metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jo8937/apache-beam-dataflow-python-geoip-batch",
"score": 2
} |
#### File: apache-beam-dataflow-python-geoip-batch/geoip/geoip_mapper.py
```python
from __future__ import absolute_import
import sys
import argparse
import logging
import apache_beam as beam
from apache_beam.transforms.core import ParDo
from google.cloud import bigquery
from google.cloud.bigquery import SchemaField
class GeoIpFn(beam.DoFn):
gi = None
def process(self, element):
if self.gi is None:
from resources.loader import load_geoip
self.gi = load_geoip()
tablerow = element
client_ip = tablerow.get("ip")
try:
if client_ip:
country = self.gi.country_code_by_addr(client_ip)
tablerow["geoIpCountry"] = country
except:
print "not found"
yield tablerow
def schemaConvert(schemaFields):
return ",".join(["%s:%s" % (f.name, f.field_type) for f in schemaFields])
def run(projectId, src_dataset, src_tablename, dest_dataset, dest_tablename, gcs_location_prefix, jobname):
from apache_beam.options.pipeline_options import PipelineOptions, GoogleCloudOptions, StandardOptions, SetupOptions, WorkerOptions
dataset = bigquery.Client(project=projectId).dataset(src_dataset)
src_table = dataset.table(src_tablename)
src_table.reload()
dest_schema = src_table.schema
dest_schema.append(SchemaField('geoIpCountry', 'STRING')) # add custom field name
options = PipelineOptions()
google_cloud_options = options.view_as(GoogleCloudOptions)
google_cloud_options.project = projectId
google_cloud_options.job_name = jobname
google_cloud_options.staging_location = gcs_location_prefix + 'staging'
google_cloud_options.temp_location = gcs_location_prefix + 'temp'
worker_options = options.view_as(WorkerOptions)
worker_options.num_workers = 32
worker_options.machine_type = "n1-standard-1" # https://cloud.google.com/compute/docs/machine-types
worker_options.autoscaling_algorithm = "NONE" # "THROUGHPUT_BASED"
setup_options = options.view_as(SetupOptions)
setup_options.setup_file = "./setup.py"
setup_options.save_main_session = False
options.view_as(StandardOptions).runner = 'DataflowRunner'
with beam.Pipeline(options=options) as p:
rows = (p | 'ReadBQ' >> beam.io.Read(beam.io.BigQuerySource(table=src_tablename, dataset=src_dataset))
| 'GeoIP' >> beam.ParDo(GeoIpFn())
)
rows | 'WriteBQ' >> beam.io.Write(
beam.io.BigQuerySink(
table = dest_tablename,
dataset= dest_dataset,
schema=schemaConvert(dest_schema),
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run(*sys.argv[1:])
``` |
{
"source": "joabakk/pyPolar",
"score": 3
} |
#### File: joabakk/pyPolar/pyPolar.py
```python
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import pynmea2
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
f = StringIO(data * 2)
sr = pynmea2.NMEAStreamReader(f)
assert len(sr.next()) == 1
assert len(sr.next()) == 1
assert len(sr.next()) == 0
print f
'''
current on board~
streamreader = pynmea2.NMEAStreamReader("/dev/pty23")
while 1:
for msg in streamreader.next():
print msg
parsedRaw = pynmea2.parse(msg)
print parsedRaw
#see issue in https://code.google.com/p/pynmea/issues/detail?id=3 on $ position
'''
'''
#from pynmea:
from pynmea.streamer import NMEAStreamer
with open('example_data_file.txt', 'r') as data_file:
streamer = NMEAStreamer(data_file)
next_data = streamer.get_objects()
data = []
while next_data:
data += next_data
next_data = streamer(read)
'''
``` |
{
"source": "Joabsonlg/ddm-rest-api",
"score": 2
} |
#### File: ddm-rest-api/shops/serializers.py
```python
from users.serializers import UserCreateSerializer
from rest_framework import serializers
from shops.models import Category, Product, Shop
class ShopSerializer(serializers.ModelSerializer):
class Meta:
model = Shop
fields = '__all__'
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = '__all__'
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
class UserShopSerializer(serializers.ModelSerializer):
class Meta:
model = Shop
fields = '__all__'
def to_representation(self, instance):
self.fields['user'] = UserCreateSerializer(read_only=True)
return super(UserShopSerializer, self).to_representation(instance)
``` |
{
"source": "joacand/Albart",
"score": 3
} |
#### File: Albart/lib/GoogleImageDownloader.py
```python
from bs4 import BeautifulSoup
import urllib.request
import os
import json
class GoogleImageDownloader:
def __init__(self):
self.user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' \
'Chrome/43.0.2357.134 Safari/537.36'
def download_image(self, image_query, output_path, file_name):
image_query = '+'.join(image_query.split())
url = "https://www.google.com/search?q=" + image_query + "&source=lnms&tbm=isch"
soup = self._get_soup(url, {'User-Agent': self.user_agent})
soup_finding = soup.find("div", {"class": "rg_meta"})
try:
img_url, file_type = json.loads(soup_finding.text)["ou"], json.loads(soup_finding.text)["ity"]
except AttributeError as e:
print('Could not find result for %s. Error: %s' % (image_query, e))
return False
try:
req = urllib.request.Request(img_url)
req.add_header('User-Agent', self.user_agent)
raw_img = urllib.request.urlopen(req).read()
if len(file_type) == 0:
file_to_open = os.path.join(output_path, file_name + ".jpg")
else:
file_to_open = os.path.join(output_path, file_name + "." + file_type)
with open(file_to_open, 'wb') as f:
f.write(raw_img)
except urllib.request.URLError as e:
print('Could not load (URLError) %s. Error: %s' % (img_url, e))
return False
except (OSError, IOError) as e:
print('Could not write image (OSError or IOError) %s. Error: %s' % (img_url, e))
return False
return True
@staticmethod
def _get_soup(url, header):
return BeautifulSoup(urllib.request.urlopen(urllib.request.Request(url, headers=header)), 'html.parser')
``` |
{
"source": "joacand/HandelsbankenYNABConverter",
"score": 3
} |
#### File: joacand/HandelsbankenYNABConverter/Converter.py
```python
import argparse
import re
def getTransactionsList(data):
transactions = []
for htmlLine in data:
entries = htmlLine.split(';')
del entries[0] # Remove Reskontradatum
if (len(entries)>1):
transaction=[]
for entry in entries:
es = stripHtml(entry)
if (es != ""):
transaction.append(es)
transactions.append(list(filter(None, transaction)))
transactions = list(filter(None, transactions))
transactions.sort(reverse=True)
return transactions
def stripHtml(entry):
es = re.findall('\>.*?\<',entry)
while ("><" in es):
es.remove("><")
for n,i in enumerate(es):
es[n] = i[1:-1]
return es[0] if len(es) > 0 else ""
def parseArgs():
parser = argparse.ArgumentParser(description='Converts a Handelsbanken Excel file to YNAB friendly CSV')
parser.add_argument('-i','--input', help='The input file name', required=True)
parser.add_argument('-o','--output', help='The output file name', required=True)
return parser.parse_args()
def main():
args = parseArgs()
input = args.input
output = args.output
print("Converting " + input + " to " + output + "\n")
with open (input) as inputFile:
data = inputFile.readlines()
transactions = getTransactionsList(data)
open(output, 'w').close()
with open(output, 'a') as outputFile:
outputFile.write("Date,Payee,Category,Memo,Outflow,Inflow\n")
for t in transactions[1:]:
outputFile.write(t[0]+","+t[1]+",,,")
flow = float(t[2].replace(',','.').replace(' ',''))
if (flow < 0):
outputFile.write(str(abs(flow))+",")
else:
outputFile.write(",")
outputFile.write(str(flow))
outputFile.write("\n")
print("Added "+str(t))
print("\nFinished converting "+input+"\nOutput file is "+output)
main()
``` |
{
"source": "Joacchim/BookMyComics",
"score": 3
} |
#### File: Joacchim/BookMyComics/setup.py
```python
import sys
def read_file(path):
try:
with open(path, 'r') as f:
return f.read()
except Exception as e:
print('Failed to read "{}": {}'.format(path, e))
return None
def write_file(content, path):
try:
with open(path, 'w') as f:
f.write(content)
return 0
except Exception as e:
print('Failed to write into "{}"'.format(path))
return 4
def switch_manifest(browser):
content = read_file('browsers/{}.json'.format(browser))
if content is None:
print('aborting...')
return 3
return write_file(content, 'web-extension/manifest.json')
def show_options():
print("Browser must be passed (either 'firefox' or 'chrome'). Example:")
print("> python setup.py firefox")
def main():
argv = sys.argv[1:]
if len(argv) != 1:
show_options()
return 1
if argv[0] == '-h' or argv[0] == '--help':
show_options()
return 0
if argv[0] not in ["firefox", "chrome"]:
print("Invalid browser passed")
show_options()
return 2
return switch_manifest(argv[0])
if __name__ == "__main__":
sys.exit(main())
```
#### File: tests/func/test_sidebar_display.py
```python
import pytest
from .utils.extension import Extension
from selenium.webdriver.support.ui import WebDriverWait
EXT = Extension()
SIDEBAR_WIDTH = 206
COLLAPSED_SIDEBAR_HEIGHT = 58
@pytest.mark.order(after='test_webext_loads')
class TestSidebarDisplay:
@staticmethod
def test_default_hidden(controller, unique_reader):
unique_reader.home()
assert controller.sidebar.loaded
# Ensures that the size is reduced to avoid overlapping on the page
# -> This guarantees that the users can continue clicking on the page's
# button when the sidebar is hidden.
size = controller.sidebar.size
assert size['height'] == COLLAPSED_SIDEBAR_HEIGHT and size['width'] == SIDEBAR_WIDTH
# Ensures that the content of the sidebar is hidden
assert controller.sidebar.hidden
@staticmethod
@pytest.mark.order(after='test_default_hidden')
def test_displayed(controller, unique_reader):
unique_reader.home()
assert controller.sidebar.loaded
if controller.sidebar.hidden:
controller.sidebar.toggle()
controller.sidebar.wait_for_text('<', 'hide-but')
# Ensures that the size is expanded (as opposed to reduced when hidden)
# Otherwise, the display won't be clearly visible for the user
size = controller.sidebar.size
assert size['height'] > COLLAPSED_SIDEBAR_HEIGHT and size['width'] == SIDEBAR_WIDTH
# Ensures that the content of the sidebar is hidden
assert not controller.sidebar.hidden
@staticmethod
@pytest.mark.order(after='test_default_hidden')
def test_toggle(controller, unique_reader):
unique_reader.home()
assert controller.sidebar.loaded
# Ensures that the content of the sidebar is toggled
init_state = controller.sidebar.hidden
controller.sidebar.toggle()
assert init_state != controller.sidebar.hidden
controller.sidebar.toggle()
assert init_state == controller.sidebar.hidden
```
#### File: utils/support/mangafox.py
```python
import random
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from . import SupportBase
from .. import RetriableError, retry, check_predicate
class FanFoxNavBar:
def __init__(self, driver_wrapper):
self._wrapper = driver_wrapper
self._driver = driver_wrapper.driver
self._pages = None
self._pageIdx = -1
self._chapter_prev = None
self._chapter_next = None
self.update()
def update(self):
# pages = self._driver.find_elements(by=By.CSS_SELECTOR, value='div.pager-list-left > span > a')
# chapters = self._driver.find_elements(by=By.CSS_SELECTOR, value='div.pager-list-left > a.chapter')
pager = self._get_pager()
if not pager:
return False
self._pages = pager.find_elements(by=By.CSS_SELECTOR, value='span > a')
chapters = pager.find_elements(by=By.CSS_SELECTOR, value='a.chapter')
self._chapter_prev = None
self._chapter_next = None
for button in chapters:
if "Pre" in button.text:
self._chapter_prev = button
if "Next" in button.text:
self._chapter_next = button
self._pageIdx = self._get_current_page_idx()
return True
def _get_pager(self):
pagers = self._driver.find_elements(by=By.CLASS_NAME, value='pager-list-left')
for i in range(len(pagers)):
if pagers[i].get_property('childElementCount') > 0:
pager = pagers[i]
return pager
return None
def _get_current_page_idx(self):
curPageIdx = -1
for i in range(len(self._pages)):
if 'active' in self._pages[i].get_attribute('class'):
curPageIdx = i
break
return curPageIdx
def prev_chapter(self):
if not self._chapter_prev:
return False
self._wrapper.ensure_click(self._chapter_prev)
return self.update()
def first_page(self):
if not self._pages or self._pageIdx == 0:
return False
self._wrapper.ensure_click(self._pages[1])
return self.update()
def has_prev_page(self):
return self._chapter_prev is not None
def prev_page(self):
if not self._pages or self._pageIdx <= 0:
return False
self._wrapper.ensure_click(self._pages[0]) # Click on the "<" button
return self.update()
def has_next_page(self):
return self._chapter_next is not None
def next_page(self):
if not self._pages or self._pageIdx == (len(self._pages) - 1):
return False
self._wrapper.ensure_click(self._pages[len(self._pages) -1 ]) # Click on the ">" button
return self.update()
def last_page(self):
if not self._pages or self._pageIdx == (len(self._pages) - 1):
return False
self._wrapper.ensure_click(self._pages[len(self._pages) - 2])
return self.update()
def next_chapter(self):
if not self._chapter_next:
return False
self._wrapper.ensure_click(self._chapter_next)
return self.update()
class FanFoxDriver(SupportBase):
name = "mangafox"
def __init__(self, *args, **kwargs):
super(FanFoxDriver, self).__init__(*args, **kwargs)
self._navbar = FanFoxNavBar(self._wrapper)
def home(self):
"""
Loads the homepage of the reader
"""
self._driver.get('https://fanfox.net')
# Bypass a common popup acting as a layer on top of the page...
try:
self._driver.find_element(by=By.CSS_SELECTOR, value='.lb-win-con > a > img').click()
except NoSuchElementException:
# fine, we only need to handle it once
pass
@retry(abort=True)
@check_predicate(RetriableError, "Could not load random comic")
def load_random(self):
# First, go to the website
self.home()
# Select a list of "candidate chapters links" (with the href values,
# which prevents invalidating the candidates by loading a new page)
mangas = self._driver.find_elements(by=By.CSS_SELECTOR, value='.manga-list-4-list > li')
print(f'checking out {len(mangas)} mangas')
candidates = []
while len(mangas):
manga = mangas.pop(random.randrange(len(mangas)))
chapters = manga.find_elements(by=By.CSS_SELECTOR, value='.manga-list-4-item-part > li > a')
# Guarantee that we take a chapter which has both a "prev" and a "next"
if len(chapters) < 3:
print('Skipping, not enough chapters')
continue
candidates.append(chapters[1].get_attribute('href'))
# Check random candidates until they fit the bill:
# - Same website (base url)
# - Has a navigation bar
print(f'checking out {len(candidates)} candidates')
while len(candidates):
candidate = candidates.pop(random.randrange(len(candidates)))
self._driver.get(candidate)
if self._driver.current_url == 'https://fanfox.net':
print('Failed ? Still on homepage')
continue
print(self._driver.current_url)
if len(self._driver.current_url.split('/')) != 7:
print('Does not support URLS with volume part for now:'
f' {len(self._driver.current_url.split("/"))} parts')
continue
# Now, select a page which has both "next" and "prev" pages (ie:
# neither first nor last), but first we need to ensure the DOM has been
# properly updated, and that the required select is present (it's added
# dynamically)
try:
pages = self._driver.find_elements(by=By.CSS_SELECTOR, value='div > div > span > a')
except NoSuchElementException:
print('Failed? No navbar')
continue
if len(pages) <= 2:
# This might mean that we're looking at one-page chapters for the
# manga. As the reader supports it, let's go with this.
self._navbar.update()
else:
# Only retain half the links (the navigation buttons are present twice
# in a page, on top of the scan and underneath it), and click on a
# page link which is neither the first nor the last.
pages = pages[0:int(len(pages)/2)]
self._wrapper.ensure_click(pages[random.randrange(1, len(pages) - 2)])
self._navbar.update()
return
raise RuntimeError("No manga with enough chapters nor with link on mangafox")
def has_prev_page(self):
return self._navbar.has_prev_page()
def prev_page(self):
if not self._navbar.prev_page():
if self._navbar.prev_chapter():
if not self._navbar.last_page():
print('Already at earliest page, cannot go to previous')
return
def has_next_page(self):
return self._navbar.has_next_page()
def next_page(self):
if not self._navbar.next_page():
if not self._navbar.next_chapter():
print('Already at latest chapter, cannot go to next')
return
def get_comic_name(self):
"""
Extracts the comic name from the current URL
"""
url = self._driver.current_url
if 'fanfox.net' not in url:
return None
return url.split('/')[4]
def get_chapter(self):
"""
Returns the chapter number of the current loaded page.
"""
parts = [p for p in self._driver.current_url.split('/') if p]
return parts[4].replace('c', '')
def get_page(self):
"""
Returns the page number of the current loaded page.
"""
parts = [p for p in self._driver.current_url.split('/') if p]
return parts[5]
```
#### File: BookMyComics/tools/logs_codes.py
```python
from utils import get_all_js_files, get_file_content
import sys
STRINGS_PATH = 'web-extension/strings.js'
def check_error_codes(file_path, error_codes, string_codes, errors):
content = get_file_content(file_path)
if content is None:
return
for index, line in enumerate(content.splitlines()):
line = line.strip()
start_log = 'LOGS.' in line
start_loc = 'LOCALIZATION.' in line
if start_loc or start_log:
line = line.split('LOGS.')[1] if start_log else line.split('LOCALIZATION.')[1]
code = line.split("'")
if len(code) < 2:
continue
code = code[1].split("'")[0]
if code.startswith('E'):
if code not in error_codes:
errors.append('[{}:{}]: Unknown error code "{}"'.format(file_path, index + 1,
code))
else:
error_codes[code]['usage'] += 1
if error_codes[code]['string'] not in string_codes:
errors.append('[{}:{}]: Unknown string code "{}" used in error code "{}"'
.format(file_path, index + 1, error_codes[code]['string'],
code))
elif code.startswith('S'):
if code not in string_codes:
errors.append('[{}:{}]: Unknown string code "{}"'.format(file_path, index + 1,
code))
else:
string_codes[code]['usage'] += 1
def get_all_defined_strings_and_error_codes(errors):
error_codes = {}
string_codes = {}
is_in_errors = False
is_reading_errors = False
is_in_logs = False
is_reading_logs = False
content = get_file_content(STRINGS_PATH)
if content is None:
return error_codes, string_codes
for index, line in enumerate(content.splitlines()):
line = line.strip()
if is_reading_errors is True:
if line.startswith("//"):
continue
if not line.startswith("'E"):
is_reading_errors = False
is_in_errors = False
continue
error_code = line.split("'")[1].split("'")[0]
string_code = line.split(":")[1].split("'")[1].split("'")[0]
if error_code in error_codes:
errors.append("[{}:{}]: error code '{}' is duplicated with line {}"
.format(STRINGS_PATH, index + 1, error_code,
error_codes[error_code]['line']))
continue
error_codes[error_code] = {'line': index + 1, 'string': string_code, 'usage': 0}
entry = string_codes.setdefault(string_code, {'usage': 0})
entry['usage'] += 1
string_codes[string_code] = entry
elif is_in_errors is True:
is_reading_errors = line.startswith('this.ERRORS = {')
elif line.startswith('function Logs('):
is_in_logs = False
is_in_errors = True
elif is_reading_logs:
if line.startswith("//"):
continue
if line.startswith('};'):
is_reading_logs = False
is_in_logs = False
continue
if not line.startswith("'S"):
continue
string_code = line.split("'")[1].split("'")[0]
if string_code in string_codes:
errors.append("[{}:{}]: string code '{}' is duplicated with line {}"
.format(STRINGS_PATH, index + 1, string_code,
string_codes[string_code]['line']))
continue
entry = string_codes.setdefault(string_code, {'usage': 0})
entry.update({'line': index + 1, 'usage': entry['usage'] + 1})
string_codes[string_code] = entry
elif is_in_logs is True:
is_reading_logs = line.startswith('this.STRINGS = {')
elif line.startswith('function Localization('):
is_in_logs = True
is_in_errors = False
return error_codes, string_codes
def check_usage(codes, kind, errors):
for key in codes:
if codes[key]['usage'] == 0:
errors.append('Unused {}: "{}" from [{}:{}]'.format(kind, key, STRINGS_PATH,
codes[key]['line']))
def main_func():
errors = []
print("=> Getting error codes and string codes...")
error_codes, string_codes = get_all_defined_strings_and_error_codes(errors)
print("<= Done")
print("Found {} error codes".format(len(error_codes)))
print("Found {} string codes".format(len(string_codes)))
print("=> Getting all js files...")
all_js_files = get_all_js_files('web-extension', STRINGS_PATH)
print("<= Done")
print("Found {} js files".format(len(all_js_files)))
print("=> Checking all js files...")
for js_file in all_js_files:
print("==> Checking '{}'...".format(js_file))
check_error_codes(js_file, error_codes, string_codes, errors)
print("<= Done")
check_usage(string_codes, 'string code', errors)
check_usage(error_codes, 'error code', errors)
if len(errors) > 0:
print("=== ERRORS ===")
for error in errors:
print("=> {}".format(error))
else:
print("=== NO ERROR FOUND ===")
return len(errors)
if __name__ == "__main__":
sys.exit(main_func())
``` |
{
"source": "Joacchim/pytest-order",
"score": 2
} |
#### File: pytest-order/tests/test_class_marks.py
```python
def test_ordinal_class_marks(item_names_for):
tests_content = (
"""
import pytest
@pytest.mark.order(1)
class Test1:
def test_1(self): pass
def test_2(self): pass
@pytest.mark.order(0)
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test2::test_1", "Test2::test_2", "Test1::test_1", "Test1::test_2"
]
def test_after_class_mark(item_names_for):
tests_content = (
"""
import pytest
@pytest.mark.order(after="Test2")
class Test1:
def test_1(self): pass
def test_2(self): pass
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test2::test_1", "Test2::test_2", "Test1::test_1", "Test1::test_2"
]
def test_invalid_class_mark(item_names_for, capsys):
tests_content = (
"""
import pytest
@pytest.mark.order(after="Test3")
class Test1:
def test_1(self): pass
def test_2(self): pass
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test1::test_1", "Test1::test_2", "Test2::test_1", "Test2::test_2"
]
out, err = capsys.readouterr()
assert (
"WARNING: cannot execute 'test_2' relative to others: "
"'Test3' - ignoring the marker"
in out
)
def test_before_class_mark(item_names_for):
tests_content = (
"""
import pytest
class Test1:
def test_1(self): pass
def test_2(self): pass
@pytest.mark.order(before="Test1")
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test2::test_1", "Test2::test_2", "Test1::test_1", "Test1::test_2"
]
def test_after_class_marks_for_single_test_in_class(item_names_for):
tests_content = (
"""
import pytest
@pytest.mark.order(after="Test2::test_1")
class Test1:
def test_1(self): pass
def test_2(self): pass
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test2::test_1", "Test1::test_1", "Test1::test_2", "Test2::test_2"
]
def test_before_class_marks_for_single_test_in_class(item_names_for):
tests_content = (
"""
import pytest
class Test1:
def test_1(self): pass
def test_2(self): pass
@pytest.mark.order(before="Test1::test_2")
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test1::test_1", "Test2::test_1", "Test2::test_2", "Test1::test_2"
]
def test_after_class_marks_for_single_test(item_names_for):
tests_content = (
"""
import pytest
@pytest.mark.order(after="test_1")
class Test1:
def test_1(self): pass
def test_2(self): pass
def test_1(): pass
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"test_1",
"Test1::test_1",
"Test1::test_2",
"Test2::test_1",
"Test2::test_2",
]
def test_before_class_marks_for_single_test(item_names_for):
tests_content = (
"""
import pytest
def test_1(): pass
class Test1:
def test_1(self): pass
def test_2(self): pass
@pytest.mark.order(before="test_1")
class Test2:
def test_1(self): pass
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test2::test_1",
"Test2::test_2",
"test_1",
"Test1::test_1",
"Test1::test_2",
]
def test_rel_class_mark_with_order_mark(item_names_for):
tests_content = (
"""
import pytest
class Test1:
def test_1(self): pass
def test_2(self): pass
@pytest.mark.order(before="Test1")
class Test2:
@pytest.mark.order(2)
def test_1(self): pass
@pytest.mark.order(1)
def test_2(self): pass
"""
)
assert item_names_for(tests_content) == [
"Test2::test_2",
"Test2::test_1",
"Test1::test_1",
"Test1::test_2",
]
``` |
{
"source": "JoachimC/magicbox_distance",
"score": 3
} |
#### File: test/integration/test_load_shapefile_networkx_native.py
```python
import unittest
import networkx as nx
class TestLoadColumbiaRoadsNetworkXNative(unittest.TestCase):
def test_load(self):
# https://data.humdata.org/dataset/d8f6feda-6755-4e84-bd14-5c719bc5f37a (hotosm_col_roads_lines_shp.zip)
roads_file = "/Users/joachim/Downloads/hotosm_col_roads_lines_shp/hotosm_col_roads_lines.shp"
# todo : ImportError: read_shp requires OGR: http://www.gdal.org/
G = nx.read_shp(roads_file)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JoachimCoenen/Datapack-Editor",
"score": 2
} |
#### File: gui/editors/datapackFilesEditor.py
```python
import os
from typing import Optional
from PyQt5.QtGui import QIcon
from Cat.CatPythonGUI.AutoGUI.propertyDecorators import Validator, ValidatorResult
from Cat.CatPythonGUI.GUI import adjustOverlap, maskCorners, CORNERS, SizePolicy, NO_MARGINS
from Cat.CatPythonGUI.GUI.pythonGUI import EditorBase
from Cat.Serializable import SerializableContainer, Serialized
from Cat.icons import icons
from Cat.utils import DeferredCallOnceMethod, openOrCreate
from gui.datapackEditorGUI import DatapackEditorGUI, LocalFilesPropInfo, ContextMenuEntries, FilesTreeItem, createNewFileGUI, createNewFolderGUI, createNewFolder
from model.Model import World, Datapack
from model.datapackContents import isNamespaceValid, NAME_SPACE_VAR
from model.utils import Position
from model.pathUtils import FilePath, normalizeDirSeparators
from session.session import getSession
class DatapackFilesEditor(EditorBase[World]):
def _openFunc(self, filePath: FilePath, selectedPosition: Optional[Position] = None):
self.window()._tryOpenOrSelectDocument(filePath, selectedPosition)
@DeferredCallOnceMethod(delay=0) # needed to avoid transferring keyPresses (Return Key) to another widget, if a focusChange happens.
def _onDoubleClick(self, data: FilesTreeItem):
if data.isFile:
self._openFunc(data.filePaths[0].fullPath)
return True
return False
def _renameFileOrFolder(self, data: FilesTreeItem):
if data.isFile:
path = data.filePaths[0].fullPath
else:
path = data.folderPath
if path is None:
return
path = path[0], path[1].rstrip('/')
lPath, __, name = path[1].rpartition('/')
newName, isOk = self._gui.askUserInput(f"rename '{name}'", name)
if isOk:
newPath = (path[0], f'{lPath}/{newName}')
joinedNewPath = os.path.join(*newPath)
if os.path.exists(joinedNewPath):
self._gui.showInformationDialog(f"The name \"{newName}\" cannot be used.", "Another file with the same name already exists.")
else:
try:
os.rename(os.path.join(*path), joinedNewPath)
except OSError as e:
getSession().showAndLogError(e)
else:
data.label = newName
# update paths of opened files:
for fe2 in data.filePaths:
filePath = fe2.fullPath
doc = getSession().documents.getDocument(filePath)
if doc is not None:
pathLen = len(path[1])
if doc.filePath[1].startswith(path[1]):
newFilePath = newPath[0], newPath[1] + doc.filePath[1][pathLen:]
doc.filePath = newFilePath
view = getSession().documents._getViewForDocument(doc)
if view is not None:
view.onDocumentsChanged.emit()
self.redraw('DatapackFilesEditor._renameFileOrFolder(...)')
def _deleteFileFunc(self, path: FilePath):
_, __, name = path[1].rstrip('/').rpartition('/')
if self._gui.askUser(f"Delete file '{name}'?", 'this cannot be undone!'):
try:
os.unlink(os.path.join(*path))
except OSError as e:
getSession().showAndLogError(e)
self.redraw('DatapackFilesEditor._deleteFileFunc(...)')
# TODO: maybe close opened file?
def _newDatapackDialog(self):
pass
def _createNewDatapackGUI(self) -> None:
def datapackPathFromName(name: str):
return normalizeDirSeparators(os.path.join(getSession().world.path, 'datapacks', name))
def validateName(name: str) -> Optional[ValidatorResult]:
datapackPath = datapackPathFromName(name)
if os.path.exists(datapackPath):
return ValidatorResult(f"Another datapack with the same name already exists.", 'error')
return None
def validateNamespace(namespace: str) -> Optional[ValidatorResult]:
if not isNamespaceValid(namespace):
return ValidatorResult(f"Not a valid namespace.\nNamespaces mut only contain:\n"
f" - Numbers (0-9)\n"
f" - Lowercase letters (a-z)\n"
f" - Underscore (_)\n"
f" - Hyphen/minus (-)\n"
f" - dot (.)\n", 'error')
return None
class Context(SerializableContainer):
name: str = Serialized(default='new Datapack', decorators=[Validator(validateName)])
namespace: str = Serialized(default='new_datapack', decorators=[Validator(validateNamespace)])
def guiFunc(gui: DatapackEditorGUI, context: Context) -> Context:
gui.propertyField(context, Context.name)
gui.propertyField(context, Context.namespace)
return context
context = Context()
while True:
context, isOk = self._gui.askUserInput(f"new Datapack", context, guiFunc)
if not isOk:
return
isValid = validateName(context.name) is None and validateNamespace(context.namespace) is None
if isValid:
break
datapackPath = datapackPathFromName(context.name)
try:
with openOrCreate(f"{datapackPath}/pack.mcmeta", 'w') as f:
f.write(
'{\n'
' "pack": {\n'
' "pack_format": 6,\n'
' "description": "[{"text":" """ + context.name + """ ","color":"white"}{"text":"\\nCreated with","color":"white"},{"text":"Data Pack Editor","color":"yellow"}] "\n'
' }\n'
'}')
for folder in getSession().datapackData.structure.values():
folderPath = f"data/{context.namespace}/{folder.folder}"
createNewFolder(datapackPath, folderPath)
for file in folder.generation.initialFiles:
fileNS = file.namespace.replace(NAME_SPACE_VAR, context.namespace)
filePath = f"{datapackPath}/data/{fileNS}/{folder.folder}{file.name}"
with openOrCreate(filePath, 'w') as f:
f.write(file.contents.replace(NAME_SPACE_VAR, context.namespace))
except OSError as e:
getSession().showAndLogError(e)
else:
self.redraw('DatapackFilesEditor._createNewDatapackGUI(...)')
def _onContextMenu(self, data: FilesTreeItem, column: int):
if not data.filePaths:
return
isMutable = not data.isImmutable
if isinstance(data.filePaths[0], FilesTreeItem):
# we have a data pack:
folderPath = data.folderPath
if folderPath is None:
return
if isinstance(folderPath, tuple):
folderPath = folderPath[0]
with self._gui.popupMenu(atMousePosition=True) as menu:
menu.addItem('edit description', lambda: self._openFunc((folderPath, 'pack.mcmeta')))
# menu.addItem('rename Folder', lambda: self._renameFileOrFolder(data), enabled=isMutable)
menu.addSeparator()
menu.addItems(ContextMenuEntries.pathItems(folderPath))
elif data.isFile:
filePath = data.filePaths[0].fullPath
with self._gui.popupMenu(atMousePosition=True) as menu:
menu.addItem('rename File', lambda: self._renameFileOrFolder(data), enabled=isMutable)
menu.addItem('delete File', lambda: self._deleteFileFunc(filePath), enabled=isMutable)
menu.addSeparator()
menu.addItems(ContextMenuEntries.fileItems(filePath, openFunc=self._openFunc))
else:
folderPath = data.folderPath
if folderPath is None:
return
with self._gui.popupMenu(atMousePosition=True) as menu:
menu.addItem('new File', lambda p=folderPath: createNewFileGUI(p, self._gui, self._openFunc), enabled=isMutable)
menu.addItem('new Folder', lambda p=folderPath: createNewFolderGUI(p, self._gui), enabled=isMutable)
menu.addItem('rename Folder', lambda: self._renameFileOrFolder(data), enabled=isMutable)
menu.addItem('delete Folder', lambda: self._deleteFileFunc(folderPath), enabled=isMutable)
menu.addSeparator()
menu.addItems(ContextMenuEntries.pathItems(folderPath))
def _iconMaker(self, data: FilesTreeItem, column: int) -> QIcon:
if data.isFile:
return icons.file_code
elif data.isArchive:
return icons.archive
return icons.folderInTree
def OnGUI(self, gui: DatapackEditorGUI) -> None:
with gui.vLayout(verticalSpacing=0):
gui.filteredProjectsFilesTree3(
self.model().datapacks,
[
LocalFilesPropInfo(Datapack.files, 'data/', 'data'),
],
isImmutable=Datapack.isZipped.get,
onDoubleClick=self._onDoubleClick,
onContextMenu=self._onContextMenu,
iconMaker=self._iconMaker,
overlap=adjustOverlap(self.overlap(), (None, None, None, 0)),
roundedCorners=maskCorners(self.roundedCorners(), CORNERS.TOP),
)
with gui.hPanel(
horizontalSpacing=0,
contentsMargins=NO_MARGINS,
overlap=adjustOverlap(self.overlap(), (None, 1, None, None)),
roundedCorners=maskCorners(self.roundedCorners(), CORNERS.BOTTOM),
):
gui.addHSpacer(5, SizePolicy.Expanding)
if gui.toolButton(
icon=icons.add,
tip="create new Datapack",
overlap=adjustOverlap(self.overlap(), (0, 1, None, None)),
roundedCorners=maskCorners(self.roundedCorners(), CORNERS.BOTTOM_RIGHT),
enabled=getSession().hasOpenedWorld
):
self._createNewDatapackGUI()
__all__ = [
'DatapackFilesEditor'
]
```
#### File: gui/lexers/jsonLexer.py
```python
from __future__ import annotations
from Cat.CatPythonGUI.GUI.codeEditor import AutoCompletionTree, CodeEditorLexer
from gui.lexers.documentLexer import DocumentLexerBase2
@CodeEditorLexer('MCJson', forceOverride=True)
class LexerJson(DocumentLexerBase2):
# defaultStyles = {style[0]: style[1] for style in styles.items()}
# styleIndices: dict[str, int] = {name: i for i, name in enumerate(styles.keys())}
def __init__(self, parent=None):
# Initialize superclass
super().__init__(parent)
# self._lastStylePos: int = 0
def autoCompletionTree(self) -> AutoCompletionTree:
return self._api.autoCompletionTree
def setAutoCompletionTree(self, value: AutoCompletionTree):
self._api.autoCompletionTree = value
def language(self):
return "JSON"
def description(self, style):
return "Custom lexer for the Minecrafts .json files"
def wordCharacters(self) -> str:
return "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-~^@#$%&:/"
def autoCompletionWordSeparators(self) -> list[str]:
return ['.'] # ':', '#', '.']
def init():
pass # Don't delete!
```
#### File: gui/lexers/mcFunctionStyler.py
```python
from __future__ import annotations
import enum
from abc import abstractmethod, ABC
from dataclasses import dataclass, field
from typing import Type, Optional, ClassVar
from Cat.utils import Decorator
from Cat.utils.collections_ import AddToDictDecorator
from gui.lexers.styler import DEFAULT_STYLE_ID, StyleId, CatStyler, registerStyler
from model.commands.argumentTypes import *
from model.commands.command import MCFunction, ParsedComment, ParsedCommand, KeywordSchema, ArgumentSchema, CommandPart, ParsedArgument
from model.utils import LanguageId
class Style(enum.Enum):
Default = DEFAULT_STYLE_ID
Command = DEFAULT_STYLE_ID + 1
String = DEFAULT_STYLE_ID + 2
Number = DEFAULT_STYLE_ID + 3
Constant = DEFAULT_STYLE_ID + 4
TargetSelector = DEFAULT_STYLE_ID + 5
Operator = DEFAULT_STYLE_ID + 6
Keyword = DEFAULT_STYLE_ID + 7
Complex = DEFAULT_STYLE_ID + 8
Comment = DEFAULT_STYLE_ID + 9
Error = DEFAULT_STYLE_ID + 10
# KeyWord = 14
# Variable = 11
# BuiltinFunction = 17
_allArgumentTypeStyles: dict[str, Optional[Style]] = {
BRIGADIER_BOOL.name: Style.Constant,
BRIGADIER_DOUBLE.name: Style.Number,
BRIGADIER_FLOAT.name: Style.Number,
BRIGADIER_INTEGER.name: Style.Number,
BRIGADIER_LONG.name: Style.Number,
BRIGADIER_STRING.name: Style.String,
MINECRAFT_ANGLE.name: Style.Number,
MINECRAFT_BLOCK_POS.name: Style.Number,
MINECRAFT_BLOCK_PREDICATE.name: Style.Complex,
MINECRAFT_BLOCK_STATE.name: Style.Complex,
MINECRAFT_COLOR.name: Style.Constant,
MINECRAFT_COLUMN_POS.name: Style.Number,
MINECRAFT_COMPONENT.name: Style.Complex,
MINECRAFT_DIMENSION.name: Style.String,
MINECRAFT_ENTITY.name: Style.TargetSelector,
MINECRAFT_ENTITY_ANCHOR.name: Style.Constant,
MINECRAFT_ENTITY_SUMMON.name: Style.String,
MINECRAFT_FLOAT_RANGE.name: Style.Number,
MINECRAFT_FUNCTION.name: Style.String,
MINECRAFT_GAME_PROFILE.name: Style.TargetSelector,
MINECRAFT_INT_RANGE.name: Style.Number,
MINECRAFT_ITEM_ENCHANTMENT.name: Style.String,
MINECRAFT_ITEM_PREDICATE.name: Style.Complex,
MINECRAFT_ITEM_SLOT.name: Style.Constant,
MINECRAFT_ITEM_STACK.name: Style.Complex,
MINECRAFT_MESSAGE.name: Style.String,
MINECRAFT_MOB_EFFECT.name: Style.String,
MINECRAFT_NBT_COMPOUND_TAG.name: Style.Complex,
MINECRAFT_NBT_PATH.name: Style.Complex,
MINECRAFT_NBT_TAG.name: Style.Complex,
MINECRAFT_OBJECTIVE.name: Style.String,
MINECRAFT_OBJECTIVE_CRITERIA.name: Style.String,
MINECRAFT_OPERATION.name: Style.Operator,
MINECRAFT_PARTICLE.name: Style.Complex,
MINECRAFT_PREDICATE.name: Style.String,
MINECRAFT_RESOURCE_LOCATION.name: Style.String,
MINECRAFT_ROTATION.name: Style.Number,
MINECRAFT_SCORE_HOLDER.name: Style.TargetSelector,
MINECRAFT_SCOREBOARD_SLOT.name: Style.Constant,
MINECRAFT_SWIZZLE.name: Style.Constant,
MINECRAFT_TEAM.name: Style.Constant,
MINECRAFT_TIME.name: Style.Number,
MINECRAFT_UUID.name: Style.String,
MINECRAFT_VEC2.name: Style.Number,
MINECRAFT_VEC3.name: Style.Number,
DPE_ADVANCEMENT.name: Style.String,
DPE_COMPARE_OPERATION.name: Style.Operator,
DPE_BIOME_ID.name: Style.String,
}
@dataclass
class ArgumentStyler(ABC):
# setStyling: StylingFunc
# innerStylers: dict[Type[Node], CatStyler]
commandStyler: MCCommandStyler
# offset: int
@classmethod
@abstractmethod
def localLanguages(cls) -> list[LanguageId]:
pass
@abstractmethod
def style(self, argument: ParsedArgument) -> None:
pass
_argumentStylers: dict[str, Type[ArgumentStyler]] = {}
argumentStyler = Decorator(AddToDictDecorator(_argumentStylers))
@registerStyler
@dataclass
class MCCommandStyler(CatStyler[CommandPart]):
@property
def localStyles(self) -> dict[str, StyleId]:
styles = {
Style.Default.name: self.offset + Style.Default.value,
Style.Command.name: self.offset + Style.Command.value,
Style.String.name: self.offset + Style.String.value,
Style.Number.name: self.offset + Style.Number.value,
Style.Constant.name: self.offset + Style.Constant.value,
Style.TargetSelector.name: self.offset + Style.TargetSelector.value,
Style.Operator.name: self.offset + Style.Operator.value,
Style.Keyword.name: self.offset + Style.Keyword.value,
Style.Complex.name: self.offset + Style.Complex.value,
Style.Comment.name: self.offset + Style.Comment.value,
Style.Error.name: self.offset + Style.Error.value,
}
return styles
argumentStylers: dict[str, ArgumentStyler] = field(init=False, repr=False, compare=False)
@classmethod
def localInnerLanguages(cls) -> list[LanguageId]:
return [LanguageId('JSON')]
@property
def localStylesCount(self) -> int:
return self._localStylesCount
@classmethod
def language(cls) -> LanguageId:
return LanguageId('MCCommand')
def __post_init__(self):
self.DEFAULT_STYLE: StyleId = self.offset + Style.Default.value
# self.NULL_STYLE: StyleId = self.offset + Style.sabotage.null.value
# self.BOOLEAN_STYLE: StyleId = self.offset + Style.boolean.value
# self.NUMBER_STYLE: StyleId = self.offset + Style.number.value
# self.STRING_STYLE: StyleId = self.offset + Style.string.value
# self.KEY_STYLE: StyleId = self.offset + Style.key.value
# self.INVALID_STYLE: StyleId = self.offset + Style.invalid.value
self._localStylesCount = 11
self.argumentStylers = {
name: argStylerCls(self) for name, argStylerCls in _argumentStylers.items()
}
def styleNode(self, data: CommandPart) -> int:
if isinstance(data, MCFunction):
return self.styleMCFunction(data)
elif isinstance(data, ParsedComment):
return self.styleComment(data)
else:
return self.styleCommand(data)
def styleMCFunction(self, function: MCFunction) -> int:
end = function.span.start
for child in function.children:
if child is None:
continue
elif isinstance(child, ParsedComment):
end = self.styleComment(child)
else:
end = self.styleCommand(child)
return end
def styleComment(self, comment: ParsedComment) -> int:
self.setStyling(comment.span.slice, Style.Comment.value)
return comment.span.end.index
def styleCommand(self, command: ParsedCommand) -> int:
argument: CommandPart = command
span = command.span.slice
while argument is not None:
if isinstance(argument, ParsedCommand):
style = Style.Command
span = slice(argument.start.index, argument.start.index + len(argument.name))
else:
argument: ParsedArgument
span = argument.span.slice
schema = argument.schema
if isinstance(schema, KeywordSchema):
style = Style.Keyword
elif isinstance(schema, ArgumentSchema):
if isinstance(schema.type, LiteralsArgumentType):
style = Style.Constant
else:
typeName = schema.typeName
# style = _allArgumentTypeStyles.get(typeName, Style.Error)
styler = self.argumentStylers.get(typeName, None)
if styler is None:
style = Style.Error
else:
styler.style(argument)
argument = argument.next
continue
else:
style = Style.Error
self.setStyling(span, style.value)
argument = argument.next
return span.stop
def addSimpleArgumentStyler(style: Style, *, forArgTypes: list[ArgumentType]) -> None:
styleId = style.value
class SimpleArgumentStyler(ArgumentStyler):
STYLE: ClassVar[StyleId] = styleId
@classmethod
def localLanguages(cls) -> list[LanguageId]:
return []
def style(self, argument: ParsedArgument) -> None:
self.commandStyler.setStyling(argument.span.slice, styleId)
for argType in forArgTypes:
argumentStyler(argType.name)(SimpleArgumentStyler)
addSimpleArgumentStyler(Style.Complex, forArgTypes=[
MINECRAFT_BLOCK_PREDICATE,
MINECRAFT_BLOCK_STATE,
MINECRAFT_COMPONENT,
MINECRAFT_ITEM_PREDICATE,
MINECRAFT_ITEM_STACK,
MINECRAFT_NBT_COMPOUND_TAG,
MINECRAFT_NBT_PATH,
MINECRAFT_NBT_TAG,
MINECRAFT_PARTICLE,
])
addSimpleArgumentStyler(Style.Constant, forArgTypes=[
BRIGADIER_BOOL,
MINECRAFT_COLOR,
MINECRAFT_ENTITY_ANCHOR,
MINECRAFT_ITEM_SLOT,
MINECRAFT_SCOREBOARD_SLOT,
MINECRAFT_SWIZZLE,
MINECRAFT_TEAM,
])
addSimpleArgumentStyler(Style.Number, forArgTypes=[
BRIGADIER_DOUBLE,
BRIGADIER_FLOAT,
BRIGADIER_INTEGER,
BRIGADIER_LONG,
MINECRAFT_ANGLE,
MINECRAFT_BLOCK_POS,
MINECRAFT_COLUMN_POS,
MINECRAFT_FLOAT_RANGE,
MINECRAFT_INT_RANGE,
MINECRAFT_ROTATION,
MINECRAFT_TIME,
MINECRAFT_VEC2,
MINECRAFT_VEC3,
])
addSimpleArgumentStyler(Style.Operator, forArgTypes=[
MINECRAFT_OPERATION,
DPE_COMPARE_OPERATION,
])
addSimpleArgumentStyler(Style.String, forArgTypes=[
BRIGADIER_STRING,
MINECRAFT_DIMENSION,
MINECRAFT_ENTITY_SUMMON,
MINECRAFT_FUNCTION,
MINECRAFT_ITEM_ENCHANTMENT,
MINECRAFT_MESSAGE,
MINECRAFT_MOB_EFFECT,
MINECRAFT_OBJECTIVE,
MINECRAFT_OBJECTIVE_CRITERIA,
MINECRAFT_PREDICATE,
MINECRAFT_RESOURCE_LOCATION,
MINECRAFT_UUID,
DPE_ADVANCEMENT,
DPE_BIOME_ID,
])
addSimpleArgumentStyler(Style.TargetSelector, forArgTypes=[
MINECRAFT_ENTITY,
MINECRAFT_GAME_PROFILE,
MINECRAFT_SCORE_HOLDER,
])
@argumentStyler(MINECRAFT_COMPONENT.name, forceOverride=True)
class ComponentStyler(ArgumentStyler):
@classmethod
def localLanguages(cls) -> list[LanguageId]:
return [LanguageId('JSON')]
def style(self, argument: ParsedArgument) -> None:
idx = self.commandStyler.styleForeignNode(argument.value)
if idx == argument.value.span.start:
self.commandStyler.setStyling(argument.span.slice, Style.Complex.value)
```
#### File: gui/lexers/styler.py
```python
from __future__ import annotations
from abc import ABC, abstractmethod
from collections import deque, OrderedDict
from dataclasses import dataclass
from typing import TypeVar, NewType, Protocol, Generic, Type, Optional
from Cat.utils.graphs import semiTopologicalSort
from Cat.utils.logging_ import logError
from model.parsing.tree import Node
from model.utils import LanguageId
_TNode = TypeVar('_TNode', bound=Node)
_TStyler = TypeVar('_TStyler', bound='CatStyler')
StyleId = NewType('StyleId', int)
DEFAULT_STYLE_ID: StyleId = StyleId(0)
class StylingFunc(Protocol):
def __call__(self, span: slice, style: StyleId) -> None:
...
@dataclass
class CatStyler(Generic[_TNode], ABC):
setStyling: StylingFunc
# innerStylers: dict[Type[Node], CatStyler]
innerStylers: dict[str, CatStyler]
offset: int
@classmethod
@abstractmethod
def language(cls) -> LanguageId:
pass
@property
@abstractmethod
def localStylesCount(self) -> int:
pass
# @classmethod
# @abstractmethod
# def localInnerStylers(cls) -> list[Type[CatStyler]]:
# pass
@classmethod
@abstractmethod
def localInnerLanguages(cls) -> list[LanguageId]:
pass
# @classmethod
# def createStyler(cls, setStyling: StylingFunc):
# # def collectAllInnerStylers(cls, setStyling: StylingFunc):
# toHandle: deque[Type[CatStyler]] = deque()
#
# allInnerStylerTypes: list[Type[CatStyler]] = []
# stylerTypesByLang: dict[str, Type[CatStyler]] = {}
# innerLanguagesByLang: dict[str, list[str]] = {}
#
# toHandle.append(cls)
# while toHandle:
# stylerCls = toHandle.pop()
# if stylerCls.language() in stylerTypesByLang:
# continue
# allInnerStylerTypes.append(stylerCls)
# stylerTypesByLang[stylerCls.language()] = stylerCls
# localInnerStylers = stylerCls.localInnerStylers()
#
# innerLanguagesByLang[stylerCls.language()] = [l.language() for l in localInnerStylers]
# toHandle.extend(localInnerStylers)
#
# localInnerStylersByLang = {
# lang: [stylerTypesByLang[il] for il in innerLangs]
# for lang, innerLangs in innerLanguagesByLang.items()
# }
#
# sortedStylerTypes: list[Type[CatStyler]] = semiTopologicalSort(
# cast(Type[CatStyler], cls),
# allInnerStylerTypes,
# getDestinations=lambda x: localInnerStylersByLang[x.language()],
# getId=lambda x: x.language()
# )
#
# allStylers: OrderedDict[str, CatStyler] = OrderedDict()
#
# offset = 0
# for stylerCls in sortedStylerTypes:
# styler = stylerCls(
# setStyling,
# allStylers,
# offset
# )
# allStylers[styler.language()] = styler
#
# return list(allStylers.values())[0]
@classmethod
def _getStyler(cls, language: LanguageId) -> Optional[Type[CatStyler]]:
if language == cls.language():
return cls
else:
return getStylerCls(language)
@classmethod
def _allInnerLanguages(cls) -> list[LanguageId]:
# def collectAllInnerStylers(cls, setStyling: StylingFunc):
toHandle: deque[LanguageId] = deque()
allInnerLangs: list[LanguageId] = []
seenLangs: set[LanguageId] = set()
# stylerTypesByLang: dict[str, Type[CatStyler]] = {}
innerLanguagesByLang: dict[LanguageId, list[LanguageId]] = {}
toHandle.append(cls.language())
while toHandle:
language = toHandle.pop()
if language in seenLangs:
continue
seenLangs.add(language)
allInnerLangs.append(language)
stylerCls = cls._getStyler(language)
if stylerCls is not None:
localInnerLangs = stylerCls.localInnerLanguages()
innerLanguagesByLang[language] = localInnerLangs
toHandle.extend(localInnerLangs)
sortedLanguages: list[LanguageId] = semiTopologicalSort(
cls.language(),
allInnerLangs,
getDestinations=innerLanguagesByLang.get,
getId=lambda x: x
)
return sortedLanguages
@classmethod
def createStyler(cls: Type[_TStyler], setStyling: StylingFunc) -> _TStyler:
sortedLanguages = cls._allInnerLanguages()
allStylers: OrderedDict[LanguageId, CatStyler] = OrderedDict()
offset = 0
for language in sortedLanguages:
stylerCls = cls._getStyler(language)
if stylerCls is None:
logError(f"CatStyler: No Styler found for language {language!r} while creating inner stylers for {cls}")
styler = stylerCls(
setStyling,
allStylers,
offset
)
offset += styler.localStylesCount
allStylers[styler.language()] = styler
return list(allStylers.values())[0]
@abstractmethod
def styleNode(self, node: _TNode) -> int:
pass
def styleForeignNode(self, node: Node) -> int:
styler = self.innerStylers.get(type(node).language)
if styler is not None:
return styler.styleNode(node)
else:
return node.span.start.index
@property
@abstractmethod
def localStyles(self) -> dict[str, StyleId]:
pass
@property
def allStyles(self) -> dict[str, StyleId]:
allStyles = {}
for language, styler in self.innerStylers.items():
innerStyles = styler.localStyles
for name, styleId in innerStyles.items():
allStyles[f'{language}:{name}'] = styleId
return allStyles
# def setStyling(self, length: int, style: int) -> None:
# assert (length >= 0)
# doc = self.document()
# if doc is not None:
# text = doc.content[self._lastStylePos:self._lastStylePos + length]
# self._lastStylePos += length
# length = len(bytearray(text, "utf-8"))
#
# super(LexerJson, self).setStyling(length, style)
__allCatStylers: dict[LanguageId, Type[CatStyler]] = {}
def registerStyler(stylerCls: Type[CatStyler]):
__allCatStylers[stylerCls.language()] = stylerCls
return stylerCls
def getStylerCls(language: LanguageId) -> Optional[Type[CatStyler]]:
return __allCatStylers.get(language)
def getStyler(language: LanguageId, setStyling: StylingFunc) -> Optional[CatStyler]:
stylerCls = getStylerCls(language)
if stylerCls is None:
return None
styler = stylerCls.createStyler(setStyling)
return styler
```
#### File: model/commands/command.py
```python
from __future__ import annotations
from abc import ABC
from dataclasses import dataclass, field
from typing import TypeVar, Union, Optional, Sequence, Any, Generic, ClassVar
from Cat.utils import Singleton
from model.commands.argumentTypes import ArgumentType, BRIGADIER_STRING, LiteralsArgumentType
from model.parsing.tree import Schema, Node
from model.utils import Position
@dataclass
class Named(ABC):
name: str
@dataclass
class CommandPartSchema(Schema, Named, ABC):
description: str = field(default='')
next: list[CommandPartSchema] = field(default_factory=list)
@dataclass
class KeywordSchema(CommandPartSchema):
@property
def asString(self) -> str:
return f'{self.name}'
@dataclass
class ArgumentSchema(CommandPartSchema):
type: ArgumentType = field(default=BRIGADIER_STRING)
@property
def typeName(self) -> str:
return self.type.name
subType: Optional[ArgumentType] = field(default=None)
args: Optional[dict[str, Union[Any, None]]] = field(default=None)
next: Sequence[CommandPartSchema] = field(default_factory=list)
@property
def asString(self) -> str:
if isinstance(self.type, LiteralsArgumentType):
return f"({'|'.join(opt for opt in self.type.options)})"
return f'<{self.name}: {self.typeName}>'
@dataclass
class SwitchSchema(CommandPartSchema):
options: list[Union[KeywordSchema, TerminalSchema]] = field(default_factory=list)
next: Sequence[CommandPartSchema] = field(default_factory=list)
@property
def asString(self) -> str:
options = '|'.join(opt.asString for opt in self.options)
return f"({options})"
@dataclass
class TerminalSchema(Singleton, CommandPartSchema):
@property
def asString(self) -> str:
return 'END'
TERMINAL = TerminalSchema(name='Terminal')
@dataclass
class CommandsRoot(Singleton, CommandPartSchema):
@property
def asString(self) -> str:
return f'<execute: Command>'
COMMANDS_ROOT = CommandsRoot(name='Command')
def formatPossibilities(possibilities: Sequence[CommandPartSchema]) -> str:
isOptional = TERMINAL in possibilities
possibilities2 = [p for p in possibilities if p is not TERMINAL]
isMany = len(possibilities2) > 1
result = '|'.join(p.asString.strip('()') for p in possibilities2)
if isOptional:
return f"[{result}]"
elif isMany:
return f"({result})"
else:
return result
@dataclass
class CommandSchema(CommandPartSchema):
# name: str = field(default='')
# description: str = field(default='')
opLevel: Union[int, str] = field(default=0)
availableInSP: bool = field(default=True)
availableInMP: bool = field(default=True)
removed: bool = field(default=False)
removedVersion: str = field(default=None) # , doc="""the version this command was removed, if it has been removed else""")
removedComment: str = field(default='')
deprecated: bool = field(default=False)
deprecatedVersion: Optional[str] = field(default=None) # , doc="""the version this command was deprecated, if it has been deprecated""")
deprecatedComment: str = field(default='')
next: list[CommandPartSchema] = field(default_factory=list[CommandPartSchema])
@property
def asString(self) -> str:
return f"{self.name} {formatPossibilities(self.next)}"
@dataclass
class CommentSchema(CommandPartSchema):
@property
def asString(self) -> str:
return f"<Comment>"
@dataclass
class MCFunctionSchema(Singleton, CommandPartSchema):
@property
def asString(self) -> str:
return f'<MCFunction>'
_TCommandPartSchema = TypeVar('_TCommandPartSchema', bound=CommandPartSchema)
@dataclass
class CommandPart(Node['CommandPart', _TCommandPartSchema], Generic[_TCommandPartSchema]):
source: str = field(repr=False)
@property
def content(self) -> str:
return self.source[self.span.slice]
_next: Optional[CommandPart] = field(default=None, init=False)
_prev: Optional[CommandPart] = field(default=None, init=False, repr=False)
language: ClassVar[str] = 'MCCommand'
@property
def next(self) -> Optional[CommandPart]:
return self._next
@next.setter
def next(self, value: Optional[CommandPart]):
oldVal = self._next
if oldVal is not None:
oldVal._prev = None
if value is not None:
value._prev = self
self._next = value
@property
def prev(self) -> Optional[CommandPart]:
return self._prev
@property
def start(self) -> Position:
return self.span.start
@property
def end(self) -> Position:
return self.span.end
@dataclass
class ParsedComment(CommandPart[CommentSchema]):
pass
@dataclass
class ParsedCommand(CommandPart[CommandSchema]):
name: str
@dataclass
class ParsedArgument(CommandPart[ArgumentSchema]):
value: Any
@dataclass
class MCFunction(CommandPart[MCFunctionSchema]):
children: list[Union[ParsedCommand, ParsedComment]] = field(default_factory=list)
@property
def commands(self) -> list[ParsedCommand]:
return [c for c in self.children if isinstance(c, ParsedCommand)]
@property
def comments(self) -> list[ParsedComment]:
return [c for c in self.children if isinstance(c, ParsedComment)]
```
#### File: model/commands/snbt.py
```python
import functools as ft
from typing import Optional
from nbtlib import Parser, tokenize, Base, Byte, Short, Int, Long, Float, Double, String, List, Compound, ByteArray, IntArray, LongArray, Path, \
ListIndex, CompoundMatch, InvalidLiteral
from nbtlib.path import can_be_converted_to_int, NamedKey, extend_accessors
from Cat.utils import escapeForXml
from Cat.utils.collections_ import AddToDictDecorator, getIfKeyIssubclass, OrderedDict
from model.commands.stringReader import StringReader
from model.commands.utils import CommandSyntaxError
from model.nbt.snbtParser import SNBTParser, SNBTError
from model.nbt.tags import NBTTag, ByteTag, ShortTag, IntTag, LongTag, FloatTag, DoubleTag, StringTag, ListTag, CompoundTag, ByteArrayTag, IntArrayTag, LongArrayTag
from model.utils import Span
def _parseNBTTagBare(sr: StringReader, *, errorsIO: list[CommandSyntaxError]) -> Optional[Base]:
# parse_nbt('{foo: [hello, world], bar: [I; 1, 2, 3]}')
sr.save()
literal = sr.source[sr.cursor:]
if not literal:
sr.rollback()
return None
try:
parser = Parser(tokenize(literal))
tag = parser.parse()
cursor = parser.token_span[1]
# account for whitespace at the end:
strVal = literal[:cursor]
strVal = strVal.rstrip()
cursor = len(strVal)
sr.cursor += cursor
except InvalidLiteral as ex:
if ex.args[0] != (0, 1):
message = ex.args[1]
start = ex.args[0][0] + sr.cursor
stop = ex.args[0][1] + sr.cursor
begin = sr.posFromColumn(start)
end = sr.posFromColumn(stop)
errorsIO.append(CommandSyntaxError(escapeForXml(message), Span(begin, end), style='error'))
sr.rollback()
return None
return tag
def parseNBTTag1(sr: StringReader, *, errorsIO: list[CommandSyntaxError]) -> Optional[NBTTag]:
tag: Optional[Base] = _parseNBTTagBare(sr, errorsIO=errorsIO)
if tag is None:
return None
result = convertTag(tag)
return result
def parseNBTTag2(sr: StringReader, *, errorsIO: list[CommandSyntaxError]) -> Optional[NBTTag]:
# parse_nbt('{foo: [hello, world], bar: [I; 1, 2, 3]}')
sr.save()
literal = sr.source[sr.cursor:]
if not literal:
sr.rollback()
return None
parser = SNBTParser(literal, ignoreTrailingChars=True)
tag = parser.parseNBTTag()
for ex in parser.errors:
message = ex.message
start = ex.span.start.index + sr.cursor
stop = ex.span.end.index + sr.cursor
begin = sr.posFromColumn(start)
end = sr.posFromColumn(stop)
style = ex.style
errorsIO.append(SNBTError(message, Span(begin, end), style=style))
if tag is not None:
cursor = parser._last.span.end.index
sr.cursor += cursor
else:
sr.rollback()
return tag
parseNBTTag = parseNBTTag2
class InvalidPath(ValueError):
"""Raised when creating an invalid nbt path."""
def __str__(self):
return f"{self.args[1]} at position {self.args[0][0]}"
def parse_accessors(parser: Parser, literal: str):
while True:
try:
tag = parser.parse()
except InvalidLiteral as exc:
raise InvalidPath(exc.args[0], f"Invalid path. ({exc.args[1]})") from exc
if isinstance(tag, String):
if parser.current_token.type == "QUOTED_STRING":
yield NamedKey(tag[:])
else:
yield from (NamedKey(key) for key in tag.split(".") if key)
elif isinstance(tag, List):
if not tag:
yield ListIndex(index=None)
elif len(tag) != 1:
raise InvalidPath(None, "Brackets should only contain one element")
elif issubclass(tag.subtype, Compound):
yield ListIndex(index=None)
yield CompoundMatch(tag[0])
elif issubclass(tag.subtype, Int) or can_be_converted_to_int(tag[0]):
yield ListIndex(int(tag[0]))
else:
raise InvalidPath(
None, "Brackets should only contain an integer or a compound"
)
elif isinstance(tag, Compound):
yield CompoundMatch(tag)
elif parser.current_token.type == "NUMBER":
yield from (
NamedKey(key) for key in parser.current_token.value.split(".") if key
)
else:
raise InvalidPath(None, f"Invalid path element {tag}")
try:
nextCharPos = parser.token_span[1] - 1
if nextCharPos < len(literal) and literal[nextCharPos] in {' ', '\t'}:
break
parser.next()
except InvalidLiteral:
break
def _parseNBTPathBare(sr: StringReader, *, errorsIO: list[CommandSyntaxError]) -> Optional[NBTTag]:
sr.save()
literal = sr.source[sr.cursor:]
parser = None
try:
parser = Parser(tokenize(literal))
except InvalidLiteral:
accessorsIter = ()
else:
accessorsIter = parse_accessors(parser, literal)
try:
accessors = ()
for accessor in accessorsIter:
accessors = extend_accessors(accessors, accessor)
path = Path.from_accessors(accessors)
if parser is not None:
cursor = parser.token_span[1]
if cursor != len(literal):
cursor -= 1
sr.cursor += cursor
except (InvalidLiteral, InvalidPath) as ex:
message = ex.args[1]
if ex.args[0] is None:
begin, end = sr.currentSpan.asTuple
else:
start = ex.args[0][0] + sr.cursor
stop = ex.args[0][1] + sr.cursor
begin = sr.posFromColumn(start)
end = sr.posFromColumn(stop)
errorsIO.append(CommandSyntaxError(escapeForXml(message), Span(begin, end), style='error'))
sr.rollback()
return None
return path
def parseNBTPath(sr: StringReader, *, errorsIO: list[CommandSyntaxError]) -> Optional[Path]:
path: Optional[Path] = _parseNBTPathBare(sr, errorsIO=errorsIO)
if path is None:
return None
return path
# result = convertTag(path)
# return result
__tagConverters: dict = {}
TagConverter = AddToDictDecorator(__tagConverters)
getTagConverter = ft.partial(getIfKeyIssubclass, __tagConverters)
def convertTag(tag: Base) -> NBTTag:
tagType = type(tag)
converter = getTagConverter(tagType)
return converter(tag)
# @TagConverter(Boolean)
# def convertBooleanTag(tag: Boolean) -> BooleanTag:
# pass
@TagConverter(Byte)
def convertByteTag(tag: Byte) -> ByteTag:
return ByteTag(int(tag))
@TagConverter(Short)
def convertShortTag(tag: Short) -> ShortTag:
return ShortTag(int(tag))
@TagConverter(Int)
def convertIntTag(tag: Int) -> IntTag:
return IntTag(int(tag))
@TagConverter(Long)
def convertLongTag(tag: Long) -> LongTag:
return LongTag(int(tag))
@TagConverter(Float)
def convertFloatTag(tag: Float) -> FloatTag:
return FloatTag(float(tag))
@TagConverter(Double)
def convertDoubleTag(tag: Double) -> DoubleTag:
return DoubleTag(float(tag))
@TagConverter(String)
def convertStringTag(tag: String) -> StringTag:
return StringTag(tag)
@TagConverter(List)
def convertListTag(tag: List) -> ListTag:
return ListTag([convertTag(t) for t in tag])
@TagConverter(Compound)
def convertCompoundTag(tag: Compound) -> CompoundTag:
return CompoundTag(OrderedDict((n, convertTag(t)) for n, t in tag.items()))
@TagConverter(ByteArray)
def convertByteArrayTag(tag: ByteArray) -> ByteArrayTag:
return ByteArrayTag([convertByteTag(t) for t in tag])
@TagConverter(IntArray)
def convertIntArrayTag(tag: IntArray) -> IntArrayTag:
return IntArrayTag([convertIntTag(t) for t in tag])
@TagConverter(LongArray)
def convertLongArrayTag(tag: LongArray) -> LongArrayTag:
return LongArrayTag([convertLongTag(t) for t in tag])
```
#### File: datapack/json/contexts.py
```python
from dataclasses import replace
from typing import Optional, Iterable
from PyQt5.QtWidgets import QWidget
from Cat.utils.logging_ import logError
from model.commands.stringReader import StringReader
from model.datapack.json.argTypes import *
from model.datapackContents import ResourceLocationNode, ResourceLocationSchema
from model.json.core import *
from model.json.jsonContext import jsonStringContext, JsonStringContext
from model.messages import *
from model.parsing.contextProvider import Suggestions, validateTree, getSuggestions, getDocumentation, onIndicatorClicked, getClickableRanges
from model.utils import GeneralError, Position, Span, MDStr
@jsonStringContext(MINECRAFT_RESOURCE_LOCATION.name)
class ResourceLocationHandler(JsonStringContext):
def schema(self, node: JsonString) -> ResourceLocationSchema:
schema = node.schema
if isinstance(schema, JsonStringSchema):
args = schema.args
schema = (args or {}).get('schema')
else:
schema = None
if schema is None:
schema = ResourceLocationSchema('', 'any')
if not isinstance(schema, ResourceLocationSchema):
logError(f"invalid 'schema' argument for JsonArgType '{MINECRAFT_RESOURCE_LOCATION.name}' in JsonStringSchema: {schema}. Expected an instance of ResourceLocationContext.")
schema = ResourceLocationSchema('', 'any')
return schema
def prepare(self, node: JsonString, errorsIO: list[GeneralError]) -> None:
sr = StringReader(node.data, 0, 0, node.data)
allowTag = True
location = sr.readResourceLocation(allowTag=allowTag)
if len(location) != len(node.data):
errorsIO.append(JsonSemanticsError(EXPECTED_BUT_GOT_MSG.format(MINECRAFT_RESOURCE_LOCATION.name, node.data), node.span))
# node.parsedValue = node.data
# return
schema = self.schema(node)
start = node.span.start
start = replace(start, column=start.column + 1, index=start.index + 1)
end = node.span.end
if len(node.data) + 2 == node.span.length:
end = replace(end, column=end.column - 1, index=end.index - 1)
span = Span(start, end)
location = ResourceLocationNode.fromString(location, span, schema)
node.parsedValue = location
def validate(self, node: JsonString, errorsIO: list[JsonSemanticsError]) -> None:
if isinstance(node.parsedValue, ResourceLocationNode):
validateTree(node.parsedValue, '', errorsIO)
# def getSuggestions2(self, ai: ArgumentSchema, contextStr: str, cursorPos: int, replaceCtx: str) -> Suggestions:
# return self.context.getSuggestions(contextStr, cursorPos, replaceCtx)
def getSuggestions(self, node: JsonString, pos: Position, replaceCtx: str) -> Suggestions:
posInContextStr = pos.index - node.span.start.index
return getSuggestions(node.parsedValue, '', pos, replaceCtx)
def getDocumentation(self, node: JsonString, position: Position) -> MDStr:
tips = []
valueDoc = getDocumentation(node.parsedValue, '', position)
if valueDoc:
tips.append(valueDoc)
propertyDoc = super(ResourceLocationHandler, self).getDocumentation(node, position)
if propertyDoc:
tips.append(propertyDoc)
return MDStr('\n\n'.join(tips)) # '\n<br>\n'.join(tips)
def getClickableRanges(self, node: JsonString) -> Optional[Iterable[Span]]:
return getClickableRanges(node.parsedValue, '')
def onIndicatorClicked(self, node: JsonString, pos: Position, window: QWidget) -> None:
return onIndicatorClicked(node.parsedValue, '', pos, window)
def init() -> None:
pass
```
#### File: model/datapack/version6.py
```python
from model.datapack.dpVersion import registerDPVersion, DPVersion
from model.datapack.json.schemas.rawJsonText import RAW_JSON_TEXT_SCHEMA
from model.datapack.json.schemas.tags import *
from model.datapackContents import NAME_SPACE_VAR, EntryHandlerInfo, DatapackContents, GenerationInfo, DefaultFileInfo, \
buildFunctionMeta, buildEntryHandlers, buildJsonMeta, buildNbtMeta
from model.json.core import JsonSchema
def initPlugin() -> None:
registerDPVersion(version6)
from model.datapack.json import contexts
contexts.init()
LOAD_JSON_CONTENTS = f"""{{
"values": [
"{NAME_SPACE_VAR}:load"
]
}}"""
TICK_JSON_CONTENTS = f"""{{
"values": [
"{NAME_SPACE_VAR}:tick"
]
}}"""
LOAD_MCFUNCTION_CONTENTS = f"say loading {NAME_SPACE_VAR} ..."
TICK_MCFUNCTION_CONTENTS = f"# add commands here..."
DATAPACK_CONTENTS: list[EntryHandlerInfo] = [
# TagInfos:
EntryHandlerInfo(
'tags/blocks/',
'.json',
True,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='tags/blocks'),
DatapackContents.tags.blocks
),
EntryHandlerInfo(
'tags/entity_types/',
'.json',
True,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='tags/entity_types'),
DatapackContents.tags.entity_types
),
EntryHandlerInfo(
'tags/fluids/',
'.json',
True,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='tags/fluids'),
DatapackContents.tags.fluids
),
EntryHandlerInfo(
'tags/functions/',
'.json',
True,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='tags/functions'),
DatapackContents.tags.functions,
GenerationInfo(
initialFiles=[
DefaultFileInfo(
'load.json',
'minecraft',
LOAD_JSON_CONTENTS
),
DefaultFileInfo(
'tick.json',
'minecraft',
TICK_JSON_CONTENTS
),
]
)
),
EntryHandlerInfo(
'tags/game_events/',
'.json',
True,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='tags/game_events'),
DatapackContents.tags.game_events
),
EntryHandlerInfo(
'tags/items/',
'.json',
True,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='tags/items'),
DatapackContents.tags.items
),
# WorldGenInfos:
EntryHandlerInfo(
'worldgen/biome/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/biome'),
DatapackContents.worldGen.biome
),
EntryHandlerInfo(
'worldgen/configured_carver/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/configured_carver'),
DatapackContents.worldGen.configured_carver
),
EntryHandlerInfo(
'worldgen/configured_feature/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/configured_feature'),
DatapackContents.worldGen.configured_feature
),
EntryHandlerInfo(
'worldgen/configured_structure_feature/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/configured_structure_feature'),
DatapackContents.worldGen.configured_structure_feature
),
EntryHandlerInfo(
'worldgen/configured_surface_builder/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/configured_surface_builder'),
DatapackContents.worldGen.configured_surface_builder
),
EntryHandlerInfo(
'worldgen/noise_settings/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/noise_settings'),
DatapackContents.worldGen.noise_settings
),
EntryHandlerInfo(
'worldgen/processor_list/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/processor_list'),
DatapackContents.worldGen.processor_list
),
EntryHandlerInfo(
'worldgen/template_pool/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='worldgen/template_pool'),
DatapackContents.worldGen.template_pool
),
# DatapackContents:
EntryHandlerInfo(
'advancements/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='advancements'),
DatapackContents.advancements
),
EntryHandlerInfo(
'functions/',
'.mcfunction',
False,
lambda fp, rl: buildFunctionMeta(fp, rl),
DatapackContents.functions,
GenerationInfo(
initialFiles=[
DefaultFileInfo(
'load.mcfunction',
NAME_SPACE_VAR,
LOAD_MCFUNCTION_CONTENTS
),
DefaultFileInfo(
'tick.mcfunction',
NAME_SPACE_VAR,
TICK_MCFUNCTION_CONTENTS
),
]
)
),
EntryHandlerInfo(
'item_modifiers/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='item_modifiers'),
DatapackContents.item_modifiers
),
EntryHandlerInfo(
'loot_tables/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='loot_tables'),
DatapackContents.loot_tables
),
EntryHandlerInfo(
'predicates/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='predicates'),
DatapackContents.predicates
),
EntryHandlerInfo(
'recipes/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='recipes'),
DatapackContents.recipes
),
EntryHandlerInfo(
'structures/',
'.nbt',
False,
lambda fp, rl: buildNbtMeta(fp, rl),
DatapackContents.structures
),
EntryHandlerInfo(
'dimension/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='dimension'),
DatapackContents.dimension
),
EntryHandlerInfo(
'dimension_type/',
'.json',
False,
lambda fp, rl: buildJsonMeta(fp, rl, schemaId='dimension_type'),
DatapackContents.dimension_type
),
]
DATAPACK_JSON_SCHEMAS: dict[str, JsonSchema] = {
'rawJsonText': RAW_JSON_TEXT_SCHEMA,
'tags/blocks': TAGS_BLOCKS,
'tags/entity_types': TAGS_ENTITY_TYPES,
'tags/fluids': TAGS_FLUIDS,
'tags/functions': TAGS_FUNCTIONS,
'tags/game_events': TAGS_GAME_EVENTS,
'tags/items': TAGS_ITEMS,
}
version6 = DPVersion(
name='6',
structure=buildEntryHandlers(DATAPACK_CONTENTS),
jsonSchemas=DATAPACK_JSON_SCHEMAS
)
```
#### File: model/data/version1_17.py
```python
from model.commands.argumentTypes import *
from model.data.mcVersions import registerMCVersion, MCVersion, Gamerule
from model.data.mcd import fillFromMinecraftData
from model.data.v1_17.commands import fillCommandsFor1_17
from model.data.v1_17.generatedBlockStates import BLOCK_STATES_BY_BLOCK
from model.datapackContents import ResourceLocation
def initPlugin() -> None:
registerMCVersion(version1_17)
# extracted from the Minecraft jar:
BLOCKS: set[ResourceLocation] = {
ResourceLocation.fromString('acacia_button'),
ResourceLocation.fromString('acacia_door'),
ResourceLocation.fromString('acacia_fence'),
ResourceLocation.fromString('acacia_fence_gate'),
ResourceLocation.fromString('acacia_leaves'),
ResourceLocation.fromString('acacia_log'),
ResourceLocation.fromString('acacia_planks'),
ResourceLocation.fromString('acacia_pressure_plate'),
ResourceLocation.fromString('acacia_sapling'),
ResourceLocation.fromString('acacia_sign'),
ResourceLocation.fromString('acacia_slab'),
ResourceLocation.fromString('acacia_stairs'),
ResourceLocation.fromString('acacia_trapdoor'),
ResourceLocation.fromString('acacia_wall_sign'),
ResourceLocation.fromString('acacia_wood'),
ResourceLocation.fromString('activator_rail'),
ResourceLocation.fromString('air'),
ResourceLocation.fromString('allium'),
ResourceLocation.fromString('amethyst_block'),
ResourceLocation.fromString('amethyst_cluster'),
ResourceLocation.fromString('ancient_debris'),
ResourceLocation.fromString('andesite'),
ResourceLocation.fromString('andesite_slab'),
ResourceLocation.fromString('andesite_stairs'),
ResourceLocation.fromString('andesite_wall'),
ResourceLocation.fromString('anvil'),
ResourceLocation.fromString('attached_melon_stem'),
ResourceLocation.fromString('attached_pumpkin_stem'),
ResourceLocation.fromString('azalea'),
ResourceLocation.fromString('azalea_leaves'),
ResourceLocation.fromString('azure_bluet'),
ResourceLocation.fromString('bamboo'),
ResourceLocation.fromString('bamboo_sapling'),
ResourceLocation.fromString('barrel'),
ResourceLocation.fromString('barrier'),
ResourceLocation.fromString('basalt'),
ResourceLocation.fromString('beacon'),
ResourceLocation.fromString('bedrock'),
ResourceLocation.fromString('bee_nest'),
ResourceLocation.fromString('beehive'),
ResourceLocation.fromString('beetroots'),
ResourceLocation.fromString('bell'),
ResourceLocation.fromString('big_dripleaf'),
ResourceLocation.fromString('big_dripleaf_stem'),
ResourceLocation.fromString('birch_button'),
ResourceLocation.fromString('birch_door'),
ResourceLocation.fromString('birch_fence'),
ResourceLocation.fromString('birch_fence_gate'),
ResourceLocation.fromString('birch_leaves'),
ResourceLocation.fromString('birch_log'),
ResourceLocation.fromString('birch_planks'),
ResourceLocation.fromString('birch_pressure_plate'),
ResourceLocation.fromString('birch_sapling'),
ResourceLocation.fromString('birch_sign'),
ResourceLocation.fromString('birch_slab'),
ResourceLocation.fromString('birch_stairs'),
ResourceLocation.fromString('birch_trapdoor'),
ResourceLocation.fromString('birch_wall_sign'),
ResourceLocation.fromString('birch_wood'),
ResourceLocation.fromString('black_banner'),
ResourceLocation.fromString('black_bed'),
ResourceLocation.fromString('black_candle'),
ResourceLocation.fromString('black_candle_cake'),
ResourceLocation.fromString('black_carpet'),
ResourceLocation.fromString('black_concrete'),
ResourceLocation.fromString('black_concrete_powder'),
ResourceLocation.fromString('black_glazed_terracotta'),
ResourceLocation.fromString('black_shulker_box'),
ResourceLocation.fromString('black_stained_glass'),
ResourceLocation.fromString('black_stained_glass_pane'),
ResourceLocation.fromString('black_terracotta'),
ResourceLocation.fromString('black_wall_banner'),
ResourceLocation.fromString('black_wool'),
ResourceLocation.fromString('blackstone'),
ResourceLocation.fromString('blackstone_slab'),
ResourceLocation.fromString('blackstone_stairs'),
ResourceLocation.fromString('blackstone_wall'),
ResourceLocation.fromString('blast_furnace'),
ResourceLocation.fromString('blue_banner'),
ResourceLocation.fromString('blue_bed'),
ResourceLocation.fromString('blue_candle'),
ResourceLocation.fromString('blue_candle_cake'),
ResourceLocation.fromString('blue_carpet'),
ResourceLocation.fromString('blue_concrete'),
ResourceLocation.fromString('blue_concrete_powder'),
ResourceLocation.fromString('blue_glazed_terracotta'),
ResourceLocation.fromString('blue_ice'),
ResourceLocation.fromString('blue_orchid'),
ResourceLocation.fromString('blue_shulker_box'),
ResourceLocation.fromString('blue_stained_glass'),
ResourceLocation.fromString('blue_stained_glass_pane'),
ResourceLocation.fromString('blue_terracotta'),
ResourceLocation.fromString('blue_wall_banner'),
ResourceLocation.fromString('blue_wool'),
ResourceLocation.fromString('bone_block'),
ResourceLocation.fromString('bookshelf'),
ResourceLocation.fromString('brain_coral'),
ResourceLocation.fromString('brain_coral_block'),
ResourceLocation.fromString('brain_coral_fan'),
ResourceLocation.fromString('brain_coral_wall_fan'),
ResourceLocation.fromString('brewing_stand'),
ResourceLocation.fromString('brick_slab'),
ResourceLocation.fromString('brick_stairs'),
ResourceLocation.fromString('brick_wall'),
ResourceLocation.fromString('bricks'),
ResourceLocation.fromString('brown_banner'),
ResourceLocation.fromString('brown_bed'),
ResourceLocation.fromString('brown_candle'),
ResourceLocation.fromString('brown_candle_cake'),
ResourceLocation.fromString('brown_carpet'),
ResourceLocation.fromString('brown_concrete'),
ResourceLocation.fromString('brown_concrete_powder'),
ResourceLocation.fromString('brown_glazed_terracotta'),
ResourceLocation.fromString('brown_mushroom'),
ResourceLocation.fromString('brown_mushroom_block'),
ResourceLocation.fromString('brown_shulker_box'),
ResourceLocation.fromString('brown_stained_glass'),
ResourceLocation.fromString('brown_stained_glass_pane'),
ResourceLocation.fromString('brown_terracotta'),
ResourceLocation.fromString('brown_wall_banner'),
ResourceLocation.fromString('brown_wool'),
ResourceLocation.fromString('bubble_column'),
ResourceLocation.fromString('bubble_coral'),
ResourceLocation.fromString('bubble_coral_block'),
ResourceLocation.fromString('bubble_coral_fan'),
ResourceLocation.fromString('bubble_coral_wall_fan'),
ResourceLocation.fromString('budding_amethyst'),
ResourceLocation.fromString('cactus'),
ResourceLocation.fromString('cake'),
ResourceLocation.fromString('calcite'),
ResourceLocation.fromString('campfire'),
ResourceLocation.fromString('candle'),
ResourceLocation.fromString('candle_cake'),
ResourceLocation.fromString('carrots'),
ResourceLocation.fromString('cartography_table'),
ResourceLocation.fromString('carved_pumpkin'),
ResourceLocation.fromString('cauldron'),
ResourceLocation.fromString('cave_air'),
ResourceLocation.fromString('cave_vines'),
ResourceLocation.fromString('cave_vines_plant'),
ResourceLocation.fromString('chain'),
ResourceLocation.fromString('chain_command_block'),
ResourceLocation.fromString('chest'),
ResourceLocation.fromString('chipped_anvil'),
ResourceLocation.fromString('chiseled_deepslate'),
ResourceLocation.fromString('chiseled_nether_bricks'),
ResourceLocation.fromString('chiseled_polished_blackstone'),
ResourceLocation.fromString('chiseled_quartz_block'),
ResourceLocation.fromString('chiseled_red_sandstone'),
ResourceLocation.fromString('chiseled_sandstone'),
ResourceLocation.fromString('chiseled_stone_bricks'),
ResourceLocation.fromString('chorus_flower'),
ResourceLocation.fromString('chorus_plant'),
ResourceLocation.fromString('clay'),
ResourceLocation.fromString('coal_block'),
ResourceLocation.fromString('coal_ore'),
ResourceLocation.fromString('coarse_dirt'),
ResourceLocation.fromString('cobbled_deepslate'),
ResourceLocation.fromString('cobbled_deepslate_slab'),
ResourceLocation.fromString('cobbled_deepslate_stairs'),
ResourceLocation.fromString('cobbled_deepslate_wall'),
ResourceLocation.fromString('cobblestone'),
ResourceLocation.fromString('cobblestone_slab'),
ResourceLocation.fromString('cobblestone_stairs'),
ResourceLocation.fromString('cobblestone_wall'),
ResourceLocation.fromString('cobweb'),
ResourceLocation.fromString('cocoa'),
ResourceLocation.fromString('command_block'),
ResourceLocation.fromString('comparator'),
ResourceLocation.fromString('composter'),
ResourceLocation.fromString('conduit'),
ResourceLocation.fromString('copper_block'),
ResourceLocation.fromString('copper_ore'),
ResourceLocation.fromString('cornflower'),
ResourceLocation.fromString('cracked_deepslate_bricks'),
ResourceLocation.fromString('cracked_deepslate_tiles'),
ResourceLocation.fromString('cracked_nether_bricks'),
ResourceLocation.fromString('cracked_polished_blackstone_bricks'),
ResourceLocation.fromString('cracked_stone_bricks'),
ResourceLocation.fromString('crafting_table'),
ResourceLocation.fromString('creeper_head'),
ResourceLocation.fromString('creeper_wall_head'),
ResourceLocation.fromString('crimson_button'),
ResourceLocation.fromString('crimson_door'),
ResourceLocation.fromString('crimson_fence'),
ResourceLocation.fromString('crimson_fence_gate'),
ResourceLocation.fromString('crimson_fungus'),
ResourceLocation.fromString('crimson_hyphae'),
ResourceLocation.fromString('crimson_nylium'),
ResourceLocation.fromString('crimson_planks'),
ResourceLocation.fromString('crimson_pressure_plate'),
ResourceLocation.fromString('crimson_roots'),
ResourceLocation.fromString('crimson_sign'),
ResourceLocation.fromString('crimson_slab'),
ResourceLocation.fromString('crimson_stairs'),
ResourceLocation.fromString('crimson_stem'),
ResourceLocation.fromString('crimson_trapdoor'),
ResourceLocation.fromString('crimson_wall_sign'),
ResourceLocation.fromString('crying_obsidian'),
ResourceLocation.fromString('cut_copper'),
ResourceLocation.fromString('cut_copper_slab'),
ResourceLocation.fromString('cut_copper_stairs'),
ResourceLocation.fromString('cut_red_sandstone'),
ResourceLocation.fromString('cut_red_sandstone_slab'),
ResourceLocation.fromString('cut_sandstone'),
ResourceLocation.fromString('cut_sandstone_slab'),
ResourceLocation.fromString('cyan_banner'),
ResourceLocation.fromString('cyan_bed'),
ResourceLocation.fromString('cyan_candle'),
ResourceLocation.fromString('cyan_candle_cake'),
ResourceLocation.fromString('cyan_carpet'),
ResourceLocation.fromString('cyan_concrete'),
ResourceLocation.fromString('cyan_concrete_powder'),
ResourceLocation.fromString('cyan_glazed_terracotta'),
ResourceLocation.fromString('cyan_shulker_box'),
ResourceLocation.fromString('cyan_stained_glass'),
ResourceLocation.fromString('cyan_stained_glass_pane'),
ResourceLocation.fromString('cyan_terracotta'),
ResourceLocation.fromString('cyan_wall_banner'),
ResourceLocation.fromString('cyan_wool'),
ResourceLocation.fromString('damaged_anvil'),
ResourceLocation.fromString('dandelion'),
ResourceLocation.fromString('dark_oak_button'),
ResourceLocation.fromString('dark_oak_door'),
ResourceLocation.fromString('dark_oak_fence'),
ResourceLocation.fromString('dark_oak_fence_gate'),
ResourceLocation.fromString('dark_oak_leaves'),
ResourceLocation.fromString('dark_oak_log'),
ResourceLocation.fromString('dark_oak_planks'),
ResourceLocation.fromString('dark_oak_pressure_plate'),
ResourceLocation.fromString('dark_oak_sapling'),
ResourceLocation.fromString('dark_oak_sign'),
ResourceLocation.fromString('dark_oak_slab'),
ResourceLocation.fromString('dark_oak_stairs'),
ResourceLocation.fromString('dark_oak_trapdoor'),
ResourceLocation.fromString('dark_oak_wall_sign'),
ResourceLocation.fromString('dark_oak_wood'),
ResourceLocation.fromString('dark_prismarine'),
ResourceLocation.fromString('dark_prismarine_slab'),
ResourceLocation.fromString('dark_prismarine_stairs'),
ResourceLocation.fromString('daylight_detector'),
ResourceLocation.fromString('dead_brain_coral'),
ResourceLocation.fromString('dead_brain_coral_block'),
ResourceLocation.fromString('dead_brain_coral_fan'),
ResourceLocation.fromString('dead_brain_coral_wall_fan'),
ResourceLocation.fromString('dead_bubble_coral'),
ResourceLocation.fromString('dead_bubble_coral_block'),
ResourceLocation.fromString('dead_bubble_coral_fan'),
ResourceLocation.fromString('dead_bubble_coral_wall_fan'),
ResourceLocation.fromString('dead_bush'),
ResourceLocation.fromString('dead_fire_coral'),
ResourceLocation.fromString('dead_fire_coral_block'),
ResourceLocation.fromString('dead_fire_coral_fan'),
ResourceLocation.fromString('dead_fire_coral_wall_fan'),
ResourceLocation.fromString('dead_horn_coral'),
ResourceLocation.fromString('dead_horn_coral_block'),
ResourceLocation.fromString('dead_horn_coral_fan'),
ResourceLocation.fromString('dead_horn_coral_wall_fan'),
ResourceLocation.fromString('dead_tube_coral'),
ResourceLocation.fromString('dead_tube_coral_block'),
ResourceLocation.fromString('dead_tube_coral_fan'),
ResourceLocation.fromString('dead_tube_coral_wall_fan'),
ResourceLocation.fromString('deepslate'),
ResourceLocation.fromString('deepslate_brick_slab'),
ResourceLocation.fromString('deepslate_brick_stairs'),
ResourceLocation.fromString('deepslate_brick_wall'),
ResourceLocation.fromString('deepslate_bricks'),
ResourceLocation.fromString('deepslate_coal_ore'),
ResourceLocation.fromString('deepslate_copper_ore'),
ResourceLocation.fromString('deepslate_diamond_ore'),
ResourceLocation.fromString('deepslate_emerald_ore'),
ResourceLocation.fromString('deepslate_gold_ore'),
ResourceLocation.fromString('deepslate_iron_ore'),
ResourceLocation.fromString('deepslate_lapis_ore'),
ResourceLocation.fromString('deepslate_redstone_ore'),
ResourceLocation.fromString('deepslate_tile_slab'),
ResourceLocation.fromString('deepslate_tile_stairs'),
ResourceLocation.fromString('deepslate_tile_wall'),
ResourceLocation.fromString('deepslate_tiles'),
ResourceLocation.fromString('detector_rail'),
ResourceLocation.fromString('diamond_block'),
ResourceLocation.fromString('diamond_ore'),
ResourceLocation.fromString('diorite'),
ResourceLocation.fromString('diorite_slab'),
ResourceLocation.fromString('diorite_stairs'),
ResourceLocation.fromString('diorite_wall'),
ResourceLocation.fromString('dirt'),
ResourceLocation.fromString('dirt_path'),
ResourceLocation.fromString('dispenser'),
ResourceLocation.fromString('dragon_egg'),
ResourceLocation.fromString('dragon_head'),
ResourceLocation.fromString('dragon_wall_head'),
ResourceLocation.fromString('dried_kelp_block'),
ResourceLocation.fromString('dripstone_block'),
ResourceLocation.fromString('dropper'),
ResourceLocation.fromString('emerald_block'),
ResourceLocation.fromString('emerald_ore'),
ResourceLocation.fromString('enchanting_table'),
ResourceLocation.fromString('end_gateway'),
ResourceLocation.fromString('end_portal'),
ResourceLocation.fromString('end_portal_frame'),
ResourceLocation.fromString('end_rod'),
ResourceLocation.fromString('end_stone'),
ResourceLocation.fromString('end_stone_brick_slab'),
ResourceLocation.fromString('end_stone_brick_stairs'),
ResourceLocation.fromString('end_stone_brick_wall'),
ResourceLocation.fromString('end_stone_bricks'),
ResourceLocation.fromString('ender_chest'),
ResourceLocation.fromString('exposed_copper'),
ResourceLocation.fromString('exposed_cut_copper'),
ResourceLocation.fromString('exposed_cut_copper_slab'),
ResourceLocation.fromString('exposed_cut_copper_stairs'),
ResourceLocation.fromString('farmland'),
ResourceLocation.fromString('fern'),
ResourceLocation.fromString('fire'),
ResourceLocation.fromString('fire_coral'),
ResourceLocation.fromString('fire_coral_block'),
ResourceLocation.fromString('fire_coral_fan'),
ResourceLocation.fromString('fire_coral_wall_fan'),
ResourceLocation.fromString('fletching_table'),
ResourceLocation.fromString('flower_pot'),
ResourceLocation.fromString('flowering_azalea'),
ResourceLocation.fromString('flowering_azalea_leaves'),
ResourceLocation.fromString('frosted_ice'),
ResourceLocation.fromString('furnace'),
ResourceLocation.fromString('gilded_blackstone'),
ResourceLocation.fromString('glass'),
ResourceLocation.fromString('glass_pane'),
ResourceLocation.fromString('glow_lichen'),
ResourceLocation.fromString('glowstone'),
ResourceLocation.fromString('gold_block'),
ResourceLocation.fromString('gold_ore'),
ResourceLocation.fromString('granite'),
ResourceLocation.fromString('granite_slab'),
ResourceLocation.fromString('granite_stairs'),
ResourceLocation.fromString('granite_wall'),
ResourceLocation.fromString('grass'),
ResourceLocation.fromString('grass_block'),
ResourceLocation.fromString('gravel'),
ResourceLocation.fromString('gray_banner'),
ResourceLocation.fromString('gray_bed'),
ResourceLocation.fromString('gray_candle'),
ResourceLocation.fromString('gray_candle_cake'),
ResourceLocation.fromString('gray_carpet'),
ResourceLocation.fromString('gray_concrete'),
ResourceLocation.fromString('gray_concrete_powder'),
ResourceLocation.fromString('gray_glazed_terracotta'),
ResourceLocation.fromString('gray_shulker_box'),
ResourceLocation.fromString('gray_stained_glass'),
ResourceLocation.fromString('gray_stained_glass_pane'),
ResourceLocation.fromString('gray_terracotta'),
ResourceLocation.fromString('gray_wall_banner'),
ResourceLocation.fromString('gray_wool'),
ResourceLocation.fromString('green_banner'),
ResourceLocation.fromString('green_bed'),
ResourceLocation.fromString('green_candle'),
ResourceLocation.fromString('green_candle_cake'),
ResourceLocation.fromString('green_carpet'),
ResourceLocation.fromString('green_concrete'),
ResourceLocation.fromString('green_concrete_powder'),
ResourceLocation.fromString('green_glazed_terracotta'),
ResourceLocation.fromString('green_shulker_box'),
ResourceLocation.fromString('green_stained_glass'),
ResourceLocation.fromString('green_stained_glass_pane'),
ResourceLocation.fromString('green_terracotta'),
ResourceLocation.fromString('green_wall_banner'),
ResourceLocation.fromString('green_wool'),
ResourceLocation.fromString('grindstone'),
ResourceLocation.fromString('hanging_roots'),
ResourceLocation.fromString('hay_block'),
ResourceLocation.fromString('heavy_weighted_pressure_plate'),
ResourceLocation.fromString('honey_block'),
ResourceLocation.fromString('honeycomb_block'),
ResourceLocation.fromString('hopper'),
ResourceLocation.fromString('horn_coral'),
ResourceLocation.fromString('horn_coral_block'),
ResourceLocation.fromString('horn_coral_fan'),
ResourceLocation.fromString('horn_coral_wall_fan'),
ResourceLocation.fromString('ice'),
ResourceLocation.fromString('infested_chiseled_stone_bricks'),
ResourceLocation.fromString('infested_cobblestone'),
ResourceLocation.fromString('infested_cracked_stone_bricks'),
ResourceLocation.fromString('infested_deepslate'),
ResourceLocation.fromString('infested_mossy_stone_bricks'),
ResourceLocation.fromString('infested_stone'),
ResourceLocation.fromString('infested_stone_bricks'),
ResourceLocation.fromString('iron_bars'),
ResourceLocation.fromString('iron_block'),
ResourceLocation.fromString('iron_door'),
ResourceLocation.fromString('iron_ore'),
ResourceLocation.fromString('iron_trapdoor'),
ResourceLocation.fromString('jack_o_lantern'),
ResourceLocation.fromString('jigsaw'),
ResourceLocation.fromString('jukebox'),
ResourceLocation.fromString('jungle_button'),
ResourceLocation.fromString('jungle_door'),
ResourceLocation.fromString('jungle_fence'),
ResourceLocation.fromString('jungle_fence_gate'),
ResourceLocation.fromString('jungle_leaves'),
ResourceLocation.fromString('jungle_log'),
ResourceLocation.fromString('jungle_planks'),
ResourceLocation.fromString('jungle_pressure_plate'),
ResourceLocation.fromString('jungle_sapling'),
ResourceLocation.fromString('jungle_sign'),
ResourceLocation.fromString('jungle_slab'),
ResourceLocation.fromString('jungle_stairs'),
ResourceLocation.fromString('jungle_trapdoor'),
ResourceLocation.fromString('jungle_wall_sign'),
ResourceLocation.fromString('jungle_wood'),
ResourceLocation.fromString('kelp'),
ResourceLocation.fromString('kelp_plant'),
ResourceLocation.fromString('ladder'),
ResourceLocation.fromString('lantern'),
ResourceLocation.fromString('lapis_block'),
ResourceLocation.fromString('lapis_ore'),
ResourceLocation.fromString('large_amethyst_bud'),
ResourceLocation.fromString('large_fern'),
ResourceLocation.fromString('lava'),
ResourceLocation.fromString('lava_cauldron'),
ResourceLocation.fromString('lectern'),
ResourceLocation.fromString('lever'),
ResourceLocation.fromString('light'),
ResourceLocation.fromString('light_blue_banner'),
ResourceLocation.fromString('light_blue_bed'),
ResourceLocation.fromString('light_blue_candle'),
ResourceLocation.fromString('light_blue_candle_cake'),
ResourceLocation.fromString('light_blue_carpet'),
ResourceLocation.fromString('light_blue_concrete'),
ResourceLocation.fromString('light_blue_concrete_powder'),
ResourceLocation.fromString('light_blue_glazed_terracotta'),
ResourceLocation.fromString('light_blue_shulker_box'),
ResourceLocation.fromString('light_blue_stained_glass'),
ResourceLocation.fromString('light_blue_stained_glass_pane'),
ResourceLocation.fromString('light_blue_terracotta'),
ResourceLocation.fromString('light_blue_wall_banner'),
ResourceLocation.fromString('light_blue_wool'),
ResourceLocation.fromString('light_gray_banner'),
ResourceLocation.fromString('light_gray_bed'),
ResourceLocation.fromString('light_gray_candle'),
ResourceLocation.fromString('light_gray_candle_cake'),
ResourceLocation.fromString('light_gray_carpet'),
ResourceLocation.fromString('light_gray_concrete'),
ResourceLocation.fromString('light_gray_concrete_powder'),
ResourceLocation.fromString('light_gray_glazed_terracotta'),
ResourceLocation.fromString('light_gray_shulker_box'),
ResourceLocation.fromString('light_gray_stained_glass'),
ResourceLocation.fromString('light_gray_stained_glass_pane'),
ResourceLocation.fromString('light_gray_terracotta'),
ResourceLocation.fromString('light_gray_wall_banner'),
ResourceLocation.fromString('light_gray_wool'),
ResourceLocation.fromString('light_weighted_pressure_plate'),
ResourceLocation.fromString('lightning_rod'),
ResourceLocation.fromString('lilac'),
ResourceLocation.fromString('lily_of_the_valley'),
ResourceLocation.fromString('lily_pad'),
ResourceLocation.fromString('lime_banner'),
ResourceLocation.fromString('lime_bed'),
ResourceLocation.fromString('lime_candle'),
ResourceLocation.fromString('lime_candle_cake'),
ResourceLocation.fromString('lime_carpet'),
ResourceLocation.fromString('lime_concrete'),
ResourceLocation.fromString('lime_concrete_powder'),
ResourceLocation.fromString('lime_glazed_terracotta'),
ResourceLocation.fromString('lime_shulker_box'),
ResourceLocation.fromString('lime_stained_glass'),
ResourceLocation.fromString('lime_stained_glass_pane'),
ResourceLocation.fromString('lime_terracotta'),
ResourceLocation.fromString('lime_wall_banner'),
ResourceLocation.fromString('lime_wool'),
ResourceLocation.fromString('lodestone'),
ResourceLocation.fromString('loom'),
ResourceLocation.fromString('magenta_banner'),
ResourceLocation.fromString('magenta_bed'),
ResourceLocation.fromString('magenta_candle'),
ResourceLocation.fromString('magenta_candle_cake'),
ResourceLocation.fromString('magenta_carpet'),
ResourceLocation.fromString('magenta_concrete'),
ResourceLocation.fromString('magenta_concrete_powder'),
ResourceLocation.fromString('magenta_glazed_terracotta'),
ResourceLocation.fromString('magenta_shulker_box'),
ResourceLocation.fromString('magenta_stained_glass'),
ResourceLocation.fromString('magenta_stained_glass_pane'),
ResourceLocation.fromString('magenta_terracotta'),
ResourceLocation.fromString('magenta_wall_banner'),
ResourceLocation.fromString('magenta_wool'),
ResourceLocation.fromString('magma_block'),
ResourceLocation.fromString('medium_amethyst_bud'),
ResourceLocation.fromString('melon'),
ResourceLocation.fromString('melon_stem'),
ResourceLocation.fromString('moss_block'),
ResourceLocation.fromString('moss_carpet'),
ResourceLocation.fromString('mossy_cobblestone'),
ResourceLocation.fromString('mossy_cobblestone_slab'),
ResourceLocation.fromString('mossy_cobblestone_stairs'),
ResourceLocation.fromString('mossy_cobblestone_wall'),
ResourceLocation.fromString('mossy_stone_brick_slab'),
ResourceLocation.fromString('mossy_stone_brick_stairs'),
ResourceLocation.fromString('mossy_stone_brick_wall'),
ResourceLocation.fromString('mossy_stone_bricks'),
ResourceLocation.fromString('moving_piston'),
ResourceLocation.fromString('mushroom_stem'),
ResourceLocation.fromString('mycelium'),
ResourceLocation.fromString('nether_brick_fence'),
ResourceLocation.fromString('nether_brick_slab'),
ResourceLocation.fromString('nether_brick_stairs'),
ResourceLocation.fromString('nether_brick_wall'),
ResourceLocation.fromString('nether_bricks'),
ResourceLocation.fromString('nether_gold_ore'),
ResourceLocation.fromString('nether_portal'),
ResourceLocation.fromString('nether_quartz_ore'),
ResourceLocation.fromString('nether_sprouts'),
ResourceLocation.fromString('nether_wart'),
ResourceLocation.fromString('nether_wart_block'),
ResourceLocation.fromString('netherite_block'),
ResourceLocation.fromString('netherrack'),
ResourceLocation.fromString('note_block'),
ResourceLocation.fromString('oak_button'),
ResourceLocation.fromString('oak_door'),
ResourceLocation.fromString('oak_fence'),
ResourceLocation.fromString('oak_fence_gate'),
ResourceLocation.fromString('oak_leaves'),
ResourceLocation.fromString('oak_log'),
ResourceLocation.fromString('oak_planks'),
ResourceLocation.fromString('oak_pressure_plate'),
ResourceLocation.fromString('oak_sapling'),
ResourceLocation.fromString('oak_sign'),
ResourceLocation.fromString('oak_slab'),
ResourceLocation.fromString('oak_stairs'),
ResourceLocation.fromString('oak_trapdoor'),
ResourceLocation.fromString('oak_wall_sign'),
ResourceLocation.fromString('oak_wood'),
ResourceLocation.fromString('observer'),
ResourceLocation.fromString('obsidian'),
ResourceLocation.fromString('orange_banner'),
ResourceLocation.fromString('orange_bed'),
ResourceLocation.fromString('orange_candle'),
ResourceLocation.fromString('orange_candle_cake'),
ResourceLocation.fromString('orange_carpet'),
ResourceLocation.fromString('orange_concrete'),
ResourceLocation.fromString('orange_concrete_powder'),
ResourceLocation.fromString('orange_glazed_terracotta'),
ResourceLocation.fromString('orange_shulker_box'),
ResourceLocation.fromString('orange_stained_glass'),
ResourceLocation.fromString('orange_stained_glass_pane'),
ResourceLocation.fromString('orange_terracotta'),
ResourceLocation.fromString('orange_tulip'),
ResourceLocation.fromString('orange_wall_banner'),
ResourceLocation.fromString('orange_wool'),
ResourceLocation.fromString('oxeye_daisy'),
ResourceLocation.fromString('oxidized_copper'),
ResourceLocation.fromString('oxidized_cut_copper'),
ResourceLocation.fromString('oxidized_cut_copper_slab'),
ResourceLocation.fromString('oxidized_cut_copper_stairs'),
ResourceLocation.fromString('packed_ice'),
ResourceLocation.fromString('peony'),
ResourceLocation.fromString('petrified_oak_slab'),
ResourceLocation.fromString('pink_banner'),
ResourceLocation.fromString('pink_bed'),
ResourceLocation.fromString('pink_candle'),
ResourceLocation.fromString('pink_candle_cake'),
ResourceLocation.fromString('pink_carpet'),
ResourceLocation.fromString('pink_concrete'),
ResourceLocation.fromString('pink_concrete_powder'),
ResourceLocation.fromString('pink_glazed_terracotta'),
ResourceLocation.fromString('pink_shulker_box'),
ResourceLocation.fromString('pink_stained_glass'),
ResourceLocation.fromString('pink_stained_glass_pane'),
ResourceLocation.fromString('pink_terracotta'),
ResourceLocation.fromString('pink_tulip'),
ResourceLocation.fromString('pink_wall_banner'),
ResourceLocation.fromString('pink_wool'),
ResourceLocation.fromString('piston'),
ResourceLocation.fromString('piston_head'),
ResourceLocation.fromString('player_head'),
ResourceLocation.fromString('player_wall_head'),
ResourceLocation.fromString('podzol'),
ResourceLocation.fromString('pointed_dripstone'),
ResourceLocation.fromString('polished_andesite'),
ResourceLocation.fromString('polished_andesite_slab'),
ResourceLocation.fromString('polished_andesite_stairs'),
ResourceLocation.fromString('polished_basalt'),
ResourceLocation.fromString('polished_blackstone'),
ResourceLocation.fromString('polished_blackstone_brick_slab'),
ResourceLocation.fromString('polished_blackstone_brick_stairs'),
ResourceLocation.fromString('polished_blackstone_brick_wall'),
ResourceLocation.fromString('polished_blackstone_bricks'),
ResourceLocation.fromString('polished_blackstone_button'),
ResourceLocation.fromString('polished_blackstone_pressure_plate'),
ResourceLocation.fromString('polished_blackstone_slab'),
ResourceLocation.fromString('polished_blackstone_stairs'),
ResourceLocation.fromString('polished_blackstone_wall'),
ResourceLocation.fromString('polished_deepslate'),
ResourceLocation.fromString('polished_deepslate_slab'),
ResourceLocation.fromString('polished_deepslate_stairs'),
ResourceLocation.fromString('polished_deepslate_wall'),
ResourceLocation.fromString('polished_diorite'),
ResourceLocation.fromString('polished_diorite_slab'),
ResourceLocation.fromString('polished_diorite_stairs'),
ResourceLocation.fromString('polished_granite'),
ResourceLocation.fromString('polished_granite_slab'),
ResourceLocation.fromString('polished_granite_stairs'),
ResourceLocation.fromString('poppy'),
ResourceLocation.fromString('potatoes'),
ResourceLocation.fromString('potted_acacia_sapling'),
ResourceLocation.fromString('potted_allium'),
ResourceLocation.fromString('potted_azalea_bush'),
ResourceLocation.fromString('potted_azure_bluet'),
ResourceLocation.fromString('potted_bamboo'),
ResourceLocation.fromString('potted_birch_sapling'),
ResourceLocation.fromString('potted_blue_orchid'),
ResourceLocation.fromString('potted_brown_mushroom'),
ResourceLocation.fromString('potted_cactus'),
ResourceLocation.fromString('potted_cornflower'),
ResourceLocation.fromString('potted_crimson_fungus'),
ResourceLocation.fromString('potted_crimson_roots'),
ResourceLocation.fromString('potted_dandelion'),
ResourceLocation.fromString('potted_dark_oak_sapling'),
ResourceLocation.fromString('potted_dead_bush'),
ResourceLocation.fromString('potted_fern'),
ResourceLocation.fromString('potted_flowering_azalea_bush'),
ResourceLocation.fromString('potted_jungle_sapling'),
ResourceLocation.fromString('potted_lily_of_the_valley'),
ResourceLocation.fromString('potted_oak_sapling'),
ResourceLocation.fromString('potted_orange_tulip'),
ResourceLocation.fromString('potted_oxeye_daisy'),
ResourceLocation.fromString('potted_pink_tulip'),
ResourceLocation.fromString('potted_poppy'),
ResourceLocation.fromString('potted_red_mushroom'),
ResourceLocation.fromString('potted_red_tulip'),
ResourceLocation.fromString('potted_spruce_sapling'),
ResourceLocation.fromString('potted_warped_fungus'),
ResourceLocation.fromString('potted_warped_roots'),
ResourceLocation.fromString('potted_white_tulip'),
ResourceLocation.fromString('potted_wither_rose'),
ResourceLocation.fromString('powder_snow'),
ResourceLocation.fromString('powder_snow_cauldron'),
ResourceLocation.fromString('powered_rail'),
ResourceLocation.fromString('prismarine'),
ResourceLocation.fromString('prismarine_brick_slab'),
ResourceLocation.fromString('prismarine_brick_stairs'),
ResourceLocation.fromString('prismarine_bricks'),
ResourceLocation.fromString('prismarine_slab'),
ResourceLocation.fromString('prismarine_stairs'),
ResourceLocation.fromString('prismarine_wall'),
ResourceLocation.fromString('pumpkin'),
ResourceLocation.fromString('pumpkin_stem'),
ResourceLocation.fromString('purple_banner'),
ResourceLocation.fromString('purple_bed'),
ResourceLocation.fromString('purple_candle'),
ResourceLocation.fromString('purple_candle_cake'),
ResourceLocation.fromString('purple_carpet'),
ResourceLocation.fromString('purple_concrete'),
ResourceLocation.fromString('purple_concrete_powder'),
ResourceLocation.fromString('purple_glazed_terracotta'),
ResourceLocation.fromString('purple_shulker_box'),
ResourceLocation.fromString('purple_stained_glass'),
ResourceLocation.fromString('purple_stained_glass_pane'),
ResourceLocation.fromString('purple_terracotta'),
ResourceLocation.fromString('purple_wall_banner'),
ResourceLocation.fromString('purple_wool'),
ResourceLocation.fromString('purpur_block'),
ResourceLocation.fromString('purpur_pillar'),
ResourceLocation.fromString('purpur_slab'),
ResourceLocation.fromString('purpur_stairs'),
ResourceLocation.fromString('quartz_block'),
ResourceLocation.fromString('quartz_bricks'),
ResourceLocation.fromString('quartz_pillar'),
ResourceLocation.fromString('quartz_slab'),
ResourceLocation.fromString('quartz_stairs'),
ResourceLocation.fromString('rail'),
ResourceLocation.fromString('raw_copper_block'),
ResourceLocation.fromString('raw_gold_block'),
ResourceLocation.fromString('raw_iron_block'),
ResourceLocation.fromString('red_banner'),
ResourceLocation.fromString('red_bed'),
ResourceLocation.fromString('red_candle'),
ResourceLocation.fromString('red_candle_cake'),
ResourceLocation.fromString('red_carpet'),
ResourceLocation.fromString('red_concrete'),
ResourceLocation.fromString('red_concrete_powder'),
ResourceLocation.fromString('red_glazed_terracotta'),
ResourceLocation.fromString('red_mushroom'),
ResourceLocation.fromString('red_mushroom_block'),
ResourceLocation.fromString('red_nether_brick_slab'),
ResourceLocation.fromString('red_nether_brick_stairs'),
ResourceLocation.fromString('red_nether_brick_wall'),
ResourceLocation.fromString('red_nether_bricks'),
ResourceLocation.fromString('red_sand'),
ResourceLocation.fromString('red_sandstone'),
ResourceLocation.fromString('red_sandstone_slab'),
ResourceLocation.fromString('red_sandstone_stairs'),
ResourceLocation.fromString('red_sandstone_wall'),
ResourceLocation.fromString('red_shulker_box'),
ResourceLocation.fromString('red_stained_glass'),
ResourceLocation.fromString('red_stained_glass_pane'),
ResourceLocation.fromString('red_terracotta'),
ResourceLocation.fromString('red_tulip'),
ResourceLocation.fromString('red_wall_banner'),
ResourceLocation.fromString('red_wool'),
ResourceLocation.fromString('redstone_block'),
ResourceLocation.fromString('redstone_lamp'),
ResourceLocation.fromString('redstone_ore'),
ResourceLocation.fromString('redstone_torch'),
ResourceLocation.fromString('redstone_wall_torch'),
ResourceLocation.fromString('redstone_wire'),
ResourceLocation.fromString('repeater'),
ResourceLocation.fromString('repeating_command_block'),
ResourceLocation.fromString('respawn_anchor'),
ResourceLocation.fromString('rooted_dirt'),
ResourceLocation.fromString('rose_bush'),
ResourceLocation.fromString('sand'),
ResourceLocation.fromString('sandstone'),
ResourceLocation.fromString('sandstone_slab'),
ResourceLocation.fromString('sandstone_stairs'),
ResourceLocation.fromString('sandstone_wall'),
ResourceLocation.fromString('scaffolding'),
ResourceLocation.fromString('sculk_sensor'),
ResourceLocation.fromString('sea_lantern'),
ResourceLocation.fromString('sea_pickle'),
ResourceLocation.fromString('seagrass'),
ResourceLocation.fromString('shroomlight'),
ResourceLocation.fromString('shulker_box'),
ResourceLocation.fromString('skeleton_skull'),
ResourceLocation.fromString('skeleton_wall_skull'),
ResourceLocation.fromString('slime_block'),
ResourceLocation.fromString('small_amethyst_bud'),
ResourceLocation.fromString('small_dripleaf'),
ResourceLocation.fromString('smithing_table'),
ResourceLocation.fromString('smoker'),
ResourceLocation.fromString('smooth_basalt'),
ResourceLocation.fromString('smooth_quartz'),
ResourceLocation.fromString('smooth_quartz_slab'),
ResourceLocation.fromString('smooth_quartz_stairs'),
ResourceLocation.fromString('smooth_red_sandstone'),
ResourceLocation.fromString('smooth_red_sandstone_slab'),
ResourceLocation.fromString('smooth_red_sandstone_stairs'),
ResourceLocation.fromString('smooth_sandstone'),
ResourceLocation.fromString('smooth_sandstone_slab'),
ResourceLocation.fromString('smooth_sandstone_stairs'),
ResourceLocation.fromString('smooth_stone'),
ResourceLocation.fromString('smooth_stone_slab'),
ResourceLocation.fromString('snow'),
ResourceLocation.fromString('snow_block'),
ResourceLocation.fromString('soul_campfire'),
ResourceLocation.fromString('soul_fire'),
ResourceLocation.fromString('soul_lantern'),
ResourceLocation.fromString('soul_sand'),
ResourceLocation.fromString('soul_soil'),
ResourceLocation.fromString('soul_torch'),
ResourceLocation.fromString('soul_wall_torch'),
ResourceLocation.fromString('spawner'),
ResourceLocation.fromString('sponge'),
ResourceLocation.fromString('spore_blossom'),
ResourceLocation.fromString('spruce_button'),
ResourceLocation.fromString('spruce_door'),
ResourceLocation.fromString('spruce_fence'),
ResourceLocation.fromString('spruce_fence_gate'),
ResourceLocation.fromString('spruce_leaves'),
ResourceLocation.fromString('spruce_log'),
ResourceLocation.fromString('spruce_planks'),
ResourceLocation.fromString('spruce_pressure_plate'),
ResourceLocation.fromString('spruce_sapling'),
ResourceLocation.fromString('spruce_sign'),
ResourceLocation.fromString('spruce_slab'),
ResourceLocation.fromString('spruce_stairs'),
ResourceLocation.fromString('spruce_trapdoor'),
ResourceLocation.fromString('spruce_wall_sign'),
ResourceLocation.fromString('spruce_wood'),
ResourceLocation.fromString('sticky_piston'),
ResourceLocation.fromString('stone'),
ResourceLocation.fromString('stone_brick_slab'),
ResourceLocation.fromString('stone_brick_stairs'),
ResourceLocation.fromString('stone_brick_wall'),
ResourceLocation.fromString('stone_bricks'),
ResourceLocation.fromString('stone_button'),
ResourceLocation.fromString('stone_pressure_plate'),
ResourceLocation.fromString('stone_slab'),
ResourceLocation.fromString('stone_stairs'),
ResourceLocation.fromString('stonecutter'),
ResourceLocation.fromString('stripped_acacia_log'),
ResourceLocation.fromString('stripped_acacia_wood'),
ResourceLocation.fromString('stripped_birch_log'),
ResourceLocation.fromString('stripped_birch_wood'),
ResourceLocation.fromString('stripped_crimson_hyphae'),
ResourceLocation.fromString('stripped_crimson_stem'),
ResourceLocation.fromString('stripped_dark_oak_log'),
ResourceLocation.fromString('stripped_dark_oak_wood'),
ResourceLocation.fromString('stripped_jungle_log'),
ResourceLocation.fromString('stripped_jungle_wood'),
ResourceLocation.fromString('stripped_oak_log'),
ResourceLocation.fromString('stripped_oak_wood'),
ResourceLocation.fromString('stripped_spruce_log'),
ResourceLocation.fromString('stripped_spruce_wood'),
ResourceLocation.fromString('stripped_warped_hyphae'),
ResourceLocation.fromString('stripped_warped_stem'),
ResourceLocation.fromString('structure_block'),
ResourceLocation.fromString('structure_void'),
ResourceLocation.fromString('sugar_cane'),
ResourceLocation.fromString('sunflower'),
ResourceLocation.fromString('sweet_berry_bush'),
ResourceLocation.fromString('tall_grass'),
ResourceLocation.fromString('tall_seagrass'),
ResourceLocation.fromString('target'),
ResourceLocation.fromString('terracotta'),
ResourceLocation.fromString('tinted_glass'),
ResourceLocation.fromString('tnt'),
ResourceLocation.fromString('torch'),
ResourceLocation.fromString('trapped_chest'),
ResourceLocation.fromString('tripwire'),
ResourceLocation.fromString('tripwire_hook'),
ResourceLocation.fromString('tube_coral'),
ResourceLocation.fromString('tube_coral_block'),
ResourceLocation.fromString('tube_coral_fan'),
ResourceLocation.fromString('tube_coral_wall_fan'),
ResourceLocation.fromString('tuff'),
ResourceLocation.fromString('turtle_egg'),
ResourceLocation.fromString('twisting_vines'),
ResourceLocation.fromString('twisting_vines_plant'),
ResourceLocation.fromString('vine'),
ResourceLocation.fromString('void_air'),
ResourceLocation.fromString('wall_torch'),
ResourceLocation.fromString('warped_button'),
ResourceLocation.fromString('warped_door'),
ResourceLocation.fromString('warped_fence'),
ResourceLocation.fromString('warped_fence_gate'),
ResourceLocation.fromString('warped_fungus'),
ResourceLocation.fromString('warped_hyphae'),
ResourceLocation.fromString('warped_nylium'),
ResourceLocation.fromString('warped_planks'),
ResourceLocation.fromString('warped_pressure_plate'),
ResourceLocation.fromString('warped_roots'),
ResourceLocation.fromString('warped_sign'),
ResourceLocation.fromString('warped_slab'),
ResourceLocation.fromString('warped_stairs'),
ResourceLocation.fromString('warped_stem'),
ResourceLocation.fromString('warped_trapdoor'),
ResourceLocation.fromString('warped_wall_sign'),
ResourceLocation.fromString('warped_wart_block'),
ResourceLocation.fromString('water'),
ResourceLocation.fromString('water_cauldron'),
ResourceLocation.fromString('waxed_copper_block'),
ResourceLocation.fromString('waxed_cut_copper'),
ResourceLocation.fromString('waxed_cut_copper_slab'),
ResourceLocation.fromString('waxed_cut_copper_stairs'),
ResourceLocation.fromString('waxed_exposed_copper'),
ResourceLocation.fromString('waxed_exposed_cut_copper'),
ResourceLocation.fromString('waxed_exposed_cut_copper_slab'),
ResourceLocation.fromString('waxed_exposed_cut_copper_stairs'),
ResourceLocation.fromString('waxed_oxidized_copper'),
ResourceLocation.fromString('waxed_oxidized_cut_copper'),
ResourceLocation.fromString('waxed_oxidized_cut_copper_slab'),
ResourceLocation.fromString('waxed_oxidized_cut_copper_stairs'),
ResourceLocation.fromString('waxed_weathered_copper'),
ResourceLocation.fromString('waxed_weathered_cut_copper'),
ResourceLocation.fromString('waxed_weathered_cut_copper_slab'),
ResourceLocation.fromString('waxed_weathered_cut_copper_stairs'),
ResourceLocation.fromString('weathered_copper'),
ResourceLocation.fromString('weathered_cut_copper'),
ResourceLocation.fromString('weathered_cut_copper_slab'),
ResourceLocation.fromString('weathered_cut_copper_stairs'),
ResourceLocation.fromString('weeping_vines'),
ResourceLocation.fromString('weeping_vines_plant'),
ResourceLocation.fromString('wet_sponge'),
ResourceLocation.fromString('wheat'),
ResourceLocation.fromString('white_banner'),
ResourceLocation.fromString('white_bed'),
ResourceLocation.fromString('white_candle'),
ResourceLocation.fromString('white_candle_cake'),
ResourceLocation.fromString('white_carpet'),
ResourceLocation.fromString('white_concrete'),
ResourceLocation.fromString('white_concrete_powder'),
ResourceLocation.fromString('white_glazed_terracotta'),
ResourceLocation.fromString('white_shulker_box'),
ResourceLocation.fromString('white_stained_glass'),
ResourceLocation.fromString('white_stained_glass_pane'),
ResourceLocation.fromString('white_terracotta'),
ResourceLocation.fromString('white_tulip'),
ResourceLocation.fromString('white_wall_banner'),
ResourceLocation.fromString('white_wool'),
ResourceLocation.fromString('wither_rose'),
ResourceLocation.fromString('wither_skeleton_skull'),
ResourceLocation.fromString('wither_skeleton_wall_skull'),
ResourceLocation.fromString('yellow_banner'),
ResourceLocation.fromString('yellow_bed'),
ResourceLocation.fromString('yellow_candle'),
ResourceLocation.fromString('yellow_candle_cake'),
ResourceLocation.fromString('yellow_carpet'),
ResourceLocation.fromString('yellow_concrete'),
ResourceLocation.fromString('yellow_concrete_powder'),
ResourceLocation.fromString('yellow_glazed_terracotta'),
ResourceLocation.fromString('yellow_shulker_box'),
ResourceLocation.fromString('yellow_stained_glass'),
ResourceLocation.fromString('yellow_stained_glass_pane'),
ResourceLocation.fromString('yellow_terracotta'),
ResourceLocation.fromString('yellow_wall_banner'),
ResourceLocation.fromString('yellow_wool'),
ResourceLocation.fromString('zombie_head'),
ResourceLocation.fromString('zombie_wall_head'),
}
# compiled from the Minecraft wiki:
FLUIDS: set[ResourceLocation] = {
ResourceLocation.fromString('empty'),
ResourceLocation.fromString('flowing_lava'),
ResourceLocation.fromString('flowing_water'),
ResourceLocation.fromString('lava'),
ResourceLocation.fromString('water'),
}
# compiled from the Minecraft wiki:
ITEMS: set[ResourceLocation] = {
ResourceLocation.fromString('air'), # yes, I know, but...
ResourceLocation.fromString('acacia_boat'),
ResourceLocation.fromString('amethyst_shard'),
ResourceLocation.fromString('apple'),
ResourceLocation.fromString('armor_stand'),
ResourceLocation.fromString('arrow'),
ResourceLocation.fromString('axolotl_bucket'),
ResourceLocation.fromString('axolotl_spawn_egg'),
ResourceLocation.fromString('baked_potato'),
ResourceLocation.fromString('bat_spawn_egg'),
ResourceLocation.fromString('bee_spawn_egg'),
ResourceLocation.fromString('beef'),
ResourceLocation.fromString('beetroot'),
ResourceLocation.fromString('beetroot_seeds'),
ResourceLocation.fromString('beetroot_soup'),
ResourceLocation.fromString('birch_boat'),
ResourceLocation.fromString('black_dye'),
ResourceLocation.fromString('blaze_powder'),
ResourceLocation.fromString('blaze_rod'),
ResourceLocation.fromString('blaze_spawn_egg'),
ResourceLocation.fromString('blue_dye'),
ResourceLocation.fromString('bone'),
ResourceLocation.fromString('bone_meal'),
ResourceLocation.fromString('book'),
ResourceLocation.fromString('bow'),
ResourceLocation.fromString('bowl'),
ResourceLocation.fromString('bread'),
ResourceLocation.fromString('brick'),
ResourceLocation.fromString('brown_dye'),
ResourceLocation.fromString('bucket'),
ResourceLocation.fromString('bundle'),
ResourceLocation.fromString('carrot'),
ResourceLocation.fromString('carrot_on_a_stick'),
ResourceLocation.fromString('cat_spawn_egg'),
ResourceLocation.fromString('cave_spider_spawn_egg'),
ResourceLocation.fromString('chainmail_boots'),
ResourceLocation.fromString('chainmail_chestplate'),
ResourceLocation.fromString('chainmail_helmet'),
ResourceLocation.fromString('chainmail_leggings'),
ResourceLocation.fromString('charcoal'),
ResourceLocation.fromString('chest_minecart'),
ResourceLocation.fromString('chicken'),
ResourceLocation.fromString('chicken_spawn_egg'),
ResourceLocation.fromString('chorus_fruit'),
ResourceLocation.fromString('clay_ball'),
ResourceLocation.fromString('clock'),
ResourceLocation.fromString('coal'),
ResourceLocation.fromString('cocoa_beans'),
ResourceLocation.fromString('cod'),
ResourceLocation.fromString('cod_bucket'),
ResourceLocation.fromString('cod_spawn_egg'),
ResourceLocation.fromString('command_block_minecart'),
ResourceLocation.fromString('compass'),
ResourceLocation.fromString('cooked_beef'),
ResourceLocation.fromString('cooked_chicken'),
ResourceLocation.fromString('cooked_cod'),
ResourceLocation.fromString('cooked_mutton'),
ResourceLocation.fromString('cooked_porkchop'),
ResourceLocation.fromString('cooked_rabbit'),
ResourceLocation.fromString('cooked_salmon'),
ResourceLocation.fromString('cookie'),
ResourceLocation.fromString('copper_ingot'),
ResourceLocation.fromString('cow_spawn_egg'),
ResourceLocation.fromString('creeper_banner_pattern'),
ResourceLocation.fromString('creeper_spawn_egg'),
ResourceLocation.fromString('crossbow'),
ResourceLocation.fromString('cyan_dye'),
ResourceLocation.fromString('dark_oak_boat'),
ResourceLocation.fromString('debug_stick'),
ResourceLocation.fromString('diamond'),
ResourceLocation.fromString('diamond_axe'),
ResourceLocation.fromString('diamond_boots'),
ResourceLocation.fromString('diamond_chestplate'),
ResourceLocation.fromString('diamond_helmet'),
ResourceLocation.fromString('diamond_hoe'),
ResourceLocation.fromString('diamond_horse_armor'),
ResourceLocation.fromString('diamond_leggings'),
ResourceLocation.fromString('diamond_pickaxe'),
ResourceLocation.fromString('diamond_shovel'),
ResourceLocation.fromString('diamond_sword'),
ResourceLocation.fromString('dolphin_spawn_egg'),
ResourceLocation.fromString('donkey_spawn_egg'),
ResourceLocation.fromString('dragon_breath'),
ResourceLocation.fromString('dried_kelp'),
ResourceLocation.fromString('drowned_spawn_egg'),
ResourceLocation.fromString('egg'),
ResourceLocation.fromString('elder_guardian_spawn_egg'),
ResourceLocation.fromString('elytra'),
ResourceLocation.fromString('emerald'),
ResourceLocation.fromString('enchanted_book'),
ResourceLocation.fromString('enchanted_golden_apple'),
ResourceLocation.fromString('end_crystal'),
ResourceLocation.fromString('ender_eye'),
ResourceLocation.fromString('ender_pearl'),
ResourceLocation.fromString('enderman_spawn_egg'),
ResourceLocation.fromString('endermite_spawn_egg'),
ResourceLocation.fromString('evoker_spawn_egg'),
ResourceLocation.fromString('experience_bottle'),
ResourceLocation.fromString('feather'),
ResourceLocation.fromString('fermented_spider_eye'),
ResourceLocation.fromString('filled_map'),
ResourceLocation.fromString('fire_charge'),
ResourceLocation.fromString('firework_rocket'),
ResourceLocation.fromString('firework_star'),
ResourceLocation.fromString('fishing_rod'),
ResourceLocation.fromString('flint'),
ResourceLocation.fromString('flint_and_steel'),
ResourceLocation.fromString('flower_banner_pattern'),
ResourceLocation.fromString('fox_spawn_egg'),
ResourceLocation.fromString('furnace_minecart'),
ResourceLocation.fromString('ghast_spawn_egg'),
ResourceLocation.fromString('ghast_tear'),
ResourceLocation.fromString('glass_bottle'),
ResourceLocation.fromString('glistering_melon_slice'),
ResourceLocation.fromString('globe_banner_pattern'),
ResourceLocation.fromString('glow_berries'),
ResourceLocation.fromString('glow_ink_sac'),
ResourceLocation.fromString('glow_item_frame'),
ResourceLocation.fromString('glow_squid_spawn_egg'),
ResourceLocation.fromString('glowstone_dust'),
ResourceLocation.fromString('goat_spawn_egg'),
ResourceLocation.fromString('gold_ingot'),
ResourceLocation.fromString('gold_nugget'),
ResourceLocation.fromString('golden_apple'),
ResourceLocation.fromString('golden_axe'),
ResourceLocation.fromString('golden_boots'),
ResourceLocation.fromString('golden_carrot'),
ResourceLocation.fromString('golden_chestplate'),
ResourceLocation.fromString('golden_helmet'),
ResourceLocation.fromString('golden_hoe'),
ResourceLocation.fromString('golden_horse_armor'),
ResourceLocation.fromString('golden_leggings'),
ResourceLocation.fromString('golden_pickaxe'),
ResourceLocation.fromString('golden_shovel'),
ResourceLocation.fromString('golden_sword'),
ResourceLocation.fromString('gray_dye'),
ResourceLocation.fromString('green_dye'),
ResourceLocation.fromString('guardian_spawn_egg'),
ResourceLocation.fromString('gunpowder'),
ResourceLocation.fromString('heart_of_the_sea'),
ResourceLocation.fromString('hoglin_spawn_egg'),
ResourceLocation.fromString('honey_bottle'),
ResourceLocation.fromString('honeycomb'),
ResourceLocation.fromString('hopper_minecart'),
ResourceLocation.fromString('horse_spawn_egg'),
ResourceLocation.fromString('husk_spawn_egg'),
ResourceLocation.fromString('ink_sac'),
ResourceLocation.fromString('iron_axe'),
ResourceLocation.fromString('iron_boots'),
ResourceLocation.fromString('iron_chestplate'),
ResourceLocation.fromString('iron_helmet'),
ResourceLocation.fromString('iron_hoe'),
ResourceLocation.fromString('iron_horse_armor'),
ResourceLocation.fromString('iron_ingot'),
ResourceLocation.fromString('iron_leggings'),
ResourceLocation.fromString('iron_nugget'),
ResourceLocation.fromString('iron_pickaxe'),
ResourceLocation.fromString('iron_shovel'),
ResourceLocation.fromString('iron_sword'),
ResourceLocation.fromString('item_frame'),
ResourceLocation.fromString('jungle_boat'),
ResourceLocation.fromString('knowledge_book'),
ResourceLocation.fromString('lapis_lazuli'),
ResourceLocation.fromString('lava_bucket'),
ResourceLocation.fromString('lead'),
ResourceLocation.fromString('leather'),
ResourceLocation.fromString('leather_boots'),
ResourceLocation.fromString('leather_chestplate'),
ResourceLocation.fromString('leather_helmet'),
ResourceLocation.fromString('leather_horse_armor'),
ResourceLocation.fromString('leather_leggings'),
ResourceLocation.fromString('light_blue_dye'),
ResourceLocation.fromString('light_gray_dye'),
ResourceLocation.fromString('lime_dye'),
ResourceLocation.fromString('lingering_potion'),
ResourceLocation.fromString('llama_spawn_egg'),
ResourceLocation.fromString('magenta_dye'),
ResourceLocation.fromString('magma_cream'),
ResourceLocation.fromString('magma_cube_spawn_egg'),
ResourceLocation.fromString('map'),
ResourceLocation.fromString('melon_seeds'),
ResourceLocation.fromString('melon_slice'),
ResourceLocation.fromString('milk_bucket'),
ResourceLocation.fromString('minecart'),
ResourceLocation.fromString('mojang_banner_pattern'),
ResourceLocation.fromString('mooshroom_spawn_egg'),
ResourceLocation.fromString('mule_spawn_egg'),
ResourceLocation.fromString('mushroom_stew'),
ResourceLocation.fromString('music_disc_11'),
ResourceLocation.fromString('music_disc_13'),
ResourceLocation.fromString('music_disc_blocks'),
ResourceLocation.fromString('music_disc_cat'),
ResourceLocation.fromString('music_disc_chirp'),
ResourceLocation.fromString('music_disc_far'),
ResourceLocation.fromString('music_disc_mall'),
ResourceLocation.fromString('music_disc_mellohi'),
ResourceLocation.fromString('music_disc_pigstep'),
ResourceLocation.fromString('music_disc_stal'),
ResourceLocation.fromString('music_disc_strad'),
ResourceLocation.fromString('music_disc_wait'),
ResourceLocation.fromString('music_disc_ward'),
ResourceLocation.fromString('mutton'),
ResourceLocation.fromString('name_tag'),
ResourceLocation.fromString('nautilus_shell'),
ResourceLocation.fromString('nether_brick'),
ResourceLocation.fromString('nether_star'),
ResourceLocation.fromString('nether_wart'),
ResourceLocation.fromString('netherite_axe'),
ResourceLocation.fromString('netherite_boots'),
ResourceLocation.fromString('netherite_chestplate'),
ResourceLocation.fromString('netherite_helmet'),
ResourceLocation.fromString('netherite_hoe'),
ResourceLocation.fromString('netherite_ingot'),
ResourceLocation.fromString('netherite_leggings'),
ResourceLocation.fromString('netherite_pickaxe'),
ResourceLocation.fromString('netherite_scrap'),
ResourceLocation.fromString('netherite_shovel'),
ResourceLocation.fromString('netherite_sword'),
ResourceLocation.fromString('oak_boat'),
ResourceLocation.fromString('ocelot_spawn_egg'),
ResourceLocation.fromString('orange_dye'),
ResourceLocation.fromString('painting'),
ResourceLocation.fromString('panda_spawn_egg'),
ResourceLocation.fromString('paper'),
ResourceLocation.fromString('parrot_spawn_egg'),
ResourceLocation.fromString('phantom_membrane'),
ResourceLocation.fromString('phantom_spawn_egg'),
ResourceLocation.fromString('pig_spawn_egg'),
ResourceLocation.fromString('piglin_banner_pattern'),
ResourceLocation.fromString('piglin_brute_spawn_egg'),
ResourceLocation.fromString('piglin_spawn_egg'),
ResourceLocation.fromString('pillager_spawn_egg'),
ResourceLocation.fromString('pink_dye'),
ResourceLocation.fromString('poisonous_potato'),
ResourceLocation.fromString('polar_bear_spawn_egg'),
ResourceLocation.fromString('popped_chorus_fruit'),
ResourceLocation.fromString('porkchop'),
ResourceLocation.fromString('potato'),
ResourceLocation.fromString('potion'),
ResourceLocation.fromString('powder_snow_bucket'),
ResourceLocation.fromString('prismarine_crystals'),
ResourceLocation.fromString('prismarine_shard'),
ResourceLocation.fromString('pufferfish'),
ResourceLocation.fromString('pufferfish_bucket'),
ResourceLocation.fromString('pufferfish_spawn_egg'),
ResourceLocation.fromString('pumpkin_pie'),
ResourceLocation.fromString('pumpkin_seeds'),
ResourceLocation.fromString('purple_dye'),
ResourceLocation.fromString('quartz'),
ResourceLocation.fromString('rabbit'),
ResourceLocation.fromString('rabbit_foot'),
ResourceLocation.fromString('rabbit_hide'),
ResourceLocation.fromString('rabbit_spawn_egg'),
ResourceLocation.fromString('rabbit_stew'),
ResourceLocation.fromString('ravager_spawn_egg'),
ResourceLocation.fromString('raw_copper'),
ResourceLocation.fromString('raw_gold'),
ResourceLocation.fromString('raw_iron'),
ResourceLocation.fromString('red_dye'),
ResourceLocation.fromString('redstone'),
ResourceLocation.fromString('rotten_flesh'),
ResourceLocation.fromString('saddle'),
ResourceLocation.fromString('salmon'),
ResourceLocation.fromString('salmon_bucket'),
ResourceLocation.fromString('salmon_spawn_egg'),
ResourceLocation.fromString('scute'),
ResourceLocation.fromString('shears'),
ResourceLocation.fromString('sheep_spawn_egg'),
ResourceLocation.fromString('shield'),
ResourceLocation.fromString('shulker_shell'),
ResourceLocation.fromString('shulker_spawn_egg'),
ResourceLocation.fromString('silverfish_spawn_egg'),
ResourceLocation.fromString('skeleton_horse_spawn_egg'),
ResourceLocation.fromString('skeleton_spawn_egg'),
ResourceLocation.fromString('skull_banner_pattern'),
ResourceLocation.fromString('slime_ball'),
ResourceLocation.fromString('slime_spawn_egg'),
ResourceLocation.fromString('snowball'),
ResourceLocation.fromString('spectral_arrow'),
ResourceLocation.fromString('spider_eye'),
ResourceLocation.fromString('spider_spawn_egg'),
ResourceLocation.fromString('splash_potion'),
ResourceLocation.fromString('spruce_boat'),
ResourceLocation.fromString('spyglass'),
ResourceLocation.fromString('squid_spawn_egg'),
ResourceLocation.fromString('stick'),
ResourceLocation.fromString('stone_axe'),
ResourceLocation.fromString('stone_hoe'),
ResourceLocation.fromString('stone_pickaxe'),
ResourceLocation.fromString('stone_shovel'),
ResourceLocation.fromString('stone_sword'),
ResourceLocation.fromString('stray_spawn_egg'),
ResourceLocation.fromString('strider_spawn_egg'),
ResourceLocation.fromString('string'),
ResourceLocation.fromString('sugar'),
ResourceLocation.fromString('suspicious_stew'),
ResourceLocation.fromString('sweet_berries'),
ResourceLocation.fromString('tipped_arrow'),
ResourceLocation.fromString('tnt_minecart'),
ResourceLocation.fromString('totem_of_undying'),
ResourceLocation.fromString('trader_llama_spawn_egg'),
ResourceLocation.fromString('trident'),
ResourceLocation.fromString('tropical_fish'),
ResourceLocation.fromString('tropical_fish_bucket'),
ResourceLocation.fromString('tropical_fish_spawn_egg'),
ResourceLocation.fromString('turtle_helmet'),
ResourceLocation.fromString('turtle_spawn_egg'),
ResourceLocation.fromString('vex_spawn_egg'),
ResourceLocation.fromString('villager_spawn_egg'),
ResourceLocation.fromString('vindicator_spawn_egg'),
ResourceLocation.fromString('wandering_trader_spawn_egg'),
ResourceLocation.fromString('warped_fungus_on_a_stick'),
ResourceLocation.fromString('water_bucket'),
ResourceLocation.fromString('wheat'),
ResourceLocation.fromString('wheat_seeds'),
ResourceLocation.fromString('white_dye'),
ResourceLocation.fromString('witch_spawn_egg'),
ResourceLocation.fromString('wither_skeleton_spawn_egg'),
ResourceLocation.fromString('wolf_spawn_egg'),
ResourceLocation.fromString('wooden_axe'),
ResourceLocation.fromString('wooden_hoe'),
ResourceLocation.fromString('wooden_pickaxe'),
ResourceLocation.fromString('wooden_shovel'),
ResourceLocation.fromString('wooden_sword'),
ResourceLocation.fromString('writable_book'),
ResourceLocation.fromString('written_book'),
ResourceLocation.fromString('yellow_dye'),
ResourceLocation.fromString('zoglin_spawn_egg'),
ResourceLocation.fromString('zombie_horse_spawn_egg'),
ResourceLocation.fromString('zombie_spawn_egg'),
ResourceLocation.fromString('zombie_villager_spawn_egg'),
ResourceLocation.fromString('zombified_piglin_spawn_egg'),
}
# compiled from the Minecraft wiki:
ENTITIES_MOBS: set[ResourceLocation] = {
ResourceLocation.fromString('axolotl'),
ResourceLocation.fromString('bat'),
ResourceLocation.fromString('bee'),
ResourceLocation.fromString('blaze'),
ResourceLocation.fromString('cat'),
ResourceLocation.fromString('cave_spider'),
ResourceLocation.fromString('chicken'),
ResourceLocation.fromString('cod'),
ResourceLocation.fromString('cow'),
ResourceLocation.fromString('creeper'),
ResourceLocation.fromString('dolphin'),
ResourceLocation.fromString('donkey'),
ResourceLocation.fromString('drowned'),
ResourceLocation.fromString('elder_guardian'),
ResourceLocation.fromString('ender_dragon'),
ResourceLocation.fromString('enderman'),
ResourceLocation.fromString('endermite'),
ResourceLocation.fromString('evoker'),
ResourceLocation.fromString('fox'),
ResourceLocation.fromString('ghast'),
ResourceLocation.fromString('giant'),
ResourceLocation.fromString('glow_squid'),
ResourceLocation.fromString('goat'),
ResourceLocation.fromString('guardian'),
ResourceLocation.fromString('hoglin'),
ResourceLocation.fromString('horse'),
ResourceLocation.fromString('husk'),
ResourceLocation.fromString('illusioner'),
ResourceLocation.fromString('iron_golem'),
ResourceLocation.fromString('llama'),
ResourceLocation.fromString('magma_cube'),
ResourceLocation.fromString('mooshroom'),
ResourceLocation.fromString('mule'),
ResourceLocation.fromString('ocelot'),
ResourceLocation.fromString('panda'),
ResourceLocation.fromString('parrot'),
ResourceLocation.fromString('phantom'),
ResourceLocation.fromString('pig'),
ResourceLocation.fromString('piglin'),
ResourceLocation.fromString('piglin_brute'),
ResourceLocation.fromString('pillager'),
ResourceLocation.fromString('polar_bear'),
ResourceLocation.fromString('pufferfish'),
ResourceLocation.fromString('rabbit'),
ResourceLocation.fromString('ravager'),
ResourceLocation.fromString('salmon'),
ResourceLocation.fromString('sheep'),
ResourceLocation.fromString('shulker'),
ResourceLocation.fromString('silverfish'),
ResourceLocation.fromString('skeleton'),
ResourceLocation.fromString('skeleton_horse'),
ResourceLocation.fromString('slime'),
ResourceLocation.fromString('snow_golem'),
ResourceLocation.fromString('spider'),
ResourceLocation.fromString('squid'),
ResourceLocation.fromString('stray'),
ResourceLocation.fromString('strider'),
ResourceLocation.fromString('trader_llama'),
ResourceLocation.fromString('tropical_fish'),
ResourceLocation.fromString('turtle'),
ResourceLocation.fromString('vex'),
ResourceLocation.fromString('villager'),
ResourceLocation.fromString('vindicator'),
ResourceLocation.fromString('wandering_trader'),
ResourceLocation.fromString('witch'),
ResourceLocation.fromString('wither'),
ResourceLocation.fromString('wither_skeleton'),
ResourceLocation.fromString('wolf'),
ResourceLocation.fromString('zoglin'),
ResourceLocation.fromString('zombie'),
ResourceLocation.fromString('zombie_horse'),
ResourceLocation.fromString('zombie_villager'),
ResourceLocation.fromString('zombified_piglin'),
}
# compiled from the Minecraft wiki:
ENTITIES_MISC: set[ResourceLocation] = {
ResourceLocation.fromString('area_effect_cloud'),
ResourceLocation.fromString('armor_stand'),
ResourceLocation.fromString('end_crystal'),
ResourceLocation.fromString('evoker_fangs'),
ResourceLocation.fromString('fishing_bobber'),
ResourceLocation.fromString('item_frame'),
ResourceLocation.fromString('leash_knot'),
ResourceLocation.fromString('lightning_bolt'),
ResourceLocation.fromString('painting'),
ResourceLocation.fromString('marker'),
}
# compiled from the Minecraft wiki:
ENTITIES_PROJECTILES: set[ResourceLocation] = {
ResourceLocation.fromString('arrow'),
ResourceLocation.fromString('dragon_fireball'),
ResourceLocation.fromString('egg'),
ResourceLocation.fromString('ender_pearl'),
ResourceLocation.fromString('experience_bottle'),
ResourceLocation.fromString('eye_of_ender'),
ResourceLocation.fromString('fireball'),
ResourceLocation.fromString('firework_rocket'),
ResourceLocation.fromString('llama_spit'),
ResourceLocation.fromString('potion'),
ResourceLocation.fromString('shulker_bullet'),
ResourceLocation.fromString('small_fireball'),
ResourceLocation.fromString('snowball'),
ResourceLocation.fromString('spectral_arrow'),
ResourceLocation.fromString('trident'),
ResourceLocation.fromString('wither_skull'),
}
# compiled from the Minecraft wiki:
ENTITIES_VEHICLES: set[ResourceLocation] = {
ResourceLocation.fromString('boat'),
ResourceLocation.fromString('chest_minecart'),
ResourceLocation.fromString('command_block_minecart'),
ResourceLocation.fromString('furnace_minecart'),
ResourceLocation.fromString('hopper_minecart'),
ResourceLocation.fromString('minecart'),
ResourceLocation.fromString('spawner_minecart'),
ResourceLocation.fromString('tnt_minecart'),
}
# compiled from the Minecraft wiki:
ENTITIES_BLOCKS: set[ResourceLocation] = {
ResourceLocation.fromString('falling_block'),
ResourceLocation.fromString('tnt'),
}
# compiled from the Minecraft wiki:
ENTITIES_ITEMS: set[ResourceLocation] = {
ResourceLocation.fromString('experience_orb'),
ResourceLocation.fromString('item'),
}
# compiled from the Minecraft wiki:
ENTITIES: set[ResourceLocation] = {
*ENTITIES_MOBS,
*ENTITIES_MISC,
*ENTITIES_PROJECTILES,
*ENTITIES_VEHICLES,
*ENTITIES_BLOCKS,
*ENTITIES_ITEMS,
}
# compiled from the Minecraft wiki:
EFFECTS: set[ResourceLocation] = {
ResourceLocation.fromString('absorption'),
ResourceLocation.fromString('bad_omen'),
ResourceLocation.fromString('blindness'),
ResourceLocation.fromString('conduit_power'),
ResourceLocation.fromString('dolphins_grace'),
ResourceLocation.fromString('fire_resistance'),
ResourceLocation.fromString('glowing'),
ResourceLocation.fromString('haste'),
ResourceLocation.fromString('health_boost'),
ResourceLocation.fromString('hero_of_the_village'),
ResourceLocation.fromString('hunger'),
ResourceLocation.fromString('instant_damage'),
ResourceLocation.fromString('instant_health'),
ResourceLocation.fromString('invisibility'),
ResourceLocation.fromString('jump_boost'),
ResourceLocation.fromString('levitation'),
ResourceLocation.fromString('luck'),
ResourceLocation.fromString('mining_fatigue'),
ResourceLocation.fromString('nausea'),
ResourceLocation.fromString('night_vision'),
ResourceLocation.fromString('poison'),
ResourceLocation.fromString('regeneration'),
ResourceLocation.fromString('resistance'),
ResourceLocation.fromString('saturation'),
ResourceLocation.fromString('slow_falling'),
ResourceLocation.fromString('slowness'),
ResourceLocation.fromString('speed'),
ResourceLocation.fromString('strength'),
ResourceLocation.fromString('unluck'),
ResourceLocation.fromString('water_breathing'),
ResourceLocation.fromString('weakness'),
ResourceLocation.fromString('wither'),
}
# compiled from the Minecraft wiki:
ENCHANTMENTS: set[ResourceLocation] = {
ResourceLocation.fromString('aqua_affinity'),
ResourceLocation.fromString('bane_of_arthropods'),
ResourceLocation.fromString('binding_curse'),
ResourceLocation.fromString('blast_protection'),
ResourceLocation.fromString('channeling'),
ResourceLocation.fromString('depth_strider'),
ResourceLocation.fromString('efficiency'),
ResourceLocation.fromString('feather_falling'),
ResourceLocation.fromString('fire_aspect'),
ResourceLocation.fromString('fire_protection'),
ResourceLocation.fromString('flame'),
ResourceLocation.fromString('fortune'),
ResourceLocation.fromString('frost_walker'),
ResourceLocation.fromString('impaling'),
ResourceLocation.fromString('infinity'),
ResourceLocation.fromString('knockback'),
ResourceLocation.fromString('looting'),
ResourceLocation.fromString('loyalty'),
ResourceLocation.fromString('luck_of_the_sea'),
ResourceLocation.fromString('lure'),
ResourceLocation.fromString('mending'),
ResourceLocation.fromString('multishot'),
ResourceLocation.fromString('piercing'),
ResourceLocation.fromString('power'),
ResourceLocation.fromString('projectile_protection'),
ResourceLocation.fromString('protection'),
ResourceLocation.fromString('punch'),
ResourceLocation.fromString('quick_charge'),
ResourceLocation.fromString('respiration'),
ResourceLocation.fromString('riptide'),
ResourceLocation.fromString('sharpness'),
ResourceLocation.fromString('silk_touch'),
ResourceLocation.fromString('smite'),
ResourceLocation.fromString('soul_speed'),
ResourceLocation.fromString('sweeping'),
ResourceLocation.fromString('thorns'),
ResourceLocation.fromString('unbreaking'),
ResourceLocation.fromString('vanishing_curse'),
}
# compiled from the Minecraft wiki:
BIOMES: set[ResourceLocation] = {
ResourceLocation.fromString('badlands'),
ResourceLocation.fromString('badlands_plateau'),
ResourceLocation.fromString('bamboo_jungle'),
ResourceLocation.fromString('bamboo_jungle_hills'),
ResourceLocation.fromString('basalt_deltas'),
ResourceLocation.fromString('beach'),
ResourceLocation.fromString('birch_forest'),
ResourceLocation.fromString('birch_forest_hills'),
ResourceLocation.fromString('cold_ocean'),
ResourceLocation.fromString('crimson_forest'),
ResourceLocation.fromString('dark_forest'),
ResourceLocation.fromString('dark_forest_hills'),
ResourceLocation.fromString('deep_cold_ocean'),
ResourceLocation.fromString('deep_frozen_ocean'),
ResourceLocation.fromString('deep_lukewarm_ocean'),
ResourceLocation.fromString('deep_ocean'),
ResourceLocation.fromString('deep_warm_ocean'),
ResourceLocation.fromString('desert'),
ResourceLocation.fromString('desert_hills'),
ResourceLocation.fromString('desert_lakes'),
ResourceLocation.fromString('dripstone_caves'),
ResourceLocation.fromString('end_barrens'),
ResourceLocation.fromString('end_highlands'),
ResourceLocation.fromString('end_midlands'),
ResourceLocation.fromString('eroded_badlands'),
ResourceLocation.fromString('flower_forest'),
ResourceLocation.fromString('forest'),
ResourceLocation.fromString('frozen_ocean'),
ResourceLocation.fromString('frozen_river'),
ResourceLocation.fromString('giant_spruce_taiga'),
ResourceLocation.fromString('giant_spruce_taiga_hills'),
ResourceLocation.fromString('giant_tree_taiga'),
ResourceLocation.fromString('giant_tree_taiga_hills'),
ResourceLocation.fromString('gravelly_mountains'),
ResourceLocation.fromString('ice_spikes'),
ResourceLocation.fromString('jungle'),
ResourceLocation.fromString('jungle_edge'),
ResourceLocation.fromString('jungle_hills'),
ResourceLocation.fromString('lukewarm_ocean'),
ResourceLocation.fromString('lush_caves'),
ResourceLocation.fromString('modified_badlands_plateau'),
ResourceLocation.fromString('modified_gravelly_mountains'),
ResourceLocation.fromString('modified_jungle'),
ResourceLocation.fromString('modified_jungle_edge'),
ResourceLocation.fromString('modified_wooded_badlands_plateau'),
ResourceLocation.fromString('mountain_edge'),
ResourceLocation.fromString('mountains'),
ResourceLocation.fromString('mushroom_field_shore'),
ResourceLocation.fromString('mushroom_fields'),
ResourceLocation.fromString('nether_wastes'),
ResourceLocation.fromString('ocean'),
ResourceLocation.fromString('plains'),
ResourceLocation.fromString('river'),
ResourceLocation.fromString('savanna'),
ResourceLocation.fromString('savanna_plateau'),
ResourceLocation.fromString('shattered_savanna'),
ResourceLocation.fromString('shattered_savanna_plateau'),
ResourceLocation.fromString('small_end_islands'),
ResourceLocation.fromString('snowy_beach'),
ResourceLocation.fromString('snowy_mountains'),
ResourceLocation.fromString('snowy_taiga'),
ResourceLocation.fromString('snowy_taiga_hills'),
ResourceLocation.fromString('snowy_taiga_mountains'),
ResourceLocation.fromString('snowy_tundra'),
ResourceLocation.fromString('soul_sand_valley'),
ResourceLocation.fromString('stone_shore'),
ResourceLocation.fromString('sunflower_plains'),
ResourceLocation.fromString('swamp'),
ResourceLocation.fromString('swamp_hills'),
ResourceLocation.fromString('taiga'),
ResourceLocation.fromString('taiga_hills'),
ResourceLocation.fromString('taiga_mountains'),
ResourceLocation.fromString('tall_birch_forest'),
ResourceLocation.fromString('tall_birch_hills'),
ResourceLocation.fromString('the_end'),
ResourceLocation.fromString('the_void'),
ResourceLocation.fromString('warm_ocean'),
ResourceLocation.fromString('warped_forest'),
ResourceLocation.fromString('wooded_badlands_plateau'),
ResourceLocation.fromString('wooded_hills'),
ResourceLocation.fromString('wooded_mountains'),
}
# compiled from the Minecraft wiki:
PARTICLES: set[ResourceLocation] = {
ResourceLocation.fromString('ambient_entity_effect'),
ResourceLocation.fromString('angry_villager'),
ResourceLocation.fromString('ash'),
ResourceLocation.fromString('barrier'),
ResourceLocation.fromString('block'),
ResourceLocation.fromString('bubble'),
ResourceLocation.fromString('bubble_column_up'),
ResourceLocation.fromString('bubble_pop'),
ResourceLocation.fromString('campfire_cosy_smoke'),
ResourceLocation.fromString('campfire_signal_smoke'),
ResourceLocation.fromString('cloud'),
ResourceLocation.fromString('composter'),
ResourceLocation.fromString('crimson_spore'),
ResourceLocation.fromString('crit'),
ResourceLocation.fromString('current_down'),
ResourceLocation.fromString('damage_indicator'),
ResourceLocation.fromString('dolphin'),
ResourceLocation.fromString('dragon_breath'),
ResourceLocation.fromString('dripping_dripstone_lava'),
ResourceLocation.fromString('dripping_dripstone_water'),
ResourceLocation.fromString('dripping_honey'),
ResourceLocation.fromString('dripping_lava'),
ResourceLocation.fromString('dripping_obsidian_tear'),
ResourceLocation.fromString('dripping_water'),
ResourceLocation.fromString('dust'),
ResourceLocation.fromString('dust_color_transition'),
ResourceLocation.fromString('effect'),
ResourceLocation.fromString('elder_guardian'),
ResourceLocation.fromString('electric_spark'),
ResourceLocation.fromString('enchant'),
ResourceLocation.fromString('enchanted_hit'),
ResourceLocation.fromString('end_rod'),
ResourceLocation.fromString('entity_effect'),
ResourceLocation.fromString('explosion'),
ResourceLocation.fromString('explosion_emitter'),
ResourceLocation.fromString('falling_dripstone_lava'),
ResourceLocation.fromString('falling_dripstone_water'),
ResourceLocation.fromString('falling_dust'),
ResourceLocation.fromString('falling_honey'),
ResourceLocation.fromString('falling_lava'),
ResourceLocation.fromString('falling_nectar'),
ResourceLocation.fromString('falling_obsidian_tear'),
ResourceLocation.fromString('falling_spore_blossom'),
ResourceLocation.fromString('falling_water'),
ResourceLocation.fromString('firework'),
ResourceLocation.fromString('fishing'),
ResourceLocation.fromString('flame'),
ResourceLocation.fromString('flash'),
ResourceLocation.fromString('glow'),
ResourceLocation.fromString('glow_squid_ink'),
ResourceLocation.fromString('happy_villager'),
ResourceLocation.fromString('heart'),
ResourceLocation.fromString('instant_effect'),
ResourceLocation.fromString('item'),
ResourceLocation.fromString('item_slime'),
ResourceLocation.fromString('item_snowball'),
ResourceLocation.fromString('landing_honey'),
ResourceLocation.fromString('landing_lava'),
ResourceLocation.fromString('landing_obsidian_tear'),
ResourceLocation.fromString('large_smoke'),
ResourceLocation.fromString('lava'),
ResourceLocation.fromString('light'),
ResourceLocation.fromString('mycelium'),
ResourceLocation.fromString('nautilus'),
ResourceLocation.fromString('note'),
ResourceLocation.fromString('poof'),
ResourceLocation.fromString('portal'),
ResourceLocation.fromString('rain'),
ResourceLocation.fromString('scrape'),
ResourceLocation.fromString('smoke'),
ResourceLocation.fromString('sneeze'),
ResourceLocation.fromString('snowflake'),
ResourceLocation.fromString('soul'),
ResourceLocation.fromString('sould_fire_flame'),
ResourceLocation.fromString('spit'),
ResourceLocation.fromString('spore_blossom_air'),
ResourceLocation.fromString('splash'),
ResourceLocation.fromString('squid_ink'),
ResourceLocation.fromString('sweep_attack'),
ResourceLocation.fromString('totem_of_undying'),
ResourceLocation.fromString('underwater'),
ResourceLocation.fromString('vibration'),
ResourceLocation.fromString('warped_spore'),
ResourceLocation.fromString('wax_off'),
ResourceLocation.fromString('wax_on'),
ResourceLocation.fromString('white_ash'),
ResourceLocation.fromString('witch'),
}
# compiled from the Minecraft wiki:
DIMENSIONS: set[ResourceLocation] = {
ResourceLocation.fromString('overworld'),
ResourceLocation.fromString('the_nether'),
ResourceLocation.fromString('the_end'),
}
# compiled from the Minecraft wiki:
STRUCTURES: set[str] = {
'jungle_pyramid',
'village',
'endcity',
'ruined_portal',
'igloo',
'stronghold',
'bastion_remnant',
'desert_pyramid',
'nether_fossil',
'buried_treasure',
'mansion',
'shipwreck',
'monument',
'swamp_hut',
'fortress',
'pillager_outpost',
'ocean_ruin',
'mineshaft',
}
# compiled from the Minecraft wiki:
SLOTS: dict[str, int] = {
'armor.chest': 102,
'armor.feet': 100,
'armor.head': 103,
'armor.legs': 101,
'weapon': 98,
'weapon.mainhand': 98,
'weapon.offhand': 99,
**{f'container.{sn}': 0 + sn for sn in range(0, 53 + 1)}, # 0-53 0-53
**{f'enderchest.{sn}': 200 + sn for sn in range(0, 26 + 1)}, # 0-26 200-226
**{f'hotbar.{sn}': 0 + sn for sn in range(0, 8 + 1)}, # 0-8 0-8
**{f'inventory.{sn}': 9 + sn for sn in range(0, 26 + 1)}, # 0-26 9-35
'horse.saddle': 400,
'horse.chest': 499,
'horse.armor': 401,
**{f'horse.{sn}': 500 + sn for sn in range(0, 14 + 1)}, # 0-14 500-514
**{f'villager.{sn}': 300 + sn for sn in range(0, 7 + 1)}, # 0-7 300-307
}
GAMERULES = [
Gamerule(
name='announceAdvancements',
description="Whether advancements should be announced in chat",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='commandBlockOutput',
description="Whether command blocks should notify admins when they perform commands",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='disableElytraMovementCheck',
description="Whether the server should skip checking player speed when the player is wearing elytra. Often helps with jittering due to lag in multiplayer.",
type=BRIGADIER_BOOL,
defaultValue='false',
),
Gamerule(
name='disableRaids',
description="Whether raids are disabled.",
type=BRIGADIER_BOOL,
defaultValue='false',
),
Gamerule(
name='doDaylightCycle',
description="Whether the daylight cycle and moon phases progress",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doEntityDrops',
description="Whether entities that are not mobs should have drops",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doFireTick',
description="Whether fire should spread and naturally extinguish",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doInsomnia',
description="Whether phantoms can spawn in the nighttime",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doImmediateRespawn',
description="Players respawn immediately without showing the death screen",
type=BRIGADIER_BOOL,
defaultValue='false',
),
Gamerule(
name='doLimitedCrafting',
description="Whether players should be able to craft only those recipes that they've unlocked first",
type=BRIGADIER_BOOL,
defaultValue='false',
),
Gamerule(
name='doMobLoot',
description="Whether mobs should drop items and experience orbs",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doMobSpawning',
description="Whether mobs should naturally spawn. Does not affect monster spawners.",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doPatrolSpawning',
description="Whether patrols can spawn",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doTileDrops',
description="Whether blocks should have drops",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doTraderSpawning',
description="Whether wandering traders can spawn",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='doWeatherCycle',
description="Whether the weather can change naturally. The /weather command can still change weather.",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='drowningDamage',
description="Whether the player should take damage when drowning",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='fallDamage',
description="Whether the player should take fall damage",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='fireDamage',
description="Whether the player should take damage in fire, lava, campfires, or on magma blocks[Java Edition only][1].",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='forgiveDeadPlayers',
description="Makes angered neutral mobs stop being angry when the targeted player dies nearby",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='freezeDamage',
description="Whether the player should take damage when inside powder snow",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='keepInventory',
description="Whether the player should keep items and experience in their inventory after death",
type=BRIGADIER_BOOL,
defaultValue='false',
),
Gamerule(
name='logAdminCommands',
description="Whether to log admin commands to server log",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='maxCommandChainLength',
description="The maximum length of a chain of commands that can be executed during one tick. Applies to command blocks and functions.",
type=BRIGADIER_INTEGER,
defaultValue='65536',
),
Gamerule(
name='maxEntityCramming',
description="The maximum number of pushable entities a mob or player can push, before taking 3♥♥ suffocation damage per half-second. Setting to 0 or lower disables the rule. Damage affects survival-mode or adventure-mode players, and all mobs but bats. Pushable entities include non-spectator-mode players, any mob except bats, as well as boats and minecarts.",
type=BRIGADIER_INTEGER,
defaultValue='24',
),
Gamerule(
name='mobGriefing',
description="Whether creepers, zombies, endermen, ghasts, withers, ender dragons, rabbits, sheep, villagers, silverfish, snow golems, and end crystals should be able to change blocks and whether mobs can pick up items, which also disables bartering. This also affects the capability of zombie-like creatures like zombified piglins and drowned to pathfind to turtle eggs.",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='naturalRegeneration',
description="Whether the player can regenerate health naturally if their hunger is full enough (doesn't affect external healing, such as golden apples, the Regeneration effect, etc.)",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='playersSleepingPercentage',
description="What percentage of players must sleep to skip the night.",
type=BRIGADIER_INTEGER,
defaultValue='100',
),
Gamerule(
name='randomTickSpeed',
description="How often a random block tick occurs (such as plant growth, leaf decay, etc.) per chunk section per game tick. 0 and negative values disables random ticks, higher numbers increase random ticks. Setting to a high integer results in high speeds of decay and growth. Numbers over 4096 make plant growth or leaf decay instantaneous.",
type=BRIGADIER_INTEGER,
defaultValue='3',
),
Gamerule(
name='reducedDebugInfo',
description="Whether the debug screen shows all or reduced information; and whether the effects of F3 + B (entity hitboxes) and F3 + G (chunk boundaries) are shown.",
type=BRIGADIER_BOOL,
defaultValue='false',
),
Gamerule(
name='sendCommandFeedback',
description="Whether the feedback from commands executed by a player should show up in chat. Also affects the default behavior of whether command blocks store their output text",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='showDeathMessages',
description="Whether death messages are put into chat when a player dies. Also affects whether a message is sent to the pet's owner when the pet dies.",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='spawnRadius',
description="The number of blocks outward from the world spawn coordinates that a player spawns in when first joining a server or when dying without a personal spawnpoint.",
type=BRIGADIER_INTEGER,
defaultValue='10',
),
Gamerule(
name='spectatorsGenerateChunks',
description="Whether players in spectator mode can generate chunks",
type=BRIGADIER_BOOL,
defaultValue='true',
),
Gamerule(
name='universalAnger',
description="Makes angered neutral mobs attack any nearby player, not just the player that angered them. Works best if forgiveDeadPlayers is disabled.",
type=BRIGADIER_BOOL,
defaultValue='false',
),
]
version1_17 = MCVersion(
name='1.17',
blocks=BLOCKS,
fluids=FLUIDS,
items=ITEMS,
entities=set(),
# Entities(
# mobs=ENTITIES_MOBS,
# misc=ENTITIES_MISC,
# projectiles=ENTITIES_PROJECTILES,
# vehicles=ENTITIES_VEHICLES,
# blocks=ENTITIES_BLOCKS,
# items=ENTITIES_ITEMS,
# ),
effects=EFFECTS,
enchantments=ENCHANTMENTS,
biomes=BIOMES,
particles=PARTICLES,
dimensions=DIMENSIONS,
structures=STRUCTURES,
gameEvents=set(), # introduced in version 1.19
slots=SLOTS,
blockStates=BLOCK_STATES_BY_BLOCK,
gamerules=GAMERULES,
commands={},
)
fillCommandsFor1_17(version1_17)
fillFromMinecraftData(version1_17)
```
#### File: model/json/lexer.py
```python
from collections import deque
from dataclasses import dataclass, field
from enum import Enum
from typing import NamedTuple
from string import whitespace as WHITESPACE, ascii_letters
from model.utils import Span, GeneralError, Position, Message
INCOMPLETE_ESCAPE_MSG = Message("Incomplete escape at end of string", 0)
SINGLE_QUOTED_STRING_MSG = Message("JSON standard does not allow single quoted strings", 0)
EXPECTED_END_OF_STRING_MSG = Message("Expected end of string", 0)
MISSING_CLOSING_QUOTE_MSG = Message("Missing closing quote", 0)
# TOO_MANY_DECIMAL_POINTS_MSG = Message("Too many decimal points in number", 0)
# MINUS_SIGN_INSIDE_NUMBER_MSG = Message("Minus sign in between number", 0)
INVALID_NUMBER_MSG = Message("Minus sign in between number `{0}`", 1)
UNKNOWN_LITERAL_MSG = Message("Unknown literal `{0}`", 1)
ILLEGAL_CHARS_MSG = Message("Illegal characters `{0}`", 1)
EMPTY_STRING_MSG = Message("Cannot parse empty string", 0)
Char = str
class TokenType(Enum):
default = 0
null = 1
boolean = 2
number = 3
string = 4
left_bracket = 5
left_brace = 6
right_bracket = 7
right_brace = 8
comma = 9
colon = 10
invalid = 11
eof = 12
@property
def asString(self) -> str:
return _TOKEN_TYPE_STR_REP[self]
_TOKEN_TYPE_STR_REP = {
TokenType.default: "default",
TokenType.null: "null",
TokenType.boolean: "boolean",
TokenType.number: "number",
TokenType.string: "string",
TokenType.left_bracket: "'['",
TokenType.left_brace: "'{'",
TokenType.right_bracket: "']'",
TokenType.right_brace: "'}'",
TokenType.comma: "','",
TokenType.colon: "':'",
TokenType.invalid: "invalid",
TokenType.eof: "end of file",
}
class Token(NamedTuple):
"""Represents a Token extracted by the parser"""
value: str
type: TokenType
span: Span
# isValid: bool = True
class JsonTokenizeError(GeneralError):
pass
WHITESPACE_NO_LF = ' \t\r\v\f'
"""without the line feed character (\\n)"""
CR_LF = '\r\n'
"""carriage return & line feed (\\r\\n)"""
@dataclass
class JsonTokenizer:
source: str
allowMultilineStr: bool
cursor: int = 0
line: int = 0
lineStart: int = 0
totalLength: int = field(default=-1)
tokens: deque[Token] = field(default_factory=deque)
errors: list[JsonTokenizeError] = field(default_factory=list)
_errorsNextToken: list[tuple[Message, tuple, str]] = field(default_factory=list, init=False)
def __post_init__(self):
if self.totalLength == -1:
self.totalLength = len(self.source)
@property
def position(self) -> Position:
return Position(self.line, self.cursor-self.lineStart, self.cursor)
@property
def char(self) -> Char:
return self.source[self.cursor]
def advanceLine(self) -> None:
self.lineStart = self.cursor
self.line += 1
def addToken(self, start: Position, tokenType: TokenType) -> Token:
end = self.position
span = Span(start, end)
token = Token(self.source[start.index:end.index], tokenType, span)
self.tokens.append(token)
# add errors:
if self._errorsNextToken:
for msg, args, style in self._errorsNextToken:
self.errorLastToken(msg, *args, style=style)
self._errorsNextToken.clear()
return token
def addToken2(self, start: Position, value: str, tokenType: TokenType) -> Token:
end = self.position
span = Span(start, end)
token = Token(value, tokenType, span)
self.tokens.append(token)
# add errors:
if self._errorsNextToken:
for msg, args, style in self._errorsNextToken:
self.errorLastToken(msg, *args, style=style)
self._errorsNextToken.clear()
return token
def errorNextToken(self, msg: Message, *args, style: str = 'error') -> None:
self._errorsNextToken.append((msg, args, style))
def errorLastToken(self, msg: Message, *args, style: str = 'error') -> None:
msgStr = msg.format(*args)
if self.tokens:
span = self.tokens[-1].span # maybe self.tokens[0] ?
else:
span = Span()
self.errors.append(JsonTokenizeError(msgStr, span, style=style))
def consumeWhitespace(self) -> None:
cursor: int = self.cursor
source: str = self.source
length: int = self.totalLength
while cursor < length:
if source[cursor] in WHITESPACE_NO_LF:
cursor += 1
elif source[cursor] == '\n':
cursor += 1
self.lineStart = cursor
self.line += 1
else:
break
self.cursor = cursor
def extract_string(tkz: JsonTokenizer) -> None:
"""Extracts a single string token from JSON string"""
start = tkz.position
quote = tkz.source[tkz.cursor]
if quote == "'":
tkz.errorNextToken(SINGLE_QUOTED_STRING_MSG)
tkz.cursor += 1 # opening '"'
while tkz.cursor < tkz.totalLength:
char = tkz.source[tkz.cursor]
tkz.cursor += 1
if char == '\\':
if tkz.cursor == tkz.totalLength or tkz.source[tkz.cursor] in CR_LF:
tkz.addToken(start, TokenType.string)
tkz.errorLastToken(INCOMPLETE_ESCAPE_MSG)
return
else:
tkz.cursor += 1
continue
elif char == quote:
tkz.addToken(start, TokenType.string)
return
elif char == '\n':
if tkz.allowMultilineStr:
tkz.advanceLine()
else:
tkz.cursor -= 1 # '\n' is not part of the string
break
tkz.addToken(start, TokenType.string)
tkz.errorLastToken(MISSING_CLOSING_QUOTE_MSG)
def extract_number(tkz: JsonTokenizer) -> None:
"""Extracts a single number token (eg. 42, -12.3) from JSON string"""
start = tkz.position
decimal_point_found = False
exponent_found = False
isValid = True
if tkz.cursor < tkz.totalLength and tkz.source[tkz.cursor] in '-+':
tkz.cursor += 1
while tkz.cursor < tkz.totalLength:
char = tkz.source[tkz.cursor]
tkz.cursor += 1
if char.isdigit():
continue
elif char == '.':
if decimal_point_found:
isValid = False
decimal_point_found = True
continue
elif char in '+-':
if not tkz.source[tkz.cursor] in 'eE':
isValid = False
continue
elif char in 'eE':
if exponent_found:
isValid = False
exponent_found = True
continue
elif char in ',' + WHITESPACE + '}]"\':=[{\\':
tkz.cursor -= 1
break
else:
continue
token = tkz.addToken(start, TokenType.number)
if not isValid:
tkz.errorLastToken(INVALID_NUMBER_MSG, token.value)
_TOKEN_TYPE_FOR_SPECIAL = {
'true': TokenType.boolean,
'false': TokenType.boolean,
'null': TokenType.null,
}
def extract_special(tkz: JsonTokenizer) -> None:
"""Extracts true, false and null from JSON string"""
start = tkz.position
tkz.cursor += 1 # first letter
while tkz.cursor < tkz.totalLength and tkz.source[tkz.cursor].isalpha():
tkz.cursor += 1
word = tkz.source[start.index:tkz.cursor]
tkType = _TOKEN_TYPE_FOR_SPECIAL.get(word, TokenType.invalid)
token = tkz.addToken2(start, word, tkType)
if token.type is TokenType.invalid:
tkz.errorLastToken(UNKNOWN_LITERAL_MSG, token.value)
def extract_illegal(tkz: JsonTokenizer) -> None:
"""Extracts illegal characters from JSON string"""
start = tkz.position
tkz.cursor += 1 # first character
while tkz.cursor < tkz.totalLength:
char = tkz.source[tkz.cursor]
if char.isalnum():
break
if char in WHITESPACE:
break
if char in '[]{},:+-.':
break
tkz.cursor += 1
token = tkz.addToken(start, TokenType.invalid)
if token.type is TokenType.invalid:
tkz.errorLastToken(ILLEGAL_CHARS_MSG, repr(token.value))
def extract_operator(tkz: JsonTokenizer) -> None:
start = tkz.position
char = tkz.source[tkz.cursor]
tkz.cursor += 1
tkz.addToken2(start, char, _TOKEN_TYPE_FOR_OPERATOR[char])
_TOKEN_TYPE_FOR_OPERATOR = {
'[': TokenType.left_bracket,
']': TokenType.right_bracket,
'{': TokenType.left_brace,
'}': TokenType.right_brace,
',': TokenType.comma,
':': TokenType.colon,
}
_OPERATOR_FOR_TOKEN_TYPE = {v: k for k, v in _TOKEN_TYPE_FOR_OPERATOR.items()}
_TOKEN_EXTRACTORS_BY_CHAR = {
# **{c: extract_operator for c in '[]{},:'},
**{c: extract_string for c in '"\''},
**{c: extract_special for c in ascii_letters}, # 'e' & 'E' will be replaced again with 'e': extract_number,
**{c: extract_number for c in '0123456789+-.eE'},
}
def tokenizeJson(source: str, allowMultilineStr: bool, *, cursor: int = 0, line: int = 0, lineStart: int = 0, totalLength: int = -1) -> tuple[deque[Token], list[GeneralError]]:
"""Converts a JSON string into a queue of tokens"""
tkz = JsonTokenizer(source, allowMultilineStr, cursor, line, lineStart, totalLength)
tkz.consumeWhitespace()
while tkz.cursor < tkz.totalLength:
char = tkz.source[tkz.cursor]
if char == '"':
extract_string(tkz)
elif char in '[]{},:':
extract_operator(tkz)
# elif char in '0123456789+-.eE':
# extract_number(tkz)
elif char in 'tfn':
extract_special(tkz)
# elif char == "'":
# extract_string(tkz)
else:
extractor = _TOKEN_EXTRACTORS_BY_CHAR.get(char, extract_illegal)
extractor(tkz)
tkz.consumeWhitespace()
tkz.addToken2(tkz.position, '', TokenType.eof)
# if char in '[]{},:':
# start = tkz.position
# tkz.cursor += 1
# tkz.addToken(start, _TOKEN_TYPE_FOR_OPERATOR[char])
# elif char == '"':
# extract_string(tkz)
# elif char.isdigit() or char in '+-':
# extract_number(tkz)
# elif char.isalpha():
# extract_special(tkz)
# else:
# extract_illegal(tkz)
#
# tkz.consumeWhitespace()
if len(tkz.tokens) == 0:
tkz.errorLastToken(EMPTY_STRING_MSG)
return tkz.tokens, tkz.errors
```
#### File: model/json/parser.py
```python
from ast import literal_eval
from collections import deque
from dataclasses import dataclass, field
from typing import Optional, AbstractSet, Union
from Cat.utils.collections_ import OrderedMultiDict
from Cat.utils.profiling import ProfiledFunction
from model.json.lexer import Token, tokenizeJson, TokenType
from model.json.core import *
from model.json.schema import enrichWithSchema
from model.messages import *
from model.utils import GeneralError, Span, MDStr
# class ParseError(Exception):
# """Error thrown when invalid JSON tokens are parsed"""
# def __init__(self, data: JsonParseError):
# super(ParseError, self).__init__(str(data))
# self.data: JsonParseError = data
@dataclass
class ParserData:
tokens: deque[Token]
lastToken: Optional[Token] = None
errors: list[JsonParseError] = field(default_factory=list)
@property
def hasTokens(self) -> bool:
return bool(self.tokens)
def accept(self, tokenType: TokenType) -> Optional[Token]:
if len(self.tokens) == 0:
span = self.lastToken.span if self.lastToken is not None else Span()
self.errors.append(JsonParseError(UNEXPECTED_EOF_MSG.format(), span))
return self.lastToken
token = self.tokens.popleft()
self.lastToken = token
if token.type is not tokenType:
msg = EXPECTED_BUT_GOT_MSG.format(tokenType.asString, token.value)
self.errors.append(JsonParseError(msg, token.span))
return token
def acceptAnyOf(self, tokenTypes: AbstractSet[TokenType], name: str = None) -> Optional[Token]:
if len(self.tokens) == 0:
span = self.lastToken.span if self.lastToken is not None else Span()
self.errors.append(JsonParseError(UNEXPECTED_EOF_MSG.format(), span))
return self.lastToken
token = self.tokens.popleft()
self.lastToken = token
if token.type not in tokenTypes:
if name is None:
name = ' | '.join(tk.asString for tk in tokenTypes)
msg = EXPECTED_BUT_GOT_MSG.format(name, token.value)
self.errors.append(JsonParseError(msg, token.span))
return token
def acceptAny(self) -> Optional[Token]:
if len(self.tokens) == 0:
span = self.lastToken.span if self.lastToken is not None else Span()
self.errors.append(JsonParseError(UNEXPECTED_EOF_MSG.format(), span))
return None
token = self.tokens.popleft()
self.lastToken = token
return self.lastToken
def pop(self) -> Token:
token = self.tokens.popleft()
self.lastToken = token
return token
def parse_object(psr: ParserData, schema: Optional[JsonObjectSchema]) -> JsonObject:
"""Parses an object out of JSON tokens"""
objData: OrderedMultiDict[str, JsonProperty] = OrderedMultiDict()
isObjectSchema = isinstance(schema, JsonObjectSchema)
start = psr.lastToken
token = psr.acceptAnyOf({TokenType.right_brace, TokenType.string})
if token.type is TokenType.eof:
return JsonObject(Span(start.span.start, token.span.end), schema, objData)
# special case:
if token.type is TokenType.right_brace:
return JsonObject(Span(start.span.start, token.span.end), schema, objData)
while token is not None:
if token.type == TokenType.string:
key = parse_string(psr, JSON_KEY_SCHEMA)
else:
if token.type == TokenType.comma:
token = psr.accept(TokenType.string)
continue
if token.type == TokenType.right_brace:
break
key = JsonString(token.span, JSON_KEY_SCHEMA, '')
propertySchema = schema.propertiesDict.get(key.data) if isObjectSchema else None
valueSchema = propertySchema.value if propertySchema is not None else None
if token.type != TokenType.colon:
token = psr.accept(TokenType.colon)
if token.type is TokenType.eof:
value = JsonInvalid(Span(psr.lastToken.span.end, token.span.end), valueSchema, '')
objData.add(key.data, JsonProperty(Span(key.span.start, value.span.end), propertySchema, key, value))
break
elif token.type != TokenType.colon:
if token.type == TokenType.comma:
value = JsonInvalid(psr.lastToken.span, valueSchema, '')
objData.add(key.data, JsonProperty(Span(key.span.start, token.span.start), propertySchema, key, value))
token = psr.accept(TokenType.string)
continue
if token.type == TokenType.right_brace:
value = JsonInvalid(psr.lastToken.span, valueSchema, '')
objData.add(key.data, JsonProperty(Span(key.span.start, token.span.start), propertySchema, key, value))
break
pass
psr.acceptAnyOf(_PARSERS.keys())
value = _internalParseTokens(psr, valueSchema)
token = psr.acceptAnyOf({TokenType.comma, TokenType.right_brace})
if token.type is TokenType.eof:
objData.add(key.data, JsonProperty(Span(key.span.start, token.span.end), propertySchema, key, value))
break
objData.add(key.data, JsonProperty(Span(key.span.start, value.span.end), propertySchema, key, value))
if token.type is TokenType.comma:
token = psr.accept(TokenType.string)
continue
if token.type == TokenType.right_brace:
break
if token.type is TokenType.eof:
token = psr.lastToken
return JsonObject(Span(start.span.start, token.span.end), schema, objData)
def parse_array(psr: ParserData, schema: Optional[JsonArraySchema]) -> JsonArray:
"""Parses an array out of JSON tokens"""
arrayData: list[JsonData] = []
elementSchema = schema.element if isinstance(schema, JsonArraySchema) else None
start = psr.lastToken
token = psr.acceptAnyOf({TokenType.right_bracket, *_PARSERS.keys()})
if token.type is TokenType.eof:
return JsonArray(Span(start.span.start, token.span.end), schema, arrayData)
# special case:
if token.type is TokenType.right_bracket:
return JsonArray(Span(start.span.start, token.span.end), schema, arrayData)
while token is not None:
value = _internalParseTokens(psr, elementSchema)
arrayData.append(value)
token = psr.acceptAnyOf({TokenType.comma, TokenType.right_bracket})
if token.type is TokenType.eof:
break
if token.type is TokenType.comma:
token = psr.acceptAnyOf(_PARSERS.keys())
continue
if token.type == TokenType.right_bracket:
break
if token.type is TokenType.eof:
token = psr.lastToken
return JsonArray(Span(start.span.start, token.span.end), schema, arrayData)
def parse_string(psr: ParserData, schema: Optional[Union[JsonStringSchema, JsonKeySchema]]) -> JsonString:
"""Parses a string out of a JSON token"""
token = psr.lastToken
string = token.value
if '\\' in string:
chars: list[str] = []
index = 1
end = len(token.value) - 1
while index < end:
char = token.value[index]
if char != '\\':
chars.append(char)
index += 1
if char == '\n':
pass
else:
pass
continue
next_char = token.value[index+1]
if next_char == 'u':
hex_string = token.value[index+2:index+6]
try:
unicode_char = literal_eval(f'"\\u{hex_string}"')
except SyntaxError as err:
psr.errors.append(JsonParseError(MDStr(f"Invalid unicode escape: `\\u{hex_string}`"), token.span))
unicode_char = '\\u{hex_string}'
chars.append(unicode_char)
index += 6
continue
if next_char in ('"', '/', '\\'):
chars.append(next_char)
elif next_char == 'b':
chars.append('\b')
elif next_char == 'f':
chars.append('\f')
elif next_char == 'n':
chars.append('\n')
elif next_char == 'r':
chars.append('\r')
elif next_char == 't':
chars.append('\t')
else:
psr.errors.append(JsonParseError(MDStr(f"Unknown escape sequence: `{token.value}`"), token.span))
index += 2
string = ''.join(chars)
elif string:
if string[0] == '"':
string = string[1:].removesuffix('"')
elif string[0] == "'":
string = string[1:].removesuffix("'")
return JsonString(token.span, schema, string)
def parse_number(psr: ParserData, schema: Optional[JsonArraySchema]) -> JsonNumber:
"""Parses a number out of a JSON token"""
token = psr.lastToken
try:
if token.value.isdigit():
number = int(token.value)
else:
number = float(token.value)
return JsonNumber(token.span, schema, number)
except ValueError as err:
psr.errors.append(JsonParseError(MDStr(f"Invalid number: `{token.value}`"), token.span))
return JsonNumber(token.span, schema, 0)
BOOLEAN_TOKENS = {
'true': True,
'false': False,
}
def parse_boolean(psr: ParserData, schema: Optional[JsonArraySchema]) -> JsonBool:
"""Parses a boolean out of a JSON token"""
token = psr.lastToken
value = BOOLEAN_TOKENS[token.value]
return JsonBool(token.span, schema, value)
def parse_null(psr: ParserData, schema: Optional[JsonArraySchema]) -> JsonNull:
"""Parses a boolean out of a JSON token"""
token = psr.lastToken
return JsonNull(token.span, schema)
_PARSERS = {
TokenType.left_bracket: parse_array,
TokenType.left_brace: parse_object,
TokenType.string: parse_string,
TokenType.number: parse_number,
TokenType.boolean: parse_boolean,
TokenType.null: parse_null,
}
def _internalParseTokens(psr: ParserData, schema: Optional[JsonArraySchema]) -> JsonData:
"""Recursive JSON parse implementation"""
token = psr.lastToken
if isinstance(schema, JsonUnionSchema):
schema = schema.optionsDict.get(token.type, schema)
parser = _PARSERS.get(token.type)
if parser is not None:
value = parser(psr, schema)
return value
else:
return JsonInvalid(token.span, schema, token.value)
def parseJsonTokens(tokens: deque[Token], schema: Optional[JsonSchema]) -> tuple[Optional[JsonData], list[GeneralError]]:
"""Recursive JSON parse implementation"""
psr = ParserData(tokens)
token = psr.acceptAnyOf(_PARSERS.keys())
if token is not None:
data = _internalParseTokens(psr, schema)
enrichWithSchema(data, schema)
else:
data = None
return data, psr.errors
@ProfiledFunction(enabled=False)
def parseJsonStr(json_string: str, allowMultilineStr: bool, schema: Optional[JsonSchema], *, cursor: int = 0, line: int = 0, lineStart: int = 0, totalLength: int = -1) -> tuple[Optional[JsonData], list[GeneralError]]:
"""Parses a JSON string into a Python object"""
tokens, errors = tokenizeJson(json_string, allowMultilineStr, cursor=cursor, line=line, lineStart=lineStart, totalLength=totalLength)
value, errors2 = parseJsonTokens(tokens, schema)
errors += errors2
if len(tokens) > 1:
errors.append(JsonParseError(
MDStr(f"Invalid JSON at `{tokens[0].value}`"),
tokens[0].span
))
return value, errors
```
#### File: model/json/validator.py
```python
from dataclasses import replace
from typing import Protocol, Type
from Cat.utils.collections_ import AddToDictDecorator, getIfKeyIssubclass
from model.json.core import *
from model.json.core import JsonInvalid, JsonSemanticsError
from model.json.jsonContext import getJsonStringContext
from model.messages import *
from model.utils import Message, Span
EXPECTED_ARGUMENT_SEPARATOR_MSG = Message("Expected whitespace to end one argument, but found trailing data: `{0}`", 1)
NO_JSON_SCHEMA_MSG = Message("No JSON Schema for {0}", 1)
NO_JSON_SCHEMA_VALIDATOR_MSG = Message("No JSON Schema validator for {0}", 1)
MISSING_JSON_STRING_HANDLER_MSG = Message("Missing JsonStringHandler for type `{0}`", 1)
DUPLICATE_PROPERTY_MSG = Message("Duplicate property `{0}`", 1)
UNKNOWN_PROPERTY_MSG = Message("Unknown property `{0}`", 1)
MISSING_MANDATORY_PROPERTY_MSG = Message("Missing mandatory property `{0}`", 1)
def wrongTypeError(expected: JsonSchema, got: JsonData):
msg = EXPECTED_BUT_GOT_MSG.format(expected.asString, got.typeName)
return JsonSemanticsError(msg, got.span)
def validateJson(data: JsonData) -> list[JsonSemanticsError]:
errors: list[JsonSemanticsError] = list[JsonSemanticsError]()
if data.schema is not None:
_validateInternal(data, errorsIO=errors)
else:
msg = NO_JSON_SCHEMA_MSG.format(data.typeName)
errors.append(JsonSemanticsError(msg, Span(data.span.start)))
return errors
def _validateInternal(data: JsonData, *, errorsIO: list[JsonSemanticsError]) -> None:
if data.schema is None:
msg = NO_JSON_SCHEMA_MSG.format(data.typeName)
errorsIO.append(JsonSemanticsError(msg, data.span))
return
validator = getSchemaValidator(type(data), None)
if validator is None:
msg = NO_JSON_SCHEMA_VALIDATOR_MSG.format(data.typeName)
errorsIO.append(JsonSemanticsError(msg, data.span, 'info'))
else:
validator(data, errorsIO=errorsIO)
class ValidatorFunc(Protocol):
def __call__(self, data: JsonData, *, errorsIO: list[JsonSemanticsError]) -> None:
pass
VALIDATORS_FOR_SCHEMAS: dict[Type[JsonData], ValidatorFunc] = {}
schemaValidator = AddToDictDecorator(VALIDATORS_FOR_SCHEMAS)
def getSchemaValidator(key: Type[JsonData], default):
return getIfKeyIssubclass(VALIDATORS_FOR_SCHEMAS, key, default)
@schemaValidator(JsonInvalid)
def validateJsonInvalid(data: JsonInvalid, *, errorsIO: list[JsonSemanticsError]) -> None:
errorsIO.append(wrongTypeError(data.schema, data))
return
@schemaValidator(JsonNull)
def validateJsonNull(data: JsonNull, *, errorsIO: list[JsonSemanticsError]) -> None:
if not isinstance(data.schema, JsonNullSchema):
errorsIO.append(wrongTypeError(data.schema, data))
return
@schemaValidator(JsonBool)
def validateJsonBool(data: JsonBool, *, errorsIO: list[JsonSemanticsError]) -> None:
if not isinstance(data.schema, JsonBoolSchema):
errorsIO.append(wrongTypeError(data.schema, data))
return
@schemaValidator(JsonNumber)
def validateJsonNumber(data: JsonNumber, *, errorsIO: list[JsonSemanticsError]) -> None:
if not isinstance(data.schema, JsonNumberSchema):
errorsIO.append(wrongTypeError(data.schema, data))
return
if isinstance(data.schema, JsonIntSchema) and type(data.data) is float:
msg = EXPECTED_BUT_GOT_MSG.format('integer', 'float')
errorsIO.append(JsonSemanticsError(msg, data.span))
if not data.schema.min <= data.data <= data.schema.max:
msg = NUMBER_OUT_OF_BOUNDS_MSG.format(data.schema.min, data.schema.max)
errorsIO.append(JsonSemanticsError(msg, data.span))
@schemaValidator(JsonString)
def validateJsonString(data: JsonString, *, errorsIO: list[JsonSemanticsError]) -> None:
if not isinstance(data.schema, JsonStringSchema):
errorsIO.append(wrongTypeError(data.schema, data))
return
# TODO: validation of JsonString using JsonStringSchema.type
if data.schema.type is not None:
argumentHandler = getJsonStringContext(data.schema.type.name)
if argumentHandler is not None:
argumentHandler.validate(data, errorsIO)
else:
errorsIO.append(JsonSemanticsError(INTERNAL_ERROR_MSG.format(MISSING_JSON_STRING_HANDLER_MSG, data.schema.type.name), data.span, style='info'))
@schemaValidator(JsonArray)
def validateJsonArray(data: JsonArray, *, errorsIO: list[JsonSemanticsError]) -> None:
if not isinstance(data.schema, JsonArraySchema):
errorsIO.append(wrongTypeError(data.schema, data))
return
for element in data.data:
_validateInternal(element, errorsIO=errorsIO)
@schemaValidator(JsonObject)
def validateJsonObject(data: JsonObject, *, errorsIO: list[JsonSemanticsError]) -> None:
if not isinstance(data.schema, JsonObjectSchema):
errorsIO.append(wrongTypeError(data.schema, data))
return
validatedProps: set[str] = set()
key: str
prop: JsonProperty
for key, prop in data.data.items():
if key in validatedProps:
msg = DUPLICATE_PROPERTY_MSG.format(repr(key))
errorsIO.append(JsonSemanticsError(msg, prop.key.span))
else:
validatedProps.add(key)
if key not in data.schema.propertiesDict:
msg = UNKNOWN_PROPERTY_MSG.format(repr(key))
errorsIO.append(JsonSemanticsError(msg, prop.key.span))
continue
_validateInternal(prop.value, errorsIO=errorsIO)
for propSchema in data.schema.properties:
if propSchema.name not in validatedProps and propSchema.mandatory:
msg = MISSING_MANDATORY_PROPERTY_MSG.format(repr(propSchema.name))
end = data.span.end
start = replace(end, column=end.column - 1, index=end.index - 1)
errorsIO.append(JsonSemanticsError(msg, Span(start, end)))
```
#### File: Datapack-Editor/model/Model.py
```python
from __future__ import annotations
import os
import traceback
from json import JSONDecodeError
from typing import TypeVar, Optional
from Cat.CatPythonGUI.AutoGUI import propertyDecorators as pd
from Cat.Serializable import RegisterContainer, SerializableContainer, Serialized, Computed, ComputedCached, SerializedPropertyABC
from Cat.extensions import FilesChangedDependency, SingleFileChangedDependency
from Cat.utils.collections_ import OrderedDict
from Cat.utils.profiling import logError
from model.datapackContents import ResourceLocation, collectAllEntries, DatapackContents, MetaInfo, choicesFromResourceLocations
from model.parsing.contextProvider import Suggestions
from model.pathUtils import getAllFilesFromSearchPath, fileNameFromFilePath, FilePathTpl
from settings import applicationSettings
_TT = TypeVar('_TT')
@RegisterContainer
class PackDescription(SerializableContainer):
__slots__ = ()
pack_format: int = Serialized(default=0)
description: str = Serialized(default='')
@RegisterContainer
class Pack(SerializableContainer):
__slots__ = ()
pack: PackDescription = Serialized(default_factory=PackDescription)
@RegisterContainer
class Datapack(SerializableContainer):
__slots__ = ()
def __typeCheckerInfo___(self):
# giving the type checker a helping hand...
self.name: str = ''
self.meta: PackDescription = PackDescription()
self.files: list[FilePathTpl] = []
self.allLocalMcFunctions: OrderedDict[ResourceLocation, FilePathTpl] = OrderedDict()
self.contents: DatapackContents = DatapackContents()
path: str = Serialized(default='')
isZipped: bool = ComputedCached(getInitValue=lambda s: os.path.isfile(s.path), ependencies_=[path])
@ComputedCached(dependencies_=[path])
def name(self) -> str:
return fileNameFromFilePath(self.path)
@pd.Framed()
@ComputedCached(dependencies_=[SingleFileChangedDependency(path.map(lambda p: os.path.join(p, 'pack.mcmeta'), str))])
def meta(self) -> PackDescription:
descriptionPath = os.path.join(self.path, 'pack.mcmeta')
try:
with open(descriptionPath, "r") as f:
text = f.read()
pack = Pack.fromJSON(text, onError=logError)
except (JSONDecodeError, FileNotFoundError, AttributeError, TypeError) as e:
logError(f"Unable to load meta information for datapack at '{self.path}'': \n{traceback.format_exc()}")
return PackDescription()
return pack.pack
description: str = meta.description
@ComputedCached(dependencies_=[FilesChangedDependency(path, 'data/**')])
def files(self) -> list[FilePathTpl]:
allLocalFiles: list[FilePathTpl] = []
pathInFolder = 'data/**'
pathInZip = 'data/**'
pif = pathInFolder
piz = pathInZip
rawLocalFiles = getAllFilesFromSearchPath(self.path, pif, piz, extensions=tuple(), excludes=None)
paritioner = pif[:-2]
localFiles = []
for jf in rawLocalFiles:
if isinstance(jf, tuple):
localFiles.append(jf)
else:
jfPart = jf.rpartition(paritioner)
localFiles.append((jfPart[0], jfPart[1] + jfPart[2],))
allLocalFiles.extend(localFiles)
return allLocalFiles
@ComputedCached(dependencies_=[FilesChangedDependency(path, 'data/**')])
def allLocalMcFunctions(self) -> OrderedDict[ResourceLocation, FilePathTpl]:
result = OrderedDict[ResourceLocation, FilePathTpl]()
for fullPath in self.files:
dpPath, filePath = fullPath
if filePath.startswith('data/'):
filePath = filePath.removeprefix('data/')
else:
continue
namespace, _, path = filePath.partition('/')
if path.startswith('functions/'):
path = path.removeprefix('functions/')
path = path.removesuffix('.mcfunction')
isTag = False
elif path.startswith('tags/functions/'):
path = path.removeprefix('tags/functions/')
path = path.removesuffix('.json')
isTag = True
else:
continue
resourceLocation = ResourceLocation(namespace, path, isTag)
result[resourceLocation] = filePath
return result
@ComputedCached(dependencies_=[FilesChangedDependency(path, 'data/**')])
def contents(self) -> DatapackContents:
contents = DatapackContents()
# allEntryHandlers = [
# # TagInfos:
# EntryHandlerInfo(
# 'tags/blocks/',
# '.json',
# True,
# lambda fp, rl: contents.tags.blocks.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'tags/entity_types/',
# '.json',
# True,
# lambda fp, rl: contents.tags.entity_types.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'tags/fluids/',
# '.json',
# True,
# lambda fp, rl: contents.tags.fluids.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'tags/functions/',
# '.json',
# True,
# lambda fp, rl: contents.tags.functions.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'tags/game_events/',
# '.json',
# True,
# lambda fp, rl: contents.tags.game_events.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'tags/items/',
# '.json',
# True,
# lambda fp, rl: contents.tags.items.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
#
# # WorldGenInfos:
# EntryHandlerInfo(
# 'worldgen/biome/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.biome.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/configured_carver/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.configured_carver.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/configured_feature/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.configured_feature.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/configured_structure_feature/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.configured_structure_feature.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/configured_surface_builder/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.configured_surface_builder.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/noise_settings/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.noise_settings.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/processor_list/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.processor_list.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'worldgen/template_pool/',
# '.json',
# False,
# lambda fp, rl: contents.worldGen.template_pool.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
#
# # DatapackContents:
# EntryHandlerInfo(
# 'advancements/',
# '.json',
# False,
# lambda fp, rl: contents.advancements.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'functions/',
# '.mcfunction',
# False,
# lambda fp, rl: contents.functions.__setitem__(rl, buildFunctionMeta(fp, rl)),
# ),
# EntryHandlerInfo(
# 'item_modifiers/',
# '.json',
# False,
# lambda fp, rl: contents.item_modifiers.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'loot_tables/',
# '.json',
# False,
# lambda fp, rl: contents.loot_tables.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'predicates/',
# '.json',
# False,
# lambda fp, rl: contents.predicates.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'recipes/',
# '.json',
# False,
# lambda fp, rl: contents.recipes.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'structures/',
# '.nbt',
# False,
# lambda fp, rl: contents.structures.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'dimension/',
# '.json',
# False,
# lambda fp, rl: contents.dimension.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
# EntryHandlerInfo(
# 'dimension_type/',
# '.json',
# False,
# lambda fp, rl: contents.dimension_type.__setitem__(rl, buildMetaInfo(MetaInfo, fp, rl)),
# ),
#
# ]
from session.session import getSession
allEntryHandlers = getSession().datapackData.structure
collectAllEntries(self.files, allEntryHandlers, contents)
return contents
@RegisterContainer
class World(SerializableContainer):
__slots__ = ()
def __typeCheckerInfo___(self):
# giving the type checker a helping hand...
self.name: str = ''
self.datapackPaths: list[str] = []
self.datapacks: list[Datapack] = []
path: str = Serialized(default='', decorators=[pd.FolderPath()])
@Computed()
def isValid(self) -> bool:
return len(self.path) > 0 and os.path.isdir(self.path)
@ComputedCached(dependencies_=[path])
def name(self) -> str:
return fileNameFromFilePath(self.path)
@pd.List()
@ComputedCached(dependencies_=[path, FilesChangedDependency(path, 'datapacks/*'), Computed(default_factory=lambda: applicationSettings.minecraft.executable)])
def datapackPaths(self) -> list[str]:
datapacksPath = os.path.join(self.path, 'datapacks/')
try:
datapackFiles = [os.path.join(datapacksPath, f) for f in os.listdir(datapacksPath)]
except (JSONDecodeError, FileNotFoundError, AttributeError, TypeError) as e:
logError(f'Unable to find datapacks: \n{traceback.format_exc()}')
return []
minecraftExec = applicationSettings.minecraft.executable
if minecraftExec and os.path.isfile(minecraftExec):
datapackFiles.append(minecraftExec)
return datapackFiles
_oldDatapacks: list[Datapack] = Serialized(default_factory=list, shouldSerialize=False, shouldPrint=False, decorators=[pd.NoUI()])
@pd.List()
@ComputedCached(dependencies_=[datapackPaths])
def datapacks(self) -> list[Datapack]:
oldDps = {dp.name: dp for dp in self._oldDatapacks}
datapacks = [
Datapack.create(path=p)
for p in self.datapackPaths
]
datapacks = [oldDps.get(dp.name, dp) for dp in datapacks]
self._oldDatapacks = datapacks.copy()
return datapacks
def choicesForDatapackContents(text: str, prop: SerializedPropertyABC[Datapack, OrderedDict[ResourceLocation, MetaInfo]]) -> Suggestions:
from session.session import getSession
locations = [b for dp in getSession().world.datapacks for b in prop.get(dp)]
return choicesFromResourceLocations(text, locations)
def metaInfoFromDatapackContents(rl: ResourceLocation, prop: SerializedPropertyABC[Datapack, OrderedDict[ResourceLocation, MetaInfo]]) -> Optional[MetaInfo]:
from session.session import getSession
for dp in getSession().world.datapacks:
# TODO: show prompt, when there are multiple files this applies to.
if (file := prop.get(dp).get(rl)) is not None:
return file
return None
```
#### File: Datapack-Editor/session/session.py
```python
from __future__ import annotations
import gc
import os
import traceback
from json import JSONDecodeError
from typing import NewType, Callable
from Cat.CatPythonGUI.AutoGUI import propertyDecorators as pd
from Cat.Serializable import SerializableContainer, RegisterContainer, Serialized, Computed
from Cat.utils import getExePath, openOrCreate, format_full_exc
from Cat.utils.profiling import logError
from Cat.utils.signals import CatSignal, CatBoundSignal
from model.Model import World
from model.data.mcVersions import MCVersion, getMCVersion
from model.datapack.dpVersion import DPVersion, getDPVersion
from session.documentHandling import DocumentsManager
from settings import applicationSettings
WindowId = NewType('WindowId', str)
@RegisterContainer
class Session(SerializableContainer):
"""docstring for Session"""
__slots__ = ()
def __typeCheckerInfo___(self):
# giving the type checker a helping hand...
pass
world: World = Serialized(default_factory=World)
hasOpenedWorld: bool = Computed(getInitValue=lambda s: bool(s.world.isValid))
documents: DocumentsManager = Serialized(default_factory=DocumentsManager, decorators=[pd.NoUI()])
minecraftData: MCVersion = Computed(default_factory=lambda: getMCVersion(applicationSettings.minecraft.version))
datapackData: DPVersion = Computed(default_factory=lambda: getDPVersion(applicationSettings.minecraft.dpVersion))
def closeWorld(self) -> None:
world = self.world
world.reset()
# resetAllGlobalCaches()
gc.collect()
def openWorld(self, newWorldPath: str) -> None:
self.closeWorld()
self.world.path = newWorldPath
onError: CatBoundSignal[Session, Callable[[Exception, str], None]] = CatSignal[Callable[[Exception, str], None]]('onError')
def showAndLogError(self, e: Exception, title: str = 'Error') -> None:
self.onError.emit(e, title)
__session = Session()
__session.onError.connect('logError', lambda e, title: logError(f'title:', format_full_exc(e)))
def getSession() -> Session:
return __session
def setSession(session: Session):
global __session
assert isinstance(session, Session)
if session is not __session:
__session.reset()
__session.copyFrom(session)
def getSessionFilePath() -> str:
return os.path.join(os.path.dirname(getExePath()), 'sessions', 'session1.json')
def loadSessionFromFile(filePath: str = None) -> None:
def _logError(ex, s):
logError(ex, s)
raise ex
if filePath is None:
filePath = getSessionFilePath()
try:
with open(filePath, "r") as inFile:
session = getSession().fromJSON(inFile.read(), onError=_logError)
setSession(session)
# deferredSelf = getSession().fromJSONDefer(inFile.read(), onError=_logError)
# setSession(next(deferredSelf))
# next(deferredSelf)
except (JSONDecodeError, FileNotFoundError, AttributeError, TypeError, RuntimeError) as e:
logError(f'Unable to load session: \n{traceback.format_exc()}')
def saveSessionToFile(filePath: str = None) -> None:
if filePath is None:
filePath = getSessionFilePath()
with openOrCreate(filePath, "w") as outFile:
getSession().toJSON(outFile)
from model import commands
commands.setGetSession(getSession)
``` |
{
"source": "JoachimFlottorp/pajbot",
"score": 3
} |
#### File: apiwrappers/authentication/access_token.py
```python
import datetime
from abc import ABC, abstractmethod
import pajbot
class AccessToken(ABC):
SHOULD_REFRESH_THRESHOLD = 0.9
"""Fraction between 0 and 1 indicating what fraction/percentage of the specified full validity period
should actually be utilized. E.g. if this is set to 0.9, the implementation will refresh the token
once at least 90% of the full validity period (expires_in) is over."""
def __init__(self, access_token, created_at, expires_in, token_type, refresh_token, scope):
self.access_token = access_token
self.created_at = created_at
# can both be None
self.expires_in = expires_in
if self.expires_in is not None:
self.expires_at = self.created_at + self.expires_in
else:
self.expires_at = None
self.token_type = token_type
# can be None
self.refresh_token = refresh_token
# always a list, can be empty list
self.scope = scope
@abstractmethod
def can_refresh(self):
pass
def should_refresh(self):
"""Returns True if less than 10% of the token's lifetime remains, False otherwise"""
if not self.can_refresh():
return False
# intended lifetime of the token
if self.expires_at is not None:
expires_after = self.expires_at - self.created_at
else:
# this is a token that never expires
# because we don't want any issues, refresh it anyways
expires_after = datetime.timedelta(hours=1)
# how much time has passed since token creation
token_age = pajbot.utils.now() - self.created_at
# maximum token age before token should be refreshed (90% of the total token lifetime)
max_token_age = expires_after * self.SHOULD_REFRESH_THRESHOLD
# expired?
return token_age >= max_token_age
def jsonify(self):
"""serialize for storage"""
if self.expires_in is None:
expires_in_milliseconds = None
else:
expires_in_milliseconds = self.expires_in.total_seconds() * 1000
return {
"access_token": self.access_token,
"created_at": self.created_at.timestamp() * 1000,
"expires_in": expires_in_milliseconds,
"token_type": self.token_type,
"refresh_token": self.refresh_token,
"scope": self.scope,
}
@classmethod
def from_json(cls, json_data):
"""deserialize json produced by jsonify()"""
if json_data["expires_in"] is None:
expires_in = None
else:
expires_in = datetime.timedelta(milliseconds=json_data["expires_in"])
return cls(
access_token=json_data["access_token"],
created_at=pajbot.utils.datetime_from_utc_milliseconds(json_data["created_at"]),
expires_in=expires_in,
token_type=json_data["token_type"],
refresh_token=json_data["refresh_token"],
scope=json_data["scope"],
)
@classmethod
def from_api_response(cls, response):
"""Construct new object from twitch response json data"""
# expires_in is only missing for old Client-IDs to which twitch will respond with
# infinitely-lived tokens (the "expires_in" field is absent in that case).
expires_in_seconds = response.get("expires_in", None)
if expires_in_seconds is None:
expires_in = None
else:
expires_in = datetime.timedelta(seconds=expires_in_seconds)
return cls(
access_token=response["access_token"],
created_at=pajbot.utils.now(),
expires_in=expires_in,
token_type=response["token_type"],
refresh_token=response.get("refresh_token", None),
scope=response.get("scope", []),
)
@abstractmethod
def refresh(self, api):
pass
class UserAccessToken(AccessToken):
def can_refresh(self):
return self.refresh_token is not None
def refresh(self, api):
if not self.can_refresh():
raise ValueError("This user access token cannot be refreshed, it has no refresh token")
return api.refresh_user_access_token(self.refresh_token)
@staticmethod
def from_implicit_auth_flow_token(access_token):
return UserAccessToken(
access_token=access_token,
created_at=None,
expires_in=None,
token_type="bearer",
refresh_token=None,
scope=[],
)
class AppAccessToken(AccessToken):
def can_refresh(self):
return True
def refresh(self, api):
return api.get_app_access_token(self.scope)
```
#### File: pajbot/managers/twitter.py
```python
from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional, Union
import datetime
import json
import logging
import threading
from pajbot.managers.db import DBManager
from pajbot.models.twitter import TwitterUser
from pajbot.utils import now, stringify_tweet, time_since, tweet_provider_stringify_tweet
import tweepy
from autobahn.twisted.websocket import WebSocketClientFactory, WebSocketClientProtocol
from twisted.internet.protocol import ReconnectingClientFactory
if TYPE_CHECKING:
from pajbot.bot import Bot
from pajbot.models.sock import HandlerParam
log = logging.getLogger(__name__)
class ClientProtocol(WebSocketClientProtocol):
def __init__(self, manager: PBTwitterManager) -> None:
super().__init__()
self.manager = manager
def onOpen(self) -> None:
self.manager.client = self
if self.manager.tweepy is None:
log.warning(
"Unable to initialize tweet-provider connection since local twitter credentials are not configured"
)
return
user_ids: List[int] = []
for screen_name in self.manager.relevant_users:
try:
user_id = self.manager.tweepy.get_user(screen_name=screen_name).id
except tweepy.errors.NotFound:
log.warn(f"Twitter user {screen_name} does not exist")
continue
except:
log.exception("Unhandled exception from tweepy.get_user (v1)")
continue
user_ids.append(user_id)
msg = {"type": "set_subscriptions", "data": user_ids}
self.sendMessage(json.dumps(msg).encode("utf8"))
def onMessage(self, payload: str, isBinary: bool) -> None:
if isBinary:
return
message = json.loads(payload)
if message["type"] == "tweet":
tweet = message["data"]
if (
tweet["user"]["screen_name"].lower() in self.manager.relevant_users
and not tweet["text"].startswith("RT ")
and tweet["in_reply_to_screen_name"] is None
):
tweet_message = tweet_provider_stringify_tweet(tweet)
self.manager.bot.say(f"B) New cool tweet from {tweet['user']['screen_name']}: {tweet_message}")
log.debug(f"Got tweet: {message['data']}")
else:
log.debug(f"Unhandled message from tweet-provider: {message}")
def onClose(self, wasClean: bool, code: int, reason: str) -> None:
log.info(f"Disconnected from tweet-provider: {reason}")
class ClientFactory(WebSocketClientFactory, ReconnectingClientFactory):
maxDelay = 30
manager: Optional[PBTwitterManager] = None
def buildProtocol(self, addr):
if self.manager is None:
raise ValueError("ClientFactory's manager not initialized")
proto = ClientProtocol(self.manager)
proto.factory = self
return proto
def clientConnectionFailed(self, connector, reason) -> None:
log.debug(f"Connection failed to PBTwitterManager: {reason}")
self.retry(connector)
def clientConnectionLost(self, connector, reason) -> None:
log.debug(f"Connection lost to PBTwitterManager: {reason}")
self.retry(connector)
class MyStreamListener(tweepy.Stream):
def __init__(self, bot: Bot):
self.relevant_users: List[str] = []
self.bot = bot
if "twitter" not in bot.config:
return
twitter_config = bot.config["twitter"]
super().__init__(
twitter_config["consumer_key"],
twitter_config["consumer_secret"],
twitter_config["access_token"],
twitter_config["access_token_secret"],
)
def on_status(self, status: tweepy.models.Status) -> None:
if (
status.user.screen_name.lower() in self.relevant_users
and not status.text.startswith("RT ")
and status.in_reply_to_screen_name is None
):
log.debug("On status from tweepy: %s", status.text)
tweet_message = stringify_tweet(status)
self.bot.say(f"B) New cool tweet from {status.user.screen_name}: {tweet_message}")
def on_request_error(self, status_code: int) -> None:
log.warning("Unhandled in twitter stream: %s", status_code)
super().on_error(status_code)
class GenericTwitterManager:
def __init__(self, bot: Bot) -> None:
self.bot = bot
self.twitter_api: Optional[tweepy.API] = None
self.twitter_client: Optional[tweepy.Client] = None
self.listener: Union[None, MyStreamListener, PBTwitterManager] = None
if self.bot:
self.bot.socket_manager.add_handler("twitter.follow", self.on_twitter_follow)
self.bot.socket_manager.add_handler("twitter.unfollow", self.on_twitter_unfollow)
if "twitter" not in bot.config:
return
twitter_config = bot.config["twitter"]
self.use_twitter_stream = "streaming" in twitter_config and twitter_config["streaming"] == "1"
try:
self.twitter_auth = tweepy.OAuthHandler(twitter_config["consumer_key"], twitter_config["consumer_secret"])
self.twitter_auth.set_access_token(twitter_config["access_token"], twitter_config["access_token_secret"])
self.twitter_api = tweepy.API(self.twitter_auth)
self.twitter_client = tweepy.Client(
twitter_config["bearer_token"],
self.twitter_auth.consumer_key,
self.twitter_auth.consumer_secret,
self.twitter_auth.access_token,
self.twitter_auth.access_token_secret
)
except:
log.exception("Twitter authentication failed.")
def on_twitter_follow(self, _data: HandlerParam) -> None:
log.info("TWITTER FOLLOW")
self.reload()
def on_twitter_unfollow(self, _data: HandlerParam) -> None:
log.info("TWITTER UNFOLLOW")
self.reload()
def reload(self) -> None:
if self.listener:
self.listener.relevant_users = []
with DBManager.create_session_scope() as db_session:
for user in db_session.query(TwitterUser):
if user.username is None:
log.warning(f"Twitter user with DB ID {user.id} has a null username")
continue
self.listener.relevant_users.append(user.username)
def follow_user(self, username: str) -> bool:
"""Add `username` to our relevant_users list."""
if not self.listener:
log.error("No twitter listener set up")
return False
if username in self.listener.relevant_users:
log.warning(f"Already following {username}")
return False
with DBManager.create_session_scope() as db_session:
db_session.add(TwitterUser(username))
self.listener.relevant_users.append(username)
log.info(f"Now following {username}")
return True
def unfollow_user(self, username: str) -> bool:
"""Stop following `username`, if we are following him."""
if not self.listener:
log.error("No twitter listener set up")
return False
if username not in self.listener.relevant_users:
log.warning(f"Trying to unfollow someone we are not following (2) {username}")
return False
self.listener.relevant_users.remove(username)
with DBManager.create_session_scope() as db_session:
user = db_session.query(TwitterUser).filter_by(username=username).one_or_none()
if not user:
log.warning("Trying to unfollow someone we are not following")
return False
db_session.delete(user)
log.info(f"No longer following {username}")
return True
def get_last_tweet(self, username: str) -> str:
if self.twitter_api and self.twitter_client:
try:
id = self.twitter_client.get_user(username=username, tweet_fields=["id"])
public_tweets = self.twitter_client.get_users_tweets(
id.data["id"], max_results=5,
tweet_fields=["created_at","text","in_reply_to_user_id","entities"], expansions=["entities.mentions.username"])
for tweet in public_tweets.data:
if tweet["text"].startswith("RT "):
return
# [TODO](JoachimFlottorp) Fix this lol.
# if public_tweets.includes:
# if not public_tweets.includes[idx].in_reply_to_screen_name is None:
# return
# Tweepy returns naive datetime object (but it's always UTC)
# .replace() makes it timezone-aware :)
created_at = tweet["created_at"].replace(tzinfo=datetime.timezone.utc)
tweet_message = stringify_tweet(tweet)
return f"{tweet_message} ({time_since(now().timestamp(), created_at.timestamp(), time_format='short')} ago)"
except Exception:
log.exception("Exception caught while getting last tweet")
return "FeelsBadMan"
else:
return "Twitter not set up FeelsBadMan"
return "FeelsBadMan"
def quit(self) -> None:
pass
# TwitterManager loads live tweets from Twitter's Streaming API
class TwitterManager(GenericTwitterManager):
def __init__(self, bot: Bot) -> None:
super().__init__(bot)
self.twitter_stream: Optional[MyStreamListener] = None
if "twitter" not in bot.config:
return
try:
if self.use_twitter_stream:
self.check_twitter_connection()
bot.execute_every(60 * 5, self.check_twitter_connection)
except:
log.exception("Twitter authentication failed.")
def initialize_twitter_stream(self) -> None:
if self.twitter_stream is None:
self.twitter_stream = MyStreamListener(self.bot)
self.listener = self.twitter_stream
self.reload()
def _run_twitter_stream(self) -> None:
if self.twitter_api is None:
log.warn("Unable to run twitter stream: local twitter client not configured")
return
self.initialize_twitter_stream()
if self.twitter_stream is None:
log.warn("Unable to run twitter stream: twitter stream failed to initialize")
return
user_ids = []
with DBManager.create_session_scope() as db_session:
for user in db_session.query(TwitterUser):
try:
twitter_user: tweepy.User = self.twitter_client.get_user(screen_name=user.username)
except tweepy.errors.NotFound:
log.warn(f"Twitter user {user.username} does not exist")
continue
except:
log.exception("Unhandled exception from tweepy.get_user (v1)")
continue
user_ids.append(twitter_user.id_str)
if not user_ids:
return
try:
self.twitter_stream.filter(follow=user_ids, threaded=False)
except:
log.exception("Exception caught in twitter stream _run")
def check_twitter_connection(self) -> None:
"""Check if the twitter stream is running.
If it's not running, try to restart it.
"""
if self.twitter_stream and self.twitter_stream.running:
return
try:
t = threading.Thread(target=self._run_twitter_stream, name="Twitter")
t.daemon = True
t.start()
except:
log.exception("Caught exception while checking twitter connection")
def quit(self) -> None:
if self.twitter_stream:
self.twitter_stream.disconnect()
# PBTwitterManager reads live tweets from a pajbot tweet-provider (https://github.com/pajbot/tweet-provider) instead of Twitter's streaming API
class PBTwitterManager(GenericTwitterManager):
client: Optional[ClientProtocol] = None
tweepy: Optional[tweepy.API] = None
def __init__(self, bot: Bot) -> None:
super().__init__(bot)
self.relevant_users: List[str] = []
PBTwitterManager.bot = bot
PBTwitterManager.tweepy = self.twitter_api
self.listener = self
if "twitter" not in bot.config:
return
self.reload()
log.info("pajbot twitter manager initialized")
from twisted.internet import reactor
twitter_config = bot.config["twitter"]
tweet_provider_host = twitter_config.get("tweet_provider_host", "127.0.0.1")
tweet_provider_port = int(twitter_config.get("tweet_provider_port", 2356))
tweet_provider_protocol = twitter_config.get("tweet_provider_protocol", "ws")
factory = ClientFactory(f"{tweet_provider_protocol}://{tweet_provider_host}:{tweet_provider_port}")
factory.manager = self
reactor.connectTCP(tweet_provider_host, tweet_provider_port, factory) # type:ignore
def follow_user(self, username: str) -> bool:
if self.twitter_api is None:
log.warn("Unable to forward follow to twitter_manager: local twitter client not configured")
return False
ws_client = PBTwitterManager.client
if ws_client is None:
log.warn("Unable to forward follow to twitter_manager: not connected")
return False
ret = super().follow_user(username)
if ret is True:
try:
user = self.twitter_client.get_user(screen_name=username)
except tweepy.errors.NotFound:
log.warn(f"Twitter user {username} does not exist")
return False
except:
log.exception("Unhandled exception from tweepy.get_user (v1)")
return False
msg = {"type": "insert_subscriptions", "data": [user.id]}
ws_client.sendMessage(json.dumps(msg).encode("utf8"))
return ret
def unfollow_user(self, username: str) -> bool:
if self.twitter_client is None:
log.warn("Unable to forward unfollow to twitter_manager: local twitter client not configured")
return False
ws_client = PBTwitterManager.client
if ws_client is None:
log.warn("Unable to forward unfollow to twitter_manager: not connected")
return False
ret = super().unfollow_user(username)
if ret is True:
try:
user = self.twitter_client.get_user(screen_name=username)
except tweepy.errors.NotFound:
log.warn(f"Twitter user {username} does not exist")
return False
except:
log.exception("Unhandled exception from tweepy.get_user (v1)")
return False
msg = {"type": "remove_subscriptions", "data": [user.id]}
ws_client.sendMessage(json.dumps(msg).encode("utf8"))
return ret
```
#### File: migration_revisions/db/0016_delete_hsbet_module.py
```python
def up(cursor, bot):
cursor.execute("DROP TABLE hsbet_game, hsbet_bet")
cursor.execute("DROP TYPE hsbet_outcome")
```
#### File: pajbot/models/moderation_action.py
```python
from typing import Dict, Optional, Union
import logging
from contextlib import contextmanager
from dataclasses import dataclass
log = logging.getLogger(__name__)
# @dataclass: https://stackoverflow.com/a/62699260/4464702 (Python 3.7 feature)
@dataclass
class Untimeout:
pass
@dataclass
class Unban:
pass
@dataclass
class Timeout:
duration: int
reason: Optional[str]
once: bool
@dataclass
class Ban:
reason: Optional[str]
# Type alias
ModerationAction = Union[Untimeout, Unban, Timeout, Ban]
def _combine_reasons(a: Optional[str], b: Optional[str]) -> Optional[str]:
if a is None and b is None:
return None
if a is None:
return b
if b is None:
return a
return f"{a} + {b}"
class ModerationActions:
# Maps login -> action to execute
actions: Dict[str, ModerationAction]
def __init__(self) -> None:
super().__init__()
self.actions = {}
def add(self, login: str, action: ModerationAction) -> None:
if login not in self.actions:
self.actions[login] = action
return
existing_action = self.actions[login]
if isinstance(action, Ban):
if isinstance(existing_action, Ban):
# combine the two
self.actions[login] = Ban(reason=_combine_reasons(existing_action.reason, action.reason))
else:
# ban wins over lower-tier action
self.actions[login] = action
return
if isinstance(action, Timeout):
if isinstance(existing_action, Ban):
# Existing action is higher-tier
return
if isinstance(existing_action, Timeout):
# combine the two
self.actions[login] = Timeout(
duration=max(action.duration, existing_action.duration),
reason=_combine_reasons(existing_action.reason, action.reason),
once=existing_action.once and action.once,
)
else:
# timeout wins over lower-tier action
self.actions[login] = action
return
if isinstance(action, Unban):
if isinstance(existing_action, Ban) or isinstance(existing_action, Timeout):
# Existing action is higher-tier
return
if isinstance(existing_action, Unban):
# two unbans, nothing to combine
pass
else:
# unban wins over lower-tier untimeout
self.actions[login] = action
return
# we have an untimeout action
# if the current action was higher-tier we wouldn't have to do anything
# if the current action was also an untimeout we wouldn't have to do anything
# there are no tiers below an untimeout
# the case where there is no pre-existing action is already handled way at the top of this method
# so, in essence, there is nothing to do here.
def execute(self, bot) -> None:
for login, action in self.actions.items():
if isinstance(action, Ban):
bot.ban_login(login, action.reason)
if isinstance(action, Timeout):
bot.timeout_login(login, action.duration, action.reason, action.once)
if isinstance(action, Unban):
bot.unban_login(login)
if isinstance(action, Untimeout):
bot.untimeout_login(login)
@contextmanager
def new_message_processing_scope(bot):
bot.thread_locals.moderation_actions = ModerationActions()
try:
yield
finally:
mod_actions = bot.thread_locals.moderation_actions
bot.thread_locals.moderation_actions = None
try:
mod_actions.execute(bot)
except:
log.exception("Failed to execute moderation actions after message processing scope ended")
```
#### File: modules/basic/debug.py
```python
import collections
import logging
from pajbot.managers.db import DBManager
from pajbot.models.command import Command, CommandExample
from pajbot.models.user import User
from pajbot.modules import BaseModule, ModuleType
from pajbot.modules.basic import BasicCommandsModule
log = logging.getLogger(__name__)
class DebugModule(BaseModule):
ID = __name__.split(".")[-1]
NAME = "Debug"
DESCRIPTION = "Debug commands and users"
CATEGORY = "Feature"
ENABLED_DEFAULT = True
MODULE_TYPE = ModuleType.TYPE_ALWAYS_ENABLED
PARENT_MODULE = BasicCommandsModule
HIDDEN = True
@staticmethod
def debug_command(bot, source, message, **rest):
if not message or len(message) <= 0:
bot.whisper(source, "Usage: !debug command (COMMAND_ID|COMMAND_ALIAS)")
return False
try:
command_id = int(message)
except ValueError:
command_id = -1
command = None
if command_id == -1:
potential_cmd = "".join(message.split(" ")[:1]).lower()
if potential_cmd in bot.commands:
command = bot.commands[potential_cmd]
else:
for _, potential_cmd in bot.commands.items():
if potential_cmd.id == command_id:
command = potential_cmd
break
if command is None:
bot.whisper(source, "No command found with the given parameters.")
return False
data = collections.OrderedDict()
data["id"] = command.id
data["level"] = command.level
data["type"] = command.action.type if command.action is not None else "???"
data["cost"] = command.cost
data["cd_all"] = command.delay_all
data["cd_user"] = command.delay_user
data["mod_only"] = command.mod_only
data["sub_only"] = command.sub_only
if data["type"] == "message":
data["response"] = command.action.response
elif data["type"] == "func" or data["type"] == "rawfunc":
data["cb"] = command.action.cb.__name__
bot.whisper(source, ", ".join(["%s=%s" % (key, value) for (key, value) in data.items()]))
@staticmethod
def debug_user(bot, source, message, **options):
if not message or len(message) <= 0:
bot.whisper(source, "Usage: !debug user USERNAME")
return False
username = message.split(" ")[0]
with DBManager.create_session_scope() as db_session:
user = User.find_by_user_input(db_session, username)
if user is None:
bot.whisper(source, "No user with this username found.")
return False
# TODO the time_in_chat_ properties could be displayed in a more user-friendly way
# current output format is time_in_chat_online=673800.0, time_in_chat_offline=7651200.0
data = user.jsonify()
bot.whisper(source, ", ".join([f"{key}={value}" for (key, value) in data.items()]))
def load_commands(self, **options):
self.commands["debug"] = Command.multiaction_command(
level=100,
delay_all=0,
delay_user=0,
default=None,
commands={
"command": Command.raw_command(
self.debug_command,
level=250,
description="Debug a command",
examples=[
CommandExample(
None,
"Debug a command",
chat="user:!debug command ping\n"
"bot>user: id=210, level=100, type=message, cost=0, cd_all=10, cd_user=30, mod_only=False, sub_only=False, response=Snusbot has been online for $(tb:bot_uptime)",
description="",
).parse()
],
),
"user": Command.raw_command(
self.debug_user,
level=250,
description="Debug a user",
examples=[
CommandExample(
None,
"Debug a user",
chat="user:!debug user snusbot\n"
"bot>user: id=123, login=snusbot, name=Snusbot, level=100, num_lines=45, points=225, tokens=0, last_seen=2016-04-05 17:56:23 CEST, last_active=2016-04-05 17:56:07 CEST, ignored=False, banned=False",
description="",
).parse()
],
),
},
)
```
#### File: modules/basic/selftimeout.py
```python
from __future__ import annotations
from typing import TYPE_CHECKING, Any
import logging
import random
from pajbot.models.command import Command, CommandExample
from pajbot.modules import BaseModule, ModuleSetting
from pajbot.modules.basic import BasicCommandsModule
if TYPE_CHECKING:
from pajbot.bot import Bot
from pajbot.models.user import User
log = logging.getLogger(__name__)
class SelfTimeoutModule(BaseModule):
ID = __name__.rsplit(".", maxsplit=1)[-1]
NAME = "Self timeout"
DESCRIPTION = "Allows users to timeout themselves based on a random duration."
CATEGORY = "Feature"
PARENT_MODULE = BasicCommandsModule
SETTINGS = [
ModuleSetting(
key="subscribers_only",
label="Only allow subscribers to use the !selftimeout command.",
type="boolean",
required=True,
default=False,
),
ModuleSetting(
key="vip_only",
label="Only allow VIPs to use the !selftimeout command.",
type="boolean",
required=True,
default=False,
),
ModuleSetting(
key="global_cd",
label="Global cooldown (seconds)",
type="number",
required=True,
placeholder="",
default=5,
constraints={"min_value": 0, "max_value": 120},
),
ModuleSetting(
key="user_cd",
label="Per-user cooldown (seconds)",
type="number",
required=True,
placeholder="",
default=15,
constraints={"min_value": 0, "max_value": 240},
),
ModuleSetting(
key="level",
label="Level required to use the command",
type="number",
required=True,
placeholder="",
default=100,
constraints={"min_value": 100, "max_value": 2000},
),
ModuleSetting(
key="command_name",
label="Command name (e.g. selftimeout)",
type="text",
required=True,
placeholder="Command name (no !)",
default="selftimeout",
constraints={"min_str_len": 2, "max_str_len": 15},
),
ModuleSetting(
key="low_value",
label="Lowest number to select from",
type="number",
required=True,
placeholder="0",
default=0,
constraints={"min_value": 0},
),
ModuleSetting(
key="high_value",
label="Highest number to select to",
type="number",
required=True,
placeholder="100",
default=100,
constraints={"min_value": 1},
),
ModuleSetting(
key="timeout_unit",
label="Choose the timeout format to use. Maximum Twitch timeout limits are enforced.",
type="options",
required=False,
default="Minutes",
options=["Seconds", "Minutes", "Hours", "Days", "Weeks"],
),
ModuleSetting(
key="zero_response",
label="Additional text to say when the user gets a 0. Text is disabled for moderator rolls.",
type="text",
required=False,
placeholder="You're safe! For now... PRChase",
default="You're safe! For now... PRChase",
constraints={"max_str_len": 100},
),
]
def load_commands(self, **options) -> None:
self.commands[self.settings["command_name"].lower().replace("!", "").replace(" ", "")] = Command.raw_command(
self.selftimeout,
sub_only=self.settings["subscribers_only"],
delay_all=self.settings["global_cd"],
delay_user=self.settings["user_cd"],
level=self.settings["level"],
examples=[
CommandExample(
None,
"Get timed out for a random duration",
chat="user:!selftimeout",
description="You don't get confirmation, only a timeout.",
).parse(),
],
)
# We're converting timeout times to seconds in order to avoid having to specify the unit to Twitch
def seconds_conversion(self, random_value: int) -> int:
if self.settings["timeout_unit"] == "Seconds":
return random_value
if self.settings["timeout_unit"] == "Minutes":
return random_value * 60
if self.settings["timeout_unit"] == "Hours":
return random_value * 3600
if self.settings["timeout_unit"] == "Days":
return random_value * 86400
if self.settings["timeout_unit"] == "Weeks":
return random_value * 604800
# Could raise an exception here instead too
return 0
def selftimeout(self, bot: Bot, source: User, event: Any, **rest) -> bool:
if self.settings["subscribers_only"] and not source.subscriber:
return True
if self.settings["vip_only"] and not source.vip:
return True
if source.moderator is True:
return True
random_value = random.randint(self.settings["low_value"], self.settings["high_value"])
standard_response = f"You got a {random_value}"
if random_value == 0 and self.settings["zero_response"] != "":
bot.send_message_to_user(
source, f"{standard_response}. {self.settings['zero_response']}", event, method="reply"
)
else:
timeout_length = self.seconds_conversion(random_value)
# Check if timeout value is over Twitch's maximum
timeout_length = min(timeout_length, 1209600)
bot.timeout(source, timeout_length, f"{standard_response}!", once=True)
return True
```
#### File: modules/chat_alerts/livealert.py
```python
import logging
from pajbot.managers.handler import HandlerManager
from pajbot.modules import BaseModule, ModuleSetting
from pajbot.modules.chat_alerts import ChatAlertModule
log = logging.getLogger(__name__)
class LiveAlertModule(BaseModule):
ID = __name__.split(".")[-1]
NAME = "Live Alert"
DESCRIPTION = "Prints a message in chat when the streamer goes live"
CATEGORY = "Feature"
ENABLED_DEFAULT = False
PARENT_MODULE = ChatAlertModule
SETTINGS = [
ModuleSetting(
key="live_message",
label="Message to post when streamer goes live | Available arguments: {streamer}, {game}, {title}",
type="text",
required=True,
placeholder="{streamer} is now live! PogChamp Streaming {game}: {title}",
default="{streamer} is now live! PogChamp Streaming {game}: {title}",
constraints={"max_str_len": 400},
),
ModuleSetting(
key="extra_message",
label="Extra message to post after the initial live message is posted. Leave empty to disable | Available arguments: {streamer}",
type="text",
required=False,
placeholder="@{streamer} TWEET THAT YOU'RE LIVE OMGScoots",
default="",
constraints={"max_str_len": 400},
),
]
def __init__(self, bot):
super().__init__(bot)
def on_stream_start(self, **rest):
live_chat_message = self.settings["live_message"]
streamer = self.bot.streamer_display
game = self.bot.stream_manager.game
title = self.bot.stream_manager.title
self.bot.say(live_chat_message.format(streamer=streamer, game=game, title=title))
if self.settings["extra_message"] != "":
self.bot.say(self.settings["extra_message"].format(streamer=streamer))
def enable(self, bot):
HandlerManager.add_handler("on_stream_start", self.on_stream_start)
def disable(self, bot):
HandlerManager.remove_handler("on_stream_start", self.on_stream_start)
```
#### File: pajbot/utils/wait_for_redis_data_loaded.py
```python
from __future__ import annotations
from typing import TYPE_CHECKING
import logging
import time
from redis import BusyLoadingError
if TYPE_CHECKING:
from pajbot.managers.redis import RedisType
log = logging.getLogger(__name__)
def wait_for_redis_data_loaded(redis: RedisType) -> None:
while True:
try:
redis.ping()
except BusyLoadingError:
log.warning("Redis not done loading, will retry in 2 seconds...")
time.sleep(2)
continue
break
```
#### File: web/common/menu.py
```python
from __future__ import annotations
from typing import Any, Dict, List, Union
import logging
from pajbot.web.utils import get_cached_enabled_modules
log = logging.getLogger(__name__)
class MenuItem:
def __init__(
self,
href: Union[str, List[MenuItem]],
menu_id: str,
caption: str,
enabled: bool = True,
level: int = 100,
) -> None:
self.href = href
self.id = menu_id
self.caption = caption
self.enabled = enabled
self.level = level
self.type = "single"
if isinstance(self.href, list):
self.type = "multi"
def init(app):
@app.context_processor
def menu() -> Dict[str, Any]:
enabled_modules = get_cached_enabled_modules()
# Menu items that are shown for normal users
menu_items: List[MenuItem] = [
MenuItem("/", "home", "Home"),
MenuItem("/commands", "commands", "Commands"),
MenuItem("/points", "points", "Points", "chatters_refresh" in enabled_modules),
MenuItem("/stats", "stats", "Stats"),
MenuItem("/decks", "decks", "Decks", "deck" in enabled_modules),
MenuItem("/playsounds", "user_playsounds", "Playsounds", "playsound" in enabled_modules),
]
# Menu items that are shown to admin when in an /admin page
admin_menu_items: List[MenuItem] = [
MenuItem("/", "home", "Home"),
MenuItem("/admin", "admin_home", "Admin Home"),
MenuItem(
[
MenuItem("/admin/banphrases", "admin_banphrases", "Banphrases"),
MenuItem("/admin/links/blacklist", "admin_links_blacklist", "Blacklisted links"),
MenuItem("/admin/links/whitelist", "admin_links_whitelist", "Whitelisted links"),
],
"filters",
"Filters",
),
MenuItem("/admin/commands", "admin_commands", "Commands"),
MenuItem("/admin/timers", "admin_timers", "Timers"),
MenuItem("/admin/moderators", "admin_moderators", "Moderators"),
MenuItem("/admin/modules", "admin_modules", "Modules"),
MenuItem("/admin/playsounds", "admin_playsounds", "Playsounds", "playsound" in enabled_modules),
MenuItem("/admin/streamer", "admin_streamer", "Streamer Info"),
]
data = {
"enabled_modules": enabled_modules,
"nav_bar_header": menu_items,
"nav_bar_admin_header": admin_menu_items,
}
return data
``` |
{
"source": "JoachimKuebart-TomTom/conan",
"score": 2
} |
#### File: toolchains/ios/test_using_cmake.py
```python
import platform
import textwrap
import unittest
import pytest
from conan.tools.cmake import CMakeToolchain
from conans.test.utils.tools import TestClient
from ._utils import create_library
@pytest.mark.skipif(platform.system() != "Darwin", reason="Requires XCode")
class ToolchainiOSTestCase(unittest.TestCase):
def setUp(self):
self.t = TestClient()
create_library(self.t)
self._conanfile = textwrap.dedent("""
from conans import ConanFile
from conan.tools.cmake import CMake, CMakeToolchain
class Library(ConanFile):
name = 'hello'
version = '1.0'
settings = 'os', 'arch', 'compiler', 'build_type'
exports_sources = 'hello.h', 'hello.cpp', 'CMakeLists.txt'
options = {{'shared': [True, False]}}
default_options = {{'shared': False}}
def generate(self):
tc = CMakeToolchain(self, generator={generator})
tc.generate()
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
self.run("lipo -info Release-iphoneos/libhello.a")
def package(self):
cmake = CMake(self)
cmake.install()
""")
self.t.save({
'ios_profile': textwrap.dedent("""
[settings]
os=iOS
os.version=12.0
arch=armv8
compiler=apple-clang
compiler.version=12.0
compiler.libcxx=libc++
build_type=Release
""")
})
def test_xcode_generator(self):
""" Simplest approach:
https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html#cross-compiling-for-ios-tvos-or-watchos
"""
self.t.save({'conanfile.py': self._conanfile.format(generator='"Xcode"')})
# Build in the cache
self.t.run('create . --profile:build=default --profile:host=ios_profile')
self.assertIn("Non-fat file: Release-iphoneos/libhello.a is architecture: arm64", self.t.out)
# Build locally
self.t.run('install . --profile:host=ios_profile --profile:build=default')
self.t.run_command('cmake . -G"Xcode" -DCMAKE_TOOLCHAIN_FILE={}'.format(CMakeToolchain.filename))
self.t.run_command('cmake --build . --config Release')
self.t.run_command("lipo -info Release-iphoneos/libhello.a")
self.assertIn("Non-fat file: Release-iphoneos/libhello.a is architecture: arm64", self.t.out)
def test_unix_makefiles_generator(self):
pass
```
#### File: toolchains/meson/test_test.py
```python
import os
import platform
import pytest
import textwrap
from conans.test.assets.sources import gen_function_cpp
from conans.test.functional.toolchains.meson._base import TestMesonBase
@pytest.mark.tool_pkg_config
@pytest.mark.skipif(platform.system() == "Windows", reason="Doesn't work in Windows")
class MesonTest(TestMesonBase):
_test_package_meson_build = textwrap.dedent("""
project('test_package', 'cpp')
hello = dependency('hello', version : '>=0.1')
test_package = executable('test_package', 'test_package.cpp', dependencies: hello)
test('test package', test_package)
""")
_test_package_conanfile_py = textwrap.dedent("""
import os
from conans import ConanFile
from conan.tools.meson import Meson, MesonToolchain
class TestConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "pkg_config"
def generate(self):
tc = MesonToolchain(self)
tc.generate()
def build(self):
meson = Meson(self)
meson.configure()
meson.build()
def test(self):
meson = Meson(self)
meson.configure()
meson.test()
""")
def test_reuse(self):
self.t.run("new hello/0.1 -s")
test_package_cpp = gen_function_cpp(name="main", includes=["hello"], calls=["hello"])
self.t.save({os.path.join("test_package", "conanfile.py"): self._test_package_conanfile_py,
os.path.join("test_package", "meson.build"): self._test_package_meson_build,
os.path.join("test_package", "test_package.cpp"): test_package_cpp})
self.t.run("create . hello/0.1@ %s" % self._settings_str)
self._check_binary()
```
#### File: toolchains/microsoft/vcvars_test.py
```python
import platform
import textwrap
import os
import pytest
from conans.test.utils.tools import TestClient
@pytest.mark.skipif(platform.system() not in ["Windows"], reason="Requires Windows")
@pytest.mark.parametrize("group", ["build", "run", None])
def test_vcvars_generator(group):
client = TestClient(path_with_spaces=False)
conanfile = textwrap.dedent("""
from conans import ConanFile
from conan.tools.microsoft import VCVars
class TestConan(ConanFile):
settings = "os", "compiler", "arch", "build_type"
def generate(self):
VCVars(self).generate({})
""".format('group="{}"'.format(group) if group else ""))
client.save({"conanfile.py": conanfile})
client.run('install . -s os=Windows -s compiler="msvc" -s compiler.version=19.1 '
'-s compiler.cppstd=14 -s compiler.runtime=static')
assert os.path.exists(os.path.join(client.current_folder, "conanvcvars.bat"))
if group in ("build", None):
bat_contents = client.load("conanbuild.bat")
assert "conanvcvars.bat" in bat_contents
else:
assert not os.path.exists(os.path.join(client.current_folder, "conanbuild.bat"))
@pytest.mark.skipif(platform.system() not in ["Windows"], reason="Requires Windows")
def test_vcvars_generator_string():
client = TestClient(path_with_spaces=False)
conanfile = textwrap.dedent("""
from conans import ConanFile
class TestConan(ConanFile):
generators = "VCVars"
settings = "os", "compiler", "arch", "build_type"
""")
client.save({"conanfile.py": conanfile})
client.run('install . -s os=Windows -s compiler="msvc" -s compiler.version=19.1 '
'-s compiler.cppstd=14 -s compiler.runtime=static')
assert os.path.exists(os.path.join(client.current_folder, "conanvcvars.bat"))
```
#### File: tools/microsoft/test_msbuild.py
```python
import mock
import os
import textwrap
from mock import Mock
from conan.tools.microsoft import MSBuild, MSBuildToolchain
from conans.model.conf import ConfDefinition
from conans.model.env_info import EnvValues
from conans.test.utils.mocks import ConanFileMock, MockSettings
from conans.tools import load
from conans import ConanFile, Settings
def test_msbuild_cpu_count():
c = ConfDefinition()
c.loads(textwrap.dedent("""\
tools.microsoft.msbuild:max_cpu_count=23
tools.build:processes=10
"""))
settings = MockSettings({"build_type": "Release",
"compiler": "gcc",
"compiler.version": "7",
"os": "Linux",
"arch": "x86_64"})
conanfile = ConanFileMock()
conanfile.settings = settings
conanfile.conf = c.get_conanfile_conf(None)
msbuild = MSBuild(conanfile)
cmd = msbuild.command('project.sln')
assert '/m:23' in cmd
def test_msbuild_toolset():
settings = Settings({"build_type": ["Release"],
"compiler": {"msvc": {"version": ["19.3"]}},
"os": ["Windows"],
"arch": ["x86_64"]})
conanfile = ConanFile(Mock(), None)
conanfile.settings = "os", "compiler", "build_type", "arch"
conanfile.initialize(settings, EnvValues())
conanfile.settings.build_type = "Release"
conanfile.settings.compiler = "msvc"
conanfile.settings.compiler.version = "19.3"
conanfile.settings.os = "Windows"
conanfile.settings.arch = "x86_64"
msbuild = MSBuildToolchain(conanfile)
assert 'v143' in msbuild.toolset
def test_msbuild_standard():
settings = Settings({"build_type": ["Release"],
"compiler": {"msvc": {"version": ["19.3"], "cppstd": ["20"]}},
"os": ["Windows"],
"arch": ["x86_64"]})
conanfile = ConanFile(Mock(), None)
conanfile.folders.set_base_generators(".")
conanfile.install_folder = os.getcwd()
conanfile.conf = ConfDefinition()
conanfile.settings = "os", "compiler", "build_type", "arch"
conanfile.initialize(settings, EnvValues())
conanfile.settings.build_type = "Release"
conanfile.settings.compiler = "msvc"
conanfile.settings.compiler.version = "19.3"
conanfile.settings.compiler.cppstd = "20"
conanfile.settings.os = "Windows"
conanfile.settings.arch = "x86_64"
msbuild = MSBuildToolchain(conanfile)
with mock.patch("conan.tools.microsoft.visual.vcvars_path", mock.MagicMock(return_value=".")):
msbuild.generate()
assert '<LanguageStandard>stdcpp20</LanguageStandard>' in load('conantoolchain_release_x64.props')
```
#### File: tools/gnu/autotools.py
```python
import os
from conan.tools.files import load_toolchain_args
from conan.tools.gnu.make import make_jobs_cmd_line_arg
from conan.tools.microsoft import unix_path
from conans.client.build import join_arguments
class Autotools(object):
def __init__(self, conanfile):
self._conanfile = conanfile
toolchain_file_content = load_toolchain_args(self._conanfile.generators_folder)
self._configure_args = toolchain_file_content.get("configure_args")
self._make_args = toolchain_file_content.get("make_args")
def configure(self, build_script_folder=None):
"""
http://jingfenghanmax.blogspot.com.es/2010/09/configure-with-host-target-and-build.html
https://gcc.gnu.org/onlinedocs/gccint/Configure-Terms.html
"""
# FIXME: Conan 2.0 Are we keeping the "should_XXX" properties???
if not self._conanfile.should_configure:
return
source = self._conanfile.source_folder
if build_script_folder:
source = os.path.join(self._conanfile.source_folder, build_script_folder)
configure_cmd = "{}/configure".format(source)
configure_cmd = unix_path(self._conanfile, configure_cmd)
cmd = "{} {}".format(configure_cmd, self._configure_args)
self._conanfile.output.info("Calling:\n > %s" % cmd)
self._conanfile.run(cmd)
def make(self, target=None):
make_program = self._conanfile.conf["tools.gnu:make_program"]
if make_program is None:
make_program = "mingw32-make" if self._use_win_mingw() else "make"
str_args = self._make_args
jobs = ""
if "-j" not in str_args and "nmake" not in make_program.lower():
jobs = make_jobs_cmd_line_arg(self._conanfile) or ""
command = join_arguments([make_program, target, str_args, jobs])
self._conanfile.run(command)
def install(self):
if not self._conanfile.should_install:
return
self.make(target="install")
def _use_win_mingw(self):
if hasattr(self._conanfile, 'settings_build'):
os_build = self._conanfile.settings_build.get_safe('os')
else:
os_build = self._conanfile.settings.get_safe("os")
if os_build == "Windows":
compiler = self._conanfile.settings.get_safe("compiler")
sub = self._conanfile.settings.get_safe("os.subsystem")
if sub in ("cygwin", "msys2", "msys") or compiler == "qcc":
return False
else:
if self._conanfile.win_bash:
return False
return True
return False
```
#### File: tools/google/toolchain.py
```python
from conan.tools._check_build_profile import check_using_build_profile
from conan.tools.files import save_toolchain_args
class BazelToolchain(object):
def __init__(self, conanfile):
self._conanfile = conanfile
check_using_build_profile(self._conanfile)
def generate(self):
save_toolchain_args({
"bazel_config": self._conanfile.conf["tools.google.bazel:config"],
"bazelrc_path": self._conanfile.conf["tools.google.bazel:bazelrc_path"]
})
``` |
{
"source": "JoachimLandtmeters/pNEUMA",
"score": 3
} |
#### File: pNEUMA/pneumapackage/logger.py
```python
import logging
import pneumapackage.settings
import inspect
import os
import datetime
def log(message, level=None, print_message=False):
"""
Record a message in the log file or/and print to the console
Parameters
----------
message : string
level : int
logger level
print_message : print the log message
Returns
-------
None
"""
if level is None:
level = pneumapackage.settings.log_level
if pneumapackage.settings.log_to_file:
# create a new logger with the calling script's name, or access the existing one
frm = inspect.stack()[1]
mod = inspect.getmodule(frm[0])
logger = get_logger(mod.__name__)
if level == logging.DEBUG:
logger.debug(message)
elif level == logging.INFO:
logger.info(message)
elif level == logging.WARNING:
logger.warning(message)
elif level == logging.ERROR:
logger.error(message)
if print_message:
print(message)
def get_logger(name):
"""
Create a logger or return the current one if already instantiated.
Parameters
----------
level : int
one of the logger.level constants
name : string
name of the logger
filename : string
name of the log file
Returns
-------
logger.logger
"""
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
filename = pneumapackage.settings.log_filename
# if a logger with this name is not already set up
if not getattr(logger, 'handler_set', None):
# get today's date and construct a log filename
todays_date = datetime.datetime.today().strftime('%Y_%m_%d')
log_filename = os.path.join(pneumapackage.settings.log_folder, '{}_{}.log'.format(filename, todays_date))
# if the logs folder does not already exist, create it
if not os.path.exists(pneumapackage.settings.log_folder):
os.makedirs(pneumapackage.settings.log_folder)
# create file handler and log formatter and set them up
handler = logging.FileHandler(log_filename, encoding='utf-8')
formatter = logging.Formatter('%(asctime)s %(levelname)s @%(name)s.py: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.handler_set = True
return logger
``` |
{
"source": "joachimlindborg/onion_omega",
"score": 3
} |
#### File: joachimlindborg/onion_omega/STK08-temp-sensor.py
```python
import time
from temperatureSensor import TemperatureSensor
import oneWire
# setup onewire and polling interval
oneWireGpio = 19 # set the sensor GPIO
pollingInterval = 1 # seconds
def __main__():
# check if 1-Wire is setup in the kernel
if not oneWire.setupOneWire(str(oneWireGpio)):
print "Kernel module could not be inserted. Please reboot and try again."
return -1
# get the address of the temperature sensor
# it should be the only device connected in this experiment
sensorAddress = oneWire.scanOneAddress()
# instantiate the temperature sensor object
sensor = TemperatureSensor("oneWire", { "address": sensorAddress, "gpio": oneWireGpio })
if not sensor.ready:
print "Sensor was not set up correctly. Please make sure that your sensor is firmly connected to the GPIO specified above and try again."
return -1
# infinite loop - runs main program code continuously
while 1:
# check and print the temperature
value = sensor.readValue()
print "T = " + str(value) + " C"
time.sleep(pollingInterval)
if __name__ == '__main__':
__main__()
``` |
{
"source": "joachimlindborg/PowerClient",
"score": 3
} |
#### File: PowerClient/power_client/main.py
```python
import machine
blinkpin = machine.Pin(2, machine.Pin.OUT)
def toggle(state=None):
blinkpin.value(not blinkpin.value())
def blink(times=1,pause=300):
while times > 0:
times-=1
toggle()
time.sleep_ms(pause)
toggle()
time.sleep_ms(pause)
SCL_PIN_ID=5
SDA_PIN_ID=4
I2C_FREQ=100000
#scl = machine.Pin(SCL_PIN_ID,machine.Pin.OUT)
scl = machine.Pin(SCL_PIN_ID)
sda = machine.Pin(SDA_PIN_ID)
#i2c = machine.I2C(scl = machine.Pin(SCL_PIN_ID,machine.Pin.OUT),
i2c = machine.I2C(scl = machine.Pin(SCL_PIN_ID),
sda = machine.Pin(SDA_PIN_ID),
freq = I2C_FREQ)
import powertrend
#from powertrend import *
D=powertrend.Powertrend(i2c)
D.main_run()
``` |
{
"source": "JoachimLippold/BayerShopper.SendIssues",
"score": 2
} |
#### File: JoachimLippold/BayerShopper.SendIssues/bayershopper_sendissues.py
```python
u"""
Upload der Issues nach Salesforce
TODO: Tour-Datum prüfen, ob korrekt
"""
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'classes')))
import datetime
import logging
import json
from optparse import OptionParser
from configparser import SafeConfigParser
from simple_salesforce import Salesforce, SalesforceLogin, SalesforceAuthenticationFailed
import requests
from issues import Issues
from classes.salesforce_connect import SalesforceConnect
class App(object):
u"""Hauptklasse der Applikation. Hier werden die grundlegenden Applikationsglobalen Variablen initialisiert.
"""
APPNAME = os.path.splitext(os.path.abspath(sys.argv[0]))[0]
""" private """
_instance, _session, _session_id, _sf_instance, _session_id, _sf_instance = (None,)*6
_loggingLevels = { logging.NOTSET: "NOTSET", logging.DEBUG: "DEBUG", logging.INFO: "INFO",
logging.WARNING: "WARNING", logging.ERROR: "ERROR", logging.CRITICAL: "CRITICAL" }
""" public """
config, logger, options, args, session, salesforce = (None,)*6
def __init__(self):
self.initConfig()
self.initOptionParser()
self.initLogging()
self.initSalesforce()
#self.checkArguments()
def initConfig(self):
u"""
Konfiguration einlesen.
Liest die Konfiguration aus einer Datei mit dem Name <SCRIPTNAME>.cfg, die sich im selben
Verzeichnis wie das Skript.
Die Konfigurationsdatei hat folgenden Aufbau:
<pre>
[salesforce]
soapUsername = <SALESFORCE-BENUTZER>
soapPassword = <<PASSWORD>>
soapSecurityToken = <SALESFORCE-SECURITY-TOKEN>
soapSandbox = False|True
soapVersion = <VERSION> ; aktuell 38.0
[logging]
formatstring = %%(asctime)s - %%(filename)s - %%(funcName)s - %%(levelname)s - %%(message)s
</pre>
Der Abschnitt 'salesforce' enthält die Zugangsdaten zum Salesforce-Server von Bayer. Im Abschnitt
[logging] wird das Format des Log-Strings definiert.
"""
self.config = SafeConfigParser()
self.config.readfp(open(self.APPNAME + '.cfg'))
def initLogging(self):
u"""
Logging in eine Datei initialisieren.
Log-Meldungen können mit self.logger.<LEVEL> in eine externe Datei geschrieben werden.
Die Loglevel werden mit einem Parameter -v oder --verbose beim Aufruf des Scriptes
angegeben. Default-Level ist 'ERROR'.
Es stehen folgende Level in aufsteigender Sortierung zur Verfügung:
* DEBUG
* INFO
* WARNING
* ERROR
* CRITICAL
Ausgegeben werden dann nur Meldungen, die mindestens dem eingestellten Loglevel entsprechen.
Wurde zum beispiel 'WARNING' gesetzt, werden nur Meldungen mit dem Level 'WARNING', 'ERROR'
und 'CRITICAL' ausgegeben. 'DEBUG' und 'INFO' werden unterdrückt.
Der Name der Datei ist Standardmäßig der Skript-Name mit der Endung .log
"""
try:
loggingLevel = next(key for key, value in self._loggingLevels.items() if value == self.options.verbose)
except (StopIteration,):
loggingLevel = logging.NOTSET
logging.basicConfig(filename=self.options.logging, format=self.config.get('logging', 'formatstring'),
filemode='a')
self.logger = logging.getLogger(self.APPNAME + ".logger")
self.logger.setLevel(loggingLevel)
self.logger.debug("options = {:s}" . format(str(self.options)))
def initOptionParser(self):
u"""
Option-Parser initialisieren.
Das Skript kann mit diversen Optionen aufgerufen werden. Diese werden vom OptionParser
verarbeitet. Aktuell sind folgende Optionen möglich:
-v, --verbose <LOGLEVEL>
Loglevel: [DEBUG, INFO, WARNING, ERROR, CRITICAL]
-l, --logging <LOGFILE>
Name des Logfiles. Default ist <SCRIPTNAME>.log
-h, --help
Hilfetext
"""
USAGE = "usage: %prog [options] /path/to/excelfile tourdate"
DESCRIPTION = u"""Kommandozeilentool zum Upload der Issues zum Salesforce-Server von Bayer.
Das Skript erwartet als Parameter den Pfad zur Excel-Datei und das Datum zu dem die Inspections markiert
wurden. Die Excel-Datei muss dabei mindestens 4 Spalten enthalten. Die Spalte "A" enthält entweder
die Salesforce-Id der Apotheke oder die Salesforce-Id der Inspection. Das Skript ermittelt selbstständig,
welche ID angegeben wurde. Die Spalten "B" bis "D" enthalten die Issue-Texte. Die Reihenfolge der Spalten
spielt keine Rolle, jedoch müssen die Spaltennamen mit "AD", "SW" oder "BT" beginnen.
"""
VERSION = "1.0"
parser = OptionParser(usage=USAGE, version=VERSION, description=DESCRIPTION)
parser.add_option("-v", "--verbose", dest="verbose", default="ERROR",
choices=[value for key, value in self._loggingLevels.items()],
help="Loglevel: [" + ', '.join([value for key, value in self._loggingLevels.items()]) + ")")
parser.add_option("-l", "--logging", dest="logging", default=self.APPNAME + ".log",
help="Name and path of logfile")
parser.add_option("-q", "--quiet", dest="quiet", action="store_true", help="don't show progress")
(self.options, self.args) = parser.parse_args()
def checkArguments(self):
if len(self.args) < 2:
self.logger.critical('Too few arguments')
sys.exit('Zu wenig Argumente.')
if not os.path.isfile(self.args[0]):
self.logger.critical('File not found: {:s}' . format(self.args[0]))
sys.exit('Datei \'{:s}\' nicht gefunden' . format(self.args[0]))
try:
date = datetime.datetime.strptime(self.args[1], '%d.%m.%Y')
except ValueError as msg:
self.logger.critical('{:s} is not a valid date' . format(self.args[1]))
sys.exit('\'{:s}\' ist kein gültiges Datum' . format(self.args[1]))
def initSalesforce(self):
u"""
Initialisiert die Salesforce-Verbindung
Öffnet eine Verbindung zum Salesforce-Server und etabliert eine entsprechende Session.
Zugriffe auf Salesforce können dann mit app.salesforce.<OBJECT>.<METHOD>() durchgeführt werden.
Beispiel:
app.salesforce.Shopper_Inspection__c.update(<INSPECTION_ID>,
{ <KEY>: <VALUE>[, <KEY>: <VALUE>[, ...]] })
führt ein Update auf einen Datensatz der Tabelle Shopper_Inspection__c durch.
"""
self.session = requests.Session()
try:
self._session_id, self._sf_instance = SalesforceLogin(
username=self.config.get('salesforce', 'soapUsername'),
password=self.config.get('salesforce', 'soapPassword'),
sf_version=self.config.get('salesforce', 'soapVersion'))
#sandbox=(self.config.get('salesforce', 'soapSandbox') == 'True'))
except SalesforceAuthenticationFailed as e:
self.logger.critical("login to salesforce failed: {:s}" . format(e.message))
print("Login to salesforce failed: {:s}".format(e.message))
exit()
self.salesforce = Salesforce(instance=self._sf_instance, session_id=self._session_id, session=self.session)
def __new__(self, *args, **kwargs):
u"""
Hauptklasse als Singleton instanziieren. Schlechter Stil, ich weiß aber nichts besseres... :-)
"""
if not self._instance:
self._instance = super(App, self).__new__(self, *args, **kwargs)
return self._instance
def printProgressBar(self, iteration, total, prefix = '', suffix = '', decimals = 1, length = 70, fill = '#'):
u"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}") . format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
sys.stdout.write('\r{:s} [{:s}] {:s}% {:s}' . format(prefix, bar, percent, suffix))
sys.stdout.flush()
if iteration == total:
sys.stdout.write("\n\n")
if __name__ == '__main__':
app = App()
app.logger.debug("options: {0}, args: {1}" . format(app.options, app.args))
sfc = SalesforceConnect(app, app.args[1])
results = sfc.getInspectionIds()
issues = Issues(app, app.args[0], app.args[1])
``` |
{
"source": "joachimmetz/artifacts-kb",
"score": 3
} |
#### File: artifacts-kb/tools/check_artifacts.py
```python
import argparse
import logging
import os
import sys
from artifacts import definitions as artifacts_definitions
from artifacts import reader as artifacts_reader
from artifacts import registry as artifacts_registry
from dfimagetools import helpers as dfimagetools_helpers
from dfvfs.helpers import command_line as dfvfs_command_line
from dfvfs.helpers import volume_scanner as dfvfs_volume_scanner
from dfvfs.lib import errors as dfvfs_errors
from artifactsrc import volume_scanner
def Main():
"""The main program function.
Returns:
bool: True if successful or False if not.
"""
argument_parser = argparse.ArgumentParser(description=(
'Checks artifact definitions on a storage media image.'))
argument_parser.add_argument(
'--artifact_definitions', '--artifact-definitions',
dest='artifact_definitions', type=str, metavar='PATH', action='store',
help=('Path to a directory or file containing the artifact definition '
'.yaml files.'))
argument_parser.add_argument(
'--back_end', '--back-end', dest='back_end', action='store',
metavar='NTFS', default=None, help='preferred dfVFS back-end.')
argument_parser.add_argument(
'--partitions', '--partition', dest='partitions', action='store',
type=str, default=None, help=(
'Define partitions to be processed. A range of partitions can be '
'defined as: "3..5". Multiple partitions can be defined as: "1,3,5" '
'(a list of comma separated values). Ranges and lists can also be '
'combined as: "1,3..5". The first partition is 1. All partitions '
'can be specified with: "all".'))
argument_parser.add_argument(
'--snapshots', '--snapshot', dest='snapshots', action='store', type=str,
default=None, help=(
'Define snapshots to be processed. A range of snapshots can be '
'defined as: "3..5". Multiple snapshots can be defined as: "1,3,5" '
'(a list of comma separated values). Ranges and lists can also be '
'combined as: "1,3..5". The first snapshot is 1. All snapshots can '
'be specified with: "all".'))
argument_parser.add_argument(
'--volumes', '--volume', dest='volumes', action='store', type=str,
default=None, help=(
'Define volumes to be processed. A range of volumes can be defined '
'as: "3..5". Multiple volumes can be defined as: "1,3,5" (a list '
'of comma separated values). Ranges and lists can also be combined '
'as: "1,3..5". The first volume is 1. All volumes can be specified '
'with: "all".'))
argument_parser.add_argument(
'-w', '--windows_version', '--windows-version',
dest='windows_version', action='store', metavar='Windows XP',
default=None, help='string that identifies the Windows version.')
argument_parser.add_argument(
'source', nargs='?', action='store', metavar='image.raw',
default=None, help='path of the storage media image.')
options = argument_parser.parse_args()
if not options.source:
print('Path to source storage media image is missing.')
print('')
argument_parser.print_help()
print('')
return False
if not options.artifact_definitions:
print('Path to artifact definitions is missing.')
print('')
argument_parser.print_help()
print('')
return False
dfimagetools_helpers.SetDFVFSBackEnd(options.back_end)
logging.basicConfig(
level=logging.INFO, format='[%(levelname)s] %(message)s')
registry = artifacts_registry.ArtifactDefinitionsRegistry()
reader = artifacts_reader.YamlArtifactsReader()
if os.path.isdir(options.artifact_definitions):
registry.ReadFromDirectory(reader, options.artifact_definitions)
elif os.path.isfile(options.artifact_definitions):
registry.ReadFromFile(reader, options.artifact_definitions)
mediator = dfvfs_command_line.CLIVolumeScannerMediator()
scanner = volume_scanner.ArtifactDefinitionsVolumeScanner(
registry, mediator=mediator)
volume_scanner_options = dfvfs_volume_scanner.VolumeScannerOptions()
volume_scanner_options.partitions = mediator.ParseVolumeIdentifiersString(
options.partitions)
if options.snapshots == 'none':
volume_scanner_options.snapshots = ['none']
else:
volume_scanner_options.snapshots = mediator.ParseVolumeIdentifiersString(
options.snapshots)
volume_scanner_options.volumes = mediator.ParseVolumeIdentifiersString(
options.volumes)
try:
if not scanner.ScanForOperatingSystemVolumes(
options.source, options=volume_scanner_options):
print('Unable to retrieve an operating system volume from: {0:s}.'.format(
options.source))
print('')
return False
definitions_with_check_results = {}
for artifact_definition in registry.GetDefinitions():
group_only = True
for source in artifact_definition.sources:
if source.type_indicator != (
artifacts_definitions.TYPE_INDICATOR_ARTIFACT_GROUP):
group_only = False
break
if group_only:
# Not interested in results of group-only artifact definitions.
continue
check_result = scanner.CheckArtifactDefinition(artifact_definition)
if check_result.number_of_file_entries:
definitions_with_check_results[artifact_definition.name] = check_result
except dfvfs_errors.ScannerError as exception:
print('[ERROR] {0!s}'.format(exception), file=sys.stderr)
print('')
return False
except KeyboardInterrupt:
print('Aborted by user.', file=sys.stderr)
print('')
return False
print('Aritfact definitions found:')
for name, check_result in sorted(definitions_with_check_results.items()):
text = '* {0:s} [results: {1:d}]'.format(
name, check_result.number_of_file_entries)
if check_result.data_formats:
text = '{0:s} [formats: {1:s}]'.format(
text, ', '.join(sorted(check_result.data_formats)))
print(text)
print('')
return True
if __name__ == '__main__':
if not Main():
sys.exit(1)
else:
sys.exit(0)
``` |
{
"source": "joachimmetz/dfvfs-snippets",
"score": 3
} |
#### File: dfvfs-snippets/scripts/list_file_entries.py
```python
import abc
import argparse
import logging
import sys
from dfvfs.analyzer import analyzer
from dfvfs.analyzer import fvde_analyzer_helper
from dfvfs.helpers import command_line
from dfvfs.helpers import volume_scanner
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.lib import errors
from dfvfs.resolver import resolver
from scripts import helpers
try:
# Disable experimental FVDE support.
analyzer.Analyzer.DeregisterHelper(fvde_analyzer_helper.FVDEAnalyzerHelper())
except KeyError:
pass
class FileEntryLister(volume_scanner.VolumeScanner):
"""File entry lister."""
_NON_PRINTABLE_CHARACTERS = list(range(0, 0x20)) + list(range(0x7f, 0xa0))
_ESCAPE_CHARACTERS = str.maketrans({
value: '\\x{0:02x}'.format(value)
for value in _NON_PRINTABLE_CHARACTERS})
def __init__(self, mediator=None):
"""Initializes a file entry lister.
Args:
mediator (VolumeScannerMediator): a volume scanner mediator.
"""
super(FileEntryLister, self).__init__(mediator=mediator)
self._list_only_files = False
def _GetDisplayPath(self, path_spec, path_segments, data_stream_name):
"""Retrieves a path to display.
Args:
path_spec (dfvfs.PathSpec): path specification of the file entry.
path_segments (list[str]): path segments of the full path of the file
entry.
data_stream_name (str): name of the data stream.
Returns:
str: path to display.
"""
display_path = ''
if path_spec.HasParent():
parent_path_spec = path_spec.parent
if parent_path_spec and parent_path_spec.type_indicator == (
dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION):
display_path = ''.join([display_path, parent_path_spec.location])
path_segments = [
segment.translate(self._ESCAPE_CHARACTERS) for segment in path_segments]
display_path = ''.join([display_path, '/'.join(path_segments)])
if data_stream_name:
data_stream_name = data_stream_name.translate(self._ESCAPE_CHARACTERS)
display_path = ':'.join([display_path, data_stream_name])
return display_path or '/'
def _ListFileEntry(
self, file_system, file_entry, parent_path_segments, output_writer):
"""Lists a file entry.
Args:
file_system (dfvfs.FileSystem): file system that contains the file entry.
file_entry (dfvfs.FileEntry): file entry to list.
parent_path_segments (str): path segments of the full path of the parent
file entry.
output_writer (StdoutWriter): output writer.
"""
path_segments = parent_path_segments + [file_entry.name]
display_path = self._GetDisplayPath(file_entry.path_spec, path_segments, '')
if not self._list_only_files or file_entry.IsFile():
output_writer.WriteFileEntry(display_path)
# TODO: print data stream names.
for sub_file_entry in file_entry.sub_file_entries:
self._ListFileEntry(
file_system, sub_file_entry, path_segments, output_writer)
def ListFileEntries(self, base_path_specs, output_writer):
"""Lists file entries in the base path specification.
Args:
base_path_specs (list[dfvfs.PathSpec]): source path specification.
output_writer (StdoutWriter): output writer.
"""
for base_path_spec in base_path_specs:
file_system = resolver.Resolver.OpenFileSystem(base_path_spec)
file_entry = resolver.Resolver.OpenFileEntry(base_path_spec)
if file_entry is None:
path_specification_string = helpers.GetPathSpecificationString(
base_path_spec)
logging.warning(
'Unable to open base path specification:\n{0:s}'.format(
path_specification_string))
return
self._ListFileEntry(file_system, file_entry, [], output_writer)
class OutputWriter(object):
"""Output writer interface."""
def __init__(self, encoding='utf-8'):
"""Initializes an output writer.
Args:
encoding (Optional[str]): input encoding.
"""
super(OutputWriter, self).__init__()
self._encoding = encoding
self._errors = 'strict'
def _EncodeString(self, string):
"""Encodes the string.
Args:
string (str): string to encode.
Returns:
bytes: encoded string.
"""
try:
# Note that encode() will first convert string into a Unicode string
# if necessary.
encoded_string = string.encode(self._encoding, errors=self._errors)
except UnicodeEncodeError:
if self._errors == 'strict':
logging.error(
'Unable to properly write output due to encoding error. '
'Switching to error tolerant encoding which can result in '
'non Basic Latin (C0) characters to be replaced with "?" or '
'"\\ufffd".')
self._errors = 'replace'
encoded_string = string.encode(self._encoding, errors=self._errors)
return encoded_string
@abc.abstractmethod
def Close(self):
"""Closes the output writer object."""
@abc.abstractmethod
def Open(self):
"""Opens the output writer object."""
@abc.abstractmethod
def WriteFileEntry(self, path):
"""Writes the file path.
Args:
path (str): path of the file.
"""
class FileOutputWriter(OutputWriter):
"""Output writer that writes to a file."""
def __init__(self, path, encoding='utf-8'):
"""Initializes an output writer.
Args:
path (str): name of the path.
encoding (Optional[str]): input encoding.
"""
super(FileOutputWriter, self).__init__(encoding=encoding)
self._file_object = None
self._path = path
def Close(self):
"""Closes the output writer object."""
self._file_object.close()
def Open(self):
"""Opens the output writer object."""
# Using binary mode to make sure to write Unix end of lines, so we can
# compare output files cross-platform.
self._file_object = open(self._path, 'wb') # pylint: disable=consider-using-with
def WriteFileEntry(self, path):
"""Writes the file path to file.
Args:
path (str): path of the file.
"""
string = '{0:s}\n'.format(path)
encoded_string = self._EncodeString(string)
self._file_object.write(encoded_string)
class StdoutWriter(OutputWriter):
"""Output writer that writes to stdout."""
def Close(self):
"""Closes the output writer object."""
def Open(self):
"""Opens the output writer object."""
def WriteFileEntry(self, path):
"""Writes the file path to stdout.
Args:
path (str): path of the file.
"""
print(path)
def Main():
"""The main program function.
Returns:
bool: True if successful or False if not.
"""
argument_parser = argparse.ArgumentParser(description=(
'Lists file entries in a directory or storage media image.'))
argument_parser.add_argument(
'--back_end', '--back-end', dest='back_end', action='store',
metavar='NTFS', default=None, help='preferred dfVFS back-end.')
argument_parser.add_argument(
'--output_file', '--output-file', dest='output_file', action='store',
metavar='source.hashes', default=None, help=(
'path of the output file, default is to output to stdout.'))
argument_parser.add_argument(
'--partitions', '--partition', dest='partitions', action='store',
type=str, default=None, help=(
'Define partitions to be processed. A range of '
'partitions can be defined as: "3..5". Multiple partitions can '
'be defined as: "1,3,5" (a list of comma separated values). '
'Ranges and lists can also be combined as: "1,3..5". The first '
'partition is 1. All partitions can be specified with: "all".'))
argument_parser.add_argument(
'--snapshots', '--snapshot', dest='snapshots', action='store', type=str,
default=None, help=(
'Define snapshots to be processed. A range of snapshots can be '
'defined as: "3..5". Multiple snapshots can be defined as: "1,3,5" '
'(a list of comma separated values). Ranges and lists can also be '
'combined as: "1,3..5". The first snapshot is 1. All snapshots can '
'be specified with: "all".'))
argument_parser.add_argument(
'--volumes', '--volume', dest='volumes', action='store', type=str,
default=None, help=(
'Define volumes to be processed. A range of volumes can be defined '
'as: "3..5". Multiple volumes can be defined as: "1,3,5" (a list '
'of comma separated values). Ranges and lists can also be combined '
'as: "1,3..5". The first volume is 1. All volumes can be specified '
'with: "all".'))
argument_parser.add_argument(
'source', nargs='?', action='store', metavar='image.raw',
default=None, help='path of the directory or storage media image.')
options = argument_parser.parse_args()
if not options.source:
print('Source value is missing.')
print('')
argument_parser.print_help()
print('')
return False
helpers.SetDFVFSBackEnd(options.back_end)
logging.basicConfig(
level=logging.INFO, format='[%(levelname)s] %(message)s')
if options.output_file:
output_writer = FileOutputWriter(options.output_file)
else:
output_writer = StdoutWriter()
try:
output_writer.Open()
except IOError as exception:
print('Unable to open output writer with error: {0!s}.'.format(
exception))
print('')
return False
mediator = command_line.CLIVolumeScannerMediator()
file_entry_lister = FileEntryLister(mediator=mediator)
volume_scanner_options = volume_scanner.VolumeScannerOptions()
volume_scanner_options.partitions = mediator.ParseVolumeIdentifiersString(
options.partitions)
if options.snapshots == 'none':
volume_scanner_options.snapshots = ['none']
else:
volume_scanner_options.snapshots = mediator.ParseVolumeIdentifiersString(
options.snapshots)
volume_scanner_options.volumes = mediator.ParseVolumeIdentifiersString(
options.volumes)
return_value = True
try:
base_path_specs = file_entry_lister.GetBasePathSpecs(
options.source, options=volume_scanner_options)
if not base_path_specs:
print('No supported file system found in source.')
print('')
return False
file_entry_lister.ListFileEntries(base_path_specs, output_writer)
print('')
print('Completed.')
except errors.ScannerError as exception:
return_value = False
print('')
print('[ERROR] {0!s}'.format(exception))
except KeyboardInterrupt:
return_value = False
print('')
print('Aborted by user.')
output_writer.Close()
return return_value
if __name__ == '__main__':
if not Main():
sys.exit(1)
else:
sys.exit(0)
``` |
{
"source": "joachimmetz/dfwinreg",
"score": 3
} |
#### File: dfwinreg/tests/regf.py
```python
import unittest
from dfwinreg import errors
from dfwinreg import regf
from tests import test_lib
class FakePyREGFKey(object):
"""Fake pyregf key for testing."""
def __init__(self):
"""Initializes a fake pyregf key."""
super(FakePyREGFKey, self).__init__()
self.number_of_sub_keys = 1
# pylint: disable=invalid-name,redundant-returns-doc,unused-argument
def get_last_written_time_as_integer(self):
"""Retrieves the last written time as an integer.
Returns:
int: last written time, which will be 0 for testing.
"""
return 0
def get_sub_key(self, sub_key_index):
"""Retrieves a specific sub key.
Returns:
pyregf.key: sub key, which will be None for testing.
"""
return None
class FakePyREGFValue(object):
"""Fake pyregf value for testing.
Attributes:
name (str): name of the value.
type (str): value type.
"""
def __init__(self, name='Test', value_type='REG_SZ'):
"""Initializes a fake pyregf value.
Args:
name (Optional[str]): name of the value.
value_type (Optional[str]): value type.
"""
super(FakePyREGFValue, self).__init__()
self.name = name
self.type = value_type
# pylint: disable=missing-raises-doc
@property
def data(self):
"""bytes: value data."""
raise IOError('raised for testing purposes.')
class REGFWinRegistryFileTest(test_lib.BaseTestCase):
"""Tests for the REGF Windows Registry file."""
# pylint: disable=protected-access
# TODO: add tests for _GetCurrentControlSetKey
# TODO: add tests for _GetCurrentControlSetKeyPath
# TODO: add tests for _GetKeyByPathFromFile
def testOpenClose(self):
"""Tests the Open and Close functions."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
registry_file.Close()
def testGetRootKey(self):
"""Tests the GetRootKey function."""
# Test GetRootKey on NTUSER.DAT file
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetRootKey()
self.assertIsNotNone(registry_key)
self.assertIsInstance(registry_key, regf.REGFWinRegistryKey)
self.assertEqual(registry_key.path, '\\')
finally:
registry_file.Close()
# Test GetRootKey on SYSTEM file
test_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetRootKey()
self.assertIsNotNone(registry_key)
self.assertIsInstance(registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(registry_key.path, '\\')
finally:
registry_file.Close()
# Test GetRootKey on NTUSER.DAT.LOG file
registry_file = regf.REGFWinRegistryFile()
test_path = self._GetTestFilePath(['NTUSER.DAT.LOG'])
self._SkipIfPathNotExists(test_path)
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
root_key = registry_file.GetRootKey()
self.assertIsNone(root_key)
finally:
registry_file.Close()
def testGetKeyByPath(self):
"""Tests the GetKeyByPath function."""
test_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.name, '')
self.assertEqual(registry_key.path, key_path)
key_path = '\\ControlSet001'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.name, 'ControlSet001')
self.assertEqual(registry_key.path, key_path)
registry_key = registry_file.GetKeyByPath('ControlSet001')
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.name, 'ControlSet001')
self.assertEqual(registry_key.path, key_path)
key_path = '\\CurrentControlSet'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.name, 'CurrentControlSet')
self.assertEqual(registry_key.path, key_path)
key_path = '\\CurrentControlSet\\Enum'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.name, 'Enum')
self.assertEqual(registry_key.path, key_path)
key_path = '\\Bogus'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNone(registry_key)
finally:
registry_file.Close()
def testRecurseKeys(self):
"""Tests the RecurseKeys function."""
dat_test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(dat_test_path)
log_test_path = self._GetTestFilePath(['NTUSER.DAT.LOG'])
self._SkipIfPathNotExists(log_test_path)
registry_file = regf.REGFWinRegistryFile()
with open(dat_test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_keys = list(registry_file.RecurseKeys())
finally:
registry_file.Close()
self.assertEqual(len(registry_keys), 1597)
registry_file = regf.REGFWinRegistryFile()
with open(log_test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_keys = list(registry_file.RecurseKeys())
finally:
registry_file.Close()
self.assertEqual(len(registry_keys), 0)
class REGFWinRegistryKeyTest(test_lib.BaseTestCase):
"""Tests for the REGF Windows Registry key."""
# pylint: disable=protected-access
def testProperties(self):
"""Tests the properties functions."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\Software'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertIsNone(registry_key.class_name)
self.assertEqual(registry_key.name, 'Software')
self.assertEqual(registry_key.number_of_subkeys, 7)
self.assertEqual(registry_key.number_of_values, 0)
self.assertEqual(registry_key.offset, 4372)
self.assertEqual(registry_key.path, key_path)
self.assertIsNotNone(registry_key.last_written_time)
timestamp = registry_key.last_written_time.timestamp
self.assertEqual(timestamp, 131205170396534120)
registry_key._pyregf_key = FakePyREGFKey()
self.assertIsNotNone(registry_key.last_written_time)
date_time_string = (
registry_key.last_written_time.CopyToDateTimeString())
self.assertEqual(date_time_string, 'Not set')
finally:
registry_file.Close()
def testGetSubkeyByIndex(self):
"""Tests the GetSubkeyByIndex function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile(
key_path_prefix='HKEY_CURRENT_USER')
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetRootKey()
key_name = 'AppEvents'
sub_registry_key = registry_key.GetSubkeyByIndex(0)
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, key_name)
expected_key_path = 'HKEY_CURRENT_USER\\AppEvents'
self.assertEqual(sub_registry_key.path, expected_key_path)
with self.assertRaises(IndexError):
registry_key.GetSubkeyByIndex(-1)
finally:
registry_file.Close()
def testGetSubkeyByName(self):
"""Tests the GetSubkeyByName function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile(
key_path_prefix='HKEY_CURRENT_USER')
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetRootKey()
self.assertIsNotNone(registry_key)
key_name = 'Software'
sub_registry_key = registry_key.GetSubkeyByName(key_name)
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, key_name)
expected_key_path = 'HKEY_CURRENT_USER\\Software'
self.assertEqual(sub_registry_key.path, expected_key_path)
key_name = 'Bogus'
sub_registry_key = registry_key.GetSubkeyByName(key_name)
self.assertIsNone(sub_registry_key)
finally:
registry_file.Close()
def testGetSubkeyByPath(self):
"""Tests the GetSubkeyByPath function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile(
key_path_prefix='HKEY_CURRENT_USER')
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetRootKey()
key_path = 'Software\\Microsoft'
sub_registry_key = registry_key.GetSubkeyByPath(key_path)
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'Microsoft')
expected_key_path = 'HKEY_CURRENT_USER\\Software\\Microsoft'
self.assertEqual(sub_registry_key.path, expected_key_path)
key_path = 'Software\\Bogus'
sub_registry_key = registry_key.GetSubkeyByPath(key_path)
self.assertIsNone(sub_registry_key)
finally:
registry_file.Close()
def testGetSubkeys(self):
"""Tests the GetSubkeys function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\Software'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
sub_registry_keys = list(registry_key.GetSubkeys())
self.assertEqual(len(sub_registry_keys), 7)
finally:
registry_file.Close()
def testGetValueByName(self):
"""Tests the GetValueByName function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetKeyByPath('\\Console')
self.assertIsNotNone(registry_key)
value_name = 'ColorTable14'
registry_value = registry_key.GetValueByName(value_name)
self.assertIsNotNone(registry_value)
self.assertEqual(registry_value.name, value_name)
value_name = 'Bogus'
registry_value = registry_key.GetValueByName(value_name)
self.assertIsNone(registry_value)
# Test retrieving the default (or nameless) value.
registry_key = registry_file.GetKeyByPath(
'\\AppEvents\\EventLabels\\.Default')
self.assertIsNotNone(registry_key)
registry_value = registry_key.GetValueByName('')
self.assertIsNotNone(registry_value)
self.assertIsNone(registry_value.name)
finally:
registry_file.Close()
def testGetValues(self):
"""Tests the GetValues function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\Console'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
values = list(registry_key.GetValues())
self.assertEqual(len(values), 37)
finally:
registry_file.Close()
def testRecurseKeys(self):
"""Tests the RecurseKeys function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\Software'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
registry_keys = list(registry_key.RecurseKeys())
finally:
registry_file.Close()
self.assertEqual(len(registry_keys), 1219)
class VirtualREGFWinRegistryKeyTest(test_lib.BaseTestCase):
"""Tests for the virtual REGF Windows Registry key."""
# pylint: disable=protected-access
def testGetSubkeyByIndex(self):
"""Tests the GetSubkeyByIndex function."""
test_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.path, key_path)
self.assertIsInstance(registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(registry_key.number_of_subkeys, 9)
sub_registry_key = registry_key.GetSubkeyByIndex(0)
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'ControlSet001')
self.assertIsInstance(sub_registry_key, regf.REGFWinRegistryKey)
sub_registry_key = registry_key.GetSubkeyByIndex(8)
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'CurrentControlSet')
self.assertIsInstance(sub_registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(sub_registry_key.number_of_subkeys, 5)
with self.assertRaises(IndexError):
registry_key.GetSubkeyByIndex(9)
finally:
registry_file.Close()
def testGetSubkeyByName(self):
"""Tests the GetSubkeyByName function."""
test_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.path, key_path)
self.assertIsInstance(registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(registry_key.number_of_subkeys, 9)
sub_registry_key = registry_key.GetSubkeyByName('ControlSet001')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'ControlSet001')
self.assertIsInstance(sub_registry_key, regf.REGFWinRegistryKey)
sub_registry_key = registry_key.GetSubkeyByName('CurrentControlSet')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'CurrentControlSet')
self.assertIsInstance(sub_registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(sub_registry_key.number_of_subkeys, 5)
# Ensure the virtual CurrentControlSet key does not return another copy
# of itself.
test_registry_key = sub_registry_key.GetSubkeyByName(
'CurrentControlSet')
self.assertIsNone(test_registry_key)
finally:
registry_file.Close()
def testGetSubkeyByPath(self):
"""Tests the GetSubkeyByPath function."""
test_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.path, key_path)
self.assertIsInstance(registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(registry_key.number_of_subkeys, 9)
sub_registry_key = registry_key.GetSubkeyByPath('\\ControlSet001')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'ControlSet001')
self.assertIsInstance(sub_registry_key, regf.REGFWinRegistryKey)
sub_registry_key = registry_key.GetSubkeyByPath(
'\\ControlSet001\\Enum')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'Enum')
self.assertEqual(sub_registry_key.path, '\\ControlSet001\\Enum')
self.assertIsInstance(sub_registry_key, regf.REGFWinRegistryKey)
sub_registry_key = registry_key.GetSubkeyByPath('\\CurrentControlSet')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'CurrentControlSet')
self.assertIsInstance(sub_registry_key, regf.VirtualREGFWinRegistryKey)
sub_registry_key = registry_key.GetSubkeyByPath(
'\\CurrentControlSet\\Enum')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.name, 'Enum')
self.assertEqual(sub_registry_key.path, '\\CurrentControlSet\\Enum')
self.assertIsInstance(sub_registry_key, regf.REGFWinRegistryKey)
finally:
registry_file.Close()
def testGetSubkeys(self):
"""Tests the GetSubkeys function."""
test_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
key_path = '\\'
registry_key = registry_file.GetKeyByPath(key_path)
self.assertIsNotNone(registry_key)
self.assertEqual(registry_key.path, key_path)
self.assertIsInstance(registry_key, regf.VirtualREGFWinRegistryKey)
self.assertEqual(registry_key.number_of_subkeys, 9)
expected_subkey_names = [
'ControlSet001',
'DriverDatabase',
'HardwareConfig',
'MountedDevices',
'RNG',
'Select',
'Setup',
'WPA',
'CurrentControlSet']
subkey_names = [subkey.name for subkey in registry_key.GetSubkeys()]
self.assertEqual(subkey_names, expected_subkey_names)
sub_registry_key = registry_key.GetSubkeyByPath('\\CurrentControlSet')
self.assertIsNotNone(sub_registry_key)
self.assertEqual(sub_registry_key.number_of_subkeys, 5)
expected_subkey_names = [
'Control',
'Enum',
'Hardware Profiles',
'Policies',
'Services']
subkey_names = [subkey.name for subkey in sub_registry_key.GetSubkeys()]
self.assertEqual(subkey_names, expected_subkey_names)
finally:
registry_file.Close()
class REGFWinRegistryValueTest(test_lib.BaseTestCase):
"""Tests for the REGF Windows Registry value."""
# pylint: disable=protected-access
def testProperties(self):
"""Tests the properties functions on a NTUSER.DAT file."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetKeyByPath('\\Console')
self.assertIsNotNone(registry_key)
value_name = 'ColorTable14'
registry_value = registry_key.GetValueByName(value_name)
expected_data = b'\xff\xff\x00\x00'
self.assertIsNotNone(registry_value)
self.assertEqual(registry_value.data_type, 4)
self.assertEqual(registry_value.data_type_string, 'REG_DWORD_LE')
self.assertEqual(registry_value.GetDataAsObject(), 65535)
self.assertEqual(registry_value.name, value_name)
self.assertEqual(registry_value.offset, 105212)
self.assertEqual(registry_value.data, expected_data)
registry_key = registry_file.GetKeyByPath(
'\\AppEvents\\EventLabels\\CriticalBatteryAlarm')
self.assertIsNotNone(registry_key)
value_name = 'DispFileName'
registry_value = registry_key.GetValueByName(value_name)
expected_data = (
b'@\x00m\x00m\x00r\x00e\x00s\x00.\x00d\x00l\x00l\x00,\x00-\x005'
b'\x008\x002\x007\x00\x00\x00')
self.assertIsNotNone(registry_value)
self.assertEqual(registry_value.data_type, 1)
self.assertEqual(registry_value.data_type_string, 'REG_SZ')
self.assertEqual(registry_value.GetDataAsObject(), '@mmres.dll,-5827')
self.assertEqual(registry_value.name, value_name)
self.assertEqual(registry_value.offset, 62028)
self.assertEqual(registry_value.data, expected_data)
registry_key = registry_file.GetKeyByPath('\\Control Panel\\Appearance')
self.assertIsNotNone(registry_key)
value_name = 'SchemeLangID'
registry_value = registry_key.GetValueByName(value_name)
expected_data = b'\x00\x00'
self.assertIsNotNone(registry_value)
self.assertEqual(registry_value.data_type, 3)
self.assertEqual(registry_value.data_type_string, 'REG_BINARY')
self.assertEqual(registry_value.GetDataAsObject(), expected_data)
self.assertEqual(registry_value.name, value_name)
self.assertEqual(registry_value.offset, 46468)
self.assertEqual(registry_value.data, expected_data)
registry_value._pyregf_value = FakePyREGFValue()
with self.assertRaises(errors.WinRegistryValueError):
_ = registry_value.data
finally:
registry_file.Close()
def testGetDataAsObject(self):
"""Tests the GetDataAsObject function."""
test_path = self._GetTestFilePath(['NTUSER.DAT'])
self._SkipIfPathNotExists(test_path)
registry_file = regf.REGFWinRegistryFile()
with open(test_path, 'rb') as file_object:
registry_file.Open(file_object)
try:
registry_key = registry_file.GetKeyByPath('\\Console')
self.assertIsNotNone(registry_key)
registry_value = registry_key.GetValueByName('ColorTable14')
self.assertIsNotNone(registry_value)
data_object = registry_value.GetDataAsObject()
self.assertEqual(data_object, 65535)
registry_value._pyregf_value = FakePyREGFValue(value_type='REG_SZ')
with self.assertRaises(errors.WinRegistryValueError):
registry_value.GetDataAsObject()
registry_value._pyregf_value = FakePyREGFValue(
value_type='REG_DWORD_LE')
with self.assertRaises(errors.WinRegistryValueError):
registry_value.GetDataAsObject()
registry_value._pyregf_value = FakePyREGFValue(
value_type='REG_MULTI_SZ')
with self.assertRaises(errors.WinRegistryValueError):
registry_value.GetDataAsObject()
# Test REG_MULTI_SZ without additional empty string.
registry_key = registry_file.GetKeyByPath(
'\\Control Panel\\International\\User Profile')
self.assertIsNotNone(registry_key)
registry_value = registry_key.GetValueByName('Languages')
self.assertIsNotNone(registry_value)
data_object = registry_value.GetDataAsObject()
self.assertEqual(len(data_object), 1)
# Test REG_MULTI_SZ with additional empty string.
registry_key = registry_file.GetKeyByPath(
'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\'
'Discardable\\PostSetup\\ShellNew')
self.assertIsNotNone(registry_key)
registry_value = registry_key.GetValueByName('Classes')
self.assertIsNotNone(registry_value)
data_object = registry_value.GetDataAsObject()
self.assertEqual(len(data_object), 9)
finally:
registry_file.Close()
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joachimmetz/imagetools",
"score": 2
} |
#### File: imagetools/dfimagetools/artifact_filters.py
```python
import logging
from artifacts import definitions as artifacts_definitions
from dfvfs.helpers import file_system_searcher as dfvfs_file_system_searcher
from dfimagetools import path_resolver
class ArtifactDefinitionFiltersGenerator(object):
"""Generator of filters based on artifact definitions."""
def __init__(self, artifacts_registry, environment_variables, user_accounts):
"""Initializes an artifact definition filters generator.
Args:
artifacts_registry (artifacts.ArtifactDefinitionsRegistry): artifact
definitions registry.
environment_variables (list[EnvironmentVariable]): environment variables.
user_accounts (list[UserAccount]): user accounts.
"""
super(ArtifactDefinitionFiltersGenerator, self).__init__()
self._artifacts_registry = artifacts_registry
self._environment_variables = environment_variables
self._path_resolver = path_resolver.PathResolver()
self._user_accounts = user_accounts
def _BuildFindSpecsFromArtifactDefinition(self, name):
"""Builds find specifications from an artifact definition.
Args:
name (str): name of the artifact definition.
Yields:
dfvfs.FindSpec: file system (dfVFS) find specification.
"""
definition = self._artifacts_registry.GetDefinitionByName(name)
if not definition:
definition = self._artifacts_registry.GetDefinitionByAlias(name)
if not definition:
logging.warning('Undefined artifact definition: {0:s}'.format(name))
else:
for source in definition.sources:
source_type = source.type_indicator
if source_type not in (
artifacts_definitions.TYPE_INDICATOR_ARTIFACT_GROUP,
artifacts_definitions.TYPE_INDICATOR_DIRECTORY,
artifacts_definitions.TYPE_INDICATOR_FILE,
artifacts_definitions.TYPE_INDICATOR_PATH):
continue
if source_type == artifacts_definitions.TYPE_INDICATOR_DIRECTORY:
logging.warning((
'Use of deprecated source type: directory in artifact '
'definition: {0:s}').format(name))
if source_type == artifacts_definitions.TYPE_INDICATOR_ARTIFACT_GROUP:
for source_name in set(source.names):
for find_spec in self._BuildFindSpecsFromArtifactDefinition(
source_name):
yield find_spec
elif source_type in (
artifacts_definitions.TYPE_INDICATOR_DIRECTORY,
artifacts_definitions.TYPE_INDICATOR_FILE,
artifacts_definitions.TYPE_INDICATOR_PATH):
for source_path in set(source.paths):
for find_spec in self._BuildFindSpecsFromFileSourcePath(
source_path, source.separator):
yield find_spec
def _BuildFindSpecsFromFileSourcePath(self, source_path, path_separator):
"""Builds find specifications from a file source type.
Args:
source_path (str): file system path defined by the source.
path_separator (str): file system path segment separator.
Yields:
dfvfs.FindSpec: file system (dfVFS) find specification.
"""
for path_glob in self._path_resolver.ExpandGlobStars(
source_path, path_separator):
for path in self._path_resolver.ExpandUsersVariable(
path_glob, path_separator, self._user_accounts):
if '%' in path:
path = self._path_resolver.ExpandEnvironmentVariables(
path, path_separator, self._environment_variables)
if not path.startswith(path_separator):
continue
try:
find_spec = dfvfs_file_system_searcher.FindSpec(
case_sensitive=False, location_glob=path,
location_separator=path_separator)
except ValueError as exception:
logging.error((
'Unable to build find specification for path: "{0:s}" with '
'error: {1!s}').format(path, exception))
continue
yield find_spec
def GetFindSpecs(self, names):
"""Retrieves find specifications for one or more artifact definitions.
Args:
names (list[str]): names of the artifact definitions to filter on.
Yields:
dfvfs.FindSpec: file system (dfVFS) find specification.
"""
for name in set(names):
for find_spec in self._BuildFindSpecsFromArtifactDefinition(name):
yield find_spec
```
#### File: imagetools/dfimagetools/bodyfile.py
```python
from dfdatetime import definitions as dfdatetime_definitions
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.vfs import ntfs_attribute as dfvfs_ntfs_attribute
class BodyfileGenerator(object):
"""Bodyfile generator."""
_NON_PRINTABLE_CHARACTERS = list(range(0, 0x20)) + list(range(0x7f, 0xa0))
_ESCAPE_CHARACTERS = {
'/': '\\/',
':': '\\:',
'\\': '\\\\',
'|': '\\|'}
_ESCAPE_CHARACTERS.update({
value: '\\x{0:02x}'.format(value)
for value in _NON_PRINTABLE_CHARACTERS})
_FILE_TYPES = {
0x1000: 'p',
0x2000: 'c',
0x4000: 'd',
0x6000: 'b',
0xa000: 'l',
0xc000: 's'}
_FILE_ATTRIBUTE_READONLY = 1
_FILE_ATTRIBUTE_SYSTEM = 4
_TIMESTAMP_FORMAT_STRINGS = {
dfdatetime_definitions.PRECISION_1_NANOSECOND: '{0:d}.{1:09d}',
dfdatetime_definitions.PRECISION_100_NANOSECONDS: '{0:d}.{1:07d}',
dfdatetime_definitions.PRECISION_1_MICROSECOND: '{0:d}.{1:06d}',
dfdatetime_definitions.PRECISION_1_MILLISECOND: '{0:d}.{1:03d}'}
def __init__(self):
"""Initializes a bodyfile generator."""
super(BodyfileGenerator, self).__init__()
self._bodyfile_escape_characters = str.maketrans(self._ESCAPE_CHARACTERS)
def _GetFileAttributeFlagsString(self, file_type, file_attribute_flags):
"""Retrieves a bodyfile string representation of file attributes flags.
Args:
file_type (str): bodyfile file type identifier.
file_attribute_flags (int): file attribute flags.
Returns:
str: bodyfile representation of the file attributes flags.
"""
string_parts = [file_type, 'r', '-', 'x', 'r', '-', 'x', 'r', '-', 'x']
if (not file_attribute_flags & self._FILE_ATTRIBUTE_READONLY and
not file_attribute_flags & self._FILE_ATTRIBUTE_SYSTEM):
string_parts[2] = 'w'
string_parts[5] = 'w'
string_parts[8] = 'w'
return ''.join(string_parts)
def _GetModeString(self, mode):
"""Retrieves a bodyfile string representation of a mode.
Args:
mode (int): mode.
Returns:
str: bodyfile representation of the mode.
"""
string_parts = 10 * ['-']
if mode & 0x0001:
string_parts[9] = 'x'
if mode & 0x0002:
string_parts[8] = 'w'
if mode & 0x0004:
string_parts[7] = 'r'
if mode & 0x0008:
string_parts[6] = 'x'
if mode & 0x0010:
string_parts[5] = 'w'
if mode & 0x0020:
string_parts[4] = 'r'
if mode & 0x0040:
string_parts[3] = 'x'
if mode & 0x0080:
string_parts[2] = 'w'
if mode & 0x0100:
string_parts[1] = 'r'
string_parts[0] = self._FILE_TYPES.get(mode & 0xf000, '-')
return ''.join(string_parts)
def _GetTimestamp(self, date_time):
"""Retrieves a bodyfile timestamp representation of a date time value.
Args:
date_time (dfdatetime.DateTimeValues): date time value.
Returns:
str: bodyfile timestamp representation of the date time value.
"""
if not date_time:
return ''
posix_timestamp, fraction_of_second = (
date_time.CopyToPosixTimestampWithFractionOfSecond())
format_string = self._TIMESTAMP_FORMAT_STRINGS.get(
date_time.precision, '{0:d}')
return format_string.format(posix_timestamp, fraction_of_second)
def GetEntries(self, file_entry, path_segments):
"""Retrieves bodyfile entry representations of a file entry.
Args:
file_entry (dfvfs.FileEntry): file entry.
path_segments (str): path segments of the full path of the file entry.
Yields:
str: bodyfile entry.
"""
file_attribute_flags = None
parent_file_reference = None
if file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_NTFS:
mft_attribute_index = getattr(file_entry.path_spec, 'mft_attribute', None)
if mft_attribute_index is not None:
fsntfs_file_entry = file_entry.GetNTFSFileEntry()
file_attribute_flags = fsntfs_file_entry.file_attribute_flags
parent_file_reference = (
fsntfs_file_entry.get_parent_file_reference_by_attribute_index(
mft_attribute_index))
stat_attribute = file_entry.GetStatAttribute()
if stat_attribute.inode_number is None:
inode_string = ''
elif file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_NTFS:
inode_string = '{0:d}-{1:d}'.format(
stat_attribute.inode_number & 0xffffffffffff,
stat_attribute.inode_number >> 48)
else:
inode_string = '{0:d}'.format(stat_attribute.inode_number)
if file_entry.type_indicator != dfvfs_definitions.TYPE_INDICATOR_NTFS:
mode = getattr(stat_attribute, 'mode', None) or 0
mode_string = self._GetModeString(mode)
else:
if file_entry.entry_type == dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY:
file_type = 'd'
elif file_entry.entry_type == dfvfs_definitions.FILE_ENTRY_TYPE_LINK:
file_type = 'l'
else:
file_type = '-'
if file_attribute_flags is None:
mode_string = ''.join([file_type] + (9 * ['-']))
else:
mode_string = self._GetFileAttributeFlagsString(
file_type, file_attribute_flags)
owner_identifier = ''
if stat_attribute.owner_identifier is not None:
owner_identifier = str(stat_attribute.owner_identifier)
group_identifier = ''
if stat_attribute.group_identifier is not None:
group_identifier = str(stat_attribute.group_identifier)
size = str(file_entry.size)
access_time = self._GetTimestamp(file_entry.access_time)
creation_time = self._GetTimestamp(file_entry.creation_time)
change_time = self._GetTimestamp(file_entry.change_time)
modification_time = self._GetTimestamp(file_entry.modification_time)
# TODO: add support to calculate MD5
md5_string = '0'
path_segments = [
segment.translate(self._bodyfile_escape_characters)
for segment in path_segments]
file_entry_name_value = '/'.join(path_segments) or '/'
if not file_entry.link:
name_value = file_entry_name_value
else:
if file_entry.type_indicator == dfvfs_definitions.TYPE_INDICATOR_NTFS:
path_segments = file_entry.link.split('\\')
else:
path_segments = file_entry.link.split('/')
file_entry_link = '/'.join([
segment.translate(self._bodyfile_escape_characters)
for segment in path_segments])
name_value = '{0:s} -> {1:s}'.format(
file_entry_name_value, file_entry_link)
yield '|'.join([
md5_string, name_value, inode_string, mode_string, owner_identifier,
group_identifier, size, access_time, modification_time, change_time,
creation_time])
for data_stream in file_entry.data_streams:
if data_stream.name:
data_stream_name = data_stream.name.translate(
self._bodyfile_escape_characters)
data_stream_name_value = ':'.join([
file_entry_name_value, data_stream_name])
yield '|'.join([
md5_string, data_stream_name_value, inode_string, mode_string,
owner_identifier, group_identifier, size, access_time,
modification_time, change_time, creation_time])
for attribute in file_entry.attributes:
if isinstance(attribute, dfvfs_ntfs_attribute.FileNameNTFSAttribute):
if (attribute.name == file_entry.name and
attribute.parent_file_reference == parent_file_reference):
attribute_name_value = '{0:s} ($FILE_NAME)'.format(
file_entry_name_value)
access_time = self._GetTimestamp(attribute.access_time)
creation_time = self._GetTimestamp(attribute.creation_time)
change_time = self._GetTimestamp(attribute.entry_modification_time)
modification_time = self._GetTimestamp(attribute.modification_time)
yield '|'.join([
md5_string, attribute_name_value, inode_string, mode_string,
owner_identifier, group_identifier, size, access_time,
modification_time, change_time, creation_time])
```
#### File: imagetools/dfimagetools/file_entry_lister.py
```python
import logging
import re
from dfvfs.helpers import file_system_searcher
from dfvfs.helpers import volume_scanner
from dfvfs.helpers import windows_path_resolver
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as dfvfs_path_spec_factory
from dfvfs.resolver import resolver as dfvfs_resolver
from dfvfs.volume import factory as dfvfs_volume_system_factory
from dfimagetools import bodyfile
from dfimagetools import decorators
class FileEntryLister(volume_scanner.VolumeScanner):
"""File entry lister."""
_UNICODE_SURROGATES_RE = re.compile('[\ud800-\udfff]')
_WINDOWS_DIRECTORIES = frozenset([
'C:\\Windows',
'C:\\WINNT',
'C:\\WTSRV',
'C:\\WINNT35',
])
def __init__(self, mediator=None):
"""Initializes a file entry lister.
Args:
mediator (dfvfs.VolumeScannerMediator): a volume scanner mediator.
"""
super(FileEntryLister, self).__init__(mediator=mediator)
self._bodyfile_generator = bodyfile.BodyfileGenerator()
self._list_only_files = False
def _GetBasePathSegments(self, base_path_spec):
"""Retrieves the base path segments.
Args:
base_path_specs (list[dfvfs.PathSpec]): source path specification.
Returns:
list[str]: path segments.
"""
if not base_path_spec.HasParent() or not base_path_spec.parent:
return ['']
if base_path_spec.parent.type_indicator in (
dfvfs_definitions.TYPE_INDICATOR_APFS_CONTAINER,
dfvfs_definitions.TYPE_INDICATOR_GPT,
dfvfs_definitions.TYPE_INDICATOR_LVM):
volume_system = dfvfs_volume_system_factory.Factory.NewVolumeSystem(
base_path_spec.parent.type_indicator)
volume_system.Open(base_path_spec.parent)
volume = volume_system.GetVolumeByIdentifier(
base_path_spec.parent.location[1:])
if base_path_spec.parent.type_indicator == (
dfvfs_definitions.TYPE_INDICATOR_GPT):
volume_identifier_prefix = 'gpt'
else:
volume_identifier_prefix = volume_system.VOLUME_IDENTIFIER_PREFIX
volume_identifier = volume.GetAttribute('identifier')
volume_path_segment = '{0:s}{{{1:s}}}'.format(
volume_identifier_prefix, volume_identifier.value)
return ['', volume_path_segment]
if base_path_spec.parent.type_indicator == (
dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION):
return base_path_spec.parent.location.split('/')
return ['']
def _GetPathSpecificationString(self, path_spec):
"""Retrieves a printable string representation of the path specification.
Args:
path_spec (dfvfs.PathSpec): path specification.
Returns:
str: printable string representation of the path specification.
"""
path_spec_string = path_spec.comparable
if self._UNICODE_SURROGATES_RE.search(path_spec_string):
path_spec_string = path_spec_string.encode(
'utf-8', errors='surrogateescape')
path_spec_string = path_spec_string.decode(
'utf-8', errors='backslashreplace')
return path_spec_string
def _ListFileEntry(self, file_system, file_entry, parent_path_segments):
"""Lists a file entry.
Args:
file_system (dfvfs.FileSystem): file system that contains the file entry.
file_entry (dfvfs.FileEntry): file entry to list.
parent_path_segments (str): path segments of the full path of the parent
file entry.
Yields:
tuple[dfvfs.FileEntry, list[str]]: file entry and path segments.
"""
path_segments = list(parent_path_segments)
if not file_entry.IsRoot():
path_segments.append(file_entry.name)
if not self._list_only_files or file_entry.IsFile():
yield file_entry, path_segments
for sub_file_entry in file_entry.sub_file_entries:
for result in self._ListFileEntry(
file_system, sub_file_entry, path_segments):
yield result
@decorators.deprecated
def GetBodyfileEntries(self, file_entry, path_segments):
"""Retrieves bodyfile entry representations of a file entry.
Args:
file_entry (dfvfs.FileEntry): file entry.
path_segments (str): path segments of the full path of the file entry.
Returns:
generator[str]: bodyfile entry generator.
"""
return self._bodyfile_generator.GetEntries(file_entry, path_segments)
def GetWindowsDirectory(self, base_path_spec):
"""Retrieves the Windows directory from the base path specification.
Args:
base_path_spec (dfvfs.PathSpec): source path specification.
Returns:
str: path of the Windows directory or None if not available.
"""
if base_path_spec.type_indicator == dfvfs_definitions.TYPE_INDICATOR_OS:
mount_point = base_path_spec
else:
mount_point = base_path_spec.parent
file_system = dfvfs_resolver.Resolver.OpenFileSystem(base_path_spec)
path_resolver = windows_path_resolver.WindowsPathResolver(
file_system, mount_point)
for windows_path in self._WINDOWS_DIRECTORIES:
windows_path_spec = path_resolver.ResolvePath(windows_path)
if windows_path_spec is not None:
return windows_path
return None
def ListFileEntries(self, base_path_specs):
"""Lists file entries in the base path specifications.
Args:
base_path_specs (list[dfvfs.PathSpec]): source path specifications.
Yields:
tuple[dfvfs.FileEntry, list[str]]: file entry and path segments.
"""
for base_path_spec in base_path_specs:
file_system = dfvfs_resolver.Resolver.OpenFileSystem(base_path_spec)
file_entry = dfvfs_resolver.Resolver.OpenFileEntry(base_path_spec)
if file_entry is None:
path_specification_string = self._GetPathSpecificationString(
base_path_spec)
logging.warning('Unable to open base path specification:\n{0:s}'.format(
path_specification_string))
return
base_path_segments = self._GetBasePathSegments(base_path_spec)
for result in self._ListFileEntry(
file_system, file_entry, base_path_segments):
yield result
def ListFileEntriesWithFindSpecs(self, base_path_specs, find_specs):
"""Lists file entries in the base path specifications.
This method filters file entries based on the find specifications.
Args:
base_path_specs (list[dfvfs.PathSpec]): source path specification.
find_specs (list[dfvfs.FindSpec]): find specifications.
Yields:
tuple[dfvfs.FileEntry, list[str]]: file entry and path segments.
"""
for base_path_spec in base_path_specs:
file_system = dfvfs_resolver.Resolver.OpenFileSystem(base_path_spec)
if dfvfs_path_spec_factory.Factory.IsSystemLevelTypeIndicator(
base_path_spec.type_indicator):
mount_point = base_path_spec
else:
mount_point = base_path_spec.parent
base_path_segments = self._GetBasePathSegments(base_path_spec)
searcher = file_system_searcher.FileSystemSearcher(
file_system, mount_point)
for path_spec in searcher.Find(find_specs=find_specs):
file_entry = dfvfs_resolver.Resolver.OpenFileEntry(path_spec)
path_segments = file_system.SplitPath(path_spec.location)
full_path_segments = list(base_path_segments)
full_path_segments.extend(path_segments)
yield file_entry, full_path_segments
``` |
{
"source": "joachimmetz/l2tdevtools",
"score": 2
} |
#### File: l2tdevtools/helpers/project.py
```python
import logging
import os
from l2tdevtools import project_config
from l2tdevtools.review_helpers import cli
class ProjectHelper(cli.CLIHelper):
"""Helper for interacting with a project.
Attributes:
project_name (str): name of the project.
"""
_AUTHORS_FILE_HEADER = [
'# Names should be added to this file with this pattern:',
'#',
'# For individuals:',
'# Name (email address)',
'#',
'# For organizations:',
'# Organization (fnmatch pattern)',
'#',
'# See python fnmatch module documentation for more information.',
'',
'Google Inc. <EMAIL>)']
SUPPORTED_PROJECTS = frozenset([
'acstore',
'artifacts',
'clitooltester',
'dfdatetime',
'dfkinds',
'dfimagetools',
'dftimewolf',
'dfvfs',
'dfvfs-snippets',
'dfwinreg',
'dtfabric',
'dtformats',
'esedb-kb',
'l2tdevtools',
'l2tdocs',
'l2tscaffolder',
'olecf-kb',
'plaso',
'timesketch',
'turbinia',
'vstools',
'winevt-kb',
'winreg-kb'])
def __init__(self, project_path):
"""Initializes a project helper.
Args:
project_path (str): path to the project.
Raises:
ValueError: if the project name is not supported.
"""
super(ProjectHelper, self).__init__()
self._project_definition = None
self.project_name = self._GetProjectName(project_path)
@property
def version_file_path(self):
"""str: path of the version file."""
return os.path.join(self.project_name, '__init__.py')
def _GetProjectName(self, project_path):
"""Retrieves the project name from the script path.
Args:
project_path (str): path to the root of the project.
Returns:
str: project name.
Raises:
ValueError: if the project name is not supported.
"""
project_name = os.path.abspath(project_path)
project_name = os.path.basename(project_name)
# The review.py check is needed for the l2tdevtools tests.
if (project_name != 'review.py' and
project_name not in self.SUPPORTED_PROJECTS):
raise ValueError('Unsupported project name: {0:s}.'.format(project_name))
return project_name
def _ReadFileContents(self, path):
"""Reads the contents of a file.
Args:
path (str): path of the file.
Returns:
bytes: file content or None if not available.
"""
if not os.path.exists(path):
logging.error('Missing file: {0:s}'.format(path))
return None
try:
with open(path, 'rb') as file_object:
file_contents = file_object.read()
except IOError as exception:
logging.error('Unable to read file with error: {0!s}'.format(exception))
return None
try:
file_contents = file_contents.decode('utf-8')
except UnicodeDecodeError as exception:
logging.error('Unable to read file with error: {0!s}'.format(exception))
return None
return file_contents
def ReadDefinitionFile(self):
"""Reads the project definitions file (project_name.ini).
Returns:
ProjectDefinition: project definition.
"""
if self._project_definition is None:
project_file = '{0:s}.ini'.format(self.project_name)
project_reader = project_config.ProjectDefinitionReader()
with open(project_file, 'r') as file_object:
self._project_definition = project_reader.Read(file_object)
return self._project_definition
```
#### File: tests/build_helpers/test_lib.py
```python
import logging
import os
import tarfile
from l2tdevtools import source_helper
class TestSourceHelper(source_helper.SourceHelper):
"""Test helper to manage project source code."""
def __init__(self, project_name, project_definition, project_version):
"""Initializes a source helper.
Args:
project_name (str): name of the project.
project_definition (ProjectDefinition): project definition.
project_version (str): version of the project source code.
"""
super(TestSourceHelper, self).__init__(project_name, project_definition)
self._project_version = project_version
self._source_directory_path = '{0:s}-{1:s}'.format(
project_name, project_version)
self._source_package_filename = '{0:s}-{1:s}.tar.gz'.format(
project_name, project_version)
# pylint: disable=redundant-returns-doc
def _CreateFromTar(self, source_filename):
"""Creates the source directory from a .tar source package.
Args:
source_filename (str): filename of the source package.
Returns:
str: name of the source directory or None if no files can be extracted
from the .tar.gz source package.
"""
with tarfile.open(source_filename, 'r:*', encoding='utf-8') as archive:
directory_name = ''
for tar_info in archive.getmembers():
filename = getattr(tar_info, 'name', None)
if isinstance(filename, bytes):
try:
filename = filename.decode('utf8')
except UnicodeDecodeError:
logging.warning(
'Unable to decode filename in tar file: {0:s}'.format(
source_filename))
continue
if filename is None:
logging.warning('Missing filename in tar file: {0:s}'.format(
source_filename))
continue
if not directory_name:
# Note that this will set directory name to an empty string
# if filename start with a /.
directory_name, _, _ = filename.partition('/')
if not directory_name or directory_name.startswith('..'):
logging.error(
'Unsupported directory name in tar file: {0:s}'.format(
source_filename))
return None
if os.path.exists(directory_name):
break
logging.info('Extracting: {0:s}'.format(source_filename))
elif not filename.startswith(directory_name):
logging.warning(
'Skipping: {0:s} in tar file: {1:s}'.format(
filename, source_filename))
continue
archive.extract(tar_info)
return directory_name
def Create(self):
"""Creates the source directory.
Returns:
str: name of the source directory or None on error.
"""
# TODO: use shutil.unpack_archive(test_path, temp_directory) when Python 2
# support has been removed.
return self._CreateFromTar(self._source_package_filename)
def GetProjectIdentifier(self):
"""Retrieves the project identifier for a given project name.
Returns:
str: project identifier or None on error.
"""
return 'com.github.log2timeline.{0:s}'.format(self.project_name)
def GetProjectVersion(self):
"""Retrieves the version number for a given project name.
Returns:
str: version number or None on error.
"""
return self._project_version
def GetSourceDirectoryPath(self):
"""Retrieves the path of the source directory.
Returns:
str: path of the source directory or None if not available.
"""
return self._source_directory_path
def GetSourcePackageFilename(self):
"""Retrieves the filename of the source package.
This function downloads the source package if not done so previously.
Returns:
str: filename of the source package or None if not available.
"""
return self._source_package_filename
``` |
{
"source": "joachimmetz/pytsk",
"score": 2
} |
#### File: joachimmetz/pytsk/setup.py
```python
from __future__ import print_function
import copy
import glob
import re
import os
import subprocess
import sys
import time
from setuptools import setup, Command, Extension
from setuptools.command.build_ext import build_ext
from setuptools.command.sdist import sdist
import distutils.ccompiler
from distutils import log
from distutils.ccompiler import new_compiler
from distutils.dep_util import newer_group
try:
from distutils.command.bdist_msi import bdist_msi
except ImportError:
bdist_msi = None
try:
from distutils.command.bdist_rpm import bdist_rpm
except ImportError:
bdist_rpm = None
import generate_bindings
import run_tests
version_tuple = (sys.version_info[0], sys.version_info[1])
if version_tuple < (3, 5):
print((
'Unsupported Python version: {0:s}, version 3.5 or higher '
'required.').format(sys.version))
sys.exit(1)
if not bdist_msi:
BdistMSICommand = None
else:
class BdistMSICommand(bdist_msi):
"""Custom handler for the bdist_msi command."""
def run(self):
"""Builds an MSI."""
# Make a deepcopy of distribution so the following version changes
# only apply to bdist_msi.
self.distribution = copy.deepcopy(self.distribution)
# bdist_msi does not support the library version so we add ".1"
# as a work around.
self.distribution.metadata.version += ".1"
bdist_msi.run(self)
if not bdist_rpm:
BdistRPMCommand = None
else:
class BdistRPMCommand(bdist_rpm):
"""Custom handler for the bdist_rpm command."""
def make_spec_file(self, spec_file):
"""Make an RPM Spec file."""
# Note that bdist_rpm can be an old style class.
if issubclass(BdistRPMCommand, object):
spec_file = super(BdistRPMCommand, self)._make_spec_file()
else:
spec_file = bdist_rpm._make_spec_file(self)
if sys.version_info[0] < 3:
python_package = 'python2'
else:
python_package = 'python3'
description = []
requires = ''
summary = ''
in_description = False
python_spec_file = []
for line in iter(spec_file):
if line.startswith('Summary: '):
summary = line
elif line.startswith('BuildRequires: '):
line = 'BuildRequires: {0:s}-setuptools, {0:s}-devel'.format(
python_package)
elif line.startswith('Requires: '):
requires = line[10:]
if python_package == 'python3':
requires = requires.replace('python-', 'python3-')
requires = requires.replace('python2-', 'python3-')
elif line.startswith('%description'):
in_description = True
elif line.startswith('python setup.py build'):
if python_package == 'python3':
line = '%py3_build'
else:
line = '%py2_build'
elif line.startswith('python setup.py install'):
if python_package == 'python3':
line = '%py3_install'
else:
line = '%py2_install'
elif line.startswith('%files'):
lines = [
'%files -n {0:s}-%{{name}}'.format(python_package),
'%defattr(644,root,root,755)',
'%license LICENSE',
'%doc README']
if python_package == 'python3':
lines.extend([
'%{_libdir}/python3*/site-packages/*.so',
'%{_libdir}/python3*/site-packages/pytsk3*.egg-info/*',
'',
'%exclude %{_prefix}/share/doc/*'])
else:
lines.extend([
'%{_libdir}/python2*/site-packages/*.so',
'%{_libdir}/python2*/site-packages/pytsk3*.egg-info/*',
'',
'%exclude %{_prefix}/share/doc/*'])
python_spec_file.extend(lines)
break
elif line.startswith('%prep'):
in_description = False
python_spec_file.append(
'%package -n {0:s}-%{{name}}'.format(python_package))
if python_package == 'python2':
python_spec_file.extend([
'Obsoletes: python-pytsk3 < %{version}',
'Provides: python-pytsk3 = %{version}'])
if requires:
python_spec_file.append('Requires: {0:s}'.format(requires))
python_spec_file.extend([
'{0:s}'.format(summary),
'',
'%description -n {0:s}-%{{name}}'.format(python_package)])
python_spec_file.extend(description)
elif in_description:
# Ignore leading white lines in the description.
if not description and not line:
continue
description.append(line)
python_spec_file.append(line)
return python_spec_file
def _make_spec_file(self):
"""Generates the text of an RPM spec file.
Returns:
list[str]: lines of text.
"""
return self.make_spec_file(
bdist_rpm._make_spec_file(self))
class BuildExtCommand(build_ext):
"""Custom handler for the build_ext command."""
def build_extension(self, extension):
"""Builds the extension.
Args:
extentsion: distutils extentsion object.
"""
if (extension.sources is None or
not isinstance(extension.sources, (list, tuple))):
raise errors.DistutilsSetupError((
'in \'ext_modules\' option (extension \'{0:s}\'), '
'\'sources\' must be present and must be '
'a list of source filenames').format(extension.name))
extension_path = self.get_ext_fullpath(extension.name)
depends = extension.sources + extension.depends
if not (self.force or newer_group(depends, extension_path, 'newer')):
log.debug('skipping \'%s\' extension (up-to-date)', extension.name)
return
log.info('building \'%s\' extension', extension.name)
# C and C++ source files need to be compiled seperately otherwise
# the extension will not build on Mac OS.
c_sources = []
cxx_sources = []
for source in extension.sources:
if source.endswith('.c'):
c_sources.append(source)
else:
cxx_sources.append(source)
objects = []
for lang, sources in (('c', c_sources), ('c++', cxx_sources)):
extra_args = extension.extra_compile_args or []
if lang == 'c++':
if self.compiler.compiler_type == 'msvc':
extra_args.append('/EHsc')
else:
extra_args.append('-std=c++14')
macros = extension.define_macros[:]
for undef in extension.undef_macros:
macros.append((undef,))
compiled_objects = self.compiler.compile(
sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=extension.include_dirs,
debug=self.debug,
extra_postargs=extra_args,
depends=extension.depends)
objects.extend(compiled_objects)
self._built_objects = objects[:]
if extension.extra_objects:
objects.extend(extension.extra_objects)
extra_args = extension.extra_link_args or []
# When MinGW32 is used statically link libgcc and libstdc++.
if self.compiler.compiler_type == 'mingw32':
extra_args.extend(['-static-libgcc', '-static-libstdc++'])
# Now link the object files together into a "shared object" --
# of course, first we have to figure out all the other things
# that go into the mix.
if extension.extra_objects:
objects.extend(extension.extra_objects)
extra_args = extension.extra_link_args or []
# Detect target language, if not provided
language = extension.language or self.compiler.detect_language(sources)
self.compiler.link_shared_object(
objects, extension_path,
libraries=self.get_libraries(extension),
library_dirs=extension.library_dirs,
runtime_library_dirs=extension.runtime_library_dirs,
extra_postargs=extra_args,
export_symbols=self.get_export_symbols(extension),
debug=self.debug,
build_temp=self.build_temp,
target_lang=language)
def configure_source(self, compiler):
"""Configures the source.
Args:
compiler: distutils compiler object.
"""
define_macros = [("HAVE_TSK_LIBTSK_H", "")]
if compiler.compiler_type == "msvc":
define_macros.extend([
("WIN32", "1"),
("UNICODE", "1"),
("NOMINMAX", "1"),
("_CRT_SECURE_NO_WARNINGS", "1")])
# TODO: ("GUID_WINDOWS", "1"),
else:
# We want to build as much as possible self contained Python
# binding.
command = [
"sh", "configure", "--disable-java", "--disable-multithreading",
"--without-afflib", "--without-libewf", "--without-libvhdi",
"--without-libvmdk", "--without-zlib"]
output = subprocess.check_output(command, cwd="sleuthkit")
print_line = False
for line in output.split(b"\n"):
line = line.rstrip()
if line == b"configure:":
print_line = True
if print_line:
if sys.version_info[0] >= 3:
line = line.decode("ascii")
print(line)
define_macros.extend([
("HAVE_CONFIG_H", "1"),
("LOCALEDIR", "\"/usr/share/locale\"")])
self.libraries = ["stdc++"]
self.define = define_macros
def run(self):
compiler = new_compiler(compiler=self.compiler)
# pylint: disable=attribute-defined-outside-init
self.configure_source(compiler)
libtsk_path = os.path.join("sleuthkit", "tsk")
if not os.access("pytsk3.cpp", os.R_OK):
# Generate the Python binding code (pytsk3.cpp).
libtsk_header_files = [
os.path.join(libtsk_path, "libtsk.h"),
os.path.join(libtsk_path, "base", "tsk_base.h"),
os.path.join(libtsk_path, "fs", "tsk_fs.h"),
os.path.join(libtsk_path, "img", "tsk_img.h"),
os.path.join(libtsk_path, "vs", "tsk_vs.h"),
"tsk3.h"]
print("Generating bindings...")
generate_bindings.generate_bindings(
"pytsk3.cpp", libtsk_header_files, initialization="tsk_init();")
build_ext.run(self)
class SDistCommand(sdist):
"""Custom handler for generating source dist."""
def run(self):
libtsk_path = os.path.join("sleuthkit", "tsk")
# sleuthkit submodule is not there, probably because this has been
# freshly checked out.
if not os.access(libtsk_path, os.R_OK):
subprocess.check_call(["git", "submodule", "init"])
subprocess.check_call(["git", "submodule", "update"])
if not os.path.exists(os.path.join("sleuthkit", "configure")):
raise RuntimeError(
"Missing: sleuthkit/configure run 'setup.py build' first.")
sdist.run(self)
class UpdateCommand(Command):
"""Update sleuthkit source.
This is normally only run by packagers to make a new release.
"""
_SLEUTHKIT_GIT_TAG = "4.11.1"
version = time.strftime("%Y%m%d")
timezone_minutes, _ = divmod(time.timezone, 60)
timezone_hours, timezone_minutes = divmod(timezone_minutes, 60)
# If timezone_hours is -1 %02d will format as -1 instead of -01
# hence we detect the sign and force a leading zero.
if timezone_hours < 0:
timezone_string = "-%02d%02d" % (-timezone_hours, timezone_minutes)
else:
timezone_string = "+%02d%02d" % (timezone_hours, timezone_minutes)
version_pkg = "%s %s" % (
time.strftime("%a, %d %b %Y %H:%M:%S"), timezone_string)
user_options = [("use-head", None, (
"Use the latest version of Sleuthkit checked into git (HEAD) instead of "
"tag: {0:s}".format(_SLEUTHKIT_GIT_TAG)))]
def initialize_options(self):
self.use_head = False
def finalize_options(self):
self.use_head = bool(self.use_head)
files = {
"sleuthkit/Makefile.am": [
("SUBDIRS = .+", "SUBDIRS = tsk"),
],
"class_parser.py": [
('VERSION = "[^"]+"', 'VERSION = "%s"' % version),
],
"dpkg/changelog": [
(r"pytsk3 \([^\)]+\)", "pytsk3 (%s-1)" % version),
("(<[^>]+>).+", r"\1 %s" % version_pkg),
],
}
def patch_sleuthkit(self):
"""Applies patches to the SleuthKit source code."""
for filename, rules in iter(self.files.items()):
filename = os.path.join(*filename.split("/"))
with open(filename, "r") as file_object:
data = file_object.read()
for search, replace in rules:
data = re.sub(search, replace, data)
with open(filename, "w") as fd:
fd.write(data)
patch_files = [
"sleuthkit-{0:s}-configure.ac".format(self._SLEUTHKIT_GIT_TAG)]
for patch_file in patch_files:
patch_file = os.path.join("patches", patch_file)
if not os.path.exists(patch_file):
print("No such patch file: {0:s}".format(patch_file))
continue
patch_file = os.path.join("..", patch_file)
subprocess.check_call(["git", "apply", patch_file], cwd="sleuthkit")
def run(self):
subprocess.check_call(["git", "stash"], cwd="sleuthkit")
subprocess.check_call(["git", "submodule", "init"])
subprocess.check_call(["git", "submodule", "update"])
print("Updating sleuthkit")
subprocess.check_call(["git", "reset", "--hard"], cwd="sleuthkit")
subprocess.check_call(["git", "clean", "-x", "-f", "-d"], cwd="sleuthkit")
subprocess.check_call(["git", "checkout", "master"], cwd="sleuthkit")
subprocess.check_call(["git", "pull"], cwd="sleuthkit")
if self.use_head:
print("Pulling from HEAD")
else:
print("Pulling from tag: {0:s}".format(self._SLEUTHKIT_GIT_TAG))
subprocess.check_call(["git", "fetch", "--force", "--tags"], cwd="sleuthkit")
git_tag_path = "tags/sleuthkit-{0:s}".format(self._SLEUTHKIT_GIT_TAG)
subprocess.check_call(["git", "checkout", git_tag_path], cwd="sleuthkit")
self.patch_sleuthkit()
compiler_type = distutils.ccompiler.get_default_compiler()
if compiler_type != "msvc":
subprocess.check_call(["./bootstrap"], cwd="sleuthkit")
# Now derive the version based on the date.
with open("version.txt", "w") as fd:
fd.write(self.version)
libtsk_path = os.path.join("sleuthkit", "tsk")
# Generate the Python binding code (pytsk3.cpp).
libtsk_header_files = [
os.path.join(libtsk_path, "libtsk.h"),
os.path.join(libtsk_path, "base", "tsk_base.h"),
os.path.join(libtsk_path, "fs", "tsk_fs.h"),
os.path.join(libtsk_path, "img", "tsk_img.h"),
os.path.join(libtsk_path, "vs", "tsk_vs.h"),
"tsk3.h"]
print("Generating bindings...")
generate_bindings.generate_bindings(
"pytsk3.cpp", libtsk_header_files, initialization="tsk_init();")
class ProjectBuilder(object):
"""Class to help build the project."""
def __init__(self, project_config, argv):
"""Initializes a project builder object."""
self._project_config = project_config
self._argv = argv
# The path to the sleuthkit/tsk directory.
self._libtsk_path = os.path.join("sleuthkit", "tsk")
# Paths under the sleuthkit/tsk directory which contain files we need
# to compile.
self._sub_library_names = ["base", "docs", "fs", "img", "pool", "util", "vs"]
# The args for the extension builder.
self.extension_args = {
"include_dirs": ["talloc", self._libtsk_path, "sleuthkit", "."],
"library_dirs": []}
# The sources to build.
self._source_files = [
"class.cpp", "error.cpp", "tsk3.cpp", "pytsk3.cpp", "talloc/talloc.c"]
# Path to the top of the unpacked sleuthkit sources.
self._sleuthkit_path = "sleuthkit"
def build(self):
"""Build everything."""
# Fetch all c and cpp files from the subdirs to compile.
extension_file = os.path.join(
self._libtsk_path, "auto", "guid.cpp")
self._source_files.append(extension_file)
for library_name in self._sub_library_names:
for extension in ("*.c", "*.cpp"):
extension_glob = os.path.join(
self._libtsk_path, library_name, extension)
self._source_files.extend(glob.glob(extension_glob))
# Sort the soure files to make sure they are in consistent order when
# building.
source_files = sorted(self._source_files)
ext_modules = [Extension("pytsk3", source_files, **self.extension_args)]
setup(
cmdclass={
"build_ext": BuildExtCommand,
"bdist_msi": BdistMSICommand,
"bdist_rpm": BdistRPMCommand,
"sdist": SDistCommand,
"update": UpdateCommand},
ext_modules=ext_modules,
**self._project_config)
if __name__ == "__main__":
__version__ = open("version.txt").read().strip()
setup_args = dict(
name="pytsk3",
version=__version__,
description="Python bindings for the sleuthkit",
long_description=(
"Python bindings for the sleuthkit (http://www.sleuthkit.org/)"),
license="Apache 2.0",
url="https://github.com/py4n6/pytsk/",
author="<NAME> and <NAME>",
author_email="<EMAIL>, <EMAIL>",
zip_safe=False)
ProjectBuilder(setup_args, sys.argv).build()
``` |
{
"source": "joachimmetz/UnifiedLogReader",
"score": 3
} |
#### File: UnifiedLogReader/scripts/tracev3_decompress.py
```python
import binascii
import os
import struct
import sys
import lz4.block
def DecompressFile(input_path, output_path):
try:
with open(input_path, 'rb') as trace_file:
with open(output_path, 'wb') as out_file:
index = 0
header = trace_file.read(4)
while header:
begin_pos = trace_file.tell() - 4
trace_file.seek(begin_pos + 8)
struct_len = struct.unpack('<Q', trace_file.read(8))[0]
#print "index={} pos=0x{:X}".format(index, begin_pos), binascii.hexlify(header)
trace_file.seek(begin_pos)
block_data = trace_file.read(16 + struct_len)
if header == b'\x00\x10\x00\x00': # header
out_file.write(block_data) # boot_uuid header, write to output directly
elif header[0] == b'\x0B':
out_file.write(block_data) # uncompressed, write to output directly
elif header[0] == b'\x0D':
if block_data[16:20] in [b'bv41', b'bv4-']:
uncompressed = b''
last_uncompressed = b''
chunk_start = 16 # bv** offset
chunk_header = block_data[chunk_start:chunk_start + 4]
while (struct_len > chunk_start) and (chunk_header != b'bv4$'):
if chunk_header == b'bv41':
uncompressed_size, compressed_size = struct.unpack('<II', block_data[chunk_start + 4:chunk_start + 12])
last_uncompressed = lz4.block.decompress(block_data[chunk_start + 12: chunk_start + 12 + compressed_size], uncompressed_size, dict=last_uncompressed)
chunk_start += 12 + compressed_size
uncompressed += last_uncompressed
elif chunk_header == b'bv4-':
uncompressed_size = struct.unpack('<I', block_data[chunk_start + 4:chunk_start + 8])[0]
uncompressed += block_data[chunk_start + 8:chunk_start + 8 + uncompressed_size]
chunk_start += 8 + uncompressed_size
else:
print 'Unknown compression value {} @ 0x{:X} - {}'.format(binascii.hexlify(chunk_header), begin_pos + chunk_start, chunk_header)
break
chunk_header = block_data[chunk_start:chunk_start + 4]
###
out_file.write(block_data[0:8]) # Same Header !
out_file.write(struct.pack('<Q', len(uncompressed))) # New size
out_file.write(uncompressed)
else:
print 'Unknown compression type', binascii.hexlify(block_data[16:20])
else:
print 'Unknown header value encountered : {}, struct_len=0x{:X}'.format(binascii.hexlify(header), struct_len)
out_file.write(block_data[0:8]) # Same Header !
out_file.write(block_data) # Same data!
if struct_len % 8: # Go to QWORD boundary on input
struct_len += 8 - (struct_len % 8)
if out_file.tell() % 8: # Go to QWORD boundary on output
out_file.write(b'\x00\x00\x00\x00\x00\x00\x00'[0:(8-out_file.tell() % 8)])
trace_file.seek(begin_pos + 16 + struct_len)
header = trace_file.read(4)
index += 1
except Exception as ex:
print 'Exception', str(ex)
return False
return True
def RecurseDecompressFiles(input_path):
files = os.listdir(input_path)
for file_name in files:
input_file_path = os.path.join(input_path, file_name)
if file_name.lower().endswith('.tracev3'):
print "Processing file - ", input_file_path
DecompressFile(input_file_path, input_file_path + ".dec")
elif os.path.isdir(input_file_path):
RecurseDecompressFiles(input_file_path)
if len(sys.argv) == 1:
print "Not enough arguments, provide the traceV3 file's path or a folder path to recurse extract tracev3 files"
else:
input_path = sys.argv[1]
if os.path.isdir(input_path):
RecurseDecompressFiles(input_path)
else:
print "Processing file - ", input_path
DecompressFile(input_path, input_path + ".dec")
```
#### File: UnifiedLogReader/UnifiedLog/resources.py
```python
from __future__ import unicode_literals
from UnifiedLog import logger
class Catalog(object):
def __init__(self):
super(Catalog, self).__init__()
self.ContinuousTime = 0
self.FileObjects = []
self.Strings = ''
self.ProcInfos = []
self.ChunkMetaInfo = []
def GetProcInfoById(self, id):
for proc_info in self.ProcInfos:
if proc_info.id == id:
return proc_info
# Not found!
logger.error("ProcInfo with id={} not found".format(id))
return None
class ChunkMeta(object):
def __init__(self, continuous_time_first, continuous_time_last, chunk_len, compression_alg):
super(ChunkMeta, self).__init__()
self.continuous_time_first = continuous_time_first
self.continuous_time_last = continuous_time_last
self.length_of_chunk = chunk_len # Chunk to follow
self.compression_alg = compression_alg # 0x100 (256) = lz4
self.ProcInfo_Ids = []
self.StringIndexes = []
self.ProcInfos = {} # key = pid
self.Strings = {} # key = string offset
class ExtraFileReference(object):
'''Extra file reference object. Some ProcInfos have messages in more than one uuidtext file'''
def __init__(self, data_size, uuid_file_index, u2, v_offset, id):
super(ExtraFileReference, self).__init__()
self.data_size = data_size # data size
self.uuid_file_index = uuid_file_index
self.unknown2 = u2
self.v_offset = v_offset # virtual offset
self.id = id
class ProcInfo(object):
def __init__(self, id, flags, uuid_file_index, dsc_file_index, proc_id1, proc_id2, pid, euid, u6, num_extra_uuid_refs, u8, num_subsys_cat_elements, u9, extra_file_refs):
super(ProcInfo, self).__init__()
self.id = id
self.flags = flags
self.uuid_file_index = uuid_file_index
self.dsc_file_index = dsc_file_index
self.proc_id1 = proc_id1 # usually same as pid (but not always!)
self.proc_id2 = proc_id2 # secondary pid like unique value for getting unique entries when 2 proc_info have same pid
self.pid = pid
self.euid = euid
self.unk_val6 = u6
self.num_extra_uuid_refs = num_extra_uuid_refs
self.unk_val8 = u8
self.num_subsys_cat_elements = num_subsys_cat_elements
self.unk_val9 = u9
self.items = {} # key = item_id, val = (subsystem, category)
self.extra_file_refs = extra_file_refs # In addition to self.uuid_file_index
def GetSubSystemAndCategory(self, sc_id):
sc = self.items.get(sc_id, None)
if sc:
return (sc[0], sc[1])
# Not found!
logger.error("Could not find subsystem_category_id={}".format(sc_id))
return ('','')
class Timesync(object):
def __init__(self, header):
super(Timesync, self).__init__()
self.header = header
self.items = []
#self.items_dict = {} # unused , use later for optimization
class TimesyncHeader(object):
def __init__(self, sig, unk1, boot_uuid, ts_numer, ts_denom, ts, bias, is_dst):
super(TimesyncHeader, self).__init__()
self.signature = sig
self.unknown1 = unk1
self.boot_uuid = boot_uuid
self.ts_numerator = ts_numer
self.ts_denominator = ts_denom
self.time_stamp = ts
self.bias_minutes = bias
self.is_dst = (is_dst == 1) # 1 = DST
class TimesyncItem(object):
'''Timesync item object'''
def __init__(self, ts_unknown, cont_time, ts, bias, is_dst):
super(TimesyncItem, self).__init__()
#self.signature = sig # "Ts " = sig?
self.ts_unknown = ts_unknown
self.continuousTime = cont_time
self.time_stamp = ts
self.bias_minutes = bias
self.is_dst = (is_dst == 1) # 1 = DST
``` |
{
"source": "JoachimRohde/netbeans",
"score": 3
} |
#### File: truffle/scripts/DebuggerBase.py
```python
class TestObject:
def addAO(self): self.ao = "AO"
def fnc1():
a = 20
o = TestObject()
o.addAO()
arr = []
arr = [5, 4, 3, 2, 1]
return 30
def fnc2(n):
n1 = n + 1
f2 = 0
if n1 <= 10:
f2 = fnc2(n1) + 1
return f2
ga = 6
fnc1()
for i in range(1, 10):
fnc2(i)
``` |
{
"source": "joachimth/CarPi",
"score": 2
} |
#### File: CarPi/CarPiCommons/CarPiConfig.py
```python
from ConfigParser import ConfigParser
from CarPiLogging import log, init_logging_from_config
from os import environ
class ConfigFileNotFoundError(Exception):
pass
def init_config(filepath):
"""
Creates a new ConfigParser instance and reads the given file
:param str filepath: Configuration File's Path
:return ConfigParser:
"""
log('Reading Config File {} ...'.format(filepath))
config = ConfigParser()
try:
config.readfp(open(filepath))
except IOError:
raise ConfigFileNotFoundError
init_logging_from_config(config)
return config
def init_config_env(env_name, default_names=['config.cnf']):
"""
Creates a new ConfigParser instance and reads the file given
by an Environmental Variable. If variable does not exist
a default value will be used.
:param str env_name: Name of Environmental Variable
:param list of str default_names: Default Name (default: 'config.cnf')
:return ConfigParser:
"""
if env_name in environ:
return init_config(environ[env_name])
else:
c = None
for default_name in default_names:
try:
c = init_config(default_name)
return c
except ConfigFileNotFoundError:
log('Could not find {} ...'.format(default_name))
if not c:
log('Failed to load Configuration File!')
raise ConfigFileNotFoundError('Failed to load Configuration File!')
else:
# This will probably never happen...
return c
if __name__ == "__main__":
log("This script is not intended to be run standalone!")
```
#### File: CarPi/CarPiCommons/RedisKeys.py
```python
class GpsRedisKeys:
KEY_LATITUDE = 'GPS.Latitude' # type: str
KEY_LONGITUDE = 'GPS.Longitude' # type: str
KEY_ALTITUDE = 'GPS.Altitude' # type: str
KEY_FIX_MODE = 'GPS.FixMode' # type: str
KEY_EPX = 'GPS.EPX' # type: str
KEY_EPY = 'GPS.EPY' # type: str
KEY_EPV = 'GPS.EPV' # type: str
KEY_EPT = 'GPS.EPT' # type: str
KEY_EPD = 'GPS.EPD' # type: str
KEY_EPS = 'GPS.EPS' # type: str
KEY_EPC = 'GPS.EPC' # type: str
KEY_TIME = 'GPS.Time' # type: str
KEY_CLIMB = 'GPS.Climb' # type: str
KEY_TRACK = 'GPS.Track' # type: str
KEY_SPEED = 'GPS.Speed' # type: str
KEY_SPEED_KMH = 'GPS.Speed.KMH' # type: str
KEY_SPEED_MPH = 'GPS.Speed.MPH' # type: str
KEY_LAST_UPDATED = 'GPS.LastUpdated' # type: str
KEY_LOCATION_COUNTRY = 'GPS.Location.Country' # type: str
KEY_LOCATION_CITY = 'GPS.Location.City' # type: str
KEY_LOCATION_ADMIN1 = 'GPS.Location.Admin1' # type: str
KEY_LOCATION_ADMIN2 = 'GPS.Location.Admin2' # type: str
KEY_TRIP_A_RECORDING = 'Trip.A.ID'
KEY_ALIVE = 'DaemonAlive.GPS' # type: str
KEYS = [
KEY_ALIVE,
KEY_LATITUDE,
KEY_LONGITUDE,
KEY_ALTITUDE,
KEY_FIX_MODE,
KEY_EPX,
KEY_EPY,
KEY_EPV,
KEY_EPT,
KEY_EPD,
KEY_EPS,
KEY_EPC,
KEY_TIME,
KEY_CLIMB,
KEY_TRACK,
KEY_SPEED,
KEY_SPEED_KMH,
KEY_SPEED_MPH,
KEY_LAST_UPDATED
]
class PersistentGpsRedisKeys:
KEY_ODO = 'GPS.ODO'
KEY_TRIP_A = 'GPS.Trip.A'
KEY_TRIP_B = 'GPS.Trip.B'
KEY_TRIP_A_RECORDING = 'Trip.A.ID'
KEYS = [
KEY_ODO,
KEY_TRIP_A,
KEY_TRIP_B,
KEY_TRIP_A_RECORDING
]
class NetworkInfoRedisKeys:
KEY_ETH0_IP = 'Net.eth0.IP' # type: str
KEY_WLAN0_IP = 'Net.wlan0.IP' # type: str
KEY_WLAN0_STRENGTH = 'Net.wlan0.Strength' # type: str
KEY_WLAN0_SSID = 'Net.wlan0.SSID' # type: str
KEY_WLAN1_IP = 'Net.wlan1.IP' # type: str
KEY_WLAN1_STRENGTH = 'Net.wlan1.Strength' # type: str
KEY_WLAN1_SSID = 'Net.wlan1.SSID' # type: str
KEY_ALIVE = 'DaemonAlive.Net' # type: str
KEYS = [
KEY_ALIVE,
KEY_ETH0_IP,
KEY_WLAN0_IP,
KEY_WLAN0_STRENGTH,
KEY_WLAN0_SSID,
KEY_WLAN1_IP,
KEY_WLAN1_STRENGTH,
KEY_WLAN1_SSID
]
class MpdDataRedisKeys:
KEY_STATE = 'MPD.State' # type: str
KEY_SONG_TITLE = 'MPD.CurrentSong.Title' # type: str
KEY_SONG_ARTIST = 'MPD.CurrentSong.Artist' # type: str
KEY_SONG_ALBUM = 'MPD.CurrentSong.Album' # type: str
KEY_SONG_LENGTH = 'MPD.CurrentSong.Length' # type: str
KEY_SONG_LENGTH_FORMATTED = 'MPD.CurrentSong.Length.Formatted' # type: str
KEY_CURRENT_TIME = 'MPD.CurrentTime' # type: str
KEY_CURRENT_TIME_FORMATTED = 'MPD.CurrentTime.Formatted' # type: str
KEY_VOLUME = 'MPD.Volume' # type: str
KEY_RANDOM = 'MPD.Random' # type: str
KEY_REPEAT = 'MPD.Repeat' # type: str
KEY_ALIVE = 'DaemonAlive.MPD' # type: str
KEYS = [
KEY_ALIVE,
KEY_SONG_TITLE,
KEY_SONG_ARTIST,
KEY_SONG_ALBUM,
KEY_CURRENT_TIME,
KEY_CURRENT_TIME_FORMATTED,
KEY_VOLUME
]
class MpdCommandRedisKeys:
KEY_ALIVE = MpdDataRedisKeys.KEY_ALIVE
# Commands
COMMAND_PLAY = 'CommandRequest(MPD.Play)'
COMMAND_PAUSE = 'CommandRequest(MPD.Pause)'
COMMAND_STOP = 'CommandRequest(MPD.Stop)'
COMMAND_NEXT = 'CommandRequest(MPD.Next)'
COMMAND_PREV = 'CommandRequest(MPD.Prev)'
# Parameters
# COMMAND_PAUSE
PARAM_PAUSE_VALUE = 'PauseValue'
COMMANDS = [
COMMAND_PLAY,
COMMAND_PAUSE,
COMMAND_STOP,
COMMAND_NEXT,
COMMAND_PREV
]
PARAMS = {
# COMMAND_PLAY: [],
COMMAND_PAUSE: [ PARAM_PAUSE_VALUE ],
# COMMAND_STOP: []
}
class ObdRedisKeys:
KEY_ALIVE = 'OBD.State'
KEY_BATTERY_VOLTAGE = 'OBD.BatteryVoltage'
KEY_ENGINE_LOAD = 'OBD.EngineLoad'
KEY_COOLANT_TEMP = 'OBD.CoolantTemp'
KEY_INTAKE_MAP = 'OBD.IntakeMAP'
KEY_ENGINE_RPM = 'OBD.RPM'
KEY_VEHICLE_SPEED = 'OBD.Speed'
KEY_INTAKE_TEMP = 'OBD.IntakeTemp'
KEY_O2_SENSOR_FAEQV = 'OBD.O2Sensor.FuelAirEqRatio'
KEY_O2_SENSOR_CURRENT = 'OBD.O2Sensor.Current'
KEY_FUELSYS_1_STATUS = 'OBD.FuelSystem1.Status'
KEY_FUELSYS_2_STATUS = 'OBD.FuelSystem2.Status'
KEY_MIL_STATUS = 'OBD.MIL'
KEY_DTC_COUNT = 'OBD.DTCCount'
KEY_CURRENT_DTCS = 'OBD.DTCs.Current'
KEY_PENDING_DTCS = 'OBD.DTCs.Pending'
KEYS = [
KEY_ALIVE,
KEY_BATTERY_VOLTAGE,
KEY_ENGINE_LOAD,
KEY_INTAKE_MAP,
KEY_ENGINE_RPM,
KEY_VEHICLE_SPEED,
KEY_INTAKE_TEMP,
KEY_O2_SENSOR_FAEQV,
KEY_O2_SENSOR_CURRENT
]
def prepare_dict(keys, default_value=None):
dict = {}
for key in keys:
dict[key] = default_value
return dict
if __name__ == "__main__":
print("This script is not intended to be run standalone!")
```
#### File: CarPi/CarPiCommons/RedisUtils.py
```python
from redis import Redis, exceptions
from CarPiLogging import log
from CarPiThreading import CarPiThread
from ConfigParser import ConfigParser, NoOptionError
from time import sleep
# Config Sections and Keys
RCONFIG_SECTION = 'Redis'
RCONFIG_PERSISTENT_SECTION = 'Persistent_Redis'
RCONFIG_KEY_HOST = 'host'
RCONFIG_KEY_PORT = 'port'
RCONFIG_KEY_DB = 'db'
RCONFIG_KEY_EXPIRE = 'expire'
RCONFIG_VALUE_EXPIRE = None
RCONFIG_VALUE_EXPIRE_COMMANDS = 5
def _get_redis(config, section):
"""
:param ConfigParser config:
:param str section:
:return Redis:
"""
return Redis(host=config.get(section, RCONFIG_KEY_HOST),
port=config.getint(section, RCONFIG_KEY_PORT),
db=config.get(section, RCONFIG_KEY_DB),
socket_connect_timeout=5)
def get_redis(config):
"""
Returns the default Redis connection
:param ConfigParser config:
:return Redis:
"""
global RCONFIG_VALUE_EXPIRE
try:
RCONFIG_VALUE_EXPIRE = config.getint(RCONFIG_SECTION, RCONFIG_KEY_EXPIRE)
log("The Redis values will expire after {} seconds.".format(RCONFIG_VALUE_EXPIRE))
except NoOptionError:
log("The Redis values will not expire.")
RCONFIG_VALUE_EXPIRE = None
except ValueError:
log("The provided default Expire value is invalid! No expiration will be set.")
RCONFIG_VALUE_EXPIRE = None
return _get_redis(config, RCONFIG_SECTION)
def get_persistent_redis(config):
"""
Returns the Persistent Redis Connection
:param ConfigParser config:
:return Redis:
"""
return _get_redis(config, RCONFIG_PERSISTENT_SECTION)
def get_piped(r, keys):
"""
Creates a Pipeline and requests all listed items at once.
Returns a dictionary with the key-value pairs being equivalent
to the stored values in Redis.
:param Redis r:
:param list of str keys:
:return dict of (str, str):
"""
data_dict = {}
pipe = r.pipeline()
for key in keys:
pipe.get(key)
data_dict[key] = None
data = pipe.execute()
for i, item in enumerate(data):
data_dict[keys[i]] = item
return data_dict
def set_piped(r, data_dict):
"""
Creates a Pipeline and sends all listed items at once.
Returns a dictionary with the key-value pairs containing the
result of each operation.
:param Redis r:
:param dict of (str, object) data_dict:
:return dict of (str, str):
"""
keys = []
result_dict = {}
pipe = r.pipeline()
for key, value in data_dict.iteritems():
if type(key) is tuple or type(key) is list:
for i in range(len(key)):
if value[i] is None:
pipe.delete(key[i])
else:
pipe.set(key[i], str(value[i]), ex=RCONFIG_VALUE_EXPIRE)
result_dict[key[i]] = None
keys.append(key[i])
else:
if value is None:
pipe.delete(key)
elif type(value) is tuple or type(value) is list:
pipe.set(key, '|'.join(value), ex=RCONFIG_VALUE_EXPIRE)
else:
pipe.set(key, str(value), ex=RCONFIG_VALUE_EXPIRE)
result_dict[key] = None
keys.append(key)
data = pipe.execute()
for i, item in enumerate(data):
result_dict[keys[i]] = item
return result_dict
def incr_piped(r, data_dict):
"""
Same as set_piped, but uses INCRBY instead of SET.
Increases <key> by <value>.
Note that INCRBY does not support expiration so this will
not be taken into account
:param Redis r:
:param dict of (str, object) data_dict:
:return dict of (str, str):
"""
keys = []
result_dict = {}
pipe = r.pipeline()
for key, value in data_dict.iteritems():
if value is None:
pipe.delete(key)
else:
pipe.incrbyfloat(key, value)
result_dict[key] = None
keys.append(key)
data = pipe.execute()
for i, item in enumerate(data):
result_dict[keys[i]] = item
return result_dict
def get_command_param_key(command, param_name):
return command + '.Param:' + param_name
def send_command_request(r, command, params=None):
"""
Creates a new Command Request and sends it to Redis for
a request processor to process
:param Redis r: Redis instance
:param str command: Command Name
:param dict of str, object params: Optional Command params
:return:
"""
pipe = r.pipeline()
pipe.set(command, True, ex=RCONFIG_VALUE_EXPIRE_COMMANDS)
if params:
for key, value in params.iteritems():
if value is not None:
param_key = get_command_param_key(command, key)
pipe.set(param_key, value, ex=RCONFIG_VALUE_EXPIRE_COMMANDS)
pipe.execute()
def set_command_as_handled(r, command):
"""
Removes a Command Request from Redis and thus marks it as handled
:param Redis r: Redis instance
:param str command: Command Name
"""
pipe = r.pipeline()
pipe.delete(command)
pipe.execute()
def get_command_params(r, command, params, delete_after_request=True):
"""
Returns one or more parameters of a given command
:param Redis r: Redis instance
:param str command: Command Name
:param str|list of str params: Paramter Name or list of Parameter Names to request
:param bool delete_after_request: If True, all requested parameters will be deleted after execution
:return str|dict of str, str:
"""
if isinstance(params, list):
output = {}
keys = []
key_map = {}
for key in params:
output[key] = None
param_key = get_command_param_key(command, key)
keys.append(param_key)
key_map[param_key] = key
out = get_piped(r, keys)
for key, value in out.iteritems():
output[key_map[key]] = value
if delete_after_request:
pipe = r.pipeline()
for key in keys:
r.delete(key)
pipe.execute()
return output
else:
return r.get(get_command_param_key(command, params))
def load_synced_value(r, pr, key):
"""
:param Redis r: Redis instance
:param Redis pr: Persistent Redis instance
:param str key:
:return str:
"""
o = get_piped(pr, [key])
if key in o and o[key]:
set_piped(r, {key: o[key]})
return o[key]
else:
r.delete(key)
return None
def save_synced_value(r, pr, key, value):
"""
:param Redis r: Redis instance
:param Redis pr: Persistent Redis instance
:param str key:
:param str|None value:
:return str:
"""
if value:
s = {key: value}
set_piped(r, s)
set_piped(pr, s)
else:
r.delete(key)
pr.delete(key)
def check_command_requests(r, commands):
"""
Checks a list of commands for a pending request
:param Redis r: Redis instance
:param list of str commands: List of Commands
:return:
"""
return get_piped(r, commands)
class RedisBackgroundFetcher(CarPiThread):
"""
Redis Background Data Fetcher
"""
RETRIES = 5
RETRY_INTERVAL = 0.5
def __init__(self, r, keys_to_fetch, fetch_interval=0.1):
"""
:param Redis r:
:param list of str keys_to_fetch:
:param int fetch_interval:
"""
CarPiThread.__init__(self, fetch_interval)
self.keys_to_fetch = keys_to_fetch
self._r = r
self._running = True
self._interval = fetch_interval
self._current_data = {} # type: dict of (str, str)
self._retries = RedisBackgroundFetcher.RETRIES
def _fetch_data(self):
# Creates a copy so a user interaction does not cause problems
keys = self.keys_to_fetch[:]
new_data = get_piped(self._r, keys)
self._current_data = new_data
def get_current_data(self):
return self._current_data
def _do(self):
try:
self._fetch_data()
self._retries = RedisBackgroundFetcher.RETRIES
except (exceptions.ConnectionError, exceptions.TimeoutError):
if self._retries == 0:
log("Failed to reconnect to Redis after {} retries!".format(RedisBackgroundFetcher.RETRIES))
raise
else:
log("Connection to Redis lost, skipping and trying again in {} seconds ({} more times) ..."
.format(RedisBackgroundFetcher.RETRY_INTERVAL, self._retries))
self._retries -= 1
sleep(RedisBackgroundFetcher.RETRY_INTERVAL)
except SystemExit:
log("SystemExit has been requested, stopping Fetcher Thread ...")
self._running = False
class CarPiControlThread(CarPiThread):
def __init__(self, redis, commands, parameters, interval):
"""
:param Redis redis: Redis instance
:param list of str commands:
:param dict of str, list of str parameters:
:param int|float interval:
"""
CarPiThread.__init__(self, interval)
self._redis = redis
self._commands = commands # type: list str
self._parameters = parameters # type: dict str, list str
self._command_implementation = self._map_command_implementations(commands) # type: dict str, function
def _map_command_implementations(self, commands):
"""
:param list of str commands:
:return dict of str, function:
"""
raise NotImplementedError
def _do(self):
commands_to_execute = {}
# Get all commands which are requested
requested_commands = check_command_requests(self._redis, self._commands)
for command, val in requested_commands.iteritems():
if not val:
continue
if command in self._parameters:
# Get Parameter Values
params = self._parameters[command]
commands_to_execute[command] = get_command_params(self._redis,
command,
params)
else:
# Execute without Parameters
commands_to_execute[command] = True
# Execute Commands
for command, params in commands_to_execute.iteritems():
if isinstance(params, dict):
self._execute_command(command, params)
else:
self._execute_command(command)
set_command_as_handled(self._redis, command)
def _execute_command(self, command, params=None):
if command in self._command_implementation:
fun = self._command_implementation[command]
if params:
fun(params)
else:
fun()
else:
log("No function found for Redis Command {}!".format(command))
if __name__ == "__main__":
print("This script is not intended to be run standalone!")
```
#### File: CarPi/CarPiDaemons/Obd2DataParser.py
```python
from CarPiLogging import log
from RedisKeys import ObdRedisKeys
class ObdPidParserUnknownError(Exception):
def __init__(self, type, val=None):
"""
:param str type: OBD PID
:param str val: (optional) value received to parse
"""
log("Unknown or unimplemented OBD PID {} (Value was: {})".format(type, val))
self.type = type
self.val = val
def trim_obd_value(v):
"""
Trims unneeded data from an OBD response
:param str v:
:return str:
"""
if not v or len(v) < 4:
return ''
else:
return v[4:]
def prepare_value(v):
"""
:param str v:
:return str:
"""
log('Preparing value {}'.format(v))
a = v.split('|')
if len(a) >= 2 and a[1] != '>':
log('Returning {} for {}'.format(a[1], v))
return a[1]
else:
log('Returning NONE for {}'.format(v))
return None
def parse_value(type, val):
"""
Parses a given OBD value of a given type (PID)
and returns the parsed value.
If the PID is unknown / not implemented a PIDParserUnknownError
will be raised including the type which was unknown
:param type:
:param val:
:return:
"""
if type in PARSER_MAP:
prep_val = prepare_value(val)
out = PARSER_MAP[type](prep_val)
log('For {} entered {}, got {} out'.format(type, prep_val, out))
return out
else:
raise ObdPidParserUnknownError(type, val)
def parse_obj(o):
"""
Parses a given dictionary with the key being the OBD PID and the value its
returned value by the OBD interface
:param dict o:
:return:
"""
r = {}
for k, v in o.items():
r[k] = parse_value(k, v)
return r
def transform_obj(o):
r = {}
for k, v in o.items():
if v is tuple:
keys = OBD_REDIS_MAP[k]
r[keys[0]] = v[0]
r[keys[1]] = v[1]
else:
r[OBD_REDIS_MAP[k]] = v
r[ObdRedisKeys.KEY_ALIVE] = 1
return r
def parse_atrv(v):
"""
Parses the battery voltage and returns it in [Volt] as float with 1 decimal place
:param str v: e.g. "12.3V"
:return float:
"""
try:
return float(v.replace('V', ''))
except ValueError:
return float('nan')
def parse_0101(v):
"""
Parses the DTC status and returns two elements.
https://en.wikipedia.org/wiki/OBD-II_PIDs#Mode_1_PID_01
:param v:
:return bool, int:
"""
tv = trim_obd_value(v)
mil_status = None # type: bool
num_dtc = None # type: int
try:
byte_a = int(v[:2], 16)
mil_status = byte_a / 0xF >= 1
num_dtc = mil_status % 0xF
except ValueError:
mil_status = None
num_dtc = None
return mil_status, num_dtc
def parse_0103(v):
"""
Parses the fuel system status and returns an array with two elements (one for
each fuel system).
The returned values are converted to decimal integers and returned as is.
The fuel system values are described here:
https://en.wikipedia.org/wiki/OBD-II_PIDs#Mode_1_PID_03
:param str v: e.g. "41030100"
:return int, int:
"""
tv = trim_obd_value(v) # trimmed value
status_1, status_2 = None, None
try:
status_1 = int(v[:2], 16)
except ValueError:
status_1 = None
try:
status_2 = int(v[2:4], 16)
except ValueError:
status_2 = None
return status_1, status_2
def parse_0104(v):
"""
Parses the calculated engine load and returns it as an integer from 0 - 100
:param str v: e.g. "410405"
:return int: e.g. 2
"""
try:
val = int(trim_obd_value(v), 16)
return val / 2.55
except ValueError:
return float('nan')
def parse_010B(v):
"""
Parses Intake MAP and returns it in [kPa] as an integer from 0 - 255
:param str v:
:return int:
"""
try:
return int(trim_obd_value(v), 16)
except ValueError:
return float('nan')
def parse_010C(v):
"""
Parses Engine RPM and returns it in [RPM] as a float from 0 - 16383.75
:param str v:
:return float:
"""
try:
val = int(trim_obd_value(v), 16)
return val / 4
except ValueError:
return float('nan')
def parse_010D(v):
"""
Parses Vehicle Speed and returns it in [km/h] as an integer from 0 - 255
:param str v:
:return int:
"""
try:
return int(trim_obd_value(v), 16)
except ValueError:
return float('nan')
def parse_010F(v):
"""
Parses Intake Air Temperature and returns it in [degrees C] as an integer from -40 - 215
:param str v:
:return int:
"""
try:
val = int(trim_obd_value(v), 16)
return val - 40
except ValueError:
return float('nan')
def parse_0134_013B(v):
"""
Parses the O2 Sensor Value (0134 - 013B) and returns two values parsed from it:
1. Fuel-Air Equivalence [Ratio] as a float from 0 - 2
2. Current in [mA] as a float from -128 - 128
:param str v:
:return tuple of float, float:
"""
try:
trim_val = trim_obd_value(v)
val_ab = int(trim_val[0:2], 16)
val_cd = int(trim_val[2:4], 16)
return (2 / 65536) * val_ab, val_cd - 128
except ValueError:
return float('nan'), float('nan')
DTC_SOURCES = {
0: 'P',
1: 'C',
2: 'B',
3: 'U'
}
NO_DTC = 'P0000'
def parse_03(v):
"""
Parses stored DTC codes and returns them as a list.
01,01,0001|0001,0001 => U1111
A7,6: Error Source
0 P Powertrain
1 C Chassis
2 B Body
3 U Network
A5,4: 2nd DTC Char => as number (0-3)
A3,0: 3rd DTC Char => as number (0-F)
B7,4: 4th DTC Char => as number (0-F)
B3,0: 5th DTC Char => as number (0-F)
:param str v:
:return list:
"""
dtcs = []
trim_v = trim_obd_value(v)
if trim_v:
# print('input: {}'.format(trim_v))
for i in range(0, len(trim_v), 4):
try:
byte_a = int(trim_v[i:i + 2], 16)
byte_b = int(trim_v[i + 2:i + 4], 16)
# print(' - bytes: {},{}'.format(byte_a, byte_b))
err_src = byte_a / 64
err_src_code = DTC_SOURCES[err_src]
# print(' Err Src: {} ({})'.format(err_src, err_src_code))
dtc_c2 = byte_a % 64 / 16
dtc_c3 = byte_a % 16
dtc_c4 = byte_b / 16
dtc_c5 = byte_b % 16
# print(' {}, {}, {}, {}'.format(dtc_c2, dtc_c3, dtc_c4, dtc_c5))
dtc = '{}{}{}{}{}'.format(err_src_code, dtc_c2, dtc_c3, dtc_c4, dtc_c5)
# print('=> {}'.format(dtc))
if dtc != NO_DTC:
dtcs.append(dtc)
except ValueError:
pass
return dtcs
PARSER_MAP = {
'ATRV': parse_atrv,
'0101': parse_0101,
'0103': parse_0103,
'0104': parse_0104,
'0105': parse_010F,
'010B': parse_010B,
'010C': parse_010C,
'010D': parse_010D,
'010F': parse_010F,
'0134': parse_0134_013B,
'0135': parse_0134_013B,
'0136': parse_0134_013B,
'0137': parse_0134_013B,
'0138': parse_0134_013B,
'0139': parse_0134_013B,
'013A': parse_0134_013B,
'013B': parse_0134_013B,
'03': parse_03,
'07': parse_03
}
OBD_REDIS_MAP = {
'ATRV': ObdRedisKeys.KEY_BATTERY_VOLTAGE,
'0101': (ObdRedisKeys.KEY_MIL_STATUS, ObdRedisKeys.KEY_DTC_COUNT),
'0103': (ObdRedisKeys.KEY_FUELSYS_1_STATUS, ObdRedisKeys.KEY_FUELSYS_2_STATUS),
'0104': ObdRedisKeys.KEY_ENGINE_LOAD,
'0105': ObdRedisKeys.KEY_COOLANT_TEMP,
'010B': ObdRedisKeys.KEY_INTAKE_MAP,
'010C': ObdRedisKeys.KEY_ENGINE_RPM,
'010D': ObdRedisKeys.KEY_VEHICLE_SPEED,
'010F': ObdRedisKeys.KEY_INTAKE_TEMP,
'0134': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'0135': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'0136': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'0137': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'0138': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'0139': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'013A': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'013B': (ObdRedisKeys.KEY_O2_SENSOR_FAEQV, ObdRedisKeys.KEY_O2_SENSOR_CURRENT),
'03': ObdRedisKeys.KEY_CURRENT_DTCS,
'07': ObdRedisKeys.KEY_PENDING_DTCS
}
if __name__ == "__main__":
print("This script is not intended to be run standalone!")
```
#### File: CarPi/CarPiUI/PygameUtils.py
```python
import pygame
from os import environ
from CarPiLogging import log
from ConfigParser import ConfigParser
IO_CONFIG_SECTION = 'IO'
IO_CONFIG_KEY_OUTPUT_DEVICE = 'output'
IO_CONFIG_KEY_MOUSE_DRIVER = 'mouse_driver'
IO_CONFIG_KEY_MOUSE_DEVICE = 'mouse_device'
ENV_OUTPUT = 'SDL_FBDEV'
ENV_MOUSE_DRIVER = 'SDL_MOUSEDRV'
ENV_MOUSE_DEVICE = 'SDL_MOUSEDEV'
UI_CONFIG_SECTION = 'UI'
UI_CONFIG_KEY_SHOW_MOUSE = 'show_mouse'
UI_CONFIG_KEY_FULLSCREEN = 'fullscreen'
UI_CONFIG_KEY_RES_WIDTH = 'res_width'
UI_CONFIG_KEY_RES_HEIGHT = 'res_height'
def init_pygame(config):
"""
:param ConfigParser config:
:return:
"""
log('Initializing PyGame ...')
init_passed, init_failed = pygame.init()
log('PyGame initialized using Ver. {}, {} modules loaded, {} modules failed'.format(pygame.ver,
init_passed,
init_failed))
mouse_visible = False
if config.has_option(UI_CONFIG_SECTION, UI_CONFIG_KEY_SHOW_MOUSE):
mouse_visible = config.getboolean(UI_CONFIG_SECTION,
UI_CONFIG_KEY_SHOW_MOUSE)
pygame.mouse.set_visible(mouse_visible)
pygame.display.set_mode((config.getint(UI_CONFIG_SECTION, UI_CONFIG_KEY_RES_WIDTH),
config.getint(UI_CONFIG_SECTION, UI_CONFIG_KEY_RES_HEIGHT)))
def init_io(config):
"""
:param ConfigParser config:
:return:
"""
log("Configuring PyGame IO ...")
if ENV_OUTPUT not in environ:
if config.has_option(IO_CONFIG_SECTION, IO_CONFIG_KEY_OUTPUT_DEVICE):
environ[ENV_OUTPUT] = config.get(IO_CONFIG_SECTION,
IO_CONFIG_KEY_OUTPUT_DEVICE)
if ENV_MOUSE_DEVICE not in environ:
if config.has_option(IO_CONFIG_SECTION, IO_CONFIG_KEY_MOUSE_DEVICE):
environ[ENV_MOUSE_DEVICE] = config.get(IO_CONFIG_SECTION,
IO_CONFIG_KEY_MOUSE_DEVICE)
if ENV_MOUSE_DRIVER not in environ:
if config.has_option(IO_CONFIG_SECTION, IO_CONFIG_KEY_MOUSE_DRIVER):
environ[ENV_MOUSE_DRIVER] = config.get(IO_CONFIG_SECTION,
IO_CONFIG_KEY_MOUSE_DRIVER)
def load_image(path, convert_alpha=True):
image = None
try:
log('Loading Image {} ...'.format(path))
with open(path) as f:
image = pygame.image.load(f) # type: pygame.Surface
if image and convert_alpha:
image = image.convert_alpha(image)
except IOError:
log('Could not load image {}!'.format(path))
except pygame.error as err:
log('PyGame did a bad while loading "{}": {}'.format(path, err.message))
return image
if __name__ == "__main__":
print("This script is not intended to be run standalone!")
``` |
{
"source": "JoachimVeulemans/honours-data",
"score": 3
} |
#### File: honours-data/backend/fileManager.py
```python
import json
import os
from os import listdir
from os.path import isfile, join
from json import JSONDecodeError
from shutil import copyfile
class FileReader:
def __init__(self, file: str):
if not os.path.isfile(file):
fileLoc = open(file, "w")
fileLoc.close()
self.file = file
def get_json(self):
f = open(self.file, 'r')
lines = str(f.readline())
f.close()
if lines == "":
lines = '{"Ideas": []}'
try:
return_value = json.loads(lines)
except JSONDecodeError as error:
print(error)
return_value = "Je hebt geen geldige JSON meegegeven met je POST request. Dit is wat je gestuurd hebt: " + lines
return return_value
class FilesReader:
def __init__(self, path: str):
self.path = path
def get_list(self):
onlyfiles = [f for f in listdir(self.path) if isfile(join(self.path, f))]
correctFiles = []
for file in onlyfiles:
if (file.__contains__(".txt")):
correctFiles.append(file.split(".txt")[0])
return correctFiles
class FileWriter:
def __init__(self, file):
if not os.path.isfile(file):
fileLoc = open(file, "w")
fileLoc.close()
self.file = file
def write_lines(self, lines: []):
for line in lines:
self.write_line(line)
def write_line(self, line: str):
f = open(self.file, "w")
line = str(line)
line = line.replace("{'", '{\"')
line = line.replace("'}", '\"}')
line = line.replace("':", '\":')
line = line.replace(", '", ', \"')
line = line.replace(": '", ': \"')
line = line.replace("', ", '\", ')
line = line.replace("['", '[\"')
line = line.replace("']", '\"]')
f.write(line)
f.close()
def clear(self):
f = open(self.file, "w")
f.close()
def remove(self, delete_path):
copyfile(self.file, delete_path)
os.remove(self.file)
``` |
{
"source": "JoachimVeulemans/music-classifier",
"score": 3
} |
#### File: backend/tests/test_api.py
```python
import sys, os
import json
import unittest
import hashlib
sys.path.append(os.path.join(sys.path[0],'..'))
from application import app
from parameterized import parameterized, parameterized_class
class TestApi(unittest.TestCase):
def setUp(self):
self.headers = {
'ContentType': 'application/json',
'dataType': 'json'
}
def test_dummy(self):
test = 'ok'
self.assertEqual(test, 'ok')
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JoachimVeulemans/secure-messaging-platform",
"score": 3
} |
#### File: secure-messaging-platform/backend/generalEncryptionFunctions.py
```python
import hashlib
import os
from generalfunctions import AuthException
from debug import debug
from base64 import *
import Crypto.Random
from Crypto import Random
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.Hash import SHA512, SHA384, SHA256, SHA, MD5
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
from cryptography.fernet import Fernet
SALT_SIZE = 16
path_to_file = os.getenv('FILES')
if path_to_file is None:
path_to_file = os.path.dirname(os.path.abspath(__file__)) + "/data/"
else:
path_to_file = path_to_file[5:]
path_to_public_keys = path_to_file + "publicKeys/pub_key_"
path_to_private_keys = path_to_file + "privateKeys/priv_key_"
def get_salt():
return str(Crypto.Random.get_random_bytes(SALT_SIZE))
def generate_AES_key():
return Fernet.generate_key()
# see link https://stackoverflow.com/questions/12524994/encrypt-decrypt-using-pycrypto-aes-256#comment75446564_21928790
class AESCipher(object):
def __init__(self, key):
self.key = hashlib.sha256(key).digest()
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return b64encode(iv + cipher.encrypt(raw))
def decrypt(self, enc):
enc = b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')
@staticmethod
def _pad(s):
return s + (AES.block_size - len(s) % AES.block_size) * chr(AES.block_size - len(s) % AES.block_size)
@staticmethod
def _unpad(s):
return s[:-ord(s[len(s) - 1:])]
# encryptedpass = "<PASSWORD>"
# https://stackoverflow.com/questions/51228645/how-can-i-encrypt-with-a-rsa-private-key-in-python
def newkeys(keysize):
random_generator = Random.new().read
key = RSA.generate(keysize, random_generator)
private, public = key, key.publickey()
return public, private
def importKey(externKey):
return RSA.importKey(externKey)
def getpublickey(priv_key):
return priv_key.publickey()
def encrypt(message, pub_key):
# RSA encryption protocol according to PKCS#1 OAEP
cipher = PKCS1_OAEP.new(pub_key)
return cipher.encrypt(message)
def decrypt(ciphertext, priv_key):
# RSA encryption protocol according to PKCS#1 OAEP
cipher = PKCS1_OAEP.new(priv_key)
return cipher.decrypt(ciphertext).decode('utf-8')
def sign(message, priv_key, hashAlg="SHA-256"):
global hash
hash = hashAlg
signer = PKCS1_v1_5.new(priv_key)
if (hash == "SHA-512"):
digest = SHA512.new()
elif (hash == "SHA-384"):
digest = SHA384.new()
elif (hash == "SHA-256"):
digest = SHA256.new()
elif (hash == "SHA-1"):
digest = SHA.new()
else:
digest = MD5.new()
digest.update(message.encode('utf-8'))
return signer.sign(digest)
def verify(message, signature, pub_key):
signer = PKCS1_v1_5.new(pub_key)
if (hash == "SHA-512"):
digest = SHA512.new()
elif (hash == "SHA-384"):
digest = SHA384.new()
elif (hash == "SHA-256"):
digest = SHA256.new()
elif (hash == "SHA-1"):
digest = SHA.new()
else:
digest = MD5.new()
digest.update(message.encode('utf-8'))
return signer.verify(digest, signature)
@debug
def full_encrypt(message: str, priv_sender, pub_receiver):
"""
:param message:
:param priv_sender:
:param pub_receiver:
:return: aes_encrypted_message, rsa_encrypted_aes_key, signature
"""
aes_key = generate_AES_key()
ciphter = AESCipher(aes_key)
aes_encr_mess = ciphter.encrypt(message)
encr_aes_key = encrypt(aes_key, pub_receiver)
signature = b64encode(sign(message, priv_sender, "md-5"))
return aes_encr_mess, encr_aes_key, signature
@debug
def full_decrypt(files: (), pub_sender, priv_receiver):
"""
:param files:
:param pub_sender:
:param priv_receiver:
:return: original_message or raises AuthException
"""
aes_key = decrypt(files[1], priv_receiver)
ciphter = AESCipher(aes_key.encode())
orig_mess = ciphter.decrypt(files[0])
if verify(orig_mess, b64decode(files[2]), pub_sender):
return orig_mess
else:
raise AuthException("this message was not intended for you.")
def generate_keys(user_id, pass_word_hash):
keys = newkeys(2048)
#curpath = os.path.dirname(os.path.dirname(os.path.abspath(os.curdir)))
pub_key_export = keys[0].exportKey("PEM")
#file = open(curpath + path_to_public_keys + str(user_id) + ".pem", "wb+")
file = open(path_to_public_keys + str(user_id) + ".pem", "wb+")
file.write(pub_key_export)
file.close()
priv_key_exp = keys[1].exportKey("PEM", passphrase=<PASSWORD>)
#file = open(curpath + path_to_private_keys + str(user_id) + ".pem", "wb+")
file = open(path_to_private_keys + str(user_id) + ".pem", "wb+")
file.write(priv_key_exp)
file.close()
def get_pub_key(user_id):
#curpath = os.path.dirname(os.path.dirname(os.path.abspath(os.curdir)))
file = open(path_to_public_keys + str(user_id) + ".pem", 'rb')
pub_key = RSA.importKey(file.read())
file.close()
return pub_key
def get_priv_key(user_id, pass_word_hash):
#curpath = os.path.dirname(os.path.dirname(os.path.abspath(os.curdir)))
file = open(path_to_private_keys + str(user_id) + ".pem", 'rb')
priv_key = RSA.importKey(file.read(), pass_word_hash)
file.close()
return priv_key
def get_keys(user_id, pass_word_hash):
return get_pub_key(user_id), get_priv_key(user_id, pass_word_hash)
# if __name__ == "__main__":
# print("----setup----")
# message = "deze tekst wil ik encrypteren"
# print("message", message)
# aes_key = "dit is mn key"
# ciphter = AESCipher(aes_key)
# print("aes_key", aes_key)
# print("----encryption----")
# enc = ciphter.encrypt(message)
# print("geincrypteerde text", enc)
# rsa_keyA = newkeys(2048)
# rsa_keyB = newkeys(2048)
# encr_aes_key = encrypt(aes_key, rsa_keyB[0])
# print("geincrypteerde aes key", encr_aes_key)
# signature = b64encode(sign(message,rsa_keyA[1], "md-5"))
# print("signature", signature)
# print("----decryption----")
# decr_aes_key = decrypt(encr_aes_key, rsa_keyB[1])
# print("decrypteerde_aes_key", decr_aes_key)
# cipher = AESCipher(decr_aes_key)
# orig_message = cipher.decrypt(enc)
# print("original_message", orig_message)
# verify = verify(message, b64decode(signature), rsa_keyA[0])
# print("veriffy", verify)
```
#### File: backend/tests/test_api.py
```python
import sys, os
import json
import unittest
import hashlib
sys.path.append(os.path.join(sys.path[0],'..'))
from application import app
from parameterized import parameterized, parameterized_class
from usermanager import get_salt
class TestApi(unittest.TestCase):
def setUp(self):
self.myapp = app.test_client()
self.myapp.testing = True
self.headers = {
'ContentType': 'application/json',
'dataType': 'json'
}
def test_api_not_found_should_return_json(self):
rv = self.myapp.get('/')
self.assertEqual(rv.status, '404 NOT FOUND')
self.assertTrue(b'Insert a valid api' in rv.data)
@parameterized.expand([
('/users/me'),
('/messages/me'),
('/users/5cd6dba004078d00c65c25cc'),
('/messages/sent/me'),
])
def test_api_login_required_get_should_return_not_loggedin_json(self, route):
rv = self.myapp.get(route)
self.assertEqual(rv.status, '401 UNAUTHORIZED')
@parameterized.expand([('iets', 'd1f9809507ddd406f5d55541c515fcc88c6734175808df2f3e0e4fa4d01535ee')])
def test_api_login_should_return_true_when_credentials_are_correct(self, user, password):
login_json = {'username': user, 'password': password}
response = self.myapp.post('/login', content_type='application/json', data=json.dumps(login_json))
self.assertEqual(response.status, '200 OK', 'Response code shoulde be 200 when credentials are correct')
self.assertEqual(json.loads(response.get_data()), 'true', 'Should login when credentials are correct')
@parameterized.expand([('iets', 'wrong_password')])
def test_api_login_should_return_false_when_credentials_are_incorrect(self, user, password):
login_json = {'username': user, 'password': password}
response = self.myapp.post('/login', content_type='application/json', data=json.dumps(login_json))
self.assertEqual(response.status, '200 OK', 'Response code shoulde be 200 when credentials are correct')
self.assertEqual(json.loads(response.get_data()), 'false', 'Should login when credentials are correct')
@parameterized.expand([('iets', 'iets')])
def test_api_should_handle_when_no_content_type_for_json_is_set_on_post_request(self, user, password):
login_json = {'username': user, 'password': password}
response = self.myapp.post('/login', data=login_json)
json_response = json.loads(response.get_data())
self.assertEqual(response.status, '400 BAD REQUEST')
self.assertTrue(True if 'bad_request_exception' in json_response else False)
if __name__ == '__main__':
unittest.main()
```
#### File: secure-messaging-platform/backend/User.py
```python
import os
from time import sleep
from fileManager import FileReader
from flask_login import UserMixin
from generalEncryptionFunctions import newkeys, generate_keys, get_keys, full_encrypt, get_pub_key, full_decrypt, get_priv_key
from generalfunctions import generate_id, is_valid_email, EmailException, is_valid_id, IDException
path_to_file = os.getenv('FILES')
if path_to_file is None:
path_to_file = os.path.dirname(os.path.abspath(__file__)) + "/data/"
path_to_file += "users.txt"
class User(UserMixin):
id: str
username: str
firstname: str
lastname: str
email: str
password: str
def __init__(self, id: str = "", username: str = "", firstname: str = "", lastname: str = "", email: str = "",
password: str = "", salt: str = ""):
# if not is_valid_id(id):
# raise IDException()
self.id = id
self.username = username
self.firstname = firstname
self.lastname = lastname
# if not is_valid_email(email):
# raise EmailException
self.email = email
self.password = password
self.salt = salt
def as_dict(self):
return {
"_id": self.id,
"username": self.username,
"firstname": self.firstname,
"lastname": self.lastname,
"email": self.email,
"password": <PASSWORD>,
"salt": self.salt
}
def get_private_key(self):
raise NotImplementedError
def get_public_key(self, user_id):
raise NotImplementedError
@staticmethod
def get(id: str):
if is_valid_id(id):
fileReader = FileReader(path_to_file)
for user_attributes in fileReader.read_words_by_line(","):
user_id = user_attributes[0]
if not is_valid_id(user_id):
raise IDException
if user_id == id:
return User(user_attributes[0], user_attributes[1], user_attributes[2],
user_attributes[3], user_attributes[4], user_attributes[5])
else:
raise IDException()
def as_new_user(data):
id = generate_id()
generate_keys(id, data['password'])
return User(id, data['username'], data['firstname'], data['lastname'], data['email'], data['password'], data['salt'])
def as_existing_user(data):
return User(data['id'], data['username'], data['firstname'], data['lastname'], data['email'], data['password'], data['salt'])
# if __name__ == "__main__":
# sender = as_new_user({"username": "sender", "firstname": "firstnametest", "lastname": "lastnametest", "email": "<EMAIL>", "password": "<PASSWORD>", "salt": "<PASSWORD>"})
# receiver = as_new_user({"username": "receiver", "firstname": "firstnametest", "lastname": "lastnametest", "email": "<EMAIL>", "password": "<PASSWORD>", "salt": "<PASSWORD>"})
# sleep(2)
# keys_sender = get_keys(sender.id, sender.password)
# pub_key_receiver = get_pub_key(receiver.id)
#
# files = full_encrypt("deze message wil ik encrypteren.", keys_sender[1], pub_key_receiver)
#
# pub_send = get_pub_key(sender.id)
# priv_receiver = get_priv_key(receiver.id, receiver.password)
# message = full_decrypt(files, pub_send, priv_receiver)
# print(message)
``` |
{
"source": "JoachimVeulemans/toxic-comments-classifier",
"score": 3
} |
#### File: toxic-comments-classifier/backend/KimCNN.py
```python
import torch.nn as nn
import torch
from torch.autograd import Variable
import torch.nn.functional as F
class KimCNN(nn.Module):
def __init__(self, embed_num, embed_dim, class_num, kernel_num, kernel_sizes, dropout, static):
super(KimCNN, self).__init__()
V = embed_num
D = embed_dim
C = class_num
Co = kernel_num
Ks = kernel_sizes
self.static = static
self.embed = nn.Embedding(V, D)
self.convs1 = nn.ModuleList([nn.Conv2d(1, Co, (K, D)) for K in Ks])
self.dropout = nn.Dropout(dropout)
self.fc1 = nn.Linear(len(Ks) * Co, C)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
if self.static:
x = Variable(x)
x = x.unsqueeze(1) # (N, Ci, W, D)
x = [F.relu(conv(x)).squeeze(3) for conv in self.convs1] # [(N, Co, W), ...]*len(Ks)
x = [F.max_pool1d(i, i.size(2)).squeeze(2) for i in x] # [(N, Co), ...]*len(Ks)
x = torch.cat(x, 1)
x = self.dropout(x) # (N, len(Ks)*Co)
logit = self.fc1(x) # (N, C)
output = self.sigmoid(logit)
return output
``` |
{
"source": "joachimwolff/scHiCExplorer",
"score": 2
} |
#### File: scHiCExplorer/docs/conf.py
```python
import sys
import os
from unittest.mock import Mock
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
MOCK_MODULES = ['numpy', 'numpy.core', 'numpy.core.multiarray', 'numpy.distutils.core', 'pandas', 'pysam', 'intervaltree',
'scipy', 'scipy.sparse', 'scipy.stats', 'scipy.ndimage',
'matplotlib', 'matplotlib.pyplot', 'matplotlib.gridspec', 'matplotlib.ticker',
'matplotlib.textpath', 'matplotlib.patches', 'matplotlib.colors', 'matplotlib.cm',
'mpl_toolkits', 'mpl_toolkits.axisartist', 'mpl_toolkits.mplot3d', 'mpl_toolkits.axes_grid1',
'Bio', 'Bio.Seq', 'Bio.Alphabet', 'pyBigWig', 'tables', 'pytables', 'future', 'past', 'past.builtins',
'future.utils', 'cooler', 'logging', 'unidecode', 'hic2cool', 'hicmatrix', 'hicmatrix.HiCMatrix',
'hicmatrix.lib', 'krbalancing', 'fit_nbinom', 'pybedtools', 'numpy.float', 'numpy.dtype', 'sparse_neighbors_search',
'sklearn.cluster', 'errno', 'shutil', 'gzip', 'sklearn', 'sklearn.neighbors', 'sklearn.decomposition', 'hicexplorer', 'h5py',
'hicexplorer.utilities', 'hicexplorer.hicMergeMatrixBins', 'hicexplorer.hicPCA']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
autodoc_mock_imports = MOCK_MODULES
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinxarg.ext'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'scHiCExplorer'
copyright = u'2019, <NAME>'
author = u'<NAME>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
def get_version():
import re
try:
f = open("../schicexplorer/_version.py")
except EnvironmentError:
return None
for line in f.readlines():
mo = re.match("__version__ = '([^']+)'", line)
if mo:
ver = mo.group(1)
return ver
return None
version = get_version()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
```
#### File: scHiCExplorer/schicexplorer/scHicAdjustMatrix.py
```python
import argparse
import os
from multiprocessing import Process, Queue
import time
import logging
log = logging.getLogger(__name__)
import cooler
import numpy as np
import gc
from hicmatrix import HiCMatrix as hm
import pandas as pd
from schicexplorer._version import __version__
from hicmatrix.lib import MatrixFileHandler
from copy import deepcopy
from schicexplorer.utilities import cell_name_list
def parse_arguments(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=False,
description='scHicAdjustMatrix is a tool to keep or remove a list of chromosomes of all Hi-C matrices stored in the scool file.'
)
parserRequired = parser.add_argument_group('Required arguments')
parserRequired.add_argument('--matrix', '-m',
help='The matrix to adjust in the scool format.',
required=True)
parserRequired.add_argument('--outFileName', '-o',
help='File name to save the adjusted matrix.',
required=True)
parserOpt = parser.add_argument_group('Optional arguments')
parserOpt.add_argument('--chromosomes', '-c',
nargs='+',
help='List of chromosomes to keep / remove')
parserOpt.add_argument('--createSubmatrix', '-cs',
type=int,
help='Keep only first n matrices and remove the rest. Good for test data creation.')
parserOpt.add_argument('--action',
help='Keep, remove or mask the list of specified chromosomes / regions ',
default='keep',
choices=['keep', 'remove']
)
parserOpt.add_argument('--threads', '-t',
help='Number of threads. Using the python multiprocessing module.',
required=False,
default=4,
type=int)
parserOpt.add_argument('--help', '-h', action='help', help='show this help message and exit')
parserOpt.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
return parser
def compute_adjust_matrix(pMatrixName, pMatricesList, pArgs, pListIds, pInvertedMap, pInvertedLogic, pQueue):
pixels_list = []
keep_matrices = []
for i, matrix in enumerate(pMatricesList):
if i % 1 == 0:
log.debug('processed {} out of {}'.format(i, len(pMatricesList)))
try:
cooler_obj = cooler.Cooler(pMatrixName + '::' + matrix)
pixels = cooler_obj.pixels()[:]
indices = pixels['bin1_id'].apply(lambda x: x in pListIds)
if pInvertedLogic:
indices = ~indices
pixels = pixels[indices].reset_index(drop=True)
indices = pixels['bin2_id'].apply(lambda x: x in pListIds)
if pInvertedLogic:
indices = ~indices
pixels = pixels[indices].reset_index(drop=True)
for key, value in pInvertedMap.items():
pixels['bin1_id'].replace(to_replace=key, value=value, inplace=True)
pixels['bin2_id'].replace(to_replace=key, value=value, inplace=True)
pixels_list.append(pixels)
keep_matrices.append(True)
except Exception as e:
keep_matrices.append(False)
log.debug('exception: {}'.format(e))
log.debug('pixels {}'.format(pixels[:5]))
continue
pQueue.put([pixels_list, keep_matrices])
return
def main(args=None):
args = parse_arguments().parse_args(args)
matrices_name = args.matrix
threads = args.threads
matrices_list = cell_name_list(matrices_name)
if args.createSubmatrix is not None and args.regions is None and args.chromosomes is None:
for matrix in matrices_list[:args.createSubmatrix]:
cooler.fileops.cp(args.matrix + '::' + matrix, args.outFileName + '::' + matrix)
exit(0)
input_count_matrices = len(matrices_list)
if threads > len(matrices_list):
threads = len(matrices_list)
# load bin ids only once
cooler_obj_external = cooler.Cooler(matrices_name + '::' + matrices_list[0])
bins = cooler_obj_external.bins()[:]
# apply the inverted operation if the number of values is less
# the idea is that for
# indices = pixels['bin1_id'].apply(lambda x: x in pListIds)
# the search time is less if the list pListIds is shorter
# therefore the drop must be inverted too
apply_inverted = False
if args.action == 'keep':
list_ids = bins.index[bins['chrom'].apply(lambda x: x in args.chromosomes)].tolist()
list_inverted_logic_ids = bins.index[bins['chrom'].apply(lambda x: x not in args.chromosomes)].tolist()
bins_new = bins[bins['chrom'].apply(lambda x: x in args.chromosomes)].reset_index()
else:
list_ids = bins.index[bins['chrom'].apply(lambda x: x not in args.chromosomes)].tolist()
list_inverted_logic_ids = bins.index[bins['chrom'].apply(lambda x: x in args.chromosomes)].tolist()
bins_new = bins[bins['chrom'].apply(lambda x: x not in args.chromosomes)].reset_index()
if len(list_inverted_logic_ids) < len(list_ids):
apply_inverted = True
list_ids = list_inverted_logic_ids
dict_values = bins_new['index'].to_dict()
inv_map = {}
for k, v in dict_values.items():
if k == v:
continue
inv_map[v] = k
bins_new.drop(['index'], axis=1, inplace=True)
all_data_collected = False
thread_done = [False] * threads
pixels_thread = [None] * threads
keep_thread = [None] * threads
matricesPerThread = len(matrices_list) // threads
queue = [None] * threads
process = [None] * threads
for i in range(threads):
if i < threads - 1:
matrices_name_list = matrices_list[i * matricesPerThread:(i + 1) * matricesPerThread]
else:
matrices_name_list = matrices_list[i * matricesPerThread:]
queue[i] = Queue()
process[i] = Process(target=compute_adjust_matrix, kwargs=dict(
pMatrixName=matrices_name,
pMatricesList=matrices_name_list,
pArgs=args,
pListIds=list_ids,
pInvertedMap=inv_map,
pInvertedLogic=apply_inverted,
pQueue=queue[i]
)
)
process[i].start()
while not all_data_collected:
for i in range(threads):
if queue[i] is not None and not queue[i].empty():
pixels_thread[i], keep_thread[i] = queue[i].get()
queue[i] = None
process[i].join()
process[i].terminate()
process[i] = None
thread_done[i] = True
all_data_collected = True
for thread in thread_done:
if not thread:
all_data_collected = False
time.sleep(1)
pixels_list = [item for sublist in pixels_thread for item in sublist]
keep_list = [item for sublist in keep_thread for item in sublist]
matrices_list = np.array(matrices_list)
mask = np.array(keep_list)
matrices_list = matrices_list[mask]
matrixFileHandler = MatrixFileHandler(pFileType='scool')
matrixFileHandler.matrixFile.bins = bins_new
matrixFileHandler.matrixFile.pixel_list = pixels_list
matrixFileHandler.matrixFile.name_list = matrices_list
matrixFileHandler.save(args.outFileName, pSymmetric=True, pApplyCorrection=False)
broken_count = input_count_matrices - np.sum(np.array(keep_list))
print('Out of {} matrices, {} were removed because they were broken.'.format(input_count_matrices, broken_count))
```
#### File: scHiCExplorer/schicexplorer/scHicCluster.py
```python
import argparse
import os
from multiprocessing import Process, Queue
import time
import logging
log = logging.getLogger(__name__)
from scipy import linalg
import cooler
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.cm import get_cmap
from sklearn.cluster import KMeans, SpectralClustering
from sklearn.neighbors import NearestNeighbors
from sklearn.decomposition import PCA
from hicmatrix import HiCMatrix as hm
import numpy as np
from scipy.sparse import csr_matrix
from holoviews.plotting.util import process_cmap
from schicexplorer._version import __version__
from schicexplorer.utilities import cell_name_list, create_csr_matrix_all_cells
def parse_arguments(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=False,
description='scHicCluster uses kmeans or spectral clustering to associate each cell to a cluster and therefore to its cell cycle. '
'The clustering can be run on the raw data, on a kNN computed via the exact euclidean distance or via PCA. '
'Please consider also the other clustering and dimension reduction approaches of the scHicExplorer suite. They can give you better results, '
'can be faster or less memory demanding.'
)
parserRequired = parser.add_argument_group('Required arguments')
# define the arguments
parserRequired.add_argument('--matrix', '-m',
help='The single cell Hi-C interaction matrices to cluster. Needs to be in scool format',
metavar='scool scHi-C matrix',
required=True)
parserRequired.add_argument('--numberOfClusters', '-c',
help='Number of to be computed clusters',
required=False,
default=12,
type=int)
parserRequired.add_argument('--clusterMethod', '-cm',
help='Algorithm to cluster the Hi-C matrices',
choices=['spectral', 'kmeans'],
default='spectral')
parserOpt = parser.add_argument_group('Optional arguments')
parserOpt.add_argument('--chromosomes',
help='List of to be plotted chromosomes',
nargs='+')
parserOpt.add_argument('--intraChromosomalContactsOnly', '-ic',
help='This option loads only the intra-chromosomal contacts. Can improve the cluster result if data is very noisy.',
action='store_true')
parserOpt.add_argument('--additionalPCA', '-pca',
help='Computes PCA on top of a k-nn. Can improve the cluster result.',
action='store_true')
parserOpt.add_argument('--dimensionsPCA', '-dim_pca',
help='The number of dimensions from the PCA matrix that should be considered for clustering. Can improve the cluster result.',
default=20,
type=int)
parserOpt.add_argument('--dimensionReductionMethod', '-drm',
help='Dimension reduction methods, knn with euclidean distance, pca',
choices=['none', 'knn', 'pca'],
default='none')
parserOpt.add_argument('--createScatterPlot', '-csp',
help='Create a scatter plot for the clustering, the x and y are the first and second principal component of the computed k-nn graph.',
required=False,
default=None)
parserOpt.add_argument('--numberOfNearestNeighbors', '-k',
help='Number of to be used computed nearest neighbors for the knn graph. Default is either the default value or the number of the provided cells, whatever is smaller.',
required=False,
default=100,
type=int)
parserOpt.add_argument('--dpi', '-d',
help='The dpi of the scatter plot.',
required=False,
default=300,
type=int)
parserOpt.add_argument('--outFileName', '-o',
help='File name to save the resulting clusters',
required=True,
default='clusters.txt')
parserOpt.add_argument('--cell_coloring_type', '-cct',
help='A two column list, first colum the cell names as stored in the scool file, second column the associated coloring for the scatter plot',
required=False)
parserOpt.add_argument('--cell_coloring_batch', '-ccb',
help='A two column list, first colum the cell names as stored in the scool file, second column the associated coloring for the scatter plot',
required=False)
parserOpt.add_argument('--latexTable', '-lt',
help='Return the overlap statistics if --cell_coloring_type is given as a latex table.')
parserOpt.add_argument('--figuresize',
help='Fontsize in the plot for x and y axis.',
type=float,
nargs=2,
default=(15, 6),
metavar=('x-size', 'y-size'))
parserOpt.add_argument('--colorMap',
help='Color map to use for the heatmap, supported are the categorical colormaps from holoviews: '
'http://holoviews.org/user_guide/Colormaps.html',
default='glasbey_dark')
parserOpt.add_argument('--fontsize',
help='Fontsize in the plot for x and y axis.',
type=float,
default=15)
parserOpt.add_argument('--threads', '-t',
help='Number of threads. Using the python multiprocessing module.',
required=False,
default=8,
type=int)
parserOpt.add_argument('--help', '-h', action='help', help='show this help message and exit')
parserOpt.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
return parser
def main(args=None):
args = parse_arguments().parse_args(args)
outputFolder = os.path.dirname(os.path.abspath(args.outFileName)) + '/'
log.debug('outputFolder {}'.format(outputFolder))
if args.cell_coloring_type:
cell_name_cell_type_dict = {}
cell_type_color_dict = {}
color_cell_type_dict = {}
cell_type_counter = 0
with open(args.cell_coloring_type, 'r') as file:
for i, line in enumerate(file.readlines()):
line = line.strip()
try:
cell_name, cell_type = line.split('\t')
except Exception:
cell_name, cell_type = line.split(' ')
cell_name_cell_type_dict[cell_name] = cell_type
if cell_type not in cell_type_color_dict:
cell_type_color_dict[cell_type] = cell_type_counter
color_cell_type_dict[cell_type_counter] = cell_type
cell_type_counter += 1
if args.cell_coloring_batch:
cell_name_cell_type_dict_batch = {}
cell_type_color_dict_batch = {}
color_cell_type_dict_batch = {}
cell_type_counter_batch = 0
with open(args.cell_coloring_batch, 'r') as file:
for i, line in enumerate(file.readlines()):
line = line.strip()
try:
cell_name, cell_type = line.split('\t')
except Exception:
cell_name, cell_type = line.split(' ')
cell_name_cell_type_dict_batch[cell_name] = cell_type
if cell_type not in cell_type_color_dict_batch:
cell_type_color_dict_batch[cell_type] = cell_type_counter_batch
color_cell_type_dict_batch[cell_type_counter_batch] = cell_type
cell_type_counter_batch += 1
raw_file_name = os.path.splitext(os.path.basename(args.outFileName))[0]
neighborhood_matrix, matrices_list = create_csr_matrix_all_cells(args.matrix, args.threads, args.chromosomes, outputFolder, raw_file_name, args.intraChromosomalContactsOnly)
reduce_to_dimension = neighborhood_matrix.shape[0] - 1
if args.dimensionReductionMethod == 'knn':
if args.numberOfNearestNeighbors > reduce_to_dimension:
args.numberOfNearestNeighbors = reduce_to_dimension
nbrs = NearestNeighbors(n_neighbors=args.numberOfNearestNeighbors, algorithm='ball_tree', n_jobs=args.threads).fit(neighborhood_matrix)
neighborhood_matrix = nbrs.kneighbors_graph(mode='distance')
if args.additionalPCA:
pca = PCA(n_components=min(neighborhood_matrix.shape) - 1)
neighborhood_matrix = pca.fit_transform(neighborhood_matrix.todense())
if args.dimensionsPCA:
args.dimensionsPCA = min(args.dimensionsPCA, neighborhood_matrix.shape[0])
neighborhood_matrix = neighborhood_matrix[:, :args.dimensionsPCA]
elif args.dimensionReductionMethod == 'pca':
corrmatrix = np.cov(neighborhood_matrix.todense())
evals, eigs = linalg.eig(corrmatrix)
neighborhood_matrix = eigs[:, :reduce_to_dimension].transpose()
if args.clusterMethod == 'spectral':
spectralClustering_object = SpectralClustering(n_clusters=args.numberOfClusters, n_jobs=args.threads,
n_neighbors=reduce_to_dimension, affinity='nearest_neighbors', random_state=0, eigen_solver="arpack")
labels_clustering = spectralClustering_object.fit_predict(neighborhood_matrix)
elif args.clusterMethod == 'kmeans':
kmeans_object = KMeans(n_clusters=args.numberOfClusters, random_state=0, n_jobs=args.threads, precompute_distances=True)
labels_clustering = kmeans_object.fit_predict(neighborhood_matrix)
if args.colorMap:
colors = process_cmap(args.colorMap)
if args.cell_coloring_type:
if len(colors) < len(cell_type_color_dict):
log.error('The chosen colormap offers too less values for the number of clusters.')
exit(1)
labels_clustering_cell_type = []
for cell_name in matrices_list:
labels_clustering_cell_type.append(cell_type_color_dict[cell_name_cell_type_dict[cell_name]])
labels_clustering_cell_type = np.array(labels_clustering_cell_type)
log.debug('labels_clustering_cell_type: {}'.format(len(labels_clustering_cell_type)))
log.debug('matrices_list: {}'.format(len(matrices_list)))
label_x = 'PC1'
label_y = 'PC2'
if args.createScatterPlot:
if args.dimensionReductionMethod == 'none':
log.warning('Raw matrix clustering scatter plot needs to compute a PCA and can request large amount (> 100 GB) of memory.')
log.debug('args.additionalPCA {}'.format(args.additionalPCA))
log.debug('args.dimensionReductionMethod {}'.format(args.dimensionReductionMethod))
if args.dimensionReductionMethod == 'none' or (args.dimensionReductionMethod == 'knn' and not args.additionalPCA):
log.debug('compute pca')
pca = PCA(n_components=min(neighborhood_matrix.shape) - 1)
neighborhood_matrix_knn = pca.fit_transform(neighborhood_matrix.todense())
log.debug('compute pca')
else:
log.debug('already computed pca')
neighborhood_matrix_knn = neighborhood_matrix
if args.cell_coloring_type:
plt.figure(figsize=(args.figuresize[0], args.figuresize[1]))
for i, color in enumerate(colors[:len(cell_type_color_dict)]):
mask = labels_clustering_cell_type == i
log.debug('plot cluster: {} {}'.format(color_cell_type_dict[i], np.sum(mask)))
plt.scatter(neighborhood_matrix_knn[:, 0].T[mask], neighborhood_matrix_knn[:, 1].T[mask], color=color, label=str(color_cell_type_dict[i]), s=20, alpha=0.7)
plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=args.fontsize)
plt.xticks([])
plt.yticks([])
plt.xlabel(label_x, fontsize=args.fontsize)
plt.ylabel(label_y, fontsize=args.fontsize)
if '.' not in args.createScatterPlot:
args.createScatterPlot += '.png'
scatter_plot_name = '.'.join(args.createScatterPlot.split('.')[:-1]) + '_cell_color.' + args.createScatterPlot.split('.')[-1]
plt.tight_layout()
plt.savefig(scatter_plot_name, dpi=args.dpi)
plt.close()
if args.cell_coloring_batch:
if len(colors) < len(cell_type_color_dict_batch):
log.error('The chosen colormap offers too less values for the number of clusters.')
exit(1)
labels_clustering_cell_type_batch = []
for cell_name in matrices_list:
labels_clustering_cell_type_batch.append(cell_type_color_dict_batch[cell_name_cell_type_dict_batch[cell_name]])
labels_clustering_cell_type_batch = np.array(labels_clustering_cell_type_batch)
log.debug('labels_clustering_cell_type: {}'.format(len(labels_clustering_cell_type_batch)))
log.debug('matrices_list: {}'.format(len(matrices_list)))
plt.figure(figsize=(args.figuresize[0], args.figuresize[1]))
for i, color in enumerate(colors[:len(cell_type_color_dict_batch)]):
mask = labels_clustering_cell_type_batch == i
log.debug('plot cluster: {} {}'.format(color_cell_type_dict_batch[i], np.sum(mask)))
plt.scatter(neighborhood_matrix_knn[:, 0].T[mask], neighborhood_matrix_knn[:, 1].T[mask], color=color, label=str(color_cell_type_dict_batch[i]), s=20, alpha=0.7)
plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=args.fontsize)
plt.xticks([])
plt.yticks([])
plt.xlabel(label_x, fontsize=args.fontsize)
plt.ylabel(label_y, fontsize=args.fontsize)
if '.' not in args.createScatterPlot:
args.createScatterPlot += '.png'
scatter_plot_name = '.'.join(args.createScatterPlot.split('.')[:-1]) + '_cell_color_batch.' + args.createScatterPlot.split('.')[-1]
plt.tight_layout()
plt.savefig(scatter_plot_name, dpi=args.dpi)
plt.close()
plt.figure(figsize=(args.figuresize[0], args.figuresize[1]))
for i, color in enumerate(colors[:args.numberOfClusters]):
mask = labels_clustering == i
plt.scatter(neighborhood_matrix_knn[:, 0].T[mask], neighborhood_matrix_knn[:, 1].T[mask], color=color, label=str(i), s=20, alpha=0.7)
plt.legend(fontsize=args.fontsize)
plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=args.fontsize)
plt.xticks([])
plt.yticks([])
plt.xlabel(label_x, fontsize=args.fontsize)
plt.ylabel(label_y, fontsize=args.fontsize)
if '.' not in args.createScatterPlot:
args.createScatterPlot += '.png'
scatter_plot_name = '.'.join(args.createScatterPlot.split('.')[:-1]) + '.' + args.createScatterPlot.split('.')[-1]
plt.tight_layout()
plt.savefig(scatter_plot_name, dpi=args.dpi)
plt.close()
if args.latexTable and args.cell_coloring_type:
# compute overlap of cell_type find found clusters
computed_clusters = set(labels_clustering)
cell_type_amounts_dict = {}
# percentage_threshold = 0.8
for threshold in [0.7, 0.8, 0.9]:
cell_type_amounts_dict[threshold] = {}
with open(args.latexTable, 'w') as matches_file:
header = '\\begin{table}[!htb]\n\\footnotesize\n\\begin{tabular}{|l'
body = '\\hline Cluster '
for i in range(len(color_cell_type_dict)):
mask_cell_type = labels_clustering_cell_type == i
header += '|c'
body += '& ' + str(color_cell_type_dict[i]) + ' (' + str(np.sum(mask_cell_type)) + ' cells)'
header += '|}\n'
body += '\\\\\n'
# body = ''
for i in computed_clusters:
body += '\\hline Cluster ' + str(i)
mask_computed_clusters = labels_clustering == i
body += ' (' + str(np.sum(mask_computed_clusters)) + ' cells)'
for j in range(len(cell_type_color_dict)):
mask_cell_type = labels_clustering_cell_type == j
mask = mask_computed_clusters & mask_cell_type
number_of_matches = np.sum(mask)
body += '& ' + str(number_of_matches)
if number_of_matches != 1:
body += ' cells / '
else:
body += ' cell / '
body += '{:.2f}'.format((number_of_matches / np.sum(mask_computed_clusters)) * 100) + ' \\% '
for threshold in [0.7, 0.8, 0.9]:
if number_of_matches / np.sum(mask_computed_clusters) >= threshold:
if color_cell_type_dict[j] in cell_type_amounts_dict[threshold]:
cell_type_amounts_dict[threshold][color_cell_type_dict[j]] += number_of_matches
else:
cell_type_amounts_dict[threshold][color_cell_type_dict[j]] = number_of_matches
else:
if color_cell_type_dict[j] in cell_type_amounts_dict[threshold]:
continue
else:
cell_type_amounts_dict[threshold][color_cell_type_dict[j]] = 0
body += '\\\\\n'
body += '\\hline ' + '&' * len(cell_type_color_dict) + '\\\\\n'
for threshold in [0.7, 0.8, 0.9]:
body += '\\hline Correct identified $>{}\\%$'.format(int(threshold * 100))
for i in range(len(cell_type_color_dict)):
mask_cell_type = labels_clustering_cell_type == i
if color_cell_type_dict[i] in cell_type_amounts_dict[threshold]:
body += '& ' + str(cell_type_amounts_dict[threshold][color_cell_type_dict[i]]) + ' / ' + str(np.sum(mask_cell_type)) + ' ('
body += '{:.2f}'.format((cell_type_amounts_dict[threshold][color_cell_type_dict[i]] / np.sum(mask_cell_type)) * 100)
else:
body += '& ' + str(0) + ' / ' + str(np.sum(mask_cell_type)) + ' ('
body += '{:.2f}'.format(0 / np.sum(mask_cell_type))
body += ' \\%)'
body += '\\\\\n'
body += '\\hline \n'
body += '\\end{tabular}\n\\caption{}\n\\end{table}'
matches_file.write(header)
matches_file.write(body)
matrices_cluster = list(zip(matrices_list, labels_clustering))
np.savetxt(args.outFileName, matrices_cluster, fmt="%s")
```
#### File: scHiCExplorer/schicexplorer/scHicMergeMatrixBins.py
```python
import argparse
import numpy as np
import cooler
import logging
log = logging.getLogger(__name__)
from multiprocessing import Process, Queue
import time
from hicmatrix import HiCMatrix as hm
from hicexplorer.hicMergeMatrixBins import running_window_merge, merge_bins
from schicexplorer._version import __version__
from hicmatrix.lib import MatrixFileHandler
from schicexplorer.utilities import cell_name_list
def parse_arguments(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=False,
description='Merges bins from a Hi-C matrix. For example, '
'using a matrix containing 5kb bins, a matrix '
'of 50kb bins can be derived using --numBins 10. '
)
parserRequired = parser.add_argument_group('Required arguments')
parserRequired.add_argument('--matrix', '-m',
help='Matrix to reduce in scool format.',
metavar='matrix.scool',
required=True)
parserRequired.add_argument('--outFileName', '-o',
help='File name to save the resulting matrix. '
'The output is also a .scool file. But don\'t add '
'the suffix.',
required=True)
parserRequired.add_argument('--numBins', '-nb',
help='Number of bins to merge.',
metavar='int',
type=int,
required=True)
parserOpt = parser.add_argument_group('Optional arguments')
parserOpt.add_argument('--runningWindow',
help='set to merge for using a running '
'window of length --numBins. Must be an odd number.',
action='store_true')
parserOpt.add_argument('--threads', '-t',
help='Number of threads. Using the python multiprocessing module.',
required=False,
default=4,
type=int)
parserOpt.add_argument('--help', '-h', action='help', help='show this help message and exit')
parserOpt.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
return parser
def compute_merge(pMatrixName, pMatrixList, pRunningWindow, pNumBins, pQueue):
out_queue_list = []
try:
for matrix in pMatrixList:
hic = hm.hiCMatrix(pMatrixName + '::' + matrix)
if pRunningWindow:
merged_matrix = running_window_merge(hic, pNumBins)
else:
merged_matrix = merge_bins(hic, pNumBins)
matrixFileHandlerOutput = MatrixFileHandler(pFileType='cool', pMatrixFile=matrix, pEnforceInteger=False, pFileWasH5=False)
matrixFileHandlerOutput.set_matrix_variables(merged_matrix.matrix, merged_matrix.cut_intervals, merged_matrix.nan_bins,
merged_matrix.correction_factors, merged_matrix.distance_counts)
out_queue_list.append(matrixFileHandlerOutput)
pQueue.put(out_queue_list)
except Exception as exp:
pQueue.put(["Fail: {}".format(str(exp))])
return
def main(args=None):
args = parse_arguments().parse_args(args)
threads = args.threads
merged_matrices = [None] * threads
matrices_list = cell_name_list(args.matrix)
if len(matrices_list) < threads:
threads = len(matrices_list)
all_data_collected = False
thread_done = [False] * threads
length_index = [None] * threads
length_index[0] = 0
matricesPerThread = len(matrices_list) // threads
queue = [None] * threads
process = [None] * threads
for i in range(threads):
if i < threads - 1:
matrices_name_list = matrices_list[i * matricesPerThread:(i + 1) * matricesPerThread]
length_index[i + 1] = length_index[i] + len(matrices_name_list)
else:
matrices_name_list = matrices_list[i * matricesPerThread:]
queue[i] = Queue()
process[i] = Process(target=compute_merge, kwargs=dict(
pMatrixName=args.matrix,
pMatrixList=matrices_name_list,
pRunningWindow=args.runningWindow,
pNumBins=args.numBins,
pQueue=queue[i]
)
)
process[i].start()
fail_flag = False
fail_message = ''
while not all_data_collected:
for i in range(threads):
if queue[i] is not None and not queue[i].empty():
# log.debug('i {}'.format(i))
# log.debug('len(queue) {}'.format(len(queue)))
# log.debug('len(merged_matrices) {}'.format(len(merged_matrices)))
merged_matrices[i] = queue[i].get()
if isinstance(merged_matrices[i][0], str) and merged_matrices[i][0].startswith('Fail: '):
fail_flag = True
fail_message = merged_matrices[i][0]
queue[i] = None
process[i].join()
process[i].terminate()
process[i] = None
thread_done[i] = True
all_data_collected = True
for thread in thread_done:
if not thread:
all_data_collected = False
time.sleep(1)
if fail_flag:
log.error('{}'.format(fail_message))
exit(1)
matrixFileHandlerObjects_list = [item for sublist in merged_matrices for item in sublist]
matrixFileHandler = MatrixFileHandler(pFileType='scool')
matrixFileHandler.matrixFile.coolObjectsList = matrixFileHandlerObjects_list
matrixFileHandler.save(args.outFileName, pSymmetric=True, pApplyCorrection=False)
```
#### File: scHiCExplorer/schicexplorer/scHicQualityControl.py
```python
import argparse
from multiprocessing import Process, Queue
import time
import os
import logging
log = logging.getLogger(__name__)
import cooler
from hicmatrix import HiCMatrix as hm
from hicmatrix.lib import MatrixFileHandler
from datetime import datetime
import numpy as np
from scipy.sparse import csr_matrix
import h5py
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from schicexplorer._version import __version__
from schicexplorer.utilities import cell_name_list
def parse_arguments(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=False,
description=''
)
parserRequired = parser.add_argument_group('Required arguments')
parserRequired.add_argument('--matrix', '-m',
help='The single cell Hi-C interaction matrices to investigate for QC. Needs to be in scool format',
metavar='scool scHi-C matrix',
required=True)
parserOpt = parser.add_argument_group('Optional arguments')
parserOpt.add_argument('--outputScool', '-o',
help='scool matrix which contains only the filtered matrices',
default='filtered_matrices.scool')
parserOpt.add_argument('--minimumReadCoverage',
help='Remove all samples with a lower read coverage as this value.',
required=False,
default=1000000,
type=int)
parserOpt.add_argument('--minimumDensity',
help='Remove all samples with a lower density as this value. The density is given by: number of non-zero interactions / all possible interactions.',
required=False,
default=0.001,
type=float)
parserOpt.add_argument('--maximumRegionToConsider',
help='To compute the density, consider only this genomic distance around the diagonal.',
required=False,
default=30000000,
type=int)
parserOpt.add_argument('--chromosomes', '-c',
nargs='+',
help='List of chromosomes that a cell needs to have to be not deleted. However, other chromosomes/contigs and scaffolds which may exist are not deleted. Use scHicAdjustMatrix for this.')
parserOpt.add_argument('--outFileNameDensity', '-od',
help='File name of the density histogram',
required=False,
default='density.png')
parserOpt.add_argument('--outFileNameReadCoverage', '-or',
help='File name of the read coverage',
required=False,
default='readCoverage.png')
parserOpt.add_argument('--outFileNameQCReport', '-oqc',
help='File name of the quality report',
required=False,
default='qc_report.txt')
parserOpt.add_argument('--plotOnly',
help='Do not create a new matrix, create only the plots.',
action='store_true')
parserOpt.add_argument('--runChromosomeCheck',
help='Skip the data integrity check for the chromosomes.',
action='store_true')
parserOpt.add_argument('--dpi', '-d',
help='The dpi of the plot.',
required=False,
default=300,
type=int)
parserOpt.add_argument('--threads', '-t',
help='Number of threads. Using the python multiprocessing module.',
required=False,
default=4,
type=int)
parserOpt.add_argument('--help', '-h', action='help', help='show this help message and exit')
parserOpt.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
return parser
def compute_read_coverage_sparsity(pMatrixName, pMatricesList, pXDimension, pMaximumRegionToConsider, pQueue):
read_coverage = []
sparsity = []
log.debug('read covarage and sparsity')
hic_ma = hm.hiCMatrix(pMatrixFile=pMatrixName + '::' + pMatricesList[0])
bin_size = hic_ma.getBinSize()
shape_x = hic_ma.matrix.shape[0]
for i, matrix in enumerate(pMatricesList):
matrixFileHandler = MatrixFileHandler(pFileType='cool', pMatrixFile=pMatrixName + '::' + matrix, pLoadMatrixOnly=True)
_matrix, cut_intervals, nan_bins, \
distance_counts, correction_factors = matrixFileHandler.load()
max_distance = pMaximumRegionToConsider // bin_size
instances = _matrix[0]
features = _matrix[1]
distances = np.absolute(instances - features)
mask = distances <= max_distance
sparsity_length = len(_matrix[2][mask])
sparsity.append(sparsity_length / (shape_x * max_distance))
# only upper half is loaded --> times 2
read_coverage_sum = _matrix[2].sum() * 2
# minus the double main diagonal
mask = distances == 0
read_coverage_sum -= _matrix[2][mask].sum()
read_coverage.append(read_coverage_sum)
pQueue.put([read_coverage, sparsity])
def compute_contains_all_chromosomes(pMatrixName, pMatricesList, pChromosomes, pQueue):
keep_matrices_chromosome_names = []
for i, matrix in enumerate(pMatricesList):
ma = hm.hiCMatrix(pMatrixName + '::' + matrix)
if pChromosomes is None:
pChromosomes = list(ma.chrBinBoundaries)
try:
ma.keepOnlyTheseChr(pChromosomes)
keep_matrices_chromosome_names.append(1)
except Exception:
keep_matrices_chromosome_names.append(0)
pQueue.put(keep_matrices_chromosome_names)
def main(args=None):
args = parse_arguments().parse_args(args)
matrices_name = args.matrix
threads = args.threads
matrices_list = cell_name_list(matrices_name)
all_samples_number = len(matrices_list)
if args.runChromosomeCheck:
#####################################################
# Detect broken chromosomes and remove these matrices
#####################################################
keep_matrices_thread = [None] * threads
all_data_collected = False
thread_done = [False] * threads
length_index = [None] * threads
length_index[0] = 0
matricesPerThread = len(matrices_list) // threads
queue = [None] * threads
process = [None] * threads
for i in range(threads):
if i < threads - 1:
matrices_name_list = matrices_list[i * matricesPerThread:(i + 1) * matricesPerThread]
length_index[i + 1] = length_index[i] + len(matrices_name_list)
else:
matrices_name_list = matrices_list[i * matricesPerThread:]
queue[i] = Queue()
process[i] = Process(target=compute_contains_all_chromosomes, kwargs=dict(
pMatrixName=matrices_name,
pMatricesList=matrices_name_list,
pChromosomes=args.chromosomes,
pQueue=queue[i]
)
)
process[i].start()
while not all_data_collected:
for i in range(threads):
if queue[i] is not None and not queue[i].empty():
worker_result = queue[i].get()
keep_matrices_thread[i] = worker_result
queue[i] = None
process[i].join()
process[i].terminate()
process[i] = None
thread_done[i] = True
all_data_collected = True
for thread in thread_done:
if not thread:
all_data_collected = False
time.sleep(1)
keep_matrices_chromosome_names = np.array([item for sublist in keep_matrices_thread for item in sublist], dtype=bool)
matrices_name_chromosome_names = np.array(matrices_list)
matrices_list = matrices_name_chromosome_names[keep_matrices_chromosome_names]
matrices_remove = matrices_name_chromosome_names[~keep_matrices_chromosome_names]
#######################################
read_coverage_thread = [None] * threads
sparsity_thread = [None] * threads
all_data_collected = False
thread_done = [False] * threads
length_index = [None] * threads
length_index[0] = 0
matricesPerThread = len(matrices_list) // threads
queue = [None] * threads
process = [None] * threads
for i in range(threads):
if i < threads - 1:
matrices_name_list = matrices_list[i * matricesPerThread:(i + 1) * matricesPerThread]
length_index[i + 1] = length_index[i] + len(matrices_name_list)
else:
matrices_name_list = matrices_list[i * matricesPerThread:]
queue[i] = Queue()
process[i] = Process(target=compute_read_coverage_sparsity, kwargs=dict(
pMatrixName=matrices_name,
pMatricesList=matrices_name_list,
pXDimension=len(matrices_list),
pMaximumRegionToConsider=args.maximumRegionToConsider,
pQueue=queue[i]
)
)
process[i].start()
while not all_data_collected:
for i in range(threads):
if queue[i] is not None and not queue[i].empty():
worker_result = queue[i].get()
read_coverage_thread[i] = worker_result[0]
sparsity_thread[i] = worker_result[1]
queue[i] = None
process[i].join()
process[i].terminate()
process[i] = None
thread_done[i] = True
all_data_collected = True
for thread in thread_done:
if not thread:
all_data_collected = False
time.sleep(1)
read_coverage = np.array([item for sublist in read_coverage_thread for item in sublist])
sparsity = np.array([item for sublist in sparsity_thread for item in sublist])
plt.close()
plt.hist(read_coverage, bins=100)
plt.suptitle('Read coverage of {}'.format(os.path.basename(args.matrix)), fontsize=12)
plt.grid(True)
if args.minimumReadCoverage > 0:
plt.axvline(args.minimumReadCoverage, color='r', linestyle='dashed', linewidth=1)
plt.title('Matrices with a read coverage < {} are removed.'.format(args.minimumReadCoverage), fontsize=10)
plt.xlabel('Read coverage')
plt.ylabel('Frequency')
plt.savefig(args.outFileNameReadCoverage, dpi=args.dpi)
plt.close()
plt.hist(sparsity, bins=100)
plt.suptitle('Density of {}'.format(os.path.basename(args.matrix)), fontsize=12)
if args.minimumDensity > 0:
plt.title('Matrices with a density < {} are removed.'.format(args.minimumDensity), fontsize=10)
plt.grid(True)
plt.xlabel('Density')
plt.ylabel('Frequency')
if args.minimumDensity > 0:
plt.axvline(args.minimumDensity, color='r', linestyle='dashed', linewidth=1)
plt.savefig(args.outFileNameDensity, dpi=args.dpi)
plt.close()
mask_read_coverage = read_coverage >= args.minimumReadCoverage
mask_sparsity = sparsity >= args.minimumDensity
mask = np.logical_and(mask_read_coverage, mask_sparsity)
matrices_list_filtered = np.array(matrices_list)[mask]
sum_read_coverage = np.sum(~mask_read_coverage)
sum_sparsity = np.sum(~mask_sparsity)
if not args.plotOnly:
np.savetxt('accepted_matrices.txt', matrices_list_filtered, fmt="%s")
np.savetxt('rejected_matrices.txt', np.array(matrices_list)[~mask], fmt="%s")
if os.path.exists(args.outputScool):
os.remove(args.outputScool)
cooler.fileops.cp(args.matrix + '::/bins', args.outputScool + '::/bins')
cooler.fileops.cp(args.matrix + '::/chroms', args.outputScool + '::/chroms')
with cooler.util.open_hdf5(args.matrix) as source:
attributes_dict = {}
for k, v in source.attrs.items():
attributes_dict[k] = v
attributes_dict['ncells'] = len(matrices_list_filtered)
attributes_dict['creation-date'] = datetime.now().isoformat()
with h5py.File(args.outputScool, "r+") as f:
h5 = f['/']
h5.attrs.update(attributes_dict)
content_bins_ln = ['chrom', 'start', 'end']
for matrix in matrices_list_filtered:
cooler.fileops.cp(args.matrix + '::' + matrix + '/pixels', args.outputScool + '::' + matrix + '/pixels')
cooler.fileops.cp(args.matrix + '::' + matrix + '/indexes', args.outputScool + '::' + matrix + '/indexes')
cooler.fileops.ln(args.outputScool + '::' + '/chroms', args.outputScool + '::' + matrix + '/chroms')
cooler.fileops.ln(args.outputScool + '::' + '/bins/chrom', args.outputScool + '::' + matrix + '/bins/chrom')
cooler.fileops.ln(args.outputScool + '::' + '/bins/start', args.outputScool + '::' + matrix + '/bins/start')
cooler.fileops.ln(args.outputScool + '::' + '/bins/end', args.outputScool + '::' + matrix + '/bins/end')
group_dataset_list = cooler.fileops.ls(args.matrix + '::' + matrix + '/bins/')
for datatype in group_dataset_list:
last_element = datatype.split('/')[-1]
if not (last_element) in content_bins_ln and last_element != '':
cooler.fileops.cp(args.matrix + '::' + matrix + '/bins/' + last_element, args.outputScool + '::' + matrix + '/bins/' + last_element)
with cooler.util.open_hdf5(args.matrix) as source: # , cooler.util.open_hdf5(args.outputScool + '::' + matrix) as destination:
attributes_dict = {}
for k, v in source[matrix].attrs.items():
attributes_dict[k] = v
with h5py.File(args.outputScool, "r+") as f:
h5 = f[matrix]
h5.attrs.update(attributes_dict)
##################
# Create QC report
##################
header = '# QC report for single-cell Hi-C data generated by scHiCExplorer ' + __version__ + '\n'
matrix_statistics = 'scHi-C sample contained {} cells:\n'.format(all_samples_number)
if args.runChromosomeCheck:
matrices_bad_chromosomes = 'Number of removed matrices containing bad chromosomes {}\n'.format(len(matrices_remove))
matrices_low_read_coverage = 'Number of removed matrices due to low read coverage (< {}): {}\n'.format(args.minimumReadCoverage, sum_read_coverage)
matrices_too_sparse = 'Number of removed matrices due to too many zero bins (< {} density, within {} relative genomic distance): {}\n'.format(args.minimumDensity, args.maximumRegionToConsider, sum_sparsity)
matrix_qc = '{} samples passed the quality control. Please consider matrices with a low read coverage may be the matrices with a low density and overlap therefore.'.format(len(matrices_list_filtered))
with open(args.outFileNameQCReport, 'w') as file:
file.write(header)
file.write(matrix_statistics)
if args.runChromosomeCheck:
file.write(matrices_bad_chromosomes)
file.write(matrices_low_read_coverage)
file.write(matrices_too_sparse)
file.write(matrix_qc)
```
#### File: schicexplorer/test/test_scHicConsensusMatrices.py
```python
import warnings
warnings.simplefilter(action="ignore", category=RuntimeWarning)
warnings.simplefilter(action="ignore", category=PendingDeprecationWarning)
import pytest
import os
from tempfile import NamedTemporaryFile, mkdtemp
import cooler
import numpy.testing as nt
from hicmatrix import HiCMatrix as hm
from schicexplorer import scHicConsensusMatrices
ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test-data/")
def test_consensus_matrices():
outfile = NamedTemporaryFile(suffix='.scool', delete=False)
outfile.close()
args = "--matrix {} --outFileName {} -t {} -c {}".format(ROOT + 'test_matrix.scool',
outfile.name, 4, ROOT + 'scHicConsensusMatrices/cluster_kmeans.txt').split()
scHicConsensusMatrices.main(args)
test_data_matrix = ROOT + 'scHicConsensusMatrices/consensus_matrix.scool'
matrices_list_test_data = cooler.fileops.list_coolers(test_data_matrix)
matrices_list_created = cooler.fileops.list_coolers(outfile.name)
matrices_list_test_data = sorted(matrices_list_test_data)
matrices_list_created = sorted(matrices_list_created)
assert len(matrices_list_test_data) == len(matrices_list_created)
for test_matrix, created_matrix in zip(matrices_list_test_data, matrices_list_created):
test = hm.hiCMatrix(test_data_matrix + '::' + test_matrix)
created = hm.hiCMatrix(outfile.name + '::' + created_matrix)
nt.assert_almost_equal(test.matrix.data, created.matrix.data, decimal=5)
nt.assert_equal(test.cut_intervals, created.cut_intervals)
os.unlink(outfile.name)
def test_version():
args = "--version".split()
with pytest.raises(SystemExit) as pytest_wrapped_e:
scHicConsensusMatrices.main(args)
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 0
def test_help():
args = "--help".split()
with pytest.raises(SystemExit) as pytest_wrapped_e:
scHicConsensusMatrices.main(args)
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 0
``` |
{
"source": "joachimwolff/sparse-neighbors-search",
"score": 2
} |
#### File: joachimwolff/sparse-neighbors-search/run_test.py
```python
from sparse_neighbors_search import MinHash
from sparse_neighbors_search import MinHashClassifier
from sparse_neighbors_search import WtaHash
from sparse_neighbors_search import WtaHashClassifier
import numpy as np
from sklearn.neighbors import NearestNeighbors
from scipy.sparse import csr_matrix
from scipy.sparse import coo_matrix
from eden.converter.graph.gspan import gspan_to_eden
from eden.graph import Vectorizer
import os.path
import time
import pickle
from scipy.io import mmwrite
from scipy.io import mmread
def load_bursi():
# if not os.path.isfile('/home/wolffj/data/minhash/dataset_bursi'):
# graphs = None
# if not os.path.isfile('/home/wolffj/data/minhash/bursi.gspan'):
# graphs = gspan_to_eden( 'http://www.bioinf.uni-freiburg.de/~costa/bursi.gspan' )
# # pickle.dump(graphs, open('/home/wolffj/data/minhash/bursi.gspan', 'wb'))
# else:
# graphs = pickle.load(open('/home/wolffj/data/minhash/bursi.gspan', 'rb'))
# vectorizer = Vectorizer( r=2,d=5 )
# dataset = vectorizer.transform( graphs )
# pickle.dump(dataset, open('/home/wolffj/data/minhash/dataset_bursi', 'wb'))
# mmwrite(open("bursi.mtx", 'w+'), dataset)
# return dataset
# else:
# return pickle.load(open('/home/wolffj/data/minhash/dataset_bursi', 'rb'))
return mmread(open("bursi.mtx", 'r'))
if __name__ == "__main__":
dataset = load_bursi()
# data = []
# print dataset.get_shape()
# for i in xrange(dataset.get_shape()[0]):
# if i < 100:
# data.append(dataset.getrow(i))
# print data
# dataset = np.vstack(data)
# dataset = dataset.tocsr()[:100]
# mmwrite(open("bursi_test.mtx", 'w+'), dataset)
# print type(data)
start = time.time()
sklearn = NearestNeighbors(n_neighbors=5, n_jobs=8)
sklearn.fit(dataset)
end = time.time()
print "fitting: ", end - start
start = time.time()
neighbors_sklearn = sklearn.kneighbors(return_distance=False)
end = time.time()
print neighbors_sklearn
print 'neighbors computing time: ', end - start
start = time.time()
minhash = MinHash(n_neighbors=5, number_of_cores=8)
minhash.fit(dataset)
end = time.time()
print "fitting: ", end - start
start = time.time()
neighbors_minHash = minhash.kneighbors(return_distance=False)
end = time.time()
print neighbors_minHash
print 'neighbors computing time: ', end - start
accuracy = 0;
for i in xrange(len(neighbors_minHash)):
accuracy += len(np.intersect1d(neighbors_minHash[i], neighbors_sklearn[i]))
print "Accuracy: ", accuracy / float(len(neighbors_minHash) * len(neighbors_sklearn[0]))
# n_neighbors_minHash = MinHash(n_neighbors = 4)
# mmwrite(open("bursi_neighbors.mtx", 'w+'), neighbors)
# mmwrite(open("bursi_values.mtx", 'w+'), dataset)
```
#### File: sparse_neighbors_search/neighbors/wtaHashClassifier.py
```python
__author__ = 'joachimwolff'
from collections import Counter
import numpy as np
from numpy import asarray
from sklearn.utils import check_array
from sklearn.utils import check_X_y
from sklearn.metrics import accuracy_score
import logging
from .wtaHash import WtaHash
class WtaHashClassifier():
"""Classifier implementing the k-nearest neighbors vote on sparse data sets.
Based on a dimension reduction with minimum hash functions.
Parameters
----------
n_neighbors : int, optional (default = 5)
Number of neighbors to use by default for :meth:`k_neighbors` queries.
fast : {True, False}, optional (default = False)
- True: will only use an inverse index to compute a k_neighbor query.
- False: an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
number_of_hash_functions : int, optional (default = '400')
Number of hash functions to use for computing the inverse index.
max_bin_size : int, optional (default = 50)
The number of maximal collisions for one hash value of one hash function. If one value of a hash function
has more collisions, this value will be ignored.
minimal_blocks_in_common : int, optional (default = 1)
The minimal number of hash collisions two instances have to be in common to be recognised. Everything less
will be ignored.
shingle_size : int, optional (default = 4)
Reduction factor for the signature size.
E.g. number_of_hash_functions=400 and shingle_size=4 --> Size of the signature will be 100
excess_factor : int, optional (default = 5)
Factor to return more neighbors internally as defined with n_neighbors. Factor is useful to increase the
precision of the :meth:`algorithm=exact` version of the implementation.
E.g.: n_neighbors = 5, excess_factor = 5. Internally n_neighbors*excess_factor = 25 neighbors will be returned.
Now the reduced data set for sklearn.NearestNeighbors is of size 25 and not 5.
number_of_cores : int, optional
Number of cores that should be used for openmp. If your system doesn't support openmp, this value
will have no effect. If it supports openmp and it is not defined, the maximum number of cores is used.
chunk_size : int, optional
Number of elements one cpu core should work on. If it is set to "0" the default behaviour of openmp is used;
e.g. for an 8-core cpu, the chunk_size is set to 8. Every core will get 8 elements, process these and get
another 8 elements until everything is done. If you set chunk_size to "-1" all cores
are getting the same amount of data at once; e.g. 8-core cpu and 128 elements to process, every core will
get 16 elements at once.
Notes
-----
The documentation is copied from scikit-learn and was only extend for a few cases. All examples are available there.
Original documentation is available at: http://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html#sklearn.neighbors.KNeighborsClassifier
Sources:
Basic algorithm:
http://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm
Idea behind implementation:
https://en.wikipedia.org/wiki/Locality-sensitive_hashing
Implementation is using scikit learn:
http://scikit-learn.org/dev/index.html
http://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html#sklearn.neighbors.KNeighborsClassifier
Algorithm based on:
<NAME>., <NAME>., <NAME>., & <NAME>. (2012).
GraphClust: alignment-free structural clustering of local RNA secondary structures.
Bioinformatics, 28(12), i224-i232.
http://bioinformatics.oxfordjournals.org/content/28/12/i224.full.pdf+html"""
def __init__(self, n_neighbors=5, radius=1.0, fast=False, number_of_hash_functions=400,
max_bin_size=50, minimal_blocks_in_common=1, shingle_size=4, excess_factor=5,
similarity=False, number_of_cores=None, chunk_size=None, prune_inverse_index=-1,
prune_inverse_index_after_instance=-1.0, remove_hash_function_with_less_entries_as=-1,
block_size=5, shingle=0, store_value_with_least_sigificant_bit=0,
rangeK_wta=20, speed_optimized=None, accuracy_optimized=None): # cpu_gpu_load_balancing=0,
cpu_gpu_load_balancing = 0
self._wtaHash = WtaHash(n_neighbors=n_neighbors, radius=radius,
fast=fast, number_of_hash_functions=number_of_hash_functions,
max_bin_size=max_bin_size, minimal_blocks_in_common=minimal_blocks_in_common,
shingle_size=shingle_size, excess_factor=excess_factor,
similarity=similarity, number_of_cores=number_of_cores, chunk_size=chunk_size, prune_inverse_index=prune_inverse_index,
prune_inverse_index_after_instance=prune_inverse_index_after_instance,
remove_hash_function_with_less_entries_as=remove_hash_function_with_less_entries_as,
block_size=block_size, shingle=shingle,
store_value_with_least_sigificant_bit=store_value_with_least_sigificant_bit,
cpu_gpu_load_balancing=cpu_gpu_load_balancing,
speed_optimized=speed_optimized, rangeK_wta=rangeK_wta, accuracy_optimized=accuracy_optimized)
def __del__(self):
del self._wtaHash
def fit(self, X, y):
"""Fit the model using X as training data.
Parameters
----------
X : {array-like, sparse matrix}
Training data, shape = [n_samples, n_features]
y : {array-like, sparse matrix}
Target values of shape = [n_samples] or [n_samples, n_outputs]"""
self._wtaHash.fit(X, y)
def partial_fit(self, X, y):
"""Extend the model by X as additional training data.
Parameters
----------
X : {array-like, sparse matrix}
Training data. Shape = [n_samples, n_features]
y : {array-like, sparse matrix}
Target values of shape = [n_samples] or [n_samples, n_outputs]"""
self._wtaHash.partial_fit(X, y)
def kneighbors(self, X=None, n_neighbors=None, return_distance=True, fast=None, pAbsoluteNumbers=None):
"""Finds the K-neighbors of a point.
Returns distance
Parameters
----------
X : array-like, last dimension same as that of fit data, optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
n_neighbors : int
Number of neighbors to get (default is the value
passed to the constructor).
return_distance : boolean, optional. Defaults to True.
If False, distances will not be returned
fast : {True, False}, optional (default = False)
- True: will only use an inverse index to compute a k_neighbor query.
- False: an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not defined, default value given by constructor is used.
Returns
-------
dist : array, shape = [n_samples, distance]
Array representing the lengths to points, only present if
return_distance=True
ind : array, shape = [n_samples, neighbors]
Indices of the nearest points in the population matrix."""
return self._wtaHash.kneighbors(X=X, n_neighbors=n_neighbors, return_distance=return_distance, fast=fast, pAbsoluteNumbers=pAbsoluteNumbers)
def kneighbors_graph(self, X=None, n_neighbors=None, mode='connectivity', fast=None, pAbsoluteNumbers=None):
"""Computes the (weighted) graph of k-Neighbors for points in X
Parameters
----------
X : array-like, last dimension same as that of fit data, optional
The query point or points.
If not provided, neighbors of each indexed point are returned.
In this case, the query point is not considered its own neighbor.
n_neighbors : int
Number of neighbors for each sample.
(default is value passed to the constructor).
mode : {'connectivity', 'distance'}, optional
Type of returned matrix: 'connectivity' will return the
connectivity matrix with ones and zeros, in 'distance' the
edges are Euclidean distance between points.
fast : {True, False}, optional (default = False)
- True: will only use an inverse index to compute a k_neighbor query.
- False: an inverse index is used to preselect instances, and these are used to get
the original data from the data set to answer a k_neighbor query. The
original data is stored in the memory.
If not defined, default value given by constructor is used.
Returns
-------
A : sparse matrix in CSR format, shape = [n_samples, n_samples_fit]
n_samples_fit is the number of samples in the fitted data
A[i, j] is assigned the weight of edge that connects i to j."""
return self._wtaHash.kneighbors_graph(X=X, n_neighbors=n_neighbors, mode=mode, fast=fast, pAbsoluteNumbers=pAbsoluteNumbers)
def predict(self, X, n_neighbors=None, fast=None, similarity=None, pAbsoluteNumbers=None):
"""Predict the class labels for the provided data
Parameters
----------
X : array of shape [n_samples, n_features]
A 2-D array representing the test points.
Returns
-------
y : array of shape [n_samples] or [n_samples, n_outputs]
Class labels for each data sample.
"""
neighbors = self._wtaHash.kneighbors(X=X, n_neighbors=n_neighbors,
return_distance=False,
fast=fast, similarity=similarity, pAbsoluteNumbers=pAbsoluteNumbers)
result_classification = []
for instance in neighbors:
y_value = []
for instance_ in instance:
if instance_ != -1:
# get all class labels
# y_value.append(y_values[instance_])
y_value.append(self._wtaHash._getY()[instance_])
if len(y_value) > 0:
# sort class labels by frequency and take the highest one
result_classification.append(Counter(y_value).keys()[0])
else:
result_classification.append(-1)
return asarray(result_classification)
def predict_proba(self, X, n_neighbors=None, fast=None, similarity=None, pAbsoluteNumbers=None):
"""Return probability estimates for the test data X.
Parameters
----------
X : array, shape = (n_samples, n_features)
A 2-D array representing the test points.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
of such arrays if n_outputs > 1.
The class probabilities of the input samples. Classes are ordered
by lexicographic order.
"""
neighbors = self._wtaHash.kneighbors(X=X, n_neighbors=n_neighbors,
return_distance=False,
fast=fast, similarity=similarity, pAbsoluteNumbers=pAbsoluteNumbers)
# y_values = self._getYValues(candidate_list)
number_of_classes = len(set(self._wtaHash._getY()))
result_classification = []
for instance in neighbors:
y_value = []
for instance_ in instance:
if instance_ != -1:
# get all class labels
y_value.append(self._wtaHash._getY()[instance_])
if len(y_value) > 0:
# sort class labels by frequency
y_proba = [0.0] * number_of_classes
sorted_classes = Counter(y_value)
# count frequency of all clases
total_class_count = 0
for value in sorted_classes.itervalues():
total_class_count += value
# compute probability by frequency / all_frequencies
for key, value in sorted_classes.iteritems():
y_proba[key] = value / float(total_class_count)
result_classification.append(y_proba[:])
return asarray(result_classification)
def score(self, X, y, sample_weight=None, fast=None):
"""Returns the mean accuracy on the given test data and labels.
In multi-label classification, this is the subset accuracy
which is a harsh metric since you require for each sample that
each label set be correctly predicted.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Test samples.
y : array-like, shape = (n_samples) or (n_samples, n_outputs)
True labels for X.
sample_weight : array-like, shape = [n_samples], optional
Sample weights.
Returns
-------
score : float
Mean accuracy of self.predict(X) wrt. y.
"""
return accuracy_score(y, self.predict(X, fast=fast), sample_weight=sample_weight)
# def _getYValues(self, candidate_list):
# if self.nearestNeighbors._y_is_csr:
# return self.nearestNeighbors._y[candidate_list]
# else:
# y_list = []
# for i in xrange(len(candidate_list)):
# y_list.append(self.nearestNeighbors._y[candidate_list[i]])
# return y_list
``` |
{
"source": "joaco18/maia-italy-aia",
"score": 3
} |
#### File: joaco18/maia-italy-aia/2022-03-15-image-normalization.py
```python
import cv2
import utils
import numpy as np
from pathlib import Path
examples_folder = Path("../project configuration/OpenCV 4 - C++/example_images")
def main():
img = cv2.imread(str(examples_folder/'lowcontrast.png'), cv2.IMREAD_GRAYSCALE)
assert img is not None, 'Couldn\'t load the image'
utils.cv2_imshow("Original image", img, 'gray')
freqs, vals = np.histogram(img.flatten(), 255)
utils.hist_plot(vals, freqs)
img = (img - img.min()) / (img.max() - img.min())
# cv2.normalize(img, img, 0, 255, cv2.NORM_MINMAX)
utils.cv2_imshow("Normalized image", img, 'gray')
freqs, vals = np.histogram(img.flatten(), 255)
utils.hist_plot(vals, freqs)
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-03-16-histogram-equalization.py
```python
import cv2
import utils
import time
import numpy as np
from numba import jit
@jit(nopython=True)
def our_hist_numba(img: np.ndarray):
# Histogram
freqs = np.zeros((256,))
for y in range(img.shape[0]):
for x in range(img.shape[1]):
freqs[img[y, x]] += 1
return freqs
@jit(nopython=True)
def our_cdf_numba(img: np.ndarray):
# Histogram
freqs = our_hist_numba(img)
# Histogram normalization
freqs = freqs / img.size
return np.cumsum(freqs)
def our_cdf(img: np.ndarray):
# Histogram
freqs = np.zeros((256,))
for y in range(img.shape[0]):
for x in range(img.shape[1]):
freqs[img[y, x]] += 1
# Histogram normalization
freqs = freqs / img.size
return np.cumsum(freqs)
def equalize_hist(img: np.ndarray):
"""
Equalization of 8-bit grayscale image
Args:
img (np.ndarray): one channel image to equalize.
"""
assert len(img.shape) == 2, \
"In equalize_hist: Multichannel images not supported"
# CDF time performace:
# start = time.time()
# cdf = our_cdf_numba(img)
# print(f'Numba func: {time.time()-start}')
# start = time.time()
# cdf = our_cdf(img)
# print(f'No-numba func: {time.time()-start}')
# CDF
cdf = our_cdf_numba(img)
# Hist equalization LUT
cdf = (cdf * 255).astype(int)
return cdf[img]
def main():
img = cv2.imread(
str(utils.EXAMPLES_DIR/'lightning_gray.jpg'),
cv2.IMREAD_GRAYSCALE
)
freqs = our_hist_numba(img)
intensities = np.arange(0, 257)
utils.cv2_imshow('Original Image', img, cmap='gray')
utils.hist_plot(intensities, freqs, 'Original Image Histogram')
res = img.copy()
cv2.normalize(img, res, 0, 255, cv2.NORM_MINMAX)
freqs = our_hist_numba(res)
utils.cv2_imshow('Normalized Image', res, cmap='gray')
utils.hist_plot(intensities, freqs, 'Normalized Image Histogram')
clahe_filter = cv2.createCLAHE(10)
res = clahe_filter.apply(img)
utils.cv2_imshow('Clahe Enhaced Image', res, cmap='gray')
utils.hist_plot(intensities, freqs, 'Clahe Enhaced Image Histogram')
res = equalize_hist(img)
freqs = our_hist_numba(res)
utils.cv2_imshow('Equalized Image', res, cmap='gray')
utils.hist_plot(intensities, freqs, 'Equalized Image Histogram')
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-03-17-denoise-salt-and-papper.py
```python
import cv2
import utils
import numpy as np
from functools import partial
def salt_and_pepper_denoise_callback(val, img: np.ndarray, win_name: str):
img_corrupted = img.copy()
salt_pepper_perc = \
cv2.getTrackbarPos('salt and pepper percentage', win_name)
median_filter_size = \
cv2.getTrackbarPos('median filter size', win_name)
number_of_pixels = int(salt_pepper_perc/100 * img.size)
y_coords = np.random.randint(0, img.shape[0] - 1, number_of_pixels)
x_coords = np.random.randint(0, img.shape[1] - 1, number_of_pixels)
values = np.random.randint(0, 2, number_of_pixels) * 255
img_corrupted[y_coords, x_coords] = values
cv2.imshow(win_name, img_corrupted)
if median_filter_size % 2:
cv2.medianBlur(img_corrupted, median_filter_size, img_corrupted)
cv2.imshow('Restored', img_corrupted)
def main():
win_name = 'Salt-and-pepper demo'
salt_and_pepper_perc = 10
median_filter_size = 1
img = cv2.imread(
str(utils.EXAMPLES_DIR/'lena.png'),
cv2.IMREAD_GRAYSCALE
)
cv2.namedWindow(win_name)
partial_callback = partial(
salt_and_pepper_denoise_callback, img=img, win_name=win_name)
cv2.createTrackbar(
'salt and pepper percentage', win_name,
salt_and_pepper_perc, 100, partial_callback
)
cv2.createTrackbar(
'median filter size', win_name,
median_filter_size, 50, partial_callback
)
salt_and_pepper_denoise_callback(0, img, win_name)
cv2.waitKey(0)
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-03-23-gradient-image.py
```python
import cv2
import utils
import numpy as np
def frame_processor(img: np.ndarray):
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
dx = cv2.Sobel(img_gray, cv2.CV_32F, 1, 0)
dy = cv2.Sobel(img_gray, cv2.CV_32F, 0, 1)
mag = cv2.magnitude(dx, dy)
cv2.normalize(mag, mag, 0, 255, cv2.NORM_MINMAX)
mag = mag.astype('uint8')
return 2 * mag
# # Aditional "cartoonization"
# median_filtered = cv2.medianBlur(img, 7)
# _, mask = cv2.threshold(mag, 30, 255, cv2.THRESH_BINARY)
# black = np.zeros(median_filtered.shape)
# median_filtered[mask == 255, :] = black[mask == 255, :]
# return median_filtered
def main():
utils.process_video_stream(frame_processor=frame_processor)
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-04-05-hough-line-detection.py
```python
import cv2
import utils
import numpy as np
from functools import partial
def edge_detection_grad_callback(
val, img: np.ndarray, win_name: str
):
stdevX10 = cv2.getTrackbarPos('stdev', win_name)
threshold = cv2.getTrackbarPos('threshold', win_name)
alpha1 = cv2.getTrackbarPos('alpha1', win_name)
alpha0 = cv2.getTrackbarPos('alpha0', win_name)
global img_edges
if stdevX10 > 0:
img_edges = cv2.GaussianBlur(img, (0, 0), stdevX10/10., stdevX10/10.)
else:
img_edges = img.copy()
img_dx = cv2.Sobel(img_edges, cv2.CV_32F, 1, 0)
img_dy = cv2.Sobel(img_edges, cv2.CV_32F, 0, 1)
mag, angle = cv2.cartToPolar(img_dx, img_dy, angleInDegrees=True)
temp = np.where(angle >= alpha0, 255, 0)
temp2 = np.where(angle <= alpha1, 255, 0)
temp = np.where((temp + temp2) != 0, 255, 0)
temp2 = np.where(mag > threshold, 255, 0)
img_edges = np.where((temp * temp2) != 0, 255, 0).astype('uint8')
cv2.imshow(win_name, img_edges)
def hough_callback(val, img: np.ndarray, win_name: str):
drho = cv2.getTrackbarPos('drho', win_name)
dtheta = cv2.getTrackbarPos('dtheta', win_name)
accum = cv2.getTrackbarPos('accum', win_name)
n = cv2.getTrackbarPos('n', win_name)
if drho <= 0:
return
if dtheta <= 0:
return
if accum <= 0:
return
img_copy = img.copy()
lines = cv2.HoughLines(img_edges.astype('uint8'), drho, dtheta/180.0, accum)
n = n if n < len(lines) else len(lines)
for [[rho, theta]] in lines[:n]:
if (theta < (np.pi / 4.)) or (theta > 3. * np.pi):
pt1 = (int(rho / np.cos(theta)), 0)
pt2 = (
int(rho - img_copy.shape[0] * np.sin(theta) / np.cos(theta)),
int(img_copy.shape[0])
)
cv2.line(img_copy, pt1, pt2, (0, 0, 255), 1)
else:
pt1 = (0, int(rho / np.sin(theta)))
pt2 = (
int(img_copy.shape[1]),
int(rho - img_copy.shape[1] * np.cos(theta) / np.sin(theta))
)
cv2.line(img_copy, pt1, pt2, (0, 0, 255), 2, cv2.LINE_AA)
cv2.imshow(win_name, img_copy)
def main():
stdevX10 = 10
threshold = 60
alpha0 = 0
alpha1 = 360
drho = 1
dtheta = 1
accum = 1
n = 10
img = cv2.imread(
str(utils.EXAMPLES_DIR / 'road.jpg'),
cv2.IMREAD_GRAYSCALE
)
grad_win_name = 'Edge detection (gradient)'
cv2.namedWindow(grad_win_name)
partial_grad_callback = partial(
edge_detection_grad_callback, img=img, win_name=grad_win_name
)
cv2.createTrackbar(
'stdev', grad_win_name, stdevX10, 100, partial_grad_callback
)
cv2.createTrackbar(
'threshold', grad_win_name, threshold, 100, partial_grad_callback
)
cv2.createTrackbar(
'alpha0', grad_win_name, alpha0, 360, partial_grad_callback
)
cv2.createTrackbar(
'alpha1', grad_win_name, alpha1, 360, partial_grad_callback
)
edge_detection_grad_callback(0, img, grad_win_name)
cv2.waitKey(0)
hough_win_name = 'Edge detection (Hough)'
partial_hough_callback = partial(
hough_callback, img=img, win_name=hough_win_name
)
cv2.namedWindow(hough_win_name)
cv2.createTrackbar('drho', hough_win_name, drho, 100, partial_hough_callback)
cv2.createTrackbar('dtheta', hough_win_name, dtheta, 100, partial_hough_callback)
cv2.createTrackbar('accum', hough_win_name, accum, 100, partial_hough_callback)
cv2.createTrackbar('n', hough_win_name, n, 100, partial_hough_callback)
hough_callback(0, img, hough_win_name)
cv2.waitKey(0)
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-04-05-tools-opening-by-reconstruction.py
```python
import cv2
import utils
import numpy as np
def main():
input_img = cv2.imread(
str(utils.EXAMPLES_DIR/'tools.png'),
cv2.IMREAD_GRAYSCALE
)
utils.cv2_imshow('Original image', input_img, 'gray')
freq = utils.our_hist_numba(input_img)
utils.hist_plot(np.arange(256), freq)
T = utils.get_triangle_auto_threshold(freq)
print(f'Triangle T = {T}')
_, binarized_img_triangle = cv2.threshold(
input_img, T, 255, cv2.THRESH_BINARY
)
utils.cv2_imshow('Triangle binarized image', binarized_img_triangle, 'gray')
SE = cv2.getStructuringElement(cv2.MORPH_RECT, (10, 10))
eroded_img = cv2.morphologyEx(binarized_img_triangle, cv2.MORPH_ERODE, SE)
utils.cv2_imshow('Triangle after erosion', eroded_img, 'gray')
marker_cur = eroded_img.copy()
marker_prev = np.zeros(marker_cur.shape)
mask = binarized_img_triangle.copy()
while np.count_nonzero(marker_cur - marker_prev) > 0:
marker_prev = marker_cur.copy()
SE = cv2.getStructuringElement(cv2.MORPH_CROSS, (3, 3))
marker_cur = cv2.dilate(marker_cur, SE)
marker_cur[mask == 0] = 0
cv2.imshow('Reconstruction in progress', marker_cur)
cv2.waitKey(100)
utils.cv2_imshow('Reconstruction result', marker_cur, 'gray')
cv2.waitKey(0)
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-04-20-ilumination-correction.py
```python
import cv2
import utils
import numpy as np
# TODO: NOT WORKING PROPERLY
def main():
img = cv2.imread(
str(utils.EXAMPLES_DIR/'rice.png'),
cv2.IMREAD_GRAYSCALE
)
_, bin = cv2.threshold(
img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU
)
cv2.imshow('Binarized on original', bin)
cv2.waitKey(0)
IF = cv2.morphologyEx(
img, cv2.MORPH_OPEN,
cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (40, 40))
)
cv2.imshow('IF', IF)
cv2.waitKey(0)
th = img - IF
cv2.imshow('Tophat', th)
cv2.waitKey(0)
_, th = cv2.threshold(th, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
cv2.imshow('Binarized after tophat', th)
cv2.waitKey(0)
avg_if = np.mean(IF)
cv2.imshow('Original Image', img)
cv2.waitKey(0)
cv2.imshow('Corrected image', th + avg_if)
cv2.waitKey(0)
if __name__ == '__main__':
main()
```
#### File: joaco18/maia-italy-aia/2022-04-28-mean-shift.py
```python
import cv2
import utils
def main():
input_img = cv2.imread(str(utils.EXAMPLES_DIR/'retina.png'))
result = cv2.pyrMeanShiftFiltering(input_img, 2, 30, 0)
cv2.imshow('Original', input_img)
cv2.waitKey(0)
cv2.imshow('Mean Shift result', result)
cv2.waitKey(0)
cv2.imwrite(str(utils.EXAMPLES_DIR/'retina.MS.png'), result)
if __name__ == '__main__':
main()
``` |
{
"source": "joacoc/materialize",
"score": 2
} |
#### File: ci/test/build.py
```python
from pathlib import Path
from materialize import mzbuild, spawn
from materialize.xcompile import Arch
def main() -> None:
repo = mzbuild.Repository(Path("."))
# Build and push any images that are not already available on Docker Hub,
# so they are accessible to other build agents.
print("--- Acquiring mzbuild images")
deps = repo.resolve_dependencies(image for image in repo if image.publish)
deps.ensure()
annotate_buildkite_with_tags(repo.rd.arch, deps)
def annotate_buildkite_with_tags(arch: Arch, deps: mzbuild.DependencySet) -> None:
tags = "\n".join([f"* `{dep.spec()}`" for dep in deps])
markdown = f"""<details><summary>{arch} Docker tags produced in this build</summary>
{tags}
</details>"""
spawn.runv(
["buildkite-agent", "annotate", "--style=info", f"--context=build-{arch}"],
stdin=markdown.encode(),
)
if __name__ == "__main__":
main()
```
#### File: test/kafka-multi-broker/mzcompose.py
```python
import time
from materialize.mzcompose import Composition
from materialize.mzcompose.services import (
Kafka,
Materialized,
SchemaRegistry,
Testdrive,
Zookeeper,
)
SERVICES = [
Zookeeper(),
Kafka(name="kafka1", broker_id=1, offsets_topic_replication_factor=2),
Kafka(name="kafka2", broker_id=2, offsets_topic_replication_factor=2),
Kafka(name="kafka3", broker_id=3, offsets_topic_replication_factor=2),
SchemaRegistry(
kafka_servers=[("kafka1", "9092"), ("kafka2", "9092"), ("kafka3", "9092")]
),
Materialized(),
Testdrive(
entrypoint=[
"testdrive",
"--schema-registry-url=http://schema-registry:8081",
"--materialize-url=postgres://materialize@materialized:6875",
"--kafka-option=acks=all",
"--seed=1",
]
),
]
def workflow_default(c: Composition) -> None:
c.start_and_wait_for_tcp(
services=[
"zookeeper",
"kafka1",
"kafka2",
"kafka3",
"schema-registry",
"materialized",
]
)
c.run("testdrive", "--kafka-addr=kafka2", "01-init.td")
time.sleep(10)
c.kill("kafka1")
time.sleep(10)
c.run("testdrive", "--kafka-addr=kafka2,kafka3", "--no-reset", "02-after-leave.td")
c.up("kafka1")
time.sleep(10)
c.run("testdrive", "--kafka-addr=kafka1", "--no-reset", "03-after-join.td")
``` |
{
"source": "joacocruz6/cc3501-tarea2",
"score": 3
} |
#### File: joacocruz6/cc3501-tarea2/Fondo.py
```python
from CC3501Utils import *
class Fondo(Figura):
def __init__(self,pos=Vector(0.0,0.0),rgb=(138.0/255,138.0/255,138.0/255)):
super().__init__(pos,rgb)
def figura(self):
"""
Dibuja el fondo
:return: None
"""
ancho=15
alto=13
dx=53.0
dy=46.0
(r,g,b)=self.color
for i in range(ancho):
for j in range(alto):
glColor3f(83.0/255,83.0/255,85.0/255)
glBegin(GL_QUADS)
glVertex2f(i*dx,j*dy)
glVertex2f(i*dx,j*dy+dy)
glVertex2f(i*dx+dx,j*dy+dy)
glVertex2f(i*dx+dx,j*dy)
glEnd()
glColor3f(r,g,b)
glBegin(GL_QUADS)
glVertex2f(i * dx+2.0, j * dy+2.0)
glVertex2f(i * dx+2.0, j * dy + dy-2.0)
glVertex2f(i * dx + dx-2.0, j * dy + dy-2.0)
glVertex2f(i * dx + dx-2.0, j * dy+2.0)
glEnd()
```
#### File: joacocruz6/cc3501-tarea2/Player.py
```python
from CC3501Utils import *
import math as m
class Player(Figura):
#inicializador
def __init__(self,pos: Vector,rgb=(1.0,1.0,51.0/255),vida=True,numero=1):
self.vida=vida#true si esta vivo
self.centro=Vector(pos.x+(53.0/2),pos.y+(46.0/2))
self.numero=numero
if self.numero==1:
self.color_camisa=(1.0,1.0,1.0)
if self.numero==2:
self.color_camisa=(43.0/255,185/255,0.0)
super().__init__(pos,rgb)
def figura(self):
"""
Dibuja al modelo en pantalla
:return: None
"""
(r,g,b)=self.color
(cr,cg,cb)=self.color_camisa
#discretizacion
#################
###Homero (P1)###
#################
if self.numero==1:
dx = 53.0 / 16
dy = 46.0 / 20
######################
####contorno negro####
######################
glColor3f(0.0, 0.0, 0.0)
# torso
glBegin(GL_POLYGON)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(8 * dx, 19 * dy)
glVertex2f(11 * dx, 19 * dy)
glVertex2f(11 * dx, 18 * dy)
glVertex2f(12 * dx, 18 * dy)
glVertex2f(12 * dx, 16 * dy)
glVertex2f(13 * dx, 16 * dy)
glVertex2f(13 * dx, 12 * dy)
glVertex2f(14 * dx, 12 * dy)
glVertex2f(14 * dx, 11 * dy)
glVertex2f(15 * dx, 11 * dy)
glVertex2f(15 * dx, 8 * dy)
glEnd()
glBegin(GL_POLYGON)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(8 * dx, 19 * dy)
glVertex2f(5 * dx, 19 * dy)
glVertex2f(5 * dx, 18 * dy)
glVertex2f(4 * dx, 18 * dy)
glVertex2f(4 * dx, 16 * dy)
glVertex2f(3 * dx, 16 * dy)
glVertex2f(3 * dx, 12 * dy)
glVertex2f(2 * dx, 12 * dy)
glVertex2f(2 * dx, 11 * dy)
glVertex2f(dx, 11 * dy)
glVertex2f(dx, 8 * dy)
glEnd()
# ---piernas---
glBegin(GL_QUADS)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(12 * dx, 8 * dy)
glVertex2f(12 * dx, 2 * dy)
glVertex2f(8 * dx, 2 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(4 * dx, 8 * dy)
glVertex2f(4 * dx, 2 * dy)
glVertex2f(8 * dx, 2 * dy)
glEnd()
# los que faltan (detalles)
glBegin(GL_QUADS)
glVertex2f(5 * dx, dy)
glVertex2f(5 * dx, 2 * dy)
glVertex2f(7 * dx, 2 * dy)
glVertex2f(7 * dx, dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(9 * dx, dy)
glVertex2f(9 * dx, 2 * dy)
glVertex2f(11 * dx, 2 * dy)
glVertex2f(11 * dx, dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(2 * dx, 7 * dy)
glVertex2f(2 * dx, 8 * dy)
glVertex2f(4 * dx, 8 * dy)
glVertex2f(4 * dx, 7 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(12 * dx, 7 * dy)
glVertex2f(12 * dx, 8 * dy)
glVertex2f(14 * dx, 8 * dy)
glVertex2f(14 * dx, 7 * dy)
glEnd()
glColor3f(138.0 / 255, 138.0 / 255, 138.0 / 255)
glBegin(GL_QUADS)
glVertex2f(3 * dx, 13 * dy)
glVertex2f(3 * dx, 14 * dy)
glVertex2f(4 * dx, 14 * dy)
glVertex2f(4 * dx, 13 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(12 * dx, 13 * dy)
glVertex2f(12 * dx, 14 * dy)
glVertex2f(13 * dx, 14 * dy)
glVertex2f(13 * dx, 13 * dy)
glEnd()
# --------------------
####figura####
# zapatos
glColor3f(0.2, 0.2, 0.2)
glBegin(GL_QUADS)
glVertex2f(5 * dx, 2 * dy)
glVertex2f(5 * dx, 3 * dy)
glVertex2f(7 * dx, 3 * dy)
glVertex2f(7 * dx, 2 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(9 * dx, 2 * dy)
glVertex2f(9 * dx, 3 * dy)
glVertex2f(11 * dx, 3 * dy)
glVertex2f(11 * dx, 2 * dy)
glEnd()
# pantalones
glColor3f(75.0 / 255, 146.0 / 255, 226.0 / 255)
glBegin(GL_QUADS)
glVertex2f(5 * dx, 3 * dy)
glVertex2f(5 * dx, 6 * dy)
glVertex2f(7 * dx, 6 * dy)
glVertex2f(7 * dx, 3 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(9 * dx, 3 * dy)
glVertex2f(9 * dx, 6 * dy)
glVertex2f(11 * dx, 6 * dy)
glVertex2f(11 * dx, 3 * dy)
glEnd()
###camisa###
glColor3f(cr,cg,cb)
glBegin(GL_QUADS)
glVertex2f(5 * dx, 6 * dy)
glVertex2f(5 * dx, 10 * dy)
glVertex2f(11 * dx, 10 * dy)
glVertex2f(11 * dx, 6 * dy)
glEnd()
# ---<NAME>---
glBegin(GL_QUADS)
glVertex2f(2 * dx, 10 * dy)
glVertex2f(2 * dx, 11 * dy)
glVertex2f(3 * dx, 11 * dy)
glVertex2f(3 * dx, 10 * dy)
glEnd()
glBegin(GL_POLYGON)
glVertex2f(3 * dx, 10 * dy)
glVertex2f(3 * dx, 12 * dy)
glVertex2f(5 * dx, 12 * dy)
glVertex2f(5 * dx, 11 * dy)
glVertex2f(6 * dx, 11 * dy)
glVertex2f(6 * dx, 10 * dy)
glEnd()
# ---izquierdo---
glBegin(GL_QUADS)
glVertex2f(10 * dx, 10 * dy)
glVertex2f(10 * dx, 11 * dy)
glVertex2f(11 * dx, 11 * dy)
glVertex2f(11 * dx, 10 * dy)
glEnd()
glBegin(GL_POLYGON)
glVertex2f(11 * dx, 10 * dy)
glVertex2f(11 * dx, 12 * dy)
glVertex2f(13 * dx, 12 * dy)
glVertex2f(13 * dx, 11 * dy)
glVertex2f(14 * dx, 11 * dy)
glVertex2f(14 * dx, 10 * dy)
glEnd()
# piel
glColor3f(r, g, b)
# ---Manos---
glBegin(GL_QUADS)
glVertex2f(2 * dx, 8 * dy)
glVertex2f(2 * dx, 10 * dy)
glVertex2f(4 * dx, 10 * dy)
glVertex2f(4 * dx, 8 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(12 * dx, 8 * dy)
glVertex2f(12 * dx, 10 * dy)
glVertex2f(14 * dx, 10 * dy)
glVertex2f(14 * dx, 8 * dy)
glEnd()
#---cabeza---
#nariz
glBegin(GL_QUADS)
glVertex2f(7*dx,13*dy)
glVertex2f(7*dx,14*dy)
glVertex2f(9*dx,14*dy)
glVertex2f(9*dx,13*dy)
glEnd()
#laterales
glBegin(GL_QUADS)
glVertex2f(5*dx,12*dy)
glVertex2f(5*dx,14*dy)
glVertex2f(6*dx,14*dy)
glVertex2f(6*dx,12*dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(10 * dx, 12 * dy)
glVertex2f(10 * dx, 14 * dy)
glVertex2f(11 * dx, 14 * dy)
glVertex2f(11 * dx, 12 * dy)
glEnd()
#frente
glBegin(GL_QUADS)
glVertex2f(5*dx,14*dy)
glVertex2f(5*dx,18*dy)
glVertex2f(11*dx,18*dy)
glVertex2f(11*dx,14*dy)
glEnd()
#orejas
glBegin(GL_QUADS)
glVertex2f(4*dx,14*dy)
glVertex2f(4*dx,15*dy)
glVertex2f(5*dx,15*dy)
glVertex2f(5*dx,14*dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(11 * dx, 14 * dy)
glVertex2f(11 * dx, 15 * dy)
glVertex2f(12 * dx, 15 * dy)
glVertex2f(12 * dx, 14 * dy)
glEnd()
# ---barba---
glColor3f(171.0/255,82.0/255,4.0/255)
glBegin(GL_QUADS)
glVertex2f(5*dx,11*dy)
glVertex2f(5*dx,12*dy)
glVertex2f(10*dx,12*dy)
glVertex2f(10*dx,11*dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(7*dx,12*dy)
glVertex2f(7*dx,13*dy)
glVertex2f(9*dx,13*dy)
glVertex2f(9*dx,12*dy)
glEnd()
################
###Marge (P2)###
################
if self.numero==2:
dx=53.0/16
dy=46.0/24
##############
###contorno###
##############
glColor3f(0.0, 0.0, 0.0)
# torso
glBegin(GL_POLYGON)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(8 * dx, 23 * dy)
glVertex2f(11 * dx, 23 * dy)
glVertex2f(11 * dx, 22 * dy)
glVertex2f(12 * dx, 22 * dy)
glVertex2f(12 * dx, 21 * dy)
glVertex2f(13 * dx, 21 * dy)
glVertex2f(13 * dx, 12 * dy)
glVertex2f(14 * dx, 12 * dy)
glVertex2f(14 * dx, 11 * dy)
glVertex2f(15 * dx, 11 * dy)
glVertex2f(15 * dx, 8 * dy)
glEnd()
glBegin(GL_POLYGON)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(8 * dx, 23 * dy)
glVertex2f(5 * dx, 23 * dy)
glVertex2f(5 * dx, 22 * dy)
glVertex2f(4 * dx, 22 * dy)
glVertex2f(4 * dx, 21 * dy)
glVertex2f(3 * dx, 21 * dy)
glVertex2f(3 * dx, 12 * dy)
glVertex2f(2 * dx, 12 * dy)
glVertex2f(2 * dx, 11 * dy)
glVertex2f(dx, 11 * dy)
glVertex2f(dx, 8 * dy)
glEnd()
# ---piernas---
glBegin(GL_QUADS)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(12 * dx, 8 * dy)
glVertex2f(12 * dx, 2 * dy)
glVertex2f(8 * dx, 2 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(8 * dx, 8 * dy)
glVertex2f(4 * dx, 8 * dy)
glVertex2f(4 * dx, 2 * dy)
glVertex2f(8 * dx, 2 * dy)
glEnd()
# los que faltan (detalles)
glBegin(GL_QUADS)
glVertex2f(5 * dx, dy)
glVertex2f(5 * dx, 2 * dy)
glVertex2f(7 * dx, 2 * dy)
glVertex2f(7 * dx, dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(9 * dx, dy)
glVertex2f(9 * dx, 2 * dy)
glVertex2f(11 * dx, 2 * dy)
glVertex2f(11 * dx, dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(2 * dx, 7 * dy)
glVertex2f(2 * dx, 8 * dy)
glVertex2f(4 * dx, 8 * dy)
glVertex2f(4 * dx, 7 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(12 * dx, 7 * dy)
glVertex2f(12 * dx, 8 * dy)
glVertex2f(14 * dx, 8 * dy)
glVertex2f(14 * dx, 7 * dy)
glEnd()
glColor3f(138.0 / 255, 138.0 / 255, 138.0 / 255)
glBegin(GL_QUADS)
glVertex2f(3 * dx, 13 * dy)
glVertex2f(3 * dx, 14 * dy)
glVertex2f(4 * dx, 14 * dy)
glVertex2f(4 * dx, 13 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(12 * dx, 13 * dy)
glVertex2f(12 * dx, 14 * dy)
glVertex2f(13 * dx, 14 * dy)
glVertex2f(13 * dx, 13 * dy)
glEnd()
############
###figura###
############
#---zapatos---#
glColor3f(235.0/255, 0.0, 0.0)
glBegin(GL_QUADS)
glVertex2f(5 * dx, 2 * dy)
glVertex2f(5 * dx, 3 * dy)
glVertex2f(7 * dx, 3 * dy)
glVertex2f(7 * dx, 2 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(9 * dx, 2 * dy)
glVertex2f(9 * dx, 3 * dy)
glVertex2f(11 * dx, 3 * dy)
glVertex2f(11 * dx, 2 * dy)
glEnd()
#---vestido---#
glColor3f(cr,cg,cb)
glBegin(GL_QUADS)
glVertex2f(5*dx,3*dy)
glVertex2f(5*dx,10*dy)
glVertex2f(11*dx,10*dy)
glVertex2f(11*dx,3*dy)
glEnd()
# ---<NAME>---
# piel
glColor3f(r, g, b)
glBegin(GL_QUADS)
glVertex2f(2 * dx, 10 * dy)
glVertex2f(2 * dx, 11 * dy)
glVertex2f(3 * dx, 11 * dy)
glVertex2f(3 * dx, 10 * dy)
glEnd()
glBegin(GL_POLYGON)
glVertex2f(3 * dx, 10 * dy)
glVertex2f(3 * dx, 12 * dy)
glVertex2f(5 * dx, 12 * dy)
glVertex2f(5 * dx, 11 * dy)
glVertex2f(6 * dx, 11 * dy)
glVertex2f(6 * dx, 10 * dy)
glEnd()
# ---izquierdo---
glBegin(GL_QUADS)
glVertex2f(10 * dx, 10 * dy)
glVertex2f(10 * dx, 11 * dy)
glVertex2f(11 * dx, 11 * dy)
glVertex2f(11 * dx, 10 * dy)
glEnd()
glBegin(GL_POLYGON)
glVertex2f(11 * dx, 10 * dy)
glVertex2f(11 * dx, 12 * dy)
glVertex2f(13 * dx, 12 * dy)
glVertex2f(13 * dx, 11 * dy)
glVertex2f(14 * dx, 11 * dy)
glVertex2f(14 * dx, 10 * dy)
glEnd()
# ---Manos---
glBegin(GL_QUADS)
glVertex2f(2 * dx, 8 * dy)
glVertex2f(2 * dx, 10 * dy)
glVertex2f(4 * dx, 10 * dy)
glVertex2f(4 * dx, 8 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(12 * dx, 8 * dy)
glVertex2f(12 * dx, 10 * dy)
glVertex2f(14 * dx, 10 * dy)
glVertex2f(14 * dx, 8 * dy)
glEnd()
#---cabeza---#
#boca
glBegin(GL_QUADS)
glVertex2f(6*dx,11*dy)
glVertex2f(6*dx,12*dy)
glVertex2f(10*dx,12*dy)
glVertex2f(10*dx,11*dy)
glEnd()
# nariz
glBegin(GL_QUADS)
glVertex2f(7 * dx, 12 * dy)
glVertex2f(7 * dx, 14 * dy)
glVertex2f(9 * dx, 14 * dy)
glVertex2f(9 * dx, 12 * dy)
glEnd()
# laterales
glBegin(GL_QUADS)
glVertex2f(5 * dx, 12 * dy)
glVertex2f(5 * dx, 14 * dy)
glVertex2f(6 * dx, 14 * dy)
glVertex2f(6 * dx, 12 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(10 * dx, 12 * dy)
glVertex2f(10 * dx, 14 * dy)
glVertex2f(11 * dx, 14 * dy)
glVertex2f(11 * dx, 12 * dy)
glEnd()
# frente + orejas
glBegin(GL_QUADS)
glVertex2f(4 * dx, 14 * dy)
glVertex2f(4 * dx, 16 * dy)
glVertex2f(12 * dx, 16 * dy)
glVertex2f(12 * dx, 14 * dy)
glEnd()
#---pelo---#
glColor3f(0.0,12.0/255,175.0/255)
glBegin(GL_QUADS)
glVertex2f(4*dx,15*dy)
glVertex2f(4*dx,16*dy)
glVertex2f(5*dx,16*dy)
glVertex2f(5*dx,15*dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(11 * dx, 15 * dy)
glVertex2f(11 * dx, 16 * dy)
glVertex2f(12 * dx, 16 * dy)
glVertex2f(12 * dx, 15 * dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(4*dx,16*dy)
glVertex2f(4*dx,21*dy)
glVertex2f(12*dx,21*dy)
glVertex2f(12*dx,16*dy)
glEnd()
glBegin(GL_QUADS)
glVertex2f(5*dx,21*dy)
glVertex2f(5*dx,22*dy)
glVertex2f(11*dx,22*dy)
glVertex2f(11*dx,21*dy)
glEnd()
def updatecenter(self):
"""
Actualiza el centro del jugador al moverse
:return: None
"""
self.centro=Vector(self.pos.x+53.0/2,self.pos.y+46.0/2)
#moverse: se mueve segun la discretizacion, direccion es -1 o 1 segun si le resto a sumo
def moverx(self,direccion: int):
self.pos=sumar(self.pos,Vector(direccion*53.0,0.0))
self.dibujar()
self.updatecenter()
def movery(self,direccion:int):
self.pos=sumar(self.pos,Vector(0.0,direccion*46.0))
self.updatecenter()
def getpos(self):
"""
Da la posicion como tupla x,y
:return: tuple
"""
return self.pos.cartesianas()
def getcenter(self):
"""
Da el centro como tupla x,y
:return: tuple
"""
return self.centro.cartesianas()
def getlife(self):
"""
True si esta vivo
:return: boolean
"""
return self.vida
def setlife(self,v):
"""
Pone el valor de la vida como v
:param v: boolean
:return: None
"""
self.vida=v
def normalizar_camisa(self):
"""
Al terminar un power up, deja el valor de la camisa como normal
:return: None
"""
if self.numero==1:
self.color_camisa=(1.0,1.0,1.0)
if self.numero==2:
self.color_camisa=(43.0/255,185/255,0.0)
def setcoloracion(self,rgb: tuple):
"""
Cambia el color de la camisa
:param rgb: tuple
:return: None
"""
self.color_camisa=rgb
``` |
{
"source": "joacocruz6/RevisadorTareas",
"score": 2
} |
#### File: src/tester/CPPTester.py
```python
import os
import sys
import subprocess
from pathlib import Path
"""
First we do some pathing options
"""
src_directory = Path('.').resolve().parent
project_directory = Path('.').resolve().parent.parent
sys.path.append(src_directory)
sys.path.append(project_directory)
"""
Some imports
"""
try:
from .AbstractTester import AbstractTester
except ModuleNotFoundError:
try:
from AbstractTester import AbstractTester
except ModuleNotFoundError as e:
print(e)
from options import TestCase, TestOptions
class CPPTester(AbstractTester):
def __init__(self,options: TestOptions,homework: str):
super().__init__(options,homework)
self._outputFile = homework.split('.cpp')[0]+'.out'
def compileFile(self):
subprocess.run(["g++",self.getHomework(),"-o",self._outputFile],cwd=self._cwd)
def run(self):
for i in range(len(self.getOtions())):
p = subprocess.run(['./'+self._outputFile],input=self.getOptions()[i].getTestOptions(),universal_newlines=True,stdout=subprocess.PIPE,cwd=self._cwd)
self.getProcess().append(p)
``` |
{
"source": "joaconigro/plugin.audio.tuneinradio",
"score": 3
} |
#### File: resources/lib/streamtheworld.py
```python
from random import choice as choise
import urllib.request, urllib.error, urllib.parse
import xml.dom.minidom as minidom
class StreamTheWorld:
## Example XML document we are parsing follows, as the minidom code is so beautiful to follow
# http://playerservices.streamtheworld.com/api/livestream?version=1.4&mount=CARACOL_RADIOAAC&lang=EN
#
#<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
#<live_stream_config version="1.4">
# <mountpoints>
# <mountpoint>
# <status>
# <status-code>200</status-code>
# <status-message>OK</status-message>
# </status>
#
# <transports>
# <transport>http</transport>
# </transports>
#
# <servers>
# <server sid="3653">
# <ip>3653.live.streamtheworld.com</ip>
# <ports>
# <port type="http">80</port>
# <port type="http">3690</port>
# <port type="http">443</port>
# </ports>
# </server>
#
# <server sid="1351">
# <ip>1351.live.streamtheworld.com</ip>
# <ports>
# <port type="http">80</port>
# <port type="http">3690</port>
# <port type="http">443</port>
# </ports>
# </server>
# </servers>
#
# <mount>CARACOL_RADIOAAC</mount>
# <format>FLV</format>
# <bitrate>32000</bitrate>
# <media-format container="flv" cuepoints="andoxml">
# <audio index="0" samplerate="44100" codec="heaacv2" bitrate="32000" channels="2"/>
# </media-format>
# <authentication>0</authentication>
# <timeout>0</timeout>
# </mountpoint>
# </mountpoints>
#</live_stream_config>
''' Parse streamtheworld URL to HTTP Stream'''
def __init__(self, cs):
self.__cs__ = cs
return
def __validate_callsign(self, cs, acc=True):
'''
Normal callsign format is 'WWWWAAA', where 'WWWW' is the radio station
callsign and 'AAA' is always 'AAC'.
'''
if not cs or not isinstance(cs, str):
raise ValueError('callsign \'%s\' is not a string.' % cs)
if len(cs) < 6:
raise ValueError('callsign \'%s\' is too short.' % cs)
if acc and not cs.endswith('AAC'):
cs = cs + 'AAC'
return cs
def __make_request(self, callsign):
''' Make a Call to StreamTheWorld API v1.5'''
host = 'playerservices.streamtheworld.com'
req = urllib.request.Request(
'http://%s/api/livestream?version=1.5&mount=%s&lang=en' %
(host, callsign))
req.add_header('User-Agent', 'Mozilla/5.0')
return req
def __t(self, element):
'''get the text of a DOM element'''
return element.firstChild.data
def __check_status(self, ele):
''' should only be one status element inside a mountpoint'''
status = ele.getElementsByTagName('status')[0]
if self.__t(status.getElementsByTagName('status-code')[0]) != '200':
msg = self.__t(status.getElementsByTagName('status-message')[0])
raise Exception('Error locating stream: ' + msg)
def __create_stream_urls(self, srcfile):
''' Return an array with all URLs'''
doc = minidom.parse(srcfile)
mp = doc.getElementsByTagName('mountpoint')[0]
self.__check_status(mp)
mt = self.__t(mp.getElementsByTagName('mount')[0])
allurls = []
for s in mp.getElementsByTagName('server'):
# a thing of beauty, right?
ip = self.__t(s.getElementsByTagName('ip')[0])
ports = [self.__t(p) for p in s.getElementsByTagName('port')]
# yes, it is always HTTP. We see ports 80, 443, and 3690 usually
urls = ['http://%s:%s/%s' % (ip, p, mt) for p in ports]
allurls.extend(urls)
return allurls
def get_stream_url(self, cs):
''' Get one URL from CS'''
try:
callsign = self.__validate_callsign(cs)
req = self.__make_request(callsign)
result = urllib.request.urlopen(req)
urls = self.__create_stream_urls(result)
except:
callsign = self.__validate_callsign(cs, False)
req = self.__make_request(callsign)
result = urllib.request.urlopen(req)
urls = self.__create_stream_urls(result)
if len(urls) > 0:
u = choise(urls)
if not u.endswith('_SC'):
u = u + '_SC'
return u
``` |
{
"source": "joacorapela/svGPFA",
"score": 2
} |
#### File: svGPFA/ci/test_svPosteriorOnIndPoints.py
```python
import pdb
import sys
import os
from scipy.io import loadmat
import torch
sys.path.append("../src")
import stats.svGPFA.svPosteriorOnIndPoints
import utils.svGPFA.initUtils
def test_buildQSigma():
tol = 1e-5
dataFilename = os.path.join(os.path.dirname(__file__), "data/get_full_from_lowplusdiag.mat")
mat = loadmat(dataFilename)
nLatents = mat['q_sqrt'].shape[0]
nTrials = mat['q_sqrt'][(0,0)].shape[2]
qSVec0 = [torch.from_numpy(mat['q_sqrt'][(i,0)]).type(torch.DoubleTensor).permute(2,0,1) for i in range(nLatents)]
qSDiag0 = [torch.from_numpy(mat['q_diag'][(i,0)]).type(torch.DoubleTensor).permute(2,0,1) for i in range(nLatents)]
srQSigma0Vecs = utils.svGPFA.initUtils.getSRQSigmaVec(qSVec=qSVec0, qSDiag=qSDiag0)
q_sigma = [torch.from_numpy(mat['q_sigma'][(0,k)]).permute(2,0,1) for k in range(nLatents)]
qMu0 = [[] for i in range(nLatents)]
params0 = {"qMu0": qMu0, "srQSigma0Vecs": srQSigma0Vecs}
qU = stats.svGPFA.svPosteriorOnIndPoints.SVPosteriorOnIndPoints()
qU.setInitialParams(initialParams=params0)
qSigma = qU.buildQSigma();
error = torch.tensor([(qSigma[k]-q_sigma[k]).norm() for k in range(len(qSigma))]).sum()
assert(error<tol)
if __name__=="__main__":
test_buildQSigma()
```
#### File: svGPFA/scripts/doPlotCIFsTruePythonAndMatlab_pythonSim.py
```python
import sys
import os
import torch
import pdb
import pickle
import argparse
import configparser
import scipy.io
import matplotlib.pyplot as plt
from scipy.io import loadmat
sys.path.append("../src")
import plot.svGPFA.plotUtilsPlotly
import utils.svGPFA.miscUtils
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("mEstNumber", help="Matlab's estimation number", type=int)
parser.add_argument("trialToPlot", help="Trial to plot", type=int)
parser.add_argument("neuronToPlot", help="Neuron to plot", type=int)
args = parser.parse_args()
mEstNumber = args.mEstNumber
trialToPlot = args.trialToPlot
neuronToPlot = args.neuronToPlot
marker = "x"
tLabel = "True"
ylim = [-6, 2]
nResamples = 10000
# pLabelPattern = "$\text{Python} (R^2={:.02f})$"
# mLabelPattern = "$\text{Matlab} (R^2={:.02f})$"
pLabelPattern = "Python (R<sup>2</sup>={:.02f})"
mLabelPattern = "Matlab (R<sup>2</sup>={:.02f})"
mEstParamsFilename = "../../matlabCode/scripts/results/{:08d}-pointProcessEstimationParams.ini".format(mEstNumber)
mEstConfig = configparser.ConfigParser()
mEstConfig.read(mEstParamsFilename)
pEstNumber = int(mEstConfig["data"]["pEstNumber"])
pEstimMetaDataFilename = "results/{:08d}_estimation_metaData.ini".format(pEstNumber)
pEstConfig = configparser.ConfigParser()
pEstConfig.read(pEstimMetaDataFilename)
pSimNumber = int(pEstConfig["simulation_params"]["simResNumber"])
pSimResFilename = "results/{:08d}_simRes.pickle".format(pSimNumber)
mModelSaveFilename = "../../matlabCode/scripts/results/{:08d}-pointProcessEstimationRes.mat".format(mEstNumber)
pModelSaveFilename = "results/{:08d}_estimatedModel.pickle".format(pEstNumber)
figFilenamePattern = "figures/{:08d}-{:08d}-truePythonMatlabCIFsPointProcess.{{:s}}".format(mEstNumber, pEstNumber)
with open(pSimResFilename, "rb") as f: simRes = pickle.load(f)
nTrials = len(simRes["latents"])
nLatents = len(simRes["latents"][0])
nSamples = len(simRes["times"][0])
tTimes = simRes["times"][0]
tLatents = torch.empty((nTrials, nSamples, nLatents), dtype=torch.double)
for r in range(nTrials):
for k in range(nLatents):
tLatents[r,:,k] = simRes["latents"][r][k]
tC = simRes["C"]
nNeurons = tC.shape[0]
td = simRes["d"]
tCIFs = utils.svGPFA.miscUtils.getCIFs(C=tC, d=td, latents=tLatents)
loadRes = scipy.io.loadmat(mModelSaveFilename)
mTimes = torch.from_numpy(loadRes["testTimes"][:,0]).type(torch.DoubleTensor).squeeze()
mMeanLatents_tmp = torch.from_numpy(loadRes["meanEstimatedLatents"]).type(torch.DoubleTensor)
# mMeanLatents_tmp = torch.reshape(mMeanLatents_tmp, (-1, nTrials, nLatents))
mMeanLatents = torch.empty((nTrials, nSamples, nLatents), dtype=torch.double)
for r in range(nTrials):
for k in range(nLatents):
mMeanLatents[r,:,k] = mMeanLatents_tmp[:,k,r]
mVarLatents_tmp = torch.from_numpy(loadRes["varEstimatedLatents"]).type(torch.DoubleTensor)
# mVarLatents_tmp = torch.reshape(mVarLatents_tmp, (-1, nTrials, nLatents))
mVarLatents = torch.empty((nTrials, nSamples, nLatents), dtype=torch.double)
for r in range(nTrials):
for k in range(nLatents):
mVarLatents[r,:,k] = mVarLatents_tmp[:,k,r]
mC = torch.from_numpy(loadRes["m"]["prs"][0,0]["C"][0,0]).type(torch.DoubleTensor)
md = torch.from_numpy(loadRes["m"]["prs"][0,0]["b"][0,0]).type(torch.DoubleTensor)
mCIFs = utils.svGPFA.miscUtils.getCIFs(C=mC, d=md, latents=mMeanLatents)
with open(pModelSaveFilename, "rb") as f: res = pickle.load(f)
pModel = res["model"]
embeddingParams = pModel.getSVEmbeddingParams()
pC = embeddingParams[0]
pd = embeddingParams[1]
with torch.no_grad():
pTestMuK, _ = pModel.predictLatents(newTimes=mTimes)
pCIFs = utils.svGPFA.miscUtils.getCIFs(C=pC, d=pd, latents=pTestMuK)
pTimes = mTimes
tCIF = tCIFs[trialToPlot,:,neuronToPlot]
mCIF = mCIFs[trialToPlot,:,neuronToPlot]
pCIF = pCIFs[trialToPlot,:,neuronToPlot]
meanTCIF = torch.mean(tCIF)
ssTot = torch.sum((tCIF-meanTCIF)**2)
pSSRes = torch.sum((pCIF-tCIF)**2)
mSSRes = torch.sum((mCIF-tCIF)**2)
pR2 = (1-(pSSRes/ssTot)).item()
mR2 = (1-(mSSRes/ssTot)).item()
pLabel = pLabelPattern.format(pR2)
mLabel = mLabelPattern.format(mR2)
fig = plot.svGPFA.plotUtilsPlotly.\
getPlotTruePythonAndMatlabCIFs(tTimes=tTimes,
tCIF=tCIF,
tLabel=tLabel,
pTimes=pTimes,
pCIF=pCIF,
pLabel=pLabel,
mTimes=mTimes,
mCIF=mCIF,
mLabel=mLabel,
title="Trial {:d}, Neuron {:d}".format(trialToPlot, neuronToPlot),
)
fig.write_image(figFilenamePattern.format("png"))
fig.write_html(figFilenamePattern.format("html"))
fig.show()
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doPlotLatentsSamplesFromEstimatedModel.py
```python
import sys
import pdb
import argparse
import pickle
import torch
import plotly.graph_objs as go
sys.path.append("../src")
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("estNumber", help="Estimation number", type=int)
parser.add_argument("nSamples", help="Number of samples to plot", type=int)
parser.add_argument("--trialToPlot", default=0, help="Trial to plot", type=int)
parser.add_argument("--latentToPlot", default=0, help="Latent to plot", type=int)
parser.add_argument("--startTimeToPlot", help="Start time to plot", default=0.0, type=float)
parser.add_argument("--endTimeToPlot", help="End time to plot", default=4.0, type=float)
parser.add_argument("--sampleRateInPlot", help="Sample rate in plot", default=1e+2, type=float)
parser.add_argument("--nodge", help="Kernel covariance matrix on inducing points nodge", default=1e-3, type=float)
parser.add_argument("--modelFilenamePattern", default="results/{:08d}_estimatedModel.pickle", help="Estimated model filename pattern")
parser.add_argument("--figFilenamePattern", default="figures/{:08d}_sampledLatents_trial{:d}_latent{:d}_nSamples{:d}.{:s}", help="Figure filename pattern")
args = parser.parse_args()
estNumber = args.estNumber
nSamples = args.nSamples
trialToPlot = args.trialToPlot
latentToPlot = args.latentToPlot
startTimeToPlot = args.startTimeToPlot
endTimeToPlot = args.endTimeToPlot
sampleRateInPlot = args.sampleRateInPlot
nodge = args.nodge
modelFilenamePattern = args.modelFilenamePattern
figFilenamePattern = args.figFilenamePattern
modelFilename = modelFilenamePattern.format(estNumber)
with open(modelFilename, "rb") as f: modelRes = pickle.load(f)
model = modelRes["model"]
nTrials = model.getIndPointsLocs()[0].shape[0]
timesOneTrial = torch.arange(start=startTimeToPlot, end=endTimeToPlot, step=1.0/sampleRateInPlot)
times = torch.empty(nTrials, len(timesOneTrial), 1)
for r in range(nTrials):
times[r,:,0] = timesOneTrial
samples = torch.empty(nSamples, nSamples, len(times))
mean = torch.empty(len(times))
std = torch.empty(len(times))
# r for trial, k for latent, n for sample number, t for time
# latentsSamples[r][k,n,t]
# latentsMeans[r][k,t]
# latentsSTDs[r][k,t]
latentSamples, latentsMeans, latentsSTDs = model._eLL._svEmbeddingAllTimes._svPosteriorOnLatents.sample(times=times, nSamples=nSamples, regFactor=nodge)
fig = go.Figure()
for n in range(nSamples):
trace = go.Scatter(
x=times[trialToPlot,:,0],
y=latentSamples[trialToPlot][latentToPlot, n, :],
# line=dict(color='rgb(0,100,80)'),
# line=dict(color='blue'),
mode='lines+markers',
name="sample {:03d}".format(n),
showlegend=True,
)
fig.add_trace(trace)
fig.update_xaxes(title_text="Time (sec)")
fig.update_yaxes(title_text="Latent Value")
fig.write_image(figFilenamePattern.format(estNumber, trialToPlot, latentToPlot, nSamples, "png"))
fig.write_html(figFilenamePattern.format(estNumber, trialToPlot, latentToPlot, nSamples, "html"))
fig.show()
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doPlotLatentsTruePythonAndMatlab.py
```python
import sys
import os
import torch
import pdb
import pickle
import argparse
import configparser
import matplotlib.pyplot as plt
from scipy.io import loadmat
sys.path.append("../src")
import plot.svGPFA.plotUtilsPlotly
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("pEstNumber", help="Python's estimation number", type=int)
parser.add_argument("trialToPlot", help="Trial to plot", type=int)
parser.add_argument("--deviceName", help="name of device (cpu or cuda)", default="cpu")
args = parser.parse_args()
pEstNumber = args.pEstNumber
trialToPlot = args.trialToPlot
deviceName = args.deviceName
marker = 'x'
pEstimMetaDataFilename = "results/{:08d}_leasSimulation_estimation_metaData_{:s}.ini".format(pEstNumber, deviceName)
pEstConfig = configparser.ConfigParser()
pEstConfig.read(pEstimMetaDataFilename)
mEstNumber = int(pEstConfig["data"]["mEstNumber"])
mEstParamsFilename = "../../matlabCode/scripts/results/{:08d}-pointProcessEstimationParams.ini".format(mEstNumber)
mEstConfig = configparser.ConfigParser()
mEstConfig.read(mEstParamsFilename)
mSimNumber = int(mEstConfig["data"]["simulationNumber"])
mSimFilename = "../../matlabCode/scripts/results/{:08d}-pointProcessSimulation.mat".format(mSimNumber)
mModelSaveFilename = "../../matlabCode/scripts/results/{:08d}-pointProcessEstimationRes.mat".format(mEstNumber)
pModelSaveFilename = "results/{:08d}_leasSimulation_estimatedModel_cpu.pickle".format(pEstNumber)
staticFigFilename = "figures/{:08d}_truePythonMatlabLatentsPointProcess_trial{:d}.png".format(pEstNumber, trialToPlot)
dynamicFigFilename = "figures/{:08d}_truePythonMatlabLatentsPointProcess_trial{:d}.html".format(pEstNumber, trialToPlot)
loadRes = loadmat(mSimFilename)
nLatents = loadRes["trueLatents"].shape[1]
nTrials = loadRes["trueLatents"].shape[0]
nSamples = loadRes["testTimes"][:,0].shape[0]
tTimes = torch.from_numpy(loadRes["testTimes"][:,0]).type(torch.DoubleTensor)
tLatents_tmp = [[torch.from_numpy(loadRes["trueLatents"][t,l]).type(torch.DoubleTensor).squeeze() for l in range(nLatents)] for t in range(nTrials)]
tLatents = torch.empty((nTrials, nSamples, nLatents))
for t in range(nTrials):
for l in range(nLatents):
tLatents[t,:,l] = tLatents_tmp[t][l]
loadRes = loadmat(mModelSaveFilename)
mTimes = torch.from_numpy(loadRes["testTimes"][:,0]).type(torch.DoubleTensor).squeeze()
mMeanLatents_tmp = torch.from_numpy(loadRes["meanEstimatedLatents"]).type(torch.DoubleTensor)
mMeanLatents = torch.empty((nTrials, nSamples, nLatents))
for t in range(nTrials):
for l in range(nLatents):
mMeanLatents[t,:,l] = mMeanLatents_tmp[:,l,t]
mVarLatents_tmp = torch.from_numpy(loadRes["varEstimatedLatents"]).type(torch.DoubleTensor)
mVarLatents = torch.empty((nTrials, nSamples, nLatents))
for t in range(nTrials):
for l in range(nLatents):
mVarLatents[t,:,l] = mVarLatents_tmp[:,l,t]
with open(pModelSaveFilename, "rb") as f: res = pickle.load(f)
pModel = res["model"]
with torch.no_grad():
pTestMuK, pTestVarK = pModel.predictLatents(newTimes=mTimes)
pTimes = mTimes
fig = plot.svGPFA.plotUtilsPlotly.\
getPlotTruePythonAndMatlabLatentsPlotly(tTimes=tTimes,
tLatents=tLatents,
pTimes=pTimes,
pMuK=pTestMuK,
pVarK=pTestVarK,
mTimes=mTimes,
mMuK=mMeanLatents,
mVarK=mVarLatents,
trialToPlot=trialToPlot,
)
fig.write_image(staticFigFilename)
fig.write_html(dynamicFigFilename)
fig.show()
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doPlotLowerBoundVsLengthscaleAndPeriodGenerativeParams.py
```python
import sys
import pdb
import argparse
import pickle
import configparser
import torch
import numpy as np
import plotly.io as pio
sys.path.append("../src")
import utils.svGPFA.initUtils
import utils.svGPFA.configUtils
import utils.svGPFA.miscUtils
import stats.kernels
import stats.svGPFA.svGPFAModelFactory
import plot.svGPFA.plotUtilsPlotly
def getReferenceParams(model, latent):
kernelsParams = model.getKernelsParams()
refParams = kernelsParams[latent]
refParams = [refParams[i].clone() for i in range(len(refParams))]
return refParams
def updateKernelParams(model, period, lengthscale, latent):
kernelsParams = model.getKernelsParams()
kernelsParams[latent][0] = lengthscale
kernelsParams[latent][1] = period
model.buildKernelsMatrices()
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("simResNumber", help="simulation result number", type=int)
parser.add_argument("indPointsLocsKMSRegEpsilon", help="regularization epsilong for the inducing points locations covariance", type=float)
parser.add_argument("--latent", help="Parameter latent number", type=int, default=0)
parser.add_argument("--lowerBoundQuantile", help="Quantile of the smallest lower bount to plot", type=float, default=0.5)
parser.add_argument("--lengthscaleStartValue", help="Center value to plot for lengthscale parameter", type=float)
parser.add_argument("--lengthscaleScale", help="Scale for lengthscale parameter", type=float)
parser.add_argument("--lengthscaleScaledDT", help="Scaled half width for the lengthscale parameter", type=float, default=1.0)
parser.add_argument("--lengthscaleNSamples", help="Number of samples for lengthscale parameter", type=float, default=100)
parser.add_argument("--periodStartValue", help="Center value for period parameter", type=float)
parser.add_argument("--periodScale", help="Scale for period parameter", type=float)
parser.add_argument("--periodScaledDT", help="Scaled half width for period parameter", type=float)
parser.add_argument("--periodNSamples", help="Number of samples for period parameter", type=float, default=6.5)
parser.add_argument("--zMin", help="Minimum z value", type=float, default=None)
parser.add_argument("--zMax", help="Minimum z value", type=float, default=None)
parser.add_argument("--nQuad", help="Number of quadrature points", type=int, default=200)
args = parser.parse_args()
simResNumber = args.simResNumber
indPointsLocsKMSRegEpsilon = args.indPointsLocsKMSRegEpsilon
latent = args.latent
lowerBoundQuantile = args.lowerBoundQuantile
lengthscaleStartValue = args.lengthscaleStartValue
lengthscaleScale = args.lengthscaleScale
lengthscaleScaledDT = args.lengthscaleScaledDT
lengthscaleNSamples = args.lengthscaleNSamples
periodStartValue = args.periodStartValue
periodScale = args.periodScale
periodScaledDT = args.periodScaledDT
periodNSamples = args.periodNSamples
zMin = args.zMin
zMax = args.zMax
# load data and initial values
simResConfigFilename = "results/{:08d}_simulation_metaData.ini".format(simResNumber)
simResConfig = configparser.ConfigParser()
simResConfig.read(simResConfigFilename)
simInitConfigFilename = simResConfig["simulation_params"]["simInitConfigFilename"]
simResFilename = simResConfig["simulation_results"]["simResFilename"]
simInitConfig = configparser.ConfigParser()
simInitConfig.read(simInitConfigFilename)
nLatents = int(simInitConfig["control_variables"]["nLatents"])
nNeurons = int(simInitConfig["control_variables"]["nNeurons"])
trialsLengths = [float(str) for str in simInitConfig["control_variables"]["trialsLengths"][1:-1].split(",")]
nTrials = len(trialsLengths)
firstIndPointLoc = float(simInitConfig["control_variables"]["firstIndPointLoc"])
nQuad = args.nQuad
with open(simResFilename, "rb") as f: simRes = pickle.load(f)
spikesTimes = simRes["spikes"]
KzzChol = simRes["KzzChol"]
indPointsMeans = simRes["indPointsMeans"]
C, d = utils.svGPFA.configUtils.getLinearEmbeddingParams(CFilename=simInitConfig["embedding_params"]["C_filename"], dFilename=simInitConfig["embedding_params"]["d_filename"])
legQuadPoints, legQuadWeights = utils.svGPFA.miscUtils.getLegQuadPointsAndWeights(nQuad=nQuad, trialsLengths=trialsLengths)
baseKernels = utils.svGPFA.configUtils.getKernels(nLatents=nLatents, config=simInitConfig, forceUnitScale=True)
baseParams = baseKernels[0].getParams()
kernel = stats.kernels.PeriodicKernel(scale=1.0, lengthscaleScale=lengthscaleScale, periodScale=periodScale)
kernel.setParams(params=torch.tensor([baseParams[0]*lengthscaleScale, baseParams[1]*periodScale]))
kernels = [kernel]
kernelsParams0 = utils.svGPFA.initUtils.getKernelsParams0(kernels=kernels, noiseSTD=0.0)
# Z0 = utils.svGPFA.initUtils.getIndPointLocs0(nIndPointsPerLatent=nIndPointsPerLatent, trialsLengths=trialsLengths, firstIndPointLoc=firstIndPointLoc)
Z0 = utils.svGPFA.configUtils.getIndPointsLocs0(nLatents=nLatents, nTrials=nTrials, config=simInitConfig)
nIndPointsPerLatent = [Z0[k].shape[1] for k in range(nLatents)]
# patch to acommodate Lea's equal number of inducing points across trials
qMu0 = [[] for k in range(nLatents)]
for k in range(nLatents):
qMu0[k] = torch.empty((nTrials, nIndPointsPerLatent[k], 1), dtype=torch.double)
for r in range(nTrials):
qMu0[k][r,:,:] = indPointsMeans[r][k]
# end patch
srQSigma0Vecs = utils.svGPFA.initUtils.getSRQSigmaVecsFromSRMatrices(srMatrices=KzzChol)
qUParams0 = {"qMu0": qMu0, "srQSigma0Vecs": srQSigma0Vecs}
kmsParams0 = {"kernelsParams0": kernelsParams0,
"inducingPointsLocs0": Z0}
qKParams0 = {"svPosteriorOnIndPoints": qUParams0,
"kernelsMatricesStore": kmsParams0}
qHParams0 = {"C0": C, "d0": d}
initialParams = {"svPosteriorOnLatents": qKParams0,
"svEmbedding": qHParams0}
quadParams = {"legQuadPoints": legQuadPoints,
"legQuadWeights": legQuadWeights}
# create model
model = stats.svGPFA.svGPFAModelFactory.SVGPFAModelFactory.buildModel(
conditionalDist=stats.svGPFA.svGPFAModelFactory.PointProcess,
linkFunction=stats.svGPFA.svGPFAModelFactory.ExponentialLink,
embeddingType=stats.svGPFA.svGPFAModelFactory.LinearEmbedding,
kernels=kernels)
model.setMeasurements(measurements=spikesTimes)
model.setInitialParams(initialParams=initialParams)
model.setQuadParams(quadParams=quadParams)
model.setIndPointsLocsKMSRegEpsilon(indPointsLocsKMSRegEpsilon=indPointsLocsKMSRegEpsilon)
model.buildKernelsMatrices()
refParams = getReferenceParams(model=model, latent=latent)
refParamsLowerBound = model.eval()
lengthscaleScaledStartValue = lengthscaleStartValue*lengthscaleScale
lengthscaleScaledEndValue = lengthscaleScaledStartValue+lengthscaleScaledDT*periodNSamples
lengthscaleScaledValues = np.arange(lengthscaleScaledStartValue, lengthscaleScaledEndValue, lengthscaleScaledDT)
periodScaledStartValue = periodStartValue*periodScale
periodScaledEndValue = periodScaledStartValue+periodScaledDT*periodNSamples
periodScaledValues = np.arange(periodScaledStartValue, periodScaledEndValue, periodScaledDT)
allLowerBoundValues = []
allUnlengthscaleScaledValues = []
allUnperiodScaledValues = []
for i in range(len(periodScaledValues)):
print("Processing period {:f} ({:d}/{:d})".format(periodScaledValues[i]/periodScale, i, len(periodScaledValues)))
for j in range(len(lengthscaleScaledValues)):
updateKernelParams(model=model, lengthscale=lengthscaleScaledValues[j], period=periodScaledValues[i], latent=latent)
lowerBound = model.eval()
if(torch.isinf(lowerBound).item()):
pdb.set_trace()
allLowerBoundValues.append(lowerBound.item())
allUnperiodScaledValues.append(periodScaledValues[i]/periodScale)
allUnlengthscaleScaledValues.append(lengthscaleScaledValues[j]/lengthscaleScale)
title = "Kernel Periodic, Latent {:d}, Epsilon {:f}".format(latent, indPointsLocsKMSRegEpsilon)
figFilenamePattern = "figures/{:08d}_generativeParams_epsilon{:f}_kernel_periodic_latent{:d}.{{:s}}".format(simResNumber, indPointsLocsKMSRegEpsilon, latent)
fig = plot.svGPFA.plotUtilsPlotly.getPlotLowerBoundVsTwoParamsParam(param1Values=allUnperiodScaledValues, param2Values=allUnlengthscaleScaledValues, lowerBoundValues=allLowerBoundValues, refParam1=refParams[0], refParam2=refParams[1], refParamText="Generative Value", refParamsLowerBound=refParamsLowerBound, title=title, lowerBoundQuantile=lowerBoundQuantile, param1Label="Period", param2Label="Lengthscale", lowerBoundLabel="Lower Bound", zMin=zMin, zMax=zMax)
fig.write_image(figFilenamePattern.format("png"))
fig.write_html(figFilenamePattern.format("html"))
pio.renderers.default = "browser"
fig.show()
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doPlotPeriodicKernelParamsForBatchPython.py
```python
import sys
import os
import pdb
import pickle
import argparse
import csv
import numpy as np
import plotly.graph_objs as go
import plotly.io as pio
pio.renderers.default = "browser"
sys.path.append("../src")
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("--labelsAndEstNumbersFilename", help="Filename containing the labels ane model estimation numbers of all models to plot",
default="../slurm/data/labelsEstNumbers20secSim.csv")
parser.add_argument("--modelFilenamePattern", help="Filename of the pickle file where the model was saved", default="results/{:08d}_estimatedModel.pickle")
parser.add_argument("--figFilenamePattern", help="Filename pattern of the plot figure", default="figures/periodicKernelParamsOfModelBatch.{:s}")
parser.add_argument("--xlabel", help="Figure xlabel", default="Period")
parser.add_argument("--ylabel", help="Figure ylabel", default="Lengthscale")
args = parser.parse_args()
labelsAndEstNumbersFilename = args.labelsAndEstNumbersFilename
modelFilenamePattern = args.modelFilenamePattern
figFilenamePattern = args.figFilenamePattern
xlabel = args.xlabel
ylabel = args.ylabel
fig = go.Figure()
with open(labelsAndEstNumbersFilename) as f:
csvReader = csv.reader(f, delimiter=" ")
for row in csvReader:
label = row[0]
modelFilename = row[1]
with open(modelFilename, "rb") as f: res = pickle.load(f)
iterationModelParams = res["iterationModelParams"]
trace = go.Scatter(
x=iterationModelParams[:,1], # period
y=iterationModelParams[:,0], # lengthscale
# mode="lines+markers",
mode="markers",
name=label,
showlegend=True,
)
fig.add_trace(trace)
fig.update_yaxes(title_text=ylabel)
fig.update_xaxes(title_text=xlabel)
fig.write_image(figFilenamePattern.format("png"))
fig.write_html(figFilenamePattern.format("html"))
fig.show()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doPlotSVGPFAEstimates.py
```python
import sys
import os
import pdb
import math
from scipy.io import loadmat
import torch
import pickle
import argparse
import configparser
import pandas as pd
import sklearn.metrics
# import statsmodels.tsa.stattools
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
sys.path.append("../src")
import stats.pointProcess.tests
import utils.svGPFA.configUtils
import utils.svGPFA.miscUtils
import plot.svGPFA.plotUtils
import plot.svGPFA.plotUtilsPlotly
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("estResNumber", help="estimation result number", type=int)
parser.add_argument("--trialToPlot", help="trial to plot", type=int, default=0)
parser.add_argument("--neuronToPlot", help="neuron to plot", type=int, default=0)
parser.add_argument("--dtCIF", help="neuron to plot", type=float, default=1e-3)
parser.add_argument("--ksTestGamma", help="gamma value for KS test", type=int, default=10)
parser.add_argument("--nTestPoints", help="number of test points where to plot latents", type=int, default=2000)
args = parser.parse_args()
estResNumber = args.estResNumber
trialToPlot = args.trialToPlot
neuronToPlot = args.neuronToPlot
dtCIF = args.dtCIF
ksTestGamma = args.ksTestGamma
nTestPoints = args.nTestPoints
estimResMetaDataFilename = "results/{:08d}_estimation_metaData.ini".format(estResNumber)
modelSaveFilename = "results/{:08d}_estimatedModel.pickle".format(estResNumber)
lowerBoundHistVsIterNoFigFilenamePattern = "figures/{:08d}_lowerBoundHistVSIterNo.{{:s}}".format(estResNumber)
lowerBoundHistVsElapsedTimeFigFilenamePattern = "figures/{:08d}_lowerBoundHistVsElapsedTime.{{:s}}".format(estResNumber)
latentsFigFilenamePattern = "figures/{:08d}_estimatedLatent_trial{:03d}_neuron{:03d}.{{:s}}".format(estResNumber, trialToPlot, neuronToPlot)
ksTestTimeRescalingNumericalCorrectionFigFilename = "figures/{:08d}_ksTestTimeRescaling_numericalCorrection_trial{:03d}_neuron{:03d}.png".format(estResNumber, trialToPlot, neuronToPlot)
trueAndEstimatedCIFsFigFilenamePattern = "figures/{:08d}_trueAndEstimatedCIFs_trial{:03d}_neuron{:03d}.{{:s}}".format(estResNumber, trialToPlot, neuronToPlot)
rocFigFilename = "figures/{:08d}_rocAnalysis_trial{:03d}_neuron{:03d}.png".format(estResNumber, trialToPlot, neuronToPlot)
kernelsParamsFigFilenamePattern = "figures/{:08d}_trueAndEstimatedKernelsParams.{{:s}}".format(estResNumber)
embeddingParamsFigFilenamePattern = "figures/{:08d}_trueAndEstimatedEmbeddingParams.{{:s}}".format(estResNumber)
ksTestTimeRescalingAnalyticalCorrectionFigFilename = "figures/{:08d}_ksTestTimeRescaling_analyticalCorrection_trial{:03d}_neuron{:03d}.png".format(estResNumber, trialToPlot, neuronToPlot)
timeRescalingDiffCDFsFigFilename = "figures/{:08d}_timeRescalingDiffCDFs_analyticalCorrection_trial{:03d}_neuron{:03d}.png".format(estResNumber, trialToPlot, neuronToPlot)
timeRescaling1LagScatterPlotFigFilename = "figures/{:08d}_timeRescaling1LagScatterPlot_analyticalCorrection_trial{:03d}_neuron{:03d}.png".format(estResNumber, trialToPlot, neuronToPlot)
timeRescalingACFFigFilename = "figures/{:08d}_timeRescalingACF_analyticalCorrection_trial{:03d}_neuron{:03d}.png".format(estResNumber, trialToPlot, neuronToPlot)
estimResConfig = configparser.ConfigParser()
estimResConfig.read(estimResMetaDataFilename)
simResNumber = int(estimResConfig["simulation_params"]["simResNumber"])
simResConfigFilename = "results/{:08d}_simulation_metaData.ini".format(simResNumber)
simResConfig = configparser.ConfigParser()
simResConfig.read(simResConfigFilename)
simInitConfigFilename = simResConfig["simulation_params"]["simInitConfigFilename"]
simInitConfig = configparser.ConfigParser()
simInitConfig.read(simInitConfigFilename)
nLatents = int(simInitConfig["control_variables"]["nLatents"])
nNeurons = int(simInitConfig["control_variables"]["nNeurons"])
dtCIF = float(simInitConfig["control_variables"]["dtCIF"])
trialsLengths = [float(str) for str in simInitConfig["control_variables"]["trialsLengths"][1:-1].split(",")]
nTrials = len(trialsLengths)
simResFilename = simResConfig["simulation_results"]["simResFilename"]
CFilename = simInitConfig["embedding_params"]["C_filename"]
dFilename = simInitConfig["embedding_params"]["d_filename"]
C, d = utils.svGPFA.configUtils.getLinearEmbeddingParams(CFilename=CFilename, dFilename=dFilename)
tIndPointsLocs = utils.svGPFA.configUtils.getIndPointsLocs0(nLatents=nLatents, nTrials=nTrials, config=simInitConfig)
with open(simResFilename, "rb") as f: simRes = pickle.load(f)
spikesTimes = simRes["spikes"]
trueLatentsSamples = simRes["latents"]
simCIFsValues = simRes["cifValues"]
trueLatentsSamples = [trueLatentsSamples[r][:nLatents,:] for r in range(nTrials)]
trueLatentsTimes = simRes["times"]
trueLatentsMeans = simRes["latentsMeans"]
trueLatentsMeans = [trueLatentsMeans[r][:nLatents,:] for r in range(nTrials)]
trueLatentsSTDs = simRes["latentsSTDs"]
trueLatentsSTDs = [trueLatentsSTDs[r][:nLatents,:] for r in range(nTrials)]
timesTrueValues = torch.linspace(0, torch.max(torch.tensor(trialsLengths)), trueLatentsSamples[0].shape[1])
testTimes = torch.linspace(0, torch.max(torch.tensor(spikesTimes[0][0])), nTestPoints)
with open(modelSaveFilename, "rb") as f: estResults = pickle.load(f)
lowerBoundHist = estResults["lowerBoundHist"]
elapsedTimeHist = estResults["elapsedTimeHist"]
model = estResults["model"]
# plot lower bound history
fig = plot.svGPFA.plotUtilsPlotly.getPlotLowerBoundHist(lowerBoundHist=lowerBoundHist)
fig.write_image(lowerBoundHistVsIterNoFigFilenamePattern.format("png"))
fig.write_html(lowerBoundHistVsIterNoFigFilenamePattern.format("html"))
fig = plot.svGPFA.plotUtilsPlotly.getPlotLowerBoundHist(elapsedTimeHist=elapsedTimeHist, lowerBoundHist=lowerBoundHist)
fig.write_image(lowerBoundHistVsElapsedTimeFigFilenamePattern.format("png"))
fig.write_html(lowerBoundHistVsElapsedTimeFigFilenamePattern.format("html"))
# plot true and estimated latents
testMuK, testVarK = model.predictLatents(newTimes=trueLatentsTimes[0])
eIndPointsLocs = model.getIndPointsLocs()
fig = plot.svGPFA.plotUtilsPlotly.getPlotTrueAndEstimatedLatents(tTimes=trueLatentsTimes[0], tLatentsSamples=trueLatentsSamples, tLatentsMeans=trueLatentsMeans, tLatentsSTDs=trueLatentsSTDs, tIndPointsLocs=tIndPointsLocs, eTimes=trueLatentsTimes[0], eLatentsMeans=testMuK, eLatentsSTDs=torch.sqrt(testVarK), eIndPointsLocs=eIndPointsLocs, trialToPlot=trialToPlot)
fig.write_image(latentsFigFilenamePattern.format("png"))
fig.write_html(latentsFigFilenamePattern.format("html"))
# KS test time rescaling with numerical correction
T = torch.tensor(trialsLengths).max()
oneTrialCIFTimes = torch.arange(0, T, dtCIF)
cifTimes = torch.unsqueeze(torch.ger(torch.ones(nTrials), oneTrialCIFTimes), dim=2)
with torch.no_grad():
emcifValues = model.computeCIFsMeans(times=cifTimes)
epmcifValues = model.computeExpectedCIFs(times=cifTimes)
spikesTimesKS = spikesTimes[trialToPlot][neuronToPlot]
cifTimesKS = cifTimes[trialToPlot,:,0]
cifValuesKS = epmcifValues[trialToPlot][neuronToPlot]
title = "Trial {:d}, Neuron {:d} ({:d} spikes)".format(trialToPlot, neuronToPlot, len(spikesTimesKS))
diffECDFsX, diffECDFsY, estECDFx, estECDFy, simECDFx, simECDFy, cb = stats.pointProcess.tests.KSTestTimeRescalingNumericalCorrection(spikesTimes=spikesTimesKS, cifTimes=oneTrialCIFTimes, cifValues=cifValuesKS, gamma=ksTestGamma)
plot.svGPFA.plotUtils.plotResKSTestTimeRescalingNumericalCorrection(diffECDFsX=diffECDFsX, diffECDFsY=diffECDFsY, estECDFx=estECDFx, estECDFy=estECDFy, simECDFx=simECDFx, simECDFy=simECDFy, cb=cb, figFilename=ksTestTimeRescalingNumericalCorrectionFigFilename, title=title)
plt.close("all")
# CIF
fig = plot.svGPFA.plotUtilsPlotly.getPlotSimulatedAndEstimatedCIFs(tTimes=timesTrueValues, tCIF=simCIFsValues[trialToPlot][neuronToPlot], tLabel="True", eMeanTimes=oneTrialCIFTimes, eMeanCIF=emcifValues[trialToPlot][neuronToPlot], eMeanLabel="Mean", ePosteriorMeanTimes=oneTrialCIFTimes, ePosteriorMeanCIF=epmcifValues[trialToPlot][neuronToPlot], ePosteriorMeanLabel="Posterior Mean", title=title)
fig.write_image(trueAndEstimatedCIFsFigFilenamePattern.format("png"))
fig.write_html(trueAndEstimatedCIFsFigFilenamePattern.format("html"))
# ROC predictive analysis
pk = cifValuesKS*dtCIF
bins = pd.interval_range(start=0, end=int(T), periods=len(pk))
cutRes, _ = pd.cut(spikesTimesKS, bins=bins, retbins=True)
Y = torch.from_numpy(cutRes.value_counts().values)
fpr, tpr, thresholds = sklearn.metrics.roc_curve(Y, pk, pos_label=1)
roc_auc = sklearn.metrics.auc(fpr, tpr)
plot.svGPFA.plotUtils.plotResROCAnalysis(fpr=fpr, tpr=tpr, auc=roc_auc, title=title, figFilename=rocFigFilename)
plt.close("all")
# plot model params
# tLatentsMeansFuncs = utils.svGPFA.configUtils.getLatentsMeansFuncs(nLatents=nLatents, nTrials=nTrials, config=simInitConfig)
# trialsTimes = utils.svGPFA.miscUtils.getTrialsTimes(trialsLengths=trialsLengths, dt=dtCIF)
# tLatentsMeans = utils.svGPFA.miscUtils.getLatentsMeanFuncsSamples(latentsMeansFuncs=tLatentsMeansFuncs, trialsTimes=trialsTimes, dtype=C.dtype)
# kernelsParams = model.getKernelsParams()
# kernels = utils.svGPFA.configUtils.getKernels(nLatents=nLatents, config=simInitConfig, forceUnitScale=True)
# with torch.no_grad():
# latentsMeans, _ = model.predictLatents(newTimes=trialsTimes[0])
# fig = plot.svGPFA.plotUtilsPlotly.getPlotTrueAndEstimatedKernelsParams(trueKernels=kernels, estimatedKernelsParams=kernelsParams)
# fig.write_image(kernelsParamsFigFilenamePattern.format("png"))
# fig.write_html(kernelsParamsFigFilenamePattern.format("html"))
#
estimatedC, estimatedD = model.getSVEmbeddingParams()
fig = plot.svGPFA.plotUtilsPlotly.getPlotTrueAndEstimatedEmbeddingParams(trueC=C, trueD=d, estimatedC=estimatedC, estimatedD=estimatedD)
fig.write_image(embeddingParamsFigFilenamePattern.format("png"))
fig.write_html(embeddingParamsFigFilenamePattern.format("html"))
# KS test time rescaling with analytical correction
t0 = math.floor(cifTimesKS.min())
tf = math.ceil(cifTimesKS.max())
dt = (cifTimesKS[1]-cifTimesKS[0]).item()
utSRISIs, uCDF, cb, utRISIs = stats.pointProcess.tests.KSTestTimeRescalingAnalyticalCorrectionUnbinned(spikesTimes=spikesTimesKS, cifValues=cifValuesKS, t0=t0, tf=tf, dt=dt)
sUTRISIs, _ = torch.sort(utSRISIs)
plot.svGPFA.plotUtils.plotResKSTestTimeRescalingAnalyticalCorrection(sUTRISIs=sUTRISIs, uCDF=uCDF, cb=cb, title=title, figFilename=ksTestTimeRescalingAnalyticalCorrectionFigFilename)
plt.close("all")
plot.svGPFA.plotUtils.plotDifferenceCDFs(sUTRISIs=sUTRISIs, uCDF=uCDF, cb=cb, figFilename=timeRescalingDiffCDFsFigFilename),
plt.close("all")
plot.svGPFA.plotUtils.plotScatter1Lag(x=utRISIs, title=title, figFilename=timeRescaling1LagScatterPlotFigFilename)
plt.close("all")
# acfRes, confint = statsmodels.tsa.stattools.acf(x=utRISIs, unbiased=True, alpha=0.05)
# plot.svGPFA.plotUtils.plotACF(acf=acfRes, Fs=1/dt, confint=confint, title=title, figFilename=timeRescalingACFFigFilename),
# plt.close("all")
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doSaveCdSimulation.py
```python
import sys
import pdb
import argparse
import math
import numpy as np
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("nNeurons", help="Number of neurons", type=int)
parser.add_argument("nLatents", help="Number of latents", type=int)
parser.add_argument("maxCIF", help="Maximum of CIF", type=float)
parser.add_argument("meanCIF", help="Mean of CIF", type=float)
args = parser.parse_args()
nNeurons = args.nNeurons
nLatents = args.nLatents
maxCIF = args.maxCIF
meanCIF = args.meanCIF
C_filename = "data/C_{:03d}neurons_{:02}latents_{:.02f}maxCIF_{:.02f}meanCIF.csv".format(nNeurons, nLatents, maxCIF, meanCIF)
d_filename = "data/d_{:03d}neurons_{:02}latents_{:.02f}maxCIF_{:.02f}meanCIF.csv".format(nNeurons, nLatents, maxCIF, meanCIF)
d_amplitude = math.log(meanCIF)
C_maxAmplitude = (math.log(maxCIF)-math.log(meanCIF))/nLatents
C = np.random.uniform(low=-1.0, high=1.0, size=(nNeurons, nLatents))*C_maxAmplitude
d = np.ones(nNeurons)*d_amplitude
np.savetxt(C_filename, C, delimiter=",")
np.savetxt(d_filename, d, delimiter=",")
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: svGPFA/scripts/doSaveConstantdSimulation.py
```python
import sys
import pdb
import argparse
import math
import numpy as np
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("constant", help="Constant value", type=float)
parser.add_argument("nNeurons", help="Number of neurons", type=int)
args = parser.parse_args()
constant = args.constant
nNeurons = args.nNeurons
d_filename = "data/d_constant_{:.2f}constant_{:03d}neurons.csv".format(constant, nNeurons)
d = np.ones(nNeurons)*constant
np.savetxt(d_filename, d, delimiter=",")
pdb.set_trace()
if __name__=="__main__":
main(sys.argv)
```
#### File: plot/svGPFA/plotUtilsPlotly.py
```python
import pdb
import math
import torch
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import plotly.graph_objs as go
import plotly.subplots
import plotly
# import plotly.io as pio
import plotly.express as px
# spike rates and times
def getPlotSpikeRatesForAllTrialsAndAllNeurons(spikesRates, xlabel="Neuron", ylabel="Average Spike Rate (Hz)", legendLabelPattern = "Trial {:d}"):
nTrials = spikesRates.shape[0]
nNeurons = spikesRates.shape[1]
data = []
layout = {
"xaxis": {"title": xlabel},
"yaxis": {"title": ylabel},
}
neuronsIndices = np.arange(nNeurons)
for r in range(nTrials):
data.append(
{
"type": "scatter",
"mode": "lines+markers",
"name": legendLabelPattern.format(r),
"x": neuronsIndices,
"y": spikesRates[r,:]
},
)
fig = go.Figure(
data=data,
layout=layout,
)
return fig
def getSimulatedSpikesTimesPlotMultipleTrials(spikesTimes, xlabel="Time (sec)", ylabel="Neuron", titlePattern="Trial {:d}"):
nTrials = len(spikesTimes)
subplotsTitles = ["trial={:d}".format(r) for r in range(nTrials)]
fig = plotly.subplots.make_subplots(rows=nTrials, cols=1, shared_xaxes=True, shared_yaxes=True, subplot_titles=subplotsTitles)
for r in range(nTrials):
for n in range(len(spikesTimes[r])):
trace = go.Scatter(
x=spikesTimes[r][n].numpy(),
y=n*np.ones(len(spikesTimes[r][n])),
mode="markers",
marker=dict(size=3, color="black"),
showlegend=False,
# hoverinfo="skip",
)
fig.add_trace(trace, row=r+1, col=1)
if r==nTrials-1:
fig.update_xaxes(title_text=xlabel, row=r+1, col=1)
if r==math.floor(nTrials/2):
fig.update_yaxes(title_text=ylabel, row=r+1, col=1)
fig.update_layout(
{
"plot_bgcolor": "rgba(0, 0, 0, 0)",
"paper_bgcolor": "rgba(0, 0, 0, 0)",
}
)
return fig
def getSimulatedSpikesTimesPlotOneTrial(spikesTimes, title, xlabel="Time (sec)", ylabel="Neuron"):
fig = go.Figure()
for n in range(len(spikesTimes)):
trace = go.Scatter(
x=spikesTimes[n].numpy(),
y=n*np.ones(len(spikesTimes[n])),
mode="markers",
marker=dict(size=3, color="black"),
showlegend=False,
# hoverinfo="skip",
)
fig.add_trace(trace)
fig.update_xaxes(title_text=xlabel)
fig.update_yaxes(title_text=ylabel)
fig.update_layout(title=title)
fig.update_layout(
{
"plot_bgcolor": "rgba(0, 0, 0, 0)",
"paper_bgcolor": "rgba(0, 0, 0, 0)",
}
)
return fig
# embedding
def getPlotTrueAndEstimatedEmbeddingParams(trueC, trueD,
estimatedC, estimatedD,
linestyleTrue="solid",
linestyleEstimated="dash",
marker="asterisk",
xlabel="Neuron Index",
ylabel="Coefficient Value"):
figDic = {
"data": [],
"layout": {
"xaxis": {"title": xlabel},
"yaxis": {"title": ylabel},
},
}
neuronIndices = np.arange(trueC.shape[0])
for i in range(estimatedC.shape[1]):
figDic["data"].append(
{
"type": "scatter",
"name": "true C[{:d}]".format(i),
"x": neuronIndices,
"y": trueC[:,i],
"line": {"dash": linestyleTrue},
# "marker_symbol": marker,
},
)
figDic["data"].append(
{
"type": "scatter",
"name": "estimated C[{:d}]".format(i),
"x": neuronIndices,
"y": estimatedC[:,i],
"line": {"dash": linestyleEstimated},
# "marker_symbol": marker,
},
)
figDic["data"].append(
{
"type": "scatter",
"name": "true d",
"x": neuronIndices,
"y": trueD[:,0],
"line": {"dash": linestyleTrue},
# "marker_symbol": marker,
},
)
figDic["data"].append(
{
"type": "scatter",
"name": "estimated d",
"x": neuronIndices,
"y": estimatedD,
"line": {"dash": linestyleEstimated},
# "marker_symbol": marker,
},
)
fig = go.Figure(
data=figDic["data"],
layout=figDic["layout"],
)
return fig
def getSimulatedEmbeddingPlot(times, samples, means, stds, title,
cbAlpha = 0.2,
cbFillColorPattern="rgba(0,0,255,{:f})",
samplesLineColor="black",
meanLineColor="blue",
xlabel="Time (sec)",
ylabel="Embedding"):
# tSamples[r], tMeans[r], tSTDs[r],
# eMean[r], eSTDs[r] \in nNeurons x nSamples
# pio.renderers.default = "browser"
#
ci = 1.96*stds
x = times
x_rev = x.flip(dims=[0])
yMeans = means
ySamples = samples
yMeans_upper = yMeans + ci
yMeans_lower = yMeans - ci
yMeans_lower = yMeans_lower.flip(dims=[0])
x = x.detach().numpy()
yMeans = yMeans.detach().numpy()
ySamples = ySamples.detach().numpy()
yMeans_upper = yMeans_upper.detach().numpy()
yMeans_lower = yMeans_lower.detach().numpy()
traceCB = go.Scatter(
x=np.concatenate((x, x_rev)),
y=np.concatenate((yMeans_upper, yMeans_lower)),
fill="tozerox",
fillcolor=cbFillColorPattern.format(cbAlpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="True",
)
traceMean = go.Scatter(
x=x,
y=yMeans,
line=dict(color=meanLineColor),
mode="lines",
name="Mean",
showlegend=True,
)
traceSamples = go.Scatter(
x=x,
y=ySamples,
line=dict(color=samplesLineColor),
mode="lines",
name="Sample",
showlegend=True,
)
fig = go.Figure()
fig.add_trace(traceCB)
fig.add_trace(traceMean)
fig.add_trace(traceSamples)
fig.update_yaxes(title_text=ylabel)
fig.update_xaxes(title_text=xlabel)
fig.update_layout(title=title)
return fig
def getPlotTrueAndEstimatedEmbedding(tTimes, tSamples, tMeans, tSTDs,
eTimes, eMeans, eSTDs,
CBalpha = 0.2,
tCBFillColorPattern="rgba(0,0,255,{:f})",
tSamplesLineColor="black",
tMeanLineColor="blue",
eCBFillColorPattern="rgba(255,0,0,{:f})",
eMeanLineColor="red",
xlabel="Time (sec)",
ylabel="Embedding",
title=""):
# tSamples[r], tMeans[r], tSTDs[r],
# eMean[r], eSTDs[r] \in nNeurons x nSamples
# pio.renderers.default = "browser"
#
eCI = 1.96*eSTDs
xE = eTimes
xE_rev = xE.flip(dims=[0])
yE = eMeans
yE_upper = yE + eCI
yE_lower = yE - eCI
yE_lower = yE_lower.flip(dims=[0])
xE = xE.detach().numpy()
yE = yE.detach().numpy()
yE_upper = yE_upper.detach().numpy()
yE_lower = yE_lower.detach().numpy()
tCI = 1.96*tSTDs
xT = tTimes
xT_rev = xT.flip(dims=[0])
yTMeans = tMeans
yTSamples = tSamples
yTMeans_upper = yTMeans + tCI
yTMeans_lower = yTMeans - tCI
yTMeans_lower = yTMeans_lower.flip(dims=[0])
xT = xT.detach().numpy()
yTMeans = yTMeans.detach().numpy()
yTSamples = yTSamples.detach().numpy()
yTMeans_upper = yTMeans_upper.detach().numpy()
yTMeans_lower = yTMeans_lower.detach().numpy()
traceECB = go.Scatter(
x=np.concatenate((xE, xE_rev)),
y=np.concatenate((yE_upper, yE_lower)),
fill="tozerox",
fillcolor=eCBFillColorPattern.format(CBalpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="Estimated",
)
traceEMean = go.Scatter(
x=xE,
y=yE,
# line=dict(color="rgb(0,100,80)"),
line=dict(color=eMeanLineColor),
mode="lines",
name="Estimated Mean",
showlegend=True,
)
traceTCB = go.Scatter(
x=np.concatenate((xT, xT_rev)),
y=np.concatenate((yTMeans_upper, yTMeans_lower)),
fill="tozerox",
fillcolor=tCBFillColorPattern.format(CBalpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="True",
)
traceTMean = go.Scatter(
x=xT,
y=yTMeans,
line=dict(color=tMeanLineColor),
mode="lines",
name="True Mean",
showlegend=True,
)
traceTSamples = go.Scatter(
x=xT,
y=yTSamples,
line=dict(color=tSamplesLineColor),
mode="lines",
name="True Sample",
showlegend=True,
)
fig = go.Figure()
fig.add_trace(traceECB)
fig.add_trace(traceEMean)
fig.add_trace(traceTCB)
fig.add_trace(traceTMean)
fig.add_trace(traceTSamples)
fig.update_yaxes(title_text=ylabel)
fig.update_xaxes(title_text=xlabel)
fig.update_layout(title=title)
return fig
# inducing points
def getPlotTrueAndEstimatedIndPointsLocs(trueIndPointsLocs,
estimatedIndPointsLocs,
linetypeTrue="solid",
linetypeEstimated="dash",
labelTrue="True",
labelEstimated="Estimated",
marker="asterisk",
xlabel="Inducing Point Index",
ylabel="Inducing Point Location"):
def getTracesOneSetTrueAndEstimatedIndPointsLocs(
trueIndPointsLocs,
estimatedIndPointsLocs,
labelTrue, labelEstimated,
useLegend):
traceTrue = go.Scatter(
y=trueIndPointsLocs,
mode="lines+markers",
name=labelTrue,
line=dict(dash=linetypeTrue),
showlegend=useLegend)
traceEstimated = go.Scatter(
y=estimatedIndPointsLocs,
mode="lines+markers",
name=labelEstimated,
line=dict(dash=linetypeEstimated),
showlegend=useLegend)
return traceTrue, traceEstimated
nLatents = len(trueIndPointsLocs)
nTrials = trueIndPointsLocs[0].shape[0]
fig = plotly.subplots.make_subplots(rows=nTrials, cols=nLatents)
for r in range(nTrials):
for k in range(nLatents):
if r==0 and k==nLatents-1:
useLegend = True
else:
useLegend = False
traceTrue, traceEstimated = getTracesOneSetTrueAndEstimatedIndPointsLocs(trueIndPointsLocs=trueIndPointsLocs[k][r,:,0], estimatedIndPointsLocs=estimatedIndPointsLocs[k][r,:,0], labelTrue=labelTrue, labelEstimated=labelEstimated, useLegend=useLegend)
fig.add_trace(traceTrue, row=r+1, col=k+1)
fig.add_trace(traceEstimated, row=r+1, col=k+1)
fig.update_layout(title="Trial {:d}, Latent {:d}".format(r, k))
fig.update_yaxes(title_text=ylabel, row=nTrials//2+1, col=1)
fig.update_xaxes(title_text=xlabel, row=nTrials, col=nLatents//2+1)
return fig
def getPlotTrueAndEstimatedIndPointsLocsOneTrialOneLatent(
trueIndPointsLocs,
estimatedIndPointsLocs,
title,
linetypeTrue="solid",
linetypeEstimated="dash",
labelTrue="True",
labelEstimated="Estimated",
marker="asterisk",
xlabel="Inducing Point Index",
ylabel="Inducing Point Location"):
def getTracesOneSetTrueAndEstimatedIndPointsLocs(
trueIndPointsLocs,
estimatedIndPointsLocs,
labelTrue, labelEstimated,
useLegend):
traceTrue = go.Scatter(
y=trueIndPointsLocs,
mode="lines+markers",
name=labelTrue,
line=dict(dash=linetypeTrue),
showlegend=useLegend)
traceEstimated = go.Scatter(
y=estimatedIndPointsLocs,
mode="lines+markers",
name=labelEstimated,
line=dict(dash=linetypeEstimated),
showlegend=useLegend)
return traceTrue, traceEstimated
fig = go.Figure()
traceTrue, traceEstimated = getTracesOneSetTrueAndEstimatedIndPointsLocs(trueIndPointsLocs=trueIndPointsLocs, estimatedIndPointsLocs=estimatedIndPointsLocs, labelTrue=labelTrue, labelEstimated=labelEstimated, useLegend=True)
fig.add_trace(traceTrue)
fig.add_trace(traceEstimated)
fig.update_layout(title=title)
fig.update_yaxes(title_text=ylabel)
fig.update_xaxes(title_text=xlabel)
return fig
# variational params
def getPlotTrueAndEstimatedIndPointsMeans(trueIndPointsMeans,
estimatedIndPointsMeans,
linetypeTrue="solid",
linetypeEstimated="dash",
labelTrue="True",
labelEstimated="Estimated",
xlabel="Inducing Point Index",
ylabel="Inducing Point Mean"):
def getTracesOneSetTrueAndEstimatedIndPointsMeans(
trueIndPointsMean,
estimatedIndPointsMean,
labelTrue, labelEstimated,
useLegend):
traceTrue = go.Scatter(
y=trueIndPointsMean,
mode="lines+markers",
name=labelTrue,
line=dict(dash=linetypeTrue),
showlegend=useLegend)
traceEstimated = go.Scatter(
y=estimatedIndPointsMean,
mode="lines+markers",
name=labelEstimated,
line=dict(dash=linetypeEstimated),
showlegend=useLegend)
return traceTrue, traceEstimated
# trueIndPointsMeans[r][k] \in nInd[k]
# qMu[k] \in nTrials x nInd[k] x 1
nTrials = len(trueIndPointsMeans)
nLatents = len(trueIndPointsMeans[0])
fig = plotly.subplots.make_subplots(rows=nTrials, cols=nLatents)
for r in range(nTrials):
for k in range(nLatents):
trueIndPointsMean = trueIndPointsMeans[r][k][:,0]
estimatedIndPointsMean = estimatedIndPointsMeans[k][r,:,0]
if r==0 and k==nLatents-1:
useLegend = True
else:
useLegend = False
traceTrue, traceEstimated = getTracesOneSetTrueAndEstimatedIndPointsMeans(trueIndPointsMean=trueIndPointsMean, estimatedIndPointsMean=estimatedIndPointsMean, labelTrue=labelTrue, labelEstimated=labelEstimated, useLegend=useLegend)
fig.add_trace(traceTrue, row=r+1, col=k+1)
fig.add_trace(traceEstimated, row=r+1, col=k+1)
fig.update_layout(title="Trial {:d}, Latent {:d}".format(r, k))
fig.update_yaxes(title_text=ylabel, row=nTrials//2+1, col=1)
fig.update_xaxes(title_text=xlabel, row=nTrials, col=nLatents//2+1)
return fig
def getPlotTrueAndEstimatedIndPointsMeansOneTrialOneLatent(
trueIndPointsMeans,
estimatedIndPointsMeans,
trueIndPointsSTDs,
estimatedIndPointsSTDs,
title,
cbAlpha = 0.2,
trueCBFillColorPattern="rgba(0,0,255,{:f})",
trueMeanLineColor="blue",
estimatedCBFillColorPattern="rgba(255,0,0,{:f})",
estimatedMeanLineColor="red",
xlabel="Inducing Point Index",
ylabel="Inducing Point Mean"):
tIndPointsIndices = torch.arange(len(trueIndPointsMeans))
eIndPointsIndices = torch.arange(len(estimatedIndPointsMeans))
eCI = 1.96*estimatedIndPointsSTDs
xE = eIndPointsIndices
xE_rev = xE.flip(dims=[0])
yE = estimatedIndPointsMeans
yE_upper = yE + eCI
yE_lower = yE - eCI
yE_lower = yE_lower.flip(dims=[0])
xE = xE.detach().numpy()
yE = yE.detach().numpy()
yE_upper = yE_upper.detach().numpy()
yE_lower = yE_lower.detach().numpy()
tCI = 1.96*trueIndPointsSTDs
xT = tIndPointsIndices
xT_rev = xT.flip(dims=[0])
yT = trueIndPointsMeans
yT_upper = yT + tCI
yT_lower = yT - tCI
yT_lower = yT_lower.flip(dims=[0])
xT = xT.detach().numpy()
yT = yT.detach().numpy()
yT_upper = yT_upper.detach().numpy()
yT_lower = yT_lower.detach().numpy()
traceECB = go.Scatter(
x=np.concatenate((xE, xE_rev)),
y=np.concatenate((yE_upper, yE_lower)),
fill="tozerox",
fillcolor=estimatedCBFillColorPattern.format(cbAlpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="Estimated",
)
traceEMean = go.Scatter(
x=xE,
y=yE,
# line=dict(color="rgb(0,100,80)"),
line=dict(color=estimatedMeanLineColor),
mode="lines+markers",
name="Estimated Mean",
showlegend=True,
)
traceTCB = go.Scatter(
x=np.concatenate((xT, xT_rev)),
y=np.concatenate((yT_upper, yT_lower)),
fill="tozerox",
fillcolor=trueCBFillColorPattern.format(cbAlpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="True",
)
traceTMean = go.Scatter(
x=xT,
y=yT,
line=dict(color=trueMeanLineColor),
mode="lines+markers",
name="True Mean",
showlegend=True,
)
fig = go.Figure()
fig.add_trace(traceECB)
fig.add_trace(traceEMean)
fig.add_trace(traceTCB)
fig.add_trace(traceTMean)
fig.update_yaxes(title_text=ylabel)
fig.update_xaxes(title_text=xlabel)
fig.update_layout(title=title)
return fig
# def getTracesOneSetTrueAndEstimatedIndPointsMeans(
# trueIndPointsMeans,
# estimatedIndPointsMeans,
# labelTrue, labelEstimated,
# useLegend):
# traceTrue = go.Scatter(
# y=trueIndPointsMeans,
# mode="lines+markers",
# name=labelTrue,
# line=dict(dash=linetypeTrue),
# showlegend=useLegend)
# traceEstimated = go.Scatter(
# y=estimatedIndPointsMeans,
# mode="lines+markers",
# name=labelEstimated,
# line=dict(dash=linetypeEstimated),
# showlegend=useLegend)
# return traceTrue, traceEstimated
#
# # qMu[k] \in nTrials x nInd[k] x 1
# fig = go.Figure()
# traceTrue, traceEstimated = getTracesOneSetTrueAndEstimatedIndPointsMeans(trueIndPointsMeans=trueIndPointsMeans, estimatedIndPointsMeans=estimatedIndPointsMeans, labelTrue=labelTrue, labelEstimated=labelEstimated, useLegend=True)
# fig.add_trace(traceTrue)
# fig.add_trace(traceEstimated)
# fig.update_layout(title=title)
# fig.update_yaxes(title_text=ylabel)
# fig.update_xaxes(title_text=xlabel)
# return fig
def getPlotTrueAndEstimatedIndPointsCovs(trueIndPointsCovs,
estimatedIndPointsCovs,
linetypeTrue="solid",
linetypeEstimated="dash",
labelTruePattern="True[:,{:d}]",
labelEstimatedPattern="Estimated[:,{:d}]",
colorsList=plotly.colors.qualitative.Plotly,
xlabel="Inducing Point Index",
ylabel="Inducing Points Covariance"):
def getTracesOneSetTrueAndEstimatedIndPointsCovs(
trueIndPointsCov,
estimatedIndPointsCov,
labelTruePattern, labelEstimatedPattern,
useLegend):
nCols = trueIndPointsCov.shape[1]
tracesTrue = [[] for i in range(nCols)]
tracesEstimated = [[] for i in range(nCols)]
for i in range(nCols):
color = colorsList[i%len(colorsList)]
tracesTrue[i] = go.Scatter(
y=trueIndPointsCov[:,i],
mode="lines+markers",
name=labelTruePattern.format(i),
line=dict(dash=linetypeTrue, color=color),
showlegend=useLegend)
tracesEstimated[i] = go.Scatter(
y=estimatedIndPointsCov[:,i],
mode="lines+markers",
name=labelEstimatedPattern.format(i),
line=dict(dash=linetypeEstimated, color=color),
showlegend=useLegend)
return tracesTrue, tracesEstimated
# trueIndPointsCovs[r][k] \in nInd[k]
# qMu[k] \in nTrials x nInd[k] x 1
nTrials = len(trueIndPointsCovs)
nLatents = len(trueIndPointsCovs[0])
fig = plotly.subplots.make_subplots(rows=nTrials, cols=nLatents)
for r in range(nTrials):
for k in range(nLatents):
trueIndPointsCov = trueIndPointsCovs[r][k]
estimatedIndPointsCov = estimatedIndPointsCovs[r][k]
if r==0 and k==nLatents-1:
useLegend = True
else:
useLegend = False
tracesTrue, tracesEstimated = getTracesOneSetTrueAndEstimatedIndPointsCovs(trueIndPointsCov=trueIndPointsCov, estimatedIndPointsCov=estimatedIndPointsCov, labelTruePattern=labelTruePattern, labelEstimatedPattern=labelEstimatedPattern, useLegend=useLegend)
for i in range(len(tracesTrue)):
fig.add_trace(tracesTrue[i], row=r+1, col=k+1)
fig.add_trace(tracesEstimated[i], row=r+1, col=k+1)
fig.update_layout(title="Trial {:d}, Latent {:d}".format(r, k))
fig.update_yaxes(title_text=ylabel, row=nTrials//2+1, col=1)
fig.update_xaxes(title_text=xlabel, row=nTrials, col=nLatents//2+1)
return fig
def getPlotTrueAndEstimatedIndPointsCovsOneTrialOneLatent(
trueIndPointsCov,
estimatedIndPointsCov,
title,
linetypeTrue="solid",
linetypeEstimated="dash",
labelTruePattern="True[:,{:d}]",
labelEstimatedPattern="Estimated[:,{:d}]",
colorsList=plotly.colors.qualitative.Plotly,
xlabel="Inducing Point Index",
ylabel="Inducing Points Covariance"):
def getTracesOneSetTrueAndEstimatedIndPointsCovs(
trueIndPointsCov,
estimatedIndPointsCov,
labelTruePattern, labelEstimatedPattern,
useLegend):
nCols = trueIndPointsCov.shape[1]
tracesTrue = [[] for i in range(nCols)]
tracesEstimated = [[] for i in range(nCols)]
for i in range(nCols):
color = colorsList[i%len(colorsList)]
tracesTrue[i] = go.Scatter(
y=trueIndPointsCov[:,i],
mode="lines+markers",
name=labelTruePattern.format(i),
line=dict(dash=linetypeTrue, color=color),
showlegend=useLegend)
tracesEstimated[i] = go.Scatter(
y=estimatedIndPointsCov[:,i],
mode="lines+markers",
name=labelEstimatedPattern.format(i),
line=dict(dash=linetypeEstimated, color=color),
showlegend=useLegend)
return tracesTrue, tracesEstimated
# trueIndPointsCovs[r][k] \in nInd[k]
# qMu[k] \in nTrials x nInd[k] x 1
fig = go.Figure()
tracesTrue, tracesEstimated = getTracesOneSetTrueAndEstimatedIndPointsCovs(trueIndPointsCov=trueIndPointsCov, estimatedIndPointsCov=estimatedIndPointsCov, labelTruePattern=labelTruePattern, labelEstimatedPattern=labelEstimatedPattern, useLegend=True)
for i in range(len(tracesTrue)):
fig.add_trace(tracesTrue[i])
fig.add_trace(tracesEstimated[i])
fig.update_layout(title=title)
fig.update_yaxes(title_text=ylabel)
fig.update_xaxes(title_text=xlabel)
return fig
# latents
def getPlotTruePythonAndMatlabLatents(tTimes, tLatents,
pTimes, pMuK, pVarK,
mTimes, mMuK, mVarK,
trialToPlot=0,
xlabel="Time (sec)",
ylabelPattern="Latent {:d}"):
# pio.renderers.default = "browser"
nLatents = mMuK.shape[2]
fig = plotly.subplots.make_subplots(rows=nLatents, cols=1, shared_xaxes=True)
# titles = ["Trial {:d}".format(trialToPlot)] + ["" for i in range(nLatents)]
title = "Trial {:d}".format(trialToPlot)
for k in range(nLatents):
trueToPlot = tLatents[trialToPlot,:,k]
pMeanToPlot = pMuK[trialToPlot,:,k]
positiveMSE = torch.mean((trueToPlot-pMeanToPlot)**2)
negativeMSE = torch.mean((trueToPlot+pMeanToPlot)**2)
if negativeMSE<positiveMSE:
pMeanToPlot = -pMeanToPlot
pCIToPlot = 1.96*(pVarK[trialToPlot,:,k].sqrt())
mMeanToPlot = mMuK[trialToPlot,:,k]
positiveMSE = torch.mean((trueToPlot-mMeanToPlot)**2)
negativeMSE = torch.mean((trueToPlot+mMeanToPlot)**2)
if negativeMSE<positiveMSE:
mMeanToPlot = -mMeanToPlot
mCIToPlot = 1.96*(mVarK[trialToPlot,:,k].sqrt())
tLatentToPlot = tLatents[trialToPlot,:,k]
x1 = pTimes
x1_rev = x1.flip(dims=[0])
y1 = pMeanToPlot
y1_upper = y1 + pCIToPlot
y1_lower = y1 - pCIToPlot
# y1_lower = y1_lower[::-1] # negative stride not supported in pytorch
y1_lower = y1_lower.flip(dims=[0])
x2 = mTimes
x2_rev = x2.flip(dims=[0])
y2 = mMeanToPlot
y2_upper = y2 + mCIToPlot
y2_lower = y2 - mCIToPlot
# y2_lower = y2_lower[::-1] # negative stride not supported in pytorch
y2_lower = y2_lower.flip(dims=[0])
x3 = tTimes
y3 = tLatentToPlot
trace1 = go.Scatter(
x=np.concatenate((x1, x1_rev)),
y=np.concatenate((y1_upper, y1_lower)),
fill="tozerox",
fillcolor="rgba(255,0,0,0.2)",
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="Python",
)
trace2 = go.Scatter(
x=np.concatenate((x2, x2_rev)),
y=np.concatenate((y2_upper, y2_lower)),
fill="tozerox",
fillcolor="rgba(0,0,255,0.2)",
line=dict(color="rgba(255,255,255,0)"),
name="Matlab",
showlegend=False,
)
trace3 = go.Scatter(
x=x1,
y=y1,
# line=dict(color="rgb(0,100,80)"),
line=dict(color="red"),
mode="lines",
name="Python",
showlegend=(k==0),
)
trace4 = go.Scatter(
x=x2,
y=y2,
# line=dict(color="rgb(0,176,246)"),
line=dict(color="blue"),
mode="lines",
name="Matlab",
showlegend=(k==0),
)
trace5 = go.Scatter(
x=x3,
y=y3,
line=dict(color="black"),
mode="lines",
name="True",
showlegend=(k==0),
)
fig.add_trace(trace1, row=k+1, col=1)
fig.add_trace(trace2, row=k+1, col=1)
fig.add_trace(trace3, row=k+1, col=1)
fig.add_trace(trace4, row=k+1, col=1)
fig.add_trace(trace5, row=k+1, col=1)
fig.update_yaxes(title_text=ylabelPattern.format(k+1), row=k+1, col=1)
# pdb.set_trace()
fig.update_layout(title_text=title)
fig.update_xaxes(title_text=xlabel, row=3, col=1)
return fig
def getPlotTrueAndEstimatedLatents(tTimes, tLatentsSamples, tLatentsMeans, tLatentsSTDs, tIndPointsLocs,
eTimes, eLatentsMeans, eLatentsSTDs, eIndPointsLocs,
trialToPlot=0,
CBalpha = 0.2,
tCBFillColorPattern="rgba(0,0,255,{:f})",
tSamplesLineColor="black",
tMeanLineColor="blue",
eCBFillColorPattern="rgba(255,0,0,{:f})",
eMeanLineColor="red",
tIndPointsLocsColor="rgba(0,0,255,0.5)",
eIndPointsLocsColor="rgba(255,0,0,0.5)",
xlabel="Time (sec)",
ylabelPattern="Latent {:d}"):
eLatentsMeans = eLatentsMeans.detach()
eLatentsSTDs = eLatentsSTDs.detach()
eIndPointsLocs = [item.detach() for item in eIndPointsLocs]
# pio.renderers.default = "browser"
nLatents = eLatentsMeans.shape[2]
fig = plotly.subplots.make_subplots(rows=nLatents, cols=1, shared_xaxes=True)
title = ylabelPattern.format(trialToPlot)
nTrials = len(tLatentsSTDs)
#
# latentsMaxs = [1.96*torch.max(tLatentsSTDs[r]).item() for r in range(nTrials)]
# latentsMaxs.append((torch.max(eLatentsMeans)+1.96*torch.max(eLatentsSTDs)).item())
# ymax = max(latentsMaxs)
#
# latentsMins = [1.96*torch.max(tLatentsSTDs[r]).item() for r in range(nTrials)]
# latentsMins.append((torch.min(eLatentsMeans)-1.96*torch.max(eLatentsSTDs)).item())
# ymin = min(latentsMins)
#
for k in range(nLatents):
tSamplesToPlot = tLatentsSamples[trialToPlot][k,:]
tMeanToPlot = tLatentsMeans[trialToPlot][k,:]
tSTDToPlot = tLatentsSTDs[trialToPlot][k,:]
tCIToPlot = 1.96*tSTDToPlot
eMeanToPlot = eLatentsMeans[trialToPlot,:,k]
eSTDToPlot = eLatentsSTDs[trialToPlot,:,k]
positiveMSE = torch.mean((tMeanToPlot-eMeanToPlot)**2)
negativeMSE = torch.mean((tMeanToPlot+eMeanToPlot)**2)
if negativeMSE<positiveMSE:
eMeanToPlot = -eMeanToPlot
eCIToPlot = 1.96*eSTDToPlot
ymax = max(torch.max(tMeanToPlot+tCIToPlot), torch.max(eMeanToPlot+eCIToPlot))
ymin = min(torch.min(tMeanToPlot-tCIToPlot), torch.min(eMeanToPlot-eCIToPlot))
xE = eTimes
xE_rev = xE.flip(dims=[0])
yE = eMeanToPlot
yE_upper = yE + eCIToPlot
yE_lower = yE - eCIToPlot
yE_lower = yE_lower.flip(dims=[0])
xE = xE.detach().numpy()
yE = yE.detach().numpy()
yE_upper = yE_upper.detach().numpy()
yE_lower = yE_lower.detach().numpy()
xT = tTimes
xT_rev = xT.flip(dims=[0])
yT = tMeanToPlot
yTSamples = tSamplesToPlot
yT_upper = yT + tCIToPlot
yT_lower = yT - tCIToPlot
yT_lower = yT_lower.flip(dims=[0])
xT = xT.detach().numpy()
yT = yT.detach().numpy()
yTSamples = yTSamples.detach().numpy()
yT_upper = yT_upper.detach().numpy()
yT_lower = yT_lower.detach().numpy()
traceECB = go.Scatter(
x=np.concatenate((xE, xE_rev)),
y=np.concatenate((yE_upper, yE_lower)),
fill="tozerox",
fillcolor=eCBFillColorPattern.format(CBalpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="Estimated",
)
traceEMean = go.Scatter(
x=xE,
y=yE,
# line=dict(color="rgb(0,100,80)"),
line=dict(color=eMeanLineColor),
mode="lines",
name="Estimated",
showlegend=(k==0),
)
traceTCB = go.Scatter(
x=np.concatenate((xT, xT_rev)),
y=np.concatenate((yT_upper, yT_lower)),
fill="tozerox",
fillcolor=tCBFillColorPattern.format(CBalpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="True",
)
traceTMean = go.Scatter(
x=xT,
y=yT,
line=dict(color=tMeanLineColor),
mode="lines",
name="True",
showlegend=(k==0),
)
traceTSamples = go.Scatter(
x=xT,
y=yTSamples,
line=dict(color=tSamplesLineColor),
mode="lines",
name="True",
showlegend=(k==0),
)
fig.add_trace(traceECB, row=k+1, col=1)
fig.add_trace(traceEMean, row=k+1, col=1)
fig.add_trace(traceTCB, row=k+1, col=1)
fig.add_trace(traceTMean, row=k+1, col=1)
fig.add_trace(traceTSamples, row=k+1, col=1)
fig.update_yaxes(title_text=ylabelPattern.format(k), row=k+1, col=1)
for n in range(tIndPointsLocs[k].shape[1]):
fig.add_shape(
dict(
type="line",
x0=tIndPointsLocs[k][trialToPlot,n,0],
y0=ymin,
x1=tIndPointsLocs[k][trialToPlot,n,0],
y1=ymax,
line=dict(
color=tIndPointsLocsColor,
width=3
),
),
row=k+1,
col=1,
)
fig.add_shape(
dict(
type="line",
x0=eIndPointsLocs[k][trialToPlot,n,0],
y0=ymin,
x1=eIndPointsLocs[k][trialToPlot,n,0],
y1=ymax,
line=dict(
color=eIndPointsLocsColor,
width=3
),
),
row=k+1,
col=1,
)
fig.update_xaxes(title_text=xlabel, row=nLatents, col=1)
fig.update_layout(title_text=title)
return fig
def getPlotTrueAndEstimatedLatentsOneTrialOneLatent(
tTimes, tLatentsSamples, tLatentsMeans, tLatentsSTDs,
eTimes, eLatentsMeans, eLatentsSTDs,
title,
CBalpha = 0.2,
tCBFillColorPattern="rgba(0,0,255,{:f})",
tSamplesLineColor="black",
tMeanLineColor="blue",
eCBFillColorPattern="rgba(255,0,0,{:f})",
eMeanLineColor="red",
xlabel="Time (sec)",
ylabel="Latent Value"):
# pio.renderers.default = "browser"
fig = go.Figure()
tCI = 1.96*tLatentsSTDs
eCI = 1.96*eLatentsSTDs
ymax = max(torch.max(tLatentsMeans+tCI), torch.max(eLatentsMeans+eCI))
ymin = min(torch.min(tLatentsMeans-tCI), torch.min(eLatentsMeans-eCI))
xE = eTimes
xE_rev = xE.flip(dims=[0])
yE = eLatentsMeans
yE_upper = yE + eCI
yE_lower = yE - eCI
yE_lower = yE_lower.flip(dims=[0])
xE = xE.detach().numpy()
yE = yE.detach().numpy()
yE_upper = yE_upper.detach().numpy()
yE_lower = yE_lower.detach().numpy()
xT = tTimes
xT_rev = xT.flip(dims=[0])
yT = tLatentsMeans
yTSamples = tLatentsSamples
yT_upper = yT + tCI
yT_lower = yT - tCI
yT_lower = yT_lower.flip(dims=[0])
xT = xT.detach().numpy()
yT = yT.detach().numpy()
yTSamples = yTSamples.detach().numpy()
yT_upper = yT_upper.detach().numpy()
yT_lower = yT_lower.detach().numpy()
traceECB = go.Scatter(
x=np.concatenate((xE, xE_rev)),
y=np.concatenate((yE_upper, yE_lower)),
fill="tozerox",
fillcolor=eCBFillColorPattern.format(CBalpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="Estimated",
)
traceEMean = go.Scatter(
x=xE,
y=yE,
# line=dict(color="rgb(0,100,80)"),
line=dict(color=eMeanLineColor),
mode="lines",
name="Estimated",
showlegend=True,
)
traceTCB = go.Scatter(
x=np.concatenate((xT, xT_rev)),
y=np.concatenate((yT_upper, yT_lower)),
fill="tozerox",
fillcolor=tCBFillColorPattern.format(CBalpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
name="True",
)
traceTMean = go.Scatter(
x=xT,
y=yT,
line=dict(color=tMeanLineColor),
mode="lines",
name="True",
showlegend=True,
)
traceTSamples = go.Scatter(
x=xT,
y=yTSamples,
line=dict(color=tSamplesLineColor),
mode="lines",
name="True",
showlegend=True,
)
fig.add_trace(traceECB)
fig.add_trace(traceEMean)
fig.add_trace(traceTCB)
fig.add_trace(traceTMean)
fig.add_trace(traceTSamples)
# for n in range(tIndPointsLocs[k].shape[1]):
# fig.add_shape(
# dict(
# type="line",
# x0=tIndPointsLocs[k][trialToPlot,n,0],
# y0=ymin,
# x1=tIndPointsLocs[k][trialToPlot,n,0],
# y1=ymax,
# line=dict(
# color=tIndPointsLocsColor,
# width=3
# ),
# ),
# row=k+1,
# col=1,
# )
# fig.add_shape(
# dict(
# type="line",
# x0=eIndPointsLocs[k][trialToPlot,n,0],
# y0=ymin,
# x1=eIndPointsLocs[k][trialToPlot,n,0],
# y1=ymax,
# line=dict(
# color=eIndPointsLocsColor,
# width=3
# ),
# ),
# row=k+1,
# col=1,
# )
fig.update_layout(title_text=title)
fig.update_xaxes(title_text=xlabel)
fig.update_yaxes(title_text=ylabel)
return fig
def getPlotTrueAndEstimatedLatentsMeans(trueLatentsMeans,
estimatedLatentsMeans,
trialsTimes,
colorTrue="blue",
colorEstimated="red",
labelTrue="True",
labelEstimated="Estimated",
xlabel="Time (sec)",
ylabel="Latent Value"):
def getTracesOneSetTrueAndEstimatedLatentsMeans(
trueLatentMean,
estimatedLatentMean,
times,
labelTrue, labelEstimated,
useLegend):
traceTrue = go.Scatter(
x=times,
y=trueLatentMean,
mode="lines+markers",
name=labelTrue,
line=dict(color=colorTrue),
showlegend=useLegend)
traceEstimated = go.Scatter(
x=times,
y=estimatedLatentMean,
mode="lines+markers",
name=labelEstimated,
line=dict(color=colorEstimated),
showlegend=useLegend)
return traceTrue, traceEstimated
# trueLatentsMeans[r] \in nLatents x nInd[k]
# qMu[k] \in nTrials x nInd[k] x 1
nTrials = len(trueLatentsMeans)
nLatents = trueLatentsMeans[0].shape[0]
fig = plotly.subplots.make_subplots(rows=nTrials, cols=nLatents)
for r in range(nTrials):
times = trialsTimes[r]
for k in range(nLatents):
trueLatentMean = trueLatentsMeans[r][k,:]
estimatedLatentMean = estimatedLatentsMeans[r,:,k]
if r==0 and k==nLatents-1:
useLegend = True
else:
useLegend = False
traceTrue, traceEstimated = getTracesOneSetTrueAndEstimatedLatentsMeans(
trueLatentMean=trueLatentMean,
estimatedLatentMean=estimatedLatentMean,
times=times,
labelTrue=labelTrue,
labelEstimated=labelEstimated,
useLegend=useLegend)
fig.add_trace(traceTrue, row=r+1, col=k+1)
fig.add_trace(traceEstimated, row=r+1, col=k+1)
fig.update_yaxes(title_text=ylabel, row=nTrials//2+1, col=1)
fig.update_xaxes(title_text=xlabel, row=nTrials, col=nLatents//2+1)
return fig
def getSimulatedLatentsPlot(trialsTimes, latentsSamples, latentsMeans,
latentsSTDs, alpha=0.5, marker="x",
xlabel="Time (sec)", ylabel="Amplitude",
width=1250, height=850,
cbFillColorPattern="rgba(0,100,0,{:f})",
meanLineColor="rgb(0,100,00)",
samplesLineColor="rgb(0,0,0)"):
nTrials = len(latentsSamples)
nLatents = latentsSamples[0].shape[0]
subplotsTitles = ["trial={:d}, latent={:d}".format(r, k) for r in range(nTrials) for k in range(nLatents)]
fig = plotly.subplots.make_subplots(rows=nTrials, cols=nLatents, subplot_titles=subplotsTitles)
for r in range(nTrials):
t = trialsTimes[r].numpy()
t_rev = t[::-1]
for k in range(nLatents):
samples = latentsSamples[r][k,:].numpy()
mean = latentsMeans[r][k,:].numpy()
std = latentsSTDs[r][k,:].numpy()
upper = mean+1.96*std
lower = mean-1.96*std
lower_rev = lower[::-1]
traceCB = go.Scatter(
x=np.concatenate((t, t_rev)),
y=np.concatenate((upper, lower_rev)),
fill="tozerox",
fillcolor=cbFillColorPattern.format(alpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
)
traceMean = go.Scatter(
x=t,
y=mean,
line=dict(color=meanLineColor),
mode="lines",
showlegend=False,
)
traceSamples = go.Scatter(
x=t,
y=samples,
line=dict(color=samplesLineColor),
mode="lines",
showlegend=False,
)
fig.add_trace(traceCB, row=r+1, col=k+1)
fig.add_trace(traceMean, row=r+1, col=k+1)
fig.add_trace(traceSamples, row=r+1, col=k+1)
if r==nTrials-1 and k==math.floor(nLatents/2):
fig.update_xaxes(title_text=xlabel, row=r+1, col=k+1)
if r==math.floor(nTrials/2) and k==0:
fig.update_yaxes(title_text=ylabel, row=r+1, col=k+1)
fig.update_layout(
autosize=False,
width=width,
height=height,
)
return fig
def getSimulatedLatentPlot(times, latentSamples, latentMeans,
latentSTDs, title, alpha=0.2, marker="x",
xlabel="Time (sec)", ylabel="Value",
cbFillColorPattern="rgba(0,0,255,{:f})",
meanLineColor="rgb(0,0,255)",
samplesLineColor="rgb(0,0,0)"):
t = times.numpy()
t_rev = t[::-1]
samples = latentSamples.numpy()
mean = latentMeans.numpy()
std = latentSTDs.numpy()
upper = mean+1.96*std
lower = mean-1.96*std
lower_rev = lower[::-1]
traceCB = go.Scatter(
x=np.concatenate((t, t_rev)),
y=np.concatenate((upper, lower_rev)),
fill="tozerox",
fillcolor=cbFillColorPattern.format(alpha),
line=dict(color="rgba(255,255,255,0)"),
showlegend=False,
)
traceMean = go.Scatter(
x=t,
y=mean,
line=dict(color=meanLineColor),
mode="lines",
showlegend=True,
name="Mean",
)
traceSamples = go.Scatter(
x=t,
y=samples,
line=dict(color=samplesLineColor),
mode="lines",
showlegend=True,
name="Sample",
)
fig = go.Figure()
fig.add_trace(traceCB)
fig.add_trace(traceMean)
fig.add_trace(traceSamples)
fig.update_xaxes(title_text=xlabel)
fig.update_yaxes(title_text=ylabel)
fig.update_layout(title=title)
return fig
# kernels
def getPlotTrueAndEstimatedKernelsParams(trueKernels, estimatedKernelsParams,
colorTrue="blue",
colorEstimated="red",
trueLegend="True",
estimatedLegend="Estimated"):
nLatents = len(trueKernels)
titles = ["Kernel {:d}: {:s}".format(i, trueKernels[i].__class__.__name__) for i in range(nLatents)]
fig = plotly.subplots.make_subplots(rows=nLatents, cols=1, subplot_titles=titles)
for k in range(nLatents):
namedParams = trueKernels[k].getNamedParams()
labels = list(namedParams.keys())
trueParams = [z.item() for z in list(namedParams.values())]
estimatedParams = estimatedKernelsParams[k].tolist()
# we are fixing scale to 1.0. This is not great :(
estimatedParams = [1.0] + estimatedParams
if k==0:
showLegend = True
else:
showLegend = False
traceTrue = go.Bar(x=labels, y=trueParams, name=trueLegend, marker_color=colorTrue, showlegend=showLegend)
traceEstimated = go.Bar(x=labels, y=estimatedParams, name=estimatedLegend, marker_color=colorEstimated, showlegend=showLegend)
fig.append_trace(traceTrue, k+1, 1)
fig.append_trace(traceEstimated, k+1, 1)
fig.update_yaxes(title_text="Parameter Value", row=nLatents//2+1, col=1)
return fig
def getPlotTrueAndEstimatedKernelsParamsOneLatent(
trueKernel,
estimatedKernelParams,
title,
colorTrue="blue",
colorEstimated="red",
trueLegend="True",
estimatedLegend="Estimated"):
fig = go.Figure()
namedParams = trueKernel.getNamedParams()
del namedParams["scale"]
labels = list(namedParams.keys())
trueParams = [z.item() for z in list(namedParams.values())]
estimatedParams = estimatedKernelParams.tolist()
traceTrue = go.Bar(x=labels, y=trueParams, name=trueLegend, marker_color=colorTrue, showlegend=True)
traceEstimated = go.Bar(x=labels, y=estimatedParams, name=estimatedLegend, marker_color=colorEstimated, showlegend=True)
fig.add_trace(traceTrue)
fig.add_trace(traceEstimated)
fig.update_yaxes(title_text="Parameter Value")
fig.update_layout(title=title)
return fig
def getPlotTruePythonAndMatlabKernelsParams(kernelsTypes,
trueKernelsParams,
pythonKernelsParams,
matlabKernelsParams,
colorTrue="blue",
colorPython="red",
colorMatlab="green",
trueLegend="True",
pythonLegend="Python",
matlabLegend="Matlab"):
nLatents = len(trueKernelsParams)
titles = ["Kernel {:d}: {:s}".format(k, kernelsTypes[k]) for k in range(nLatents)]
fig = plotly.subplots.make_subplots(rows=nLatents, cols=1, subplot_titles=titles)
for k in range(nLatents):
trueParams = trueKernelsParams[k].tolist()
pythonParams = pythonKernelsParams[k].tolist()
matlabParams = matlabKernelsParams[k].tolist()
if k==0:
showLegend = True
else:
showLegend = False
if kernelsTypes[k]=="PeriodicKernel":
labels = ["Length Scale", "Period"]
elif kernelsTypes[k]=="ExponentialQuadraticKernel":
labels = ["Length Scale"]
else:
raise RuntimeError("Invalid kernel type {:s}".format(kernelsTypes[k]))
traceTrue = go.Bar(x=labels, y=trueParams, name=trueLegend, marker_color=colorTrue, showlegend=showLegend)
tracePython = go.Bar(x=labels, y=pythonParams, name=pythonLegend, marker_color=colorPython, showlegend=showLegend)
traceMatlab = go.Bar(x=labels, y=matlabParams, name=matlabLegend, marker_color=colorMatlab, showlegend=showLegend)
fig.append_trace(traceTrue, k+1, 1)
fig.append_trace(tracePython, k+1, 1)
fig.append_trace(traceMatlab, k+1, 1)
fig.update_yaxes(title_text="Parameter Value", row=nLatents//2+1, col=1)
return fig
def getPlotLowerBoundVsTwoParamsParam(param1Values,
param2Values,
lowerBoundValues,
refParam1,
refParam2,
refParamsLowerBound,
refParamText,
title,
lowerBoundQuantile = 0.5,
param1Label="Parameter 1",
param2Label="Parameter 2",
lowerBoundLabel="Lower Bound",
markerSize=3.0,
markerOpacity=0.8,
markerColorscale="Viridis",
zMin=None, zMax=None,
):
data = {"x": param1Values, "y": param2Values, "z": lowerBoundValues}
df = pd.DataFrame(data)
if zMin is None:
zMin = df.z.quantile(lowerBoundQuantile)
if zMax is None:
zMax = df.z.max()
dfTrimmed = df[df.z>zMin]
# fig = go.Figure(data=[go.Scatter3d(x=param1Values, y=param2Values, z=lowerBoundValues, mode="markers")])
# fig = go.Figure(data=[go.Scatter3d(x=dfTrimmed.x, y=dfTrimmed.y, z=dfTrimmed.z, mode="markers")])
# fig.update_layout(scene=dict(zaxis=dict(range=[df.z.max()-1000,df.z.max()+500],)),width=700,)
# fig = px.scatter_3d(dfTrimmed, x='x', y='y', z='z')
fig = go.Figure(data=[go.Scatter3d(x=dfTrimmed.x, y=dfTrimmed.y,
z=dfTrimmed.z, mode="markers",
marker=dict(size=markerSize,
color=dfTrimmed.z,
colorscale=markerColorscale,
opacity=markerOpacity)) ])
# fig = go.Figure(go.Mesh3d(x=dfTrimmed.x, y=dfTrimmed.y, z=dfTrimmed.z))
# fig.add_trace(
# go.Scatter3d(
# x=[refParam1],
# y=[refParam2],
# z=[refParamsLowerBound],
# type="scatter3d", text=[refParamText], mode="text",
# )
# )
fig.update_layout(title=title, scene = dict(xaxis_title = param1Label, yaxis_title = param2Label, zaxis_title = lowerBoundLabel,))
# fig.update_layout(scene = dict(zaxis = dict(range=[zMin,zMax]),))
# fig.update_layout(scene = dict(zaxis=dict(range=[df.z.max()-1000,df.z.max()+500],),),)
# pio.renderers.default = "browser"
# fig.show()
# pdb.set_trace()
return fig
# CIF
def getPlotTruePythonAndMatlabCIFs(tTimes, tCIF, tLabel,
pTimes, pCIF, pLabel,
mTimes, mCIF, mLabel,
xlabel="Time (sec)",
ylabel="CIF",
title=""
):
# pio.renderers.default = "browser"
figDic = {
"data": [],
"layout": {
"xaxis": {"title": xlabel},
"yaxis": {"title": ylabel},
"title": {"text": title},
},
}
figDic["data"].append(
{
"type": "scatter",
"name": tLabel,
"x": tTimes,
"y": tCIF,
},
)
figDic["data"].append(
{
"type": "scatter",
"name": pLabel,
"x": pTimes,
"y": pCIF,
},
)
figDic["data"].append(
{
"type": "scatter",
"name": mLabel,
"x": mTimes,
"y": mCIF,
},
)
fig = go.Figure(
data=figDic["data"],
layout=figDic["layout"],
)
return fig
def getPlotSimulatedAndEstimatedCIFs(tTimes, tCIF, tLabel,
eMeanTimes=None, eMeanCIF=None, eMeanLabel=None,
ePosteriorMeanTimes=None, ePosteriorMeanCIF=None, ePosteriorMeanLabel=None,
xlabel="Time (sec)", ylabel="CIF", title=""):
# pio.renderers.default = "browser"
figDic = {
"data": [],
"layout": {
"xaxis": {"title": xlabel},
"yaxis": {"title": ylabel},
"title": {"text": title},
},
}
figDic["data"].append(
{
"type": "scatter",
"name": tLabel,
"x": tTimes,
"y": tCIF,
},
)
if eMeanCIF is not None:
figDic["data"].append(
{
"type": "scatter",
"name": eMeanLabel,
"x": eMeanTimes,
"y": eMeanCIF,
},
)
if ePosteriorMeanCIF is not None:
figDic["data"].append(
{
"type": "scatter",
"name": ePosteriorMeanLabel,
"x": ePosteriorMeanTimes,
"y": ePosteriorMeanCIF,
},
)
fig = go.Figure(
data=figDic["data"],
layout=figDic["layout"],
)
return fig
def getPlotCIF(times, values, title="", xlabel="Time (sec)", ylabel="Conditional Intensity Function"):
figDic = {
"data": [],
"layout": {
"title": title,
"xaxis": {"title": xlabel},
"yaxis": {"title": ylabel},
},
}
figDic["data"].append(
{
"type": "scatter",
"x": times,
"y": values,
},
)
fig = go.Figure(
data=figDic["data"],
layout=figDic["layout"],
)
return fig
# Lower bound
def getPlotLowerBoundHist(lowerBoundHist, elapsedTimeHist=None,
xlabelIterNumber="Iteration Number",
xlabelElapsedTime="Elapsed Time (sec)",
ylabel="Lower Bound", marker="cross", linestyle="solid"):
if elapsedTimeHist is None:
trace = go.Scatter(
y=lowerBoundHist,
mode="lines+markers",
line={"color": "red", "dash": linestyle},
marker={"symbol": marker},
showlegend=False,
)
xlabel = xlabelIterNumber
else:
trace = go.Scatter(
x=elapsedTimeHist,
y=lowerBoundHist,
mode="lines+markers",
line={"color": "red", "dash": linestyle},
marker_symbol=marker,
showlegend=False,
)
xlabel = xlabelElapsedTime
fig = go.Figure()
fig.add_trace(trace)
fig.update_xaxes(title_text=xlabel)
fig.update_yaxes(title_text=ylabel)
return fig
def getPlotLowerBoundVsOneParam(paramValues, lowerBoundValues, refParams, title, yMin, yMax, lowerBoundLineColor, refParamsLineColors, percMargin=0.1, xlab="Parameter Value", ylab="Lower Bound"):
if math.isinf(yMin):
yMin = lowerBoundValues.min()
if math.isinf(yMax):
yMax = lowerBoundValues.max()
margin = percMargin*max(abs(yMin), abs(yMax))
yMin = yMin - margin
yMax = yMax + margin
layout = {
"title": title,
"xaxis": {"title": xlab},
# "yaxis": {"title": "Lower Bound"},
"yaxis": {"title": ylab, "range": [yMin, yMax]},
}
data = []
data.append(
{
"type": "scatter",
"mode": "lines+markers",
# "mode": "markers",
"x": paramValues,
"y": lowerBoundValues,
"marker": dict(color=lowerBoundLineColor),
"line": dict(color=lowerBoundLineColor),
"name": "lower bound",
},
)
fig = go.Figure(
data=data,
layout=layout,
)
for i in range(len(refParams)):
fig.add_shape(
# Line Vertical
dict(
type="line",
x0=refParams[i],
y0=yMin,
x1=refParams[i],
y1=yMax,
line=dict(
color=refParamsLineColors[i],
width=3
)
))
return fig
```
#### File: stats/pointProcess/sampler.py
```python
import pdb
import random
import torch
class Sampler:
def sampleInhomogeneousPP_thinning(self, cifTimes, cifValues, T):
""" Thining algorithm to sample from an inhomogeneous point process. Algorithm 2 from <NAME> (2016). Thinning algorithms for simulating Point Prcesses.
:param: cifFun: Intensity function of the point process.
:type: cifFun: function
:param: T: the returned samples of the point process :math:`\in [0, T]`
:type: T: double
:param: nGrid: number of points in the grid used to search for the maximum of cifFun.
:type: nGrid: integer
:return: (inhomogeneous, homogeneous): samples of the inhomogeneous and homogenous point process with cif function cifFun.
:rtype: tuple containing two lists
"""
m = 0
t = [0]
s = [0]
lambdaMax = cifValues.max()
while s[m]<T:
u = torch.rand(1)
w = -torch.log(u)/lambdaMax # w~exponential(lambdaMax)
s.append(s[m]+w) # {sm} homogeneous Poisson process
D = random.uniform(0, 1)
cifIndex = (cifTimes-s[m+1]).argmin()
approxIntensityAtNewPoissonSpike = cifValues[cifIndex]
if D<=approxIntensityAtNewPoissonSpike/lambdaMax: # accepting with probability
# cifF(s[m+1])/lambdaMax
t.append(s[m+1].item()) # {tn} inhomogeneous Poisson
# process
m += 1
if t[-1]<=T:
answer = {"inhomogeneous": t[1:], "homogeneous": s[1:]}
else:
answer = {"inhomogeneous": t[1:-1], "homogeneous": s[1:-1]}
return answer
def sampleInhomogeneousPP_timeRescaling(self, cifTimes, cifValues, T):
""" Time rescaling algorithm to sample from an inhomogeneous point
process. Chapter 2 from <NAME>'s Point Process Notes.
:param: cifFun: cif function of the point process.
:type: cifFun: function
:param: T: the returned samples of the point process :math:`\in [0, T]`
:type: T: double
:param: dt: spike-time resolution.
:type: dt: float
:return: samples of the inhomogeneous point process with cif function cifFun.
:rtype: list
"""
s = []
i = 0
dt = cifTimes[1]-cifTimes[0]
while i<(len(cifTimes)-1):
u = torch.rand(1)
z = -torch.log(u) # z~exponential(1.0)
anInt = cifValues[i]*dt
j = i+1
while j<len(cifTimes) and anInt<=z:
anInt += cifValues[j]*dt
j += 1
if anInt>z:
s.append(cifTimes[j-1].item())
i = j
return s
```
#### File: stats/svGPFA/svGPFAModelFactory.py
```python
import stats.svGPFA.kernelsMatricesStore
import stats.svGPFA.svPosteriorOnIndPoints
import stats.svGPFA.svPosteriorOnLatents
import stats.svGPFA.svEmbedding
import stats.svGPFA.expectedLogLikelihood
import stats.svGPFA.klDivergence
import stats.svGPFA.svLowerBound
#:
PointProcess = 0
#:
Poisson = 1
#:
Gaussian = 2
#:
LinearEmbedding = 100
#:
ExponentialLink = 1000
#:
NonExponentialLink = 1001
class SVGPFAModelFactory:
@staticmethod
def buildModel(conditionalDist, linkFunction, embeddingType, kernels):
if conditionalDist==PointProcess:
if embeddingType==LinearEmbedding:
if linkFunction==ExponentialLink:
qU = stats.svGPFA.svPosteriorOnIndPoints.SVPosteriorOnIndPoints()
indPointsLocsKMS = stats.svGPFA.kernelsMatricesStore.IndPointsLocsKMS()
indPointsLocsAndAllTimesKMS = stats.svGPFA.kernelsMatricesStore.IndPointsLocsAndAllTimesKMS()
indPointsLocsAndAssocTimesKMS = stats.svGPFA.kernelsMatricesStore.IndPointsLocsAndAssocTimesKMS()
qKAllTimes = stats.svGPFA.svPosteriorOnLatents.SVPosteriorOnLatentsAllTimes(
svPosteriorOnIndPoints=qU,
indPointsLocsKMS=indPointsLocsKMS,
indPointsLocsAndTimesKMS=indPointsLocsAndAllTimesKMS)
qKAssocTimes = stats.svGPFA.svPosteriorOnLatents.SVPosteriorOnLatentsAssocTimes(
svPosteriorOnIndPoints=qU,
indPointsLocsKMS=indPointsLocsKMS,
indPointsLocsAndTimesKMS=indPointsLocsAndAssocTimesKMS)
qHAllTimes = stats.svGPFA.svEmbedding.LinearSVEmbeddingAllTimes(
svPosteriorOnLatents=qKAllTimes)
qHAssocTimes = stats.svGPFA.svEmbedding.LinearSVEmbeddingAssocTimes(
svPosteriorOnLatents=qKAssocTimes)
eLL = stats.svGPFA.expectedLogLikelihood.PointProcessELLExpLink(
svEmbeddingAllTimes=qHAllTimes,
svEmbeddingAssocTimes=qHAssocTimes)
klDiv = stats.svGPFA.klDivergence.KLDivergence(indPointsLocsKMS=indPointsLocsKMS,
svPosteriorOnIndPoints=qU)
svlb = stats.svGPFA.svLowerBound.SVLowerBound(eLL=eLL, klDiv=klDiv)
svlb.setKernels(kernels=kernels)
else:
raise ValueError("Invalid linkFunction=%s"%
repr(linkFunction))
else:
raise ValueError("Invalid embeddingType=%s"%
repr(embeddingType))
else:
raise ValueError("Invalid conditionalDist=%s"%
repr(conditionalDist))
return svlb
```
#### File: utils/networking/multiprocessingUtils.py
```python
class FileLock:
def __init__(self, filename):
self.filename = filename
with open(self.filename, 'w') as f:
f.write('done')
def lock(self):
with open(self.filename, 'w') as f:
f.write('working')
def unlock(self):
with open(self.filename, 'w') as f:
f.write('done')
def is_locked(self):
return open(self.filename, 'r').read() == 'working'
``` |
{
"source": "joacosaralegui/deteccion-automatica-de-frases-chequeables",
"score": 3
} |
#### File: deteccion-automatica-de-frases-chequeables/src/dataset.py
```python
import glob
import os
import csv
from nltk.tokenize import sent_tokenize
def save_csv(folder, sentences):
"""
Dado un conjunto de frases con etiquetas las guarda en formato csv
"""
filename = os.path.join(folder, 'frases.csv')
with open(filename, mode='w') as sentences_file:
file_writer = csv.writer(sentences_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
for sentence in sentences:
file_writer.writerow([sentence['sentence'],sentence['target']])
def get_sentences(folder):
"""
Dada la dirección de una carpeta recorre todos los .txt presentes y extrae el texto,
luego lo parsea en frases y extrae las etiquetas de chequeable/no chequeable de cada frase
"""
parsed_sentences = []
files_path = os.path.join(folder, '*.txt')
for filename in glob.glob(files_path):
with open(filename, 'r') as f:
text = f.read()
sentences_tokenized = sent_tokenize(text)
sentences_tokenized = [i for s in sentences_tokenized for i in s.split("\n") if len(i) > 1]
parsed_sentences = parsed_sentences + sentences_tokenized
tagged = get_tagged_sentences(parsed_sentences)
return tagged
def get_sentences_csv(folder):
# Load all the tagged sentences included in the .pickle files
filename = os.path.join(folder, 'frases_revisadas.csv')
with open(filename, 'r') as f:
sentences_reader = csv.reader(f, delimiter=',',quotechar='"')
sentences = [
{'target': r[1],
'sentence': r[0]} for r in sentences_reader
]
return sentences
def clean(sentence):
#return sentence.replace(',','').replace('.','').replace(';','').replace('[','').replace(']','').replace("(Aplausos.)","").replace("(aplausos)","").replace("%"," PERCENTAGE ").replace("$", " money ")
return sentence.replace("-","").replace(";","").replace(',','').replace(".","")
def get_tagged_sentences(sentences):
# From a list of sentence, find all the fact-checakble tags in it, else, tag as non fact checkable
tagged_sentences = []
for sentence in sentences:
sentence = clean(sentence)
no_tags = sentence.replace("<chequeable>","").replace("</chequeable>","")
if "<chequeable>" in sentence or "</chequeable>" in sentence:
tagged_sentences.append({'target': 'fact-checkable', 'sentence': no_tags})
else:
tagged_sentences.append({'target': 'non-fact-checkable', 'sentence': sentence})
FOLDER = os.path.join('..','data','tagged_corpus')
# save to csv
save_csv(FOLDER,tagged_sentences)
return tagged_sentences
if __name__=="__main__":
FOLDER = os.path.join('..','data','tagged_corpus')
get_sentences(FOLDER)
```
#### File: deteccion-automatica-de-frases-chequeables/src/feature_extraction.py
```python
import spacy
from nltk import ngrams
from sklearn.base import BaseEstimator, TransformerMixin
from datetime import datetime
import numpy as np
def is_int(s):
"""
Función helper para detectar si un string es entero
"""
try:
int(s)
return True
except ValueError:
return False
class SpacyFeaturizer:
"""
Featurizer basado en el pipeline de Spacy + USE
"""
def __init__(self, spacy_model="es_core_news_lg",use_model="xx_use_lg"):
"""
Inicializa el pipeline
"""
self.nlp = spacy.load(spacy_model)
self.nlp.add_pipe('universal_sentence_encoder',config={"model_name":use_model})
def featurize(self, frases):
"""
Dado una lista de frases retorna una lista de diccionarios con las features de cada frase
Utiliza la función provista como parametro para extraer los features
"""
feature_dicts = []
docs = self.nlp.pipe(frases)
for doc in docs:
feature_dicts.append(self.get_features_tradicionales(doc))
return feature_dicts
def featurize_embs(self, frases):
"""
Dada una lista de frases retorna lista de embeddings
"""
return [np.array(doc.vector) for doc in self.nlp.pipe(frases)]
def get_features_tradicionales(self, doc, use_ngrams=True):
"""
Dado un Doc de Spacy extrae features relevantes en un diccionario
"""
features = {}
# Entidades
for ent in doc.ents:
features[ent.text.lower()] = True
features[ent.label_] = True
# N-grams de etiquetas POS
if use_ngrams:
pos_tags = [t.pos_ for t in doc]
sentence_ngrams = ngrams(pos_tags,3)
for ngram in sentence_ngrams:
features[str(ngram)] = True
# Análisis de tokens
for token in doc:
# Morphology: tense, person, mood, etc
for key,value in token.morph.to_dict().items():
features[key+value] = True
# Lemmas + shapes
if "d" in token.shape_:
# SI es año agrego un feature de acuerdo a si ya paso o es futuro
# Today reference date
today = datetime.today()
if token.shape_ == "dddd":
value = int(token.text)
if value > 1950 and value < 2050:
if value < today.year:
features["PAST_YEAR"] = True
elif value > today.year:
features["COMING_YEAR"] = True
else:
features["THIS_YEAR"] = True
# Siempre agrego el shape igual
features[token.shape_] = True
else:
features[token.lemma_.lower()] = True
# POS
features[token.pos_] = True
# Dep
features[token.dep_] = 1
# Text
features[token.text.lower()] = True
#import pdb; pdb.set_trace()
return features
class TraditionalFeatureTransformer(BaseEstimator, TransformerMixin):
"""
Featurizer para las features tradicionales. POS, Lemas, Morfología, Entidades, n-gramas
"""
def __init__(self):
self.featurizer = SpacyFeaturizer()
def fit(self, x, y=None):
return self
def transform(self, data):
return self.featurizer.featurize(data)
class EmbeddingsFeatureTransformer(BaseEstimator, TransformerMixin):
"""
Featurizer de representaciones vectoriales
"""
def __init__(self):
self.featurizer = SpacyFeaturizer()
def fit(self, x, y=None):
return self
def transform(self, data):
return self.featurizer.featurize_embs(data)
``` |
{
"source": "joacosaralegui/elige-tu-propia-aventura-bot",
"score": 3
} |
#### File: elige-tu-propia-aventura-bot/src/selenium_poll_posting.py
```python
import traceback
import time
from selenium import webdriver
from selenium.common.exceptions import StaleElementReferenceException, TimeoutException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support.ui import Select
import credentials
class URL:
TWITTER = 'http://twitter.com'
TWITTER_STATUS = 'https://twitter.com/botamoud/status/'
class Constants:
USERNAME = credentials.user
PASSWORD = <PASSWORD>
GLOBAL_ENTRY_Q = '#globalentry'
class TwitterLocator:
# login stuff
login_btn = (By.XPATH, "//span[text()='Iniciar sesión']")
username = (By.NAME, "session[username_or_email]")
password = (By.NAME, "<PASSWORD>]")
login_confirm = (By.XPATH, "//span[text()='Iniciar sesión']")
# tweet stuff
outer_tweet_box = (By.CLASS_NAME, 'public-DraftStyleDefault-block')
tweet_box = (By.CLASS_NAME, "public-DraftEditor-content")
tweet_btn = (By.XPATH, "//span[text()='Tweet']")
another_tweet = (By.XPATH, "//span[text()='Add another Tweet']")
# poll stuff
poll_btn = (By.XPATH, '//div[@aria-label="Add poll"]')
option_one = (By.NAME, 'Choice1')
option_two = (By.NAME, 'Choice2')
extra_opts = (By.XPATH, '//div[@aria-label="Add a choice"]')
option_three = (By.NAME, 'Choice3')
days = (By.ID, 'Days')
hours = (By.ID, 'Hours')
minutes = (By.ID, 'Minutes')
# etc.
search_input = (By.ID, "search-query")
like_btn = (By.CLASS_NAME, "HeartAnimation")
latest_tweets = (By.PARTIAL_LINK_TEXT, 'Latest')
class PollBot(object):
def __init__(self):
self.locator_dictionary = TwitterLocator.__dict__
self.chrome_options = Options()
#self.chrome_options.add_argument("--headless")
self.browser = webdriver.Chrome("/usr/lib/chromium-browser/chromedriver",chrome_options=self.chrome_options)
self.browser.get(URL.TWITTER)
self.timeout = 2
def _find_element(self, *loc):
return self.browser.find_element(*loc)
def __getattr__(self, what):
try:
if what in self.locator_dictionary.keys():
try:
element = WebDriverWait(self.browser, self.timeout).until(
EC.presence_of_element_located(self.locator_dictionary[what])
)
except(TimeoutException, StaleElementReferenceException):
traceback.print_exc()
try:
element = WebDriverWait(self.browser, self.timeout).until(
EC.visibility_of_element_located(self.locator_dictionary[what])
)
except(TimeoutException, StaleElementReferenceException):
traceback.print_exc()
# I could have returned element, however because of lazy loading, I am seeking the element before return
return self._find_element(*self.locator_dictionary[what])
except AttributeError:
super(PollBot, self).__getattribute__("method_missing")(what)
def run(self, post_text, status_id, choice1, choice2, choice3, days, hours, minutes):
self.login()
self.tweet_poll(post_text, status_id, choice1, choice2, choice3, days, hours, minutes)
self.browser.close()
def login(self, username=Constants.USERNAME, password=Constants.PASSWORD):
self.login_btn.click()
time.sleep(1)
self.username.click()
time.sleep(0.1)
self.username.send_keys(username)
time.sleep(0.1)
self.password.click()
time.sleep(0.1)
self.password.send_keys(password)
time.sleep(0.1)
self.login_confirm.click()
time.sleep(0.5)
def tweet_poll(self, post_text, status_id, choice1, choice2, choice3, days, hours, minutes=15):
self.browser.get(URL.TWITTER_STATUS+str(status_id))
self.timeout = 2
# click the tweet box
self.another_tweet.click()
time.sleep(1)
# type the tweet
self.tweet_box.send_keys(post_text)
time.sleep(1)
# make the poll
self.poll_btn.click()
time.sleep(0.1)
self.option_one.click()
time.sleep(0.1)
self.option_one.send_keys(choice1)
time.sleep(0.1)
self.option_two.click()
time.sleep(0.1)
self.option_two.send_keys(choice2)
time.sleep(0.2)
if choice3:
self.extra_opts.click()
time.sleep(0.1)
self.option_three.click()
time.sleep(0.1)
self.option_three.send_keys(choice3)
self.days.click()
time.sleep(0.1)
Select(self.days).select_by_value(str(days))
time.sleep(0.1)
self.hours.click()
time.sleep(0.1)
Select(self.hours).select_by_value(str(hours))
time.sleep(0.1)
self.minutes.click()
time.sleep(0.1)
Select(self.minutes).select_by_value(str(minutes))
time.sleep(0.1)
# send the tweet
self.tweet_btn.click()
time.sleep(2)
if __name__=="__main__":
pollBot = PollBot()
pollBot.run("Hola","1","2","3",0,6,15)
``` |
{
"source": "joac/redacted",
"score": 3
} |
#### File: redacted/redacted/__init__.py
```python
import threading
from . _redacted import SecretContainer
EXCEPTION_ON_LEAK = False # Instead of printing, raise an exception on insecure access to data
TRACE_SECRET_ACCESS = False # Log stack traces of secure access to secret
REDACTED_STR = "<REDACTED>"
__LOCAL_DATA = threading.local()
# __LOCAL_DATA.unlocked =
class DataLeak(Exception):
"""Raised when you are leaking data"""
class Secret(SecretContainer):
"""Encapsulates binary sensitive data"""
def __init__(self, secret):
self.secret = secret
self.locked = True
def __repr__(self):
return REDACTED_STR
def __format__(self, *args, **kwargs):
raise DataLeak("Redacted Objects can't be unsafe formatted")
def __str__(self):
return REDACTED_STR
class SecureFormatter(object):
"""Provides pythonesque string formating, returning a Secret"""
class SecureSink(object):
"""Provides a way to access stored secret"""
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
pass
def unwrap(self, secret):
secret.locked = False
return secret.secret
``` |
{
"source": "joac/singing-girl",
"score": 3
} |
#### File: singing-girl/tests/singing_girl_tests.py
```python
import unittest
from singing_girl import sing
from decimal import Decimal
class TestTraductorNumeros(unittest.TestCase):
def assertSing(self, number, text):
self.assertEquals(sing(number), text)
def test_digitos(self):
self.assertSing(0, 'cero')
self.assertSing(1, 'uno')
self.assertSing(2, 'dos')
self.assertSing(3, 'tres')
self.assertSing(4, 'cuatro')
self.assertSing(5, 'cinco')
self.assertSing(6, 'seis')
self.assertSing(7, 'siete')
self.assertSing(8, 'ocho')
self.assertSing(9, 'nueve')
def test_decenas(self):
self.assertSing(10, 'diez')
self.assertSing(11, 'once')
self.assertSing(12, 'doce')
self.assertSing(13, 'trece')
self.assertSing(14, 'catorce')
self.assertSing(15, 'quince')
self.assertSing(16, 'dieciséis')
self.assertSing(17, 'diecisiete')
self.assertSing(18, 'dieciocho')
self.assertSing(19, 'diecinueve')
self.assertSing(20, 'veinte')
self.assertSing(21, 'veintiuno')
self.assertSing(22, 'veintidós')
self.assertSing(23, 'veintitrés')
self.assertSing(24, 'veinticuatro')
self.assertSing(25, 'veinticinco')
self.assertSing(26, 'veintiséis')
self.assertSing(27, 'veintisiete')
self.assertSing(28, 'veintiocho')
self.assertSing(29, 'veintinueve')
self.assertSing(30, 'treinta')
self.assertSing(37, 'treinta y siete')
self.assertSing(40, 'cuarenta')
self.assertSing(42, 'cuarenta y dos')
self.assertSing(50, 'cincuenta')
self.assertSing(55, 'cincuenta y cinco')
self.assertSing(60, 'sesenta')
self.assertSing(66, 'sesenta y seis')
self.assertSing(70, 'setenta')
self.assertSing(77, 'setenta y siete')
self.assertSing(80, 'ochenta')
self.assertSing(88, 'ochenta y ocho')
self.assertSing(90, 'noventa')
self.assertSing(99, 'noventa y nueve')
def test_centenas(self):
self.assertSing(100, 'cien')
self.assertSing(111, 'ciento once')
self.assertSing(200, 'doscientos')
self.assertSing(222, 'doscientos veintidós')
self.assertSing(300, 'trescientos')
self.assertSing(333, 'trescientos treinta y tres')
self.assertSing(400, 'cuatrocientos')
self.assertSing(444, 'cuatrocientos cuarenta y cuatro')
self.assertSing(500, 'quinientos')
self.assertSing(555, 'quinientos cincuenta y cinco')
self.assertSing(600, 'seiscientos')
self.assertSing(666, 'seiscientos sesenta y seis')
self.assertSing(700, 'setecientos')
self.assertSing(777, 'setecientos setenta y siete')
self.assertSing(800, 'ochocientos')
self.assertSing(888, 'ochocientos ochenta y ocho')
self.assertSing(900, 'novecientos')
self.assertSing(953, 'novecientos cincuenta y tres')
self.assertSing(999, 'novecientos noventa y nueve')
def test_miles(self):
self.assertSing(4326, 'cuatro mil trescientos veintiséis')
self.assertSing(7532, 'siete mil quinientos treinta y dos')
self.assertSing(1014, 'mil catorce')
self.assertSing(21000, 'veintiún mil')
self.assertSing(71000, 'setenta y un mil')
self.assertSing(916543, 'novecientos dieciséis mil quinientos cuarenta y tres')
def test_millones(self):
self.assertSing(10 ** 6, 'un millón')
self.assertSing(10 ** 12, 'un billón')
self.assertSing(10 ** 18, 'un trillón')
self.assertSing(10 ** 24, 'un cuatrillón')
def test_numeros_grandes(self):
self.assertSing(1000000, 'un millón')
self.assertSing(1000021, 'un millón veintiuno')
self.assertSing(41000021, 'cuarenta y un millones veintiuno')
self.assertSing(41000021, 'cuarenta y un millones veintiuno')
self.assertSing(416010015, 'cuatrocientos dieciséis millones diez mil quince')
self.assertSing(123123123123123123123456123456,
'ciento veintitrés mil ciento veintitrés cuatrillones '
'ciento veintitrés mil ciento veintitrés trillones '
'ciento veintitrés mil ciento veintitrés billones '
'ciento veintitrés mil cuatrocientos cincuenta y seis millones '
'ciento veintitrés mil cuatrocientos cincuenta y seis')
def test_decimales(self):
self.assertSing(16.1, 'dieciséis con 10/100')
self.assertSing(16.321, 'dieciséis con 32/100')
self.assertSing(16.80, 'dieciséis con 80/100')
self.assertSing(16.51, 'dieciséis con 51/100')
self.assertSing(1.75, 'uno con 75/100')
self.assertSing(Decimal('123123123123123123123456123456.33'),
'ciento veintitrés mil ciento veintitrés cuatrillones '
'ciento veintitrés mil ciento veintitrés trillones '
'ciento veintitrés mil ciento veintitrés billones '
'ciento veintitrés mil cuatrocientos cincuenta y seis millones '
'ciento veintitrés mil cuatrocientos cincuenta y seis con 33/100')
self.assertSing(Decimal('123123123123123123123456123456.67'),
'ciento veintitrés mil ciento veintitrés cuatrillones '
'ciento veintitrés mil ciento veintitrés trillones '
'ciento veintitrés mil ciento veintitrés billones '
'ciento veintitrés mil cuatrocientos cincuenta y seis millones '
'ciento veintitrés mil cuatrocientos cincuenta y seis con 67/100')
def test_value_error_raised_on_invalid_number(self):
self.assertRaises(ValueError, sing, 10 ** 30)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joadavis/other-fun-projects",
"score": 3
} |
#### File: other-fun-projects/ldraw-model-creation/puck-optimizer.py
```python
class LDrawPuckGen(object):
# in LDU
brick_height = 24
brick_side = 20
model = {}
part_1x1 = "1x1"
part_1x2 = "1x2"
def generate_puck(self, diameter, height):
# return a model represented by 3x3x3 dict structure
# rendered parts will be bounded by the diameter
radius = diameter/2
for h in range(height):
for x in range(diameter):
for z in range(diameter):
# TODO allow defining a different algorithm
if (radius-x)**2 + (radius-z)**2 < radius**2:
# model{x}{y}{z} = 1x1
# if sticking to a grid, can just hash together
# key = f"<KEY>
key = f"{<KEY> # use spaces, easy to split
self.model[key] = self.part_1x1
return self.model
def optimize(self):
# TODO optimize
# return model structure after optimization
return self.model
def create_file(model, filename):
# creates a file containing a valid ldraw format for the model
# example line: 1 4 10 -24 10 1 0 0 0 1 0 0 0 1 3005.dat
# format: 1 <colour> x y z a b c d e f g h i <file>
lines = []
lines.append("0 // Created by puck-optimizer.py, joadavis\n")
prefix_color_red = "1 4 "
suffix_part1x1 = "3005.dat\n"
suffix_part1x2 = "3004.dat\n"
suffix_part2x2 = "3003.dat\n"
suffix_part2x3 = "3002.dat\n"
rotate_0 = " 1 0 0 0 1 0 0 0 1 "
# rotate_90 = " 0 0 -1 0 1 0 1 0 0 "
rotate_90 = " 0 0 1 0 1 0 -1 0 0 "
for pos_key, part in model.items():
# generate an output line for the part
# TODO use a dict of part to partial line mappings incl. part orientation
if part == LDrawPuckGen.part_1x1:
# pull apart pos_key into x y z
x, y, z = pos_key.split()
suff = suffix_part1x1
rot = rotate_0
# adjust
# look for possible matching neighbors to swap
#nei_1x2 = x + " " + y + " " + str(int(z) + 1)
nei_1x2 = str(int(x) + 1) + " " + y + " " + z
print(nei_1x2)
if nei_1x2 in model.keys() and y == "-2":
# would like to del model[nei_1x2] but...
model[nei_1x2] = "0"
model[pos_key] = LDrawPuckGen.part_1x2
suff = suffix_part1x2
x = int(x) * LDrawPuckGen.brick_side
y = int(y) * LDrawPuckGen.brick_height
z = int(z) * LDrawPuckGen.brick_side
lines.append(prefix_color_red + str(x) + " " + str(y) + " " + str(z) + rot + suff)
# write lines to a file
with open(filename, 'w') as f:
#for eachline in lines:
# f.write(eachline + '\n')
f.writelines(lines)
# first test - simple puck
diameter = 10
height = 3
filename = "./models/3layer-puck.ldr"
generator = LDrawPuckGen()
model = generator.generate_puck(diameter, height)
print(model)
#model = optimize(model)
generator.optimize()
create_file(generator.model, filename)
###
# found out unfortunately that parts are not rotated on a corner but rather positioned and based on the middle of the part
# so replacing two 1x1 with a 1x2 would need to go with half part placement based on the code above. :(
# possible unit tests
# diameter of 1 and height of 1 should just give 1 brick
``` |
{
"source": "joadavis/rpi-coding",
"score": 3
} |
#### File: rpi-coding/halloween-xray-box/xray-box.py
```python
import RPi.GPIO as GPIO
import time
# todo import random and random patterns
pinb = 10
pinc = 11
pind = 15
pinw = 16
GPIO.setmode(GPIO.BOARD)
GPIO.setup(10, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(15, GPIO.OUT)
GPIO.setup(16, GPIO.OUT)
GPIO.setup(37, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(38, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def doScan(channel):
print("Starting scan")
GPIO.output(pinb, False)
GPIO.output(pinc, False)
GPIO.output(pind, False)
GPIO.output(pinw, False)
# make waves
for i in range(3):
wavetime = .1
# make waves
#print "bump from b"
GPIO.output(pinb, True)
time.sleep(wavetime)
GPIO.output(pinc, True)
time.sleep(wavetime)
GPIO.output(pind, True)
time.sleep(wavetime)
GPIO.output(pind, False)
time.sleep(wavetime)
GPIO.output(pinc, False)
time.sleep(wavetime)
GPIO.output(pinb, False)
time.sleep(wavetime)
#
GPIO.output(pinb, True)
time.sleep(wavetime)
GPIO.output(pinc, True)
time.sleep(wavetime)
GPIO.output(pind, True)
GPIO.output(pinb, False)
time.sleep(wavetime)
GPIO.output(pinc, False)
time.sleep(wavetime)
GPIO.output(pind, False)
time.sleep(wavetime)
#
#print "bump from d"
GPIO.output(pind, True)
time.sleep(wavetime)
GPIO.output(pinc, True)
time.sleep(wavetime)
GPIO.output(pinb, True)
time.sleep(wavetime)
GPIO.output(pinb, False)
time.sleep(wavetime)
GPIO.output(pinc, False)
time.sleep(wavetime)
GPIO.output(pind, False)
time.sleep(wavetime)
#
GPIO.output(pind, True)
time.sleep(wavetime)
GPIO.output(pinc, True)
time.sleep(wavetime)
GPIO.output(pinb, True)
GPIO.output(pind, False)
time.sleep(wavetime)
GPIO.output(pinc, False)
time.sleep(wavetime)
GPIO.output(pinb, False)
time.sleep(wavetime)
GPIO.output(pinb, False)
GPIO.output(pinc, False)
GPIO.output(pind, False)
print("display")
GPIO.output(pinw, True)
time.sleep(10)
GPIO.output(pinw, False)
print("Scan complete")
def doScanBuildup(channel):
print("Starting scan b")
GPIO.output(pinb, False)
GPIO.output(pinc, False)
GPIO.output(pind, False)
GPIO.output(pinw, False)
# make waves
wavetime = .3
for i in range(5):
wavetime = wavetime - 0.05
# make waves
#print "bump from b"
GPIO.output(pinb, True)
time.sleep(wavetime)
GPIO.output(pinc, True)
time.sleep(wavetime)
GPIO.output(pinb, False)
GPIO.output(pind, True)
time.sleep(wavetime)
GPIO.output(pinc, False)
time.sleep(wavetime)
GPIO.output(pind, False)
time.sleep(wavetime)
GPIO.output(pinb, False)
GPIO.output(pinc, False)
GPIO.output(pind, False)
print("display b")
GPIO.output(pinw, True)
time.sleep(10)
GPIO.output(pinw, False)
print("Scan complete b")
GPIO.add_event_detect(38, GPIO.RISING, callback=doScanBuildup, bouncetime=3000)
while True:
GPIO.wait_for_edge(37, GPIO.FALLING)
print("how'd that happen?")
doScan()
# wont ever get here in this version
GPIO.cleanup()
```
#### File: rpi-coding/splendiferous/splendiferous.py
```python
import pygame
import random
#import splendgame
#import splendcards as ards
import splendconstants as ants
# This sets the WIDTH and HEIGHT of each grid location
WIDTH = 20
HEIGHT = 20
# This sets the margin between each cell
MARGIN = 5
# some generic drawing functions --------------
def draw_gem(screen, color, x, y):
#pygame.draw.polygon(screen, color, [[x+10, y+10], [x, y+20], [x+20, y+20]], 5)
pygame.draw.polygon(screen, color, [[x+10, y], [x, y+10], [x, y+20], [x+10, y+30], [x+20, y+20], [x+20, y+10]], 5)
#pygame.draw.polygon(screen, color, [[x+10, y+10], [x, y+20], [x+20, y+20]], 5)
#pygame.draw.polygon(screen, color, [[x+10, y+10], [x, y+20], [x+20, y+20]], 5)
def draw_gem_small(screen, color, x, y):
pygame.draw.polygon(screen, color, [[x+3, y], [x, y+3], [x, y+6], [x+3, y+9], [x+6, y+6], [x+6, y+3]])
# use a 12x20 space
def draw_noble_icon(screen, x, y):
pygame.draw.polygon(screen, ants.WHITE,
[ [x, y], [x+2, y+3], [x + 4, y], [x+6, y+3], [x+8, y],
[x+10, y+3], [x+12, y],
[x+12, y+8], [x+10, y+10],
[x+11, y+12], [x+11, y+20], [x+1, y+20],
[x+1, y+12], [x+2, y+10],
[x, y+8], [x, y] ])
def draw_finger(screen, x, y):
pygame.draw.polygon(screen, ants.WHITE,
[ [x,y], [x+2, y], [x+2, y+5],
[x+8, y+5], [x+8, y+15],
[x, y+15], [x, y] ] )
# some classes --------------------------------
class GameSession(object):
players = []
bank_tokens = []
noble_gallery = None
mines_deck_I = None
mines_deck_II = None
mines_deck_III = None
all_sprites = None
def __init__(self, players):
self.all_sprites = pygame.sprite.Group()
self.noble_gallery = pygame.sprite.Group()
self.mines_deck_I = pygame.sprite.Group()
self.mines_deck_II = pygame.sprite.Group()
self.mines_deck_III = pygame.sprite.Group()
self.mines_backs = pygame.sprite.Group()
for player in players:
player.newGame()
self.players = players
if len(players) == 2:
self.bank_tokens = [5,5,5,5,5,7]
self.noble_gallery.add(Noble_Card(100, 10))
self.noble_gallery.add(Noble_Card(180, 10))
self.noble_gallery.add(Noble_Card(260, 10))
else:
print("unsupported number of players")
# TODO generate mines for each deck
self.mines_deck_III.add( Mine(ants.GEM_RUBY, 3, [0,0,3,7,0], 3, 10, 10) )
# collect up all sprites
self.all_sprites.add(self.noble_gallery)
self.all_sprites.add(self.mines_deck_I)
self.all_sprites.add(self.mines_deck_II)
self.all_sprites.add(self.mines_deck_III)
def generateMines(self, level):
""" level expressed as an int from 1 to 3 """
if level > 3 or level < 1:
# TODO throw an exception
return None
class GenericPlayer(object):
name = "generic"
score = 0
turncount = 0
tokens = [0,0,0,0,0,0]
hand = None
played_sprites = None
played_vals = [0,0,0,0,0]
def __init__(self, name):
self.name = name
def newGame(self):
self.score = 0
self.turncount = 0
def canBuy(self, wants):
""" Given a list of wanted tokens (wild gold will be ignored),
determine if the player has enough tokens.
return a list of what would be left or None
"""
remaining_tokens = list(self.tokens)
for want_token in range(5):
if wants[want_token] > remaining_tokens[want_token]:
# are there enough wild gold to cover deficit?
deficit = wants[want_token] - remaining_tokens[want_token]
if remaining_tokens[5] >= deficit:
remaining_tokens[5] = remaining_tokens[5] - deficit
remaining_tokens[want_token] = 0
else:
return None
else:
remaining_tokens[want_token] = remaining_tokens[want_token] - wants[want_token]
return remaining_tokens
class HumanPlayer(GenericPlayer):
def __init__(self, name):
self.name = name # TODO make this a super call
# tODO have a surface class for the background, to make it pretty once
class Token_Bank(pygame.sprite.Sprite):
color = ants.GEM_DIAM
def __init__(self, color, x, y):
super().__init__()
self.color = color
self.image = pygame.Surface([ants.TOKEN_SIZE, ants.TOKEN_SIZE])
pygame.draw.circle(self.image, ants.TOKEN2, [20, 20], 22)
pygame.draw.circle(self.image, ants.TOKEN, [20, 20], 20)
draw_gem(self.image, self.color, 9, 5)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
class Mine(pygame.sprite.Sprite):
victory_point_value = 0
color = ants.GEM_DIAM
costs = [7,7,7,7,7]
level = 1
faceup = False
def __init__(self, color, vp, costs, level, x, y):
super().__init__()
self.color = color
self.victory_point_value = vp
self.costs = costs
self.image = pygame.Surface([ants.MINE_SIZE, ants.MINE_SIZE])
self.localdraw_back(self.image)
# Fetch the rectangle object that has the dimensions of the image
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def make_faceup(self):
if faceup == True:
self.image = pygame.Surface([ants.MINE_SIZE, ants.MINE_SIZE][60, 60])
self.image.fill(color)
self.localdraw(self.image)
# Fetch the rectangle object that has the dimensions of the image
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def update(self):
pass
def localdraw_back(self, screen):
# determine the back color and level
if self.level == 1:
b_color = ants.MINE_BACK_I
b_level = "I"
elif self.level == 2:
b_color = ants.MINE_BACK_II
b_level = "II"
elif self.level == 3:
b_color = ants.MINE_BACK_III
b_level = "III"
self.image = pygame.Surface([ants.MINE_SIZE, ants.MINE_SIZE])
lofont = pygame.font.SysFont("serif", 36)
pygame.draw.rect(self.image, ants.MINE_BACK,
[0, 0, ants.MINE_SIZE, ants.MINE_SIZE])
pygame.draw.polygon(self.image, b_color,
[ [0, ants.MINE_SIZE], [ants.MINE_SIZE - 20, 0],
[ants.MINE_SIZE, 0], [20, ants.MINE_SIZE] ] )
b_text = lofont.render(b_level,
True, ants.WHITE)
self.image.blit(b_text, [(ants.MINE_SIZE // 2) - (b_text.get_width() // 2),
(ants.MINE_SIZE // 2) - (b_text.get_height() // 2)])
def localdraw(self, screen):
lofont = pygame.font.Font(None, 18)
pygame.draw.rect(screen, self.color, [0, 0, ants.MINE_SIZE, 15])
pygame.draw.rect(screen, ants.MINE_BACK, [0, 0 + 15, ants.MINE_SIZE, 45])
draw_gem(screen, self.color, 5, 20)
if self.victory_point_value > 0:
text = lofont.render("+" + str(self.victory_point_value),
True, ants.WHITE)
self.image.blit(text, [45, 3])
def pick_two(max=4):
""" pick a number from 0 to max inclusive, then pick another number from 0 to max inclusive
default from 0 to 4
returns tuple with smallest number first
"""
num1 = random.randint(0, max-1) # why -1? to leave room for the second number
num2 = random.randint(0, max-1)
print(num1, " ", num2)
if num2 >= num1:
num2 = num2 + 1 # add back in the -1 if second number is after first
return (num1, num2)
else:
return (num2, num1) # put the smaller number first
class Noble_Card(pygame.sprite.Sprite):
victory_point_value = 3
wants = [4, 4, 4, 4, 4] # higher than any expectation
def __init__(self, x, y, wants = []):
super().__init__() # ommitting this will cause an "object has no attribute '_Sprite__g'" error
if wants == []:
num1, num2 = pick_two()
if random.randint(0,1):
# two 4s
self.wants = [0,0,0,0,0]
self.wants[num1] = 4
self.wants[num2] = 4
else:
# three 3s
self.wants = [3,3,3,3,3]
self.wants[num1] = 0
self.wants[num2] = 0
else:
self.wants = wants
print(self.wants)
# image setup
self.image = pygame.Surface([50,50])
self.init_draw(self.image)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
def update(self):
print("upd")
pass
def init_draw(self, screen):
# now using sprite, so coords relative within sprite image (screen)
# upper left corner x and y then width and height (downward)
pygame.draw.rect(screen, ants.NOBLE_BACK, [0, 0, 50, 50])
infont = pygame.font.Font(None, 18)
# TODO: print wants > 0
# TODO: print victory point value (all the same, but good reminder)
line_offset = 2
for gem in range(len(self.wants)):
if self.wants[gem] > 0:
draw_gem_small(screen, ants.GEM_ORDER[gem], 2, line_offset)
text = infont.render(str(self.wants[gem]), True, ants.WHITE)
screen.blit(text, [12, line_offset - 2])
line_offset = line_offset + 12
draw_noble_icon(screen, 29, 5)
text = infont.render("+" + str(self.victory_point_value),
True, ants.WHITE)
screen.blit(text, [30, 30])
# draw grid lines
def draw_grid(screen, color):
pass
# Setup --------------------------------------
pygame.init()
players = [ HumanPlayer("Player 1"),
HumanPlayer("Player 2") ]
gamesession = GameSession(players);
# Set the width and height of the screen [width,height]
size = [700, 500]
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Splendiferous")
# try defining this in constants
afont = pygame.font.Font(None, 18)
# setup token buttons
diam_token = Token_Bank(ants.GEM_DIAM, 10, 380)
emer_token = Token_Bank(ants.GEM_EMER, 60, 380)
ruby_token = Token_Bank(ants.GEM_RUBY, 110, 380)
onix_token = Token_Bank(ants.GEM_ONIX, 160, 380)
saph_token = Token_Bank(ants.GEM_SAPH, 210, 380)
wild_token = Token_Bank(ants.GEM_WILD, 280, 380)
tokens = [diam_token, emer_token, ruby_token, onix_token, <PASSWORD>_token, wild_token]
token_group = pygame.sprite.Group()
token_group.add(tokens)
test_mine = Mine(ants.GEM_RUBY, 1, [0, 0, 2, 4, 0], 1, 200, 200)
test_mine2 = Mine(ants.GEM_SAPH, 4, [0, 0, 2, 4, 0], 2, 300, 200)
gamesession.mines_deck_I.add(test_mine)
gamesession.mines_deck_II.add(test_mine2)
# Loop until the user clicks the close button.
done = False
# Used to manage how fast the screen updates
clock = pygame.time.Clock()
# Hide the mouse cursor
pygame.mouse.set_visible(0)
# -------- Main Program Loop -----------
while not done:
# ALL EVENT PROCESSING SHOULD GO BELOW THIS COMMENT
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
elif event.type == pygame.MOUSEBUTTONDOWN:
# User clicks the mouse. Get the position
click_pos = pygame.mouse.get_pos()
# Change the x/y screen coordinates to grid coordinates
column = click_pos[0] // (WIDTH + MARGIN)
row = click_pos[1] // (HEIGHT + MARGIN)
# Set that location to zero
#grid[row][column] = 1
print("Click ", click_pos, "Grid coordinates: ", row, column)
# ALL EVENT PROCESSING SHOULD GO ABOVE THIS COMMENT
# ALL GAME LOGIC SHOULD GO BELOW THIS COMMENT
pos = pygame.mouse.get_pos()
x = pos[0]
y = pos[1]
# ALL GAME LOGIC SHOULD GO ABOVE THIS COMMENT
# ALL CODE TO DRAW SHOULD GO BELOW THIS COMMENT
# First, clear the screen to white. Don't put other drawing commands
# above this, or they will be erased with this command.
screen.fill(ants.BACK_GREEN)
for row in range(20):
for column in range(20):
color = ants.BACK_GREEN_LINES
#if grid[row][column] == 1:
# color = GREEN
pygame.draw.rect(screen,
color,
[(MARGIN + WIDTH) * column + MARGIN,
(MARGIN + HEIGHT) * row + MARGIN,
WIDTH, HEIGHT])
#offset = 0
#for token in tokens:
# offset = offset + 50
# token.draw(screen, 10 + offset, 380)
token_group.draw(screen)
#pygame.draw.line(screen, green, [0, 0], [50, 30], 5)
# Draw on the screen a green line from (0,0) to (50.75)
# 5 pixels wide.
#pygame.draw.lines(screen, black, False, [[0, 80], [50, 90], [200, 80], [220, 30]], 5)
#pygame.draw.aaline(screen, green, [0, 50], [50, 80], True)
# mouse follow
#draw_gem(screen, ants.GEM_DIAM, x, y)
#draw_gem(screen, ants.GEM_EMER, x+1, y+1)
draw_finger(screen, x, y)
# testin' fun
gamesession.mines_deck_III.sprites()[0].rect.y = gamesession.mines_deck_III.sprites()[0].rect.y + 1
gamesession.all_sprites.draw(screen)
# ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT
# Go ahead and update the screen with what we've drawn.
pygame.display.flip()
# Limit to 20 frames per second
clock.tick(60)
# Close the window and quit.
# If you forget this line, the program will 'hang'
# on exit if running from IDLE.
pygame.quit()
``` |
{
"source": "jo-adithya/note-app",
"score": 3
} |
#### File: note-app/app/routes.py
```python
from flask import request, flash, render_template, redirect, url_for
from flask_login import current_user, login_required, login_user, logout_user
from werkzeug.urls import url_parse
from app import app, db
from app.forms import LoginForm, RegistrationForm
from app.models import User
@app.route('/forget')
def forget():
return render_template('forgot.html', title='Forgot Password')
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
# Data Verification
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Invalid username or password')
return(redirect(url_for('login')))
# Verified
login_user(user, remember=form.remember_me.data)
# Check if user requested a protected url before login
requested_url = request.args.get('next')
if not requested_url or url_parse(requested_url).netloc != '':
requested_url = url_for('index')
return redirect(requested_url)
return render_template('login.html', title='Login', form=form)
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
# Add user to the database
user = User(username=form.username.data, email=form.email.data)
user.set_password(password=form.password.data)
db.session.add(user)
db.session.commit()
return redirect(url_for('login'))
return render_template('register.html', title='Register', form=form)
@app.route('/')
@login_required
def index():
return render_template('index.html', title='Notes')
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
``` |
{
"source": "jo-adithya/tic-tac-toe",
"score": 4
} |
#### File: jo-adithya/tic-tac-toe/main.py
```python
from helper import title, winning_states
from random import choice
import time
import copy
def display_board(board):
for el in board:
print('—————————————')
print(f'| {el[0]} | {el[1]} | {el[2]} |')
print('—————————————')
def transform_board(board, mark):
temp = copy.deepcopy(board)
for row in temp:
for col in enumerate(row):
if col[1] == mark:
row[col[0]] = True
return temp
def player_turn(board):
while True:
try:
move = int(input('Enter Board Number 1 - 9: ')) - 1
if move not in range(9):
print('Please enter a number from 1 - 9...')
continue
elif board[move // 3][move % 3] != ' ':
print('That box is already taken...')
continue
except ValueError:
print('Please enter a number...')
continue
break
return move
def computer_turn(board):
# Check if Computer can win
if (move := check_almost_win(board, 'X')) != None:
print('computer')
return move
# Check if Player can win
if (move := check_almost_win(board, 'O')) != None:
print('player')
return move
# Check if center or corner is empty
corners = []
if board[1][1] == ' ':
return 4
if board[0][0] == ' ':
corners.append(0)
if board[0][2] == ' ':
corners.append(2)
if board[2][0] == ' ':
corners.append(6)
if board[2][2] == ' ':
corners.append(8)
if corners:
return choice(corners)
return choice([board[i].index(' ') + 3 * i for i in range(3) if ' ' in board[i]])
def check_almost_win(board, mark):
board = transform_board(board, mark)
transposed_board = list(map(list, zip(*board)))
for i in range(3):
if board[i] in winning_states:
return board[i].index(' ') + i * 3
if transposed_board[i] in winning_states:
return 3 * transposed_board[i].index(' ') + i
if (check_board := [board[0][0], board[1][1], board[2][2]]) in winning_states:
return [0, 4, 8][check_board.index(' ')]
if (check_board := [board[0][2], board[1][1], board[0][2]]) in winning_states:
return [2, 4, 6][check_board.index(' ')]
def check_win(board, mark):
board = transform_board(board, mark)
transposed_board = list(map(list, zip(*board)))
for i in range(3):
if board[i] == [True, True, True]:
return True
if transposed_board[i] == [True, True, True]:
return True
if [board[0][0], board[1][1], board[2][2]] == [True, True, True]:
return True
if [board[0][2], board[1][1], board[2][0]] == [True, True, True]:
return True
return False
def display_computer_turn():
print('\n\nComputer\'s Turn', end='')
time.sleep(.5)
print('.', end='', flush=True)
time.sleep(.5)
print('.', end='', flush=True)
time.sleep(.5)
print('.', flush=True)
time.sleep(.5)
if __name__ == "__main__":
# Initial Board
print('\n', title, '\n\n')
board = [[' ', ' ', ' '], [' ', ' ', ' '], [' ', ' ', ' ']]
turn = 1
print('Box Numbers:')
display_board([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
time.sleep(2)
display_computer_turn()
board[choice([0,1,2])][choice([0,1,2])] = 'X'
display_board(board)
while turn < 5:
# Ask user for input (move)
print('\n\nPlayer\'s Turn...')
move = player_turn(board)
board[move // 3][move % 3] = 'O'
display_board(board)
if check_win(board, 'O'):
print('Congratulations, You Win!')
break
display_computer_turn()
move = computer_turn(board)
board[move // 3][move % 3] = 'X'
display_board(board)
if check_win(board, 'X'):
print('Computer wins!')
break
turn += 1
else:
print('It\'s a tie!')
``` |
{
"source": "joaduo/uriego",
"score": 3
} |
#### File: joaduo/uriego/logging.py
```python
def debug(msg, *args, **kwargs):
info(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
if args:
msg = msg % args
if kwargs:
msg = msg.format(**kwargs)
print(msg)
```
#### File: joaduo/uriego/main.py
```python
import uasyncio
import riego
import logging
import ujson
import machine
def web_page(msg):
html = """<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body>
<h2>MicroRiego Web Server</h2>
<p>
{msg}
</p>
</body>
</html>"""
return html.format(msg=msg)
class Server:
def __init__(self, host='0.0.0.0', port=80, backlog=5, timeout=20):
self.host = host
self.port = port
self.backlog = backlog
self.timeout = timeout
async def run(self):
logging.info('Awaiting client connection.')
self.cid = 0
self.server = await uasyncio.start_server(self.run_client, self.host, self.port, self.backlog)
async def run_client(self, sreader, swriter):
self.cid += 1
cid = self.cid
logging.info('Got connection from client cid={cid}', cid=cid)
riego.garbage_collect()
try:
request = await uasyncio.wait_for(sreader.readline(), self.timeout)
request_trailer = await uasyncio.wait_for(sreader.read(-1), self.timeout)
logging.info('request={request!r}, cid={cid}', request=request, cid=cid)
verb, path = request.split()[0:2]
try:
resp = serve_request(verb, path, request_trailer)
except UnauthenticatedError as e:
resp = response(401, 'text/html', web_page('%s %r' % (e,e)))
except Exception as e:
resp = response(500, 'text/html', web_page('Exception: %s %r' % (e,e)))
swriter.write(resp)
await swriter.drain()
except uasyncio.TimeoutError:
swriter.write('Timeout')
await swriter.drain()
except Exception as e:
logging.info('Exception e={e}', e=e)
swriter.write('exc={e}'.format(e=e))
await swriter.drain()
logging.info('Client {cid} disconnect.', cid=cid)
swriter.close()
await swriter.wait_closed()
logging.info('Client {cid} socket closed.', cid=cid)
async def close(self):
logging.info('Closing server')
self.server.close()
await self.server.wait_closed()
logging.info('Server closed.')
def response(status, content_type, payload):
status_msg = {200:'OK',
404:'NOT FOUND',
403:'FORBIDDEN',
401:'UNAUTHENTICATED',
500:'SERVER ERROR'}[status]
header = ('HTTP/1.1 %s %s\n' % (status, status_msg) +
'Content-Type: %s\n' % content_type +
'Connection: close\n\n')
return header + payload
class UnauthenticatedError(Exception):
pass
AUTH_TOKEN='<PASSWORD>'
def extract_json(request):
riego.garbage_collect()
msg = ujson.loads(request[request.rfind(b'\r\n\r\n')+4:])
if msg.get('auth_token') != AUTH_TOKEN:
raise UnauthenticatedError('Unauthenticated. Send json like {"auth_token":"<secret>", "payload": ...}')
return msg['payload']
POST = b'POST'
def serve_request(verb, path, request_trailer):
logging.info(path)
content_type = 'application/json'
status = 200
if path == b'/task_list':
if verb == POST:
riego.task_list.load_tasks(extract_json(request_trailer))
payload = ujson.dumps(riego.task_list.table_json)
elif path == b'/time':
if verb == POST:
payload = extract_json(request_trailer)
logging.info('set time to {payload}', payload=payload)
machine.RTC().datetime(payload)
payload = ujson.dumps(riego.gmtime())
else:
status = 404
content_type = 'text/html'
payload = web_page('404 Not found')
return response(status, content_type, payload)
def main():
server = Server()
try:
uasyncio.run(server.run())
uasyncio.run(riego.loop_tasks())
finally:
uasyncio.run(server.close())
_ = uasyncio.new_event_loop()
main()
``` |
{
"source": "joaduo/xpath_webdriver",
"score": 2
} |
#### File: xpath_webdriver/xpathwebdriver/solve_settings.py
```python
import rel_imp; rel_imp.init()
import importlib
import logging
import os
logger = logging.getLogger('solve_settings')
class ConfigVar(object):
def __init__(self, doc=None, default=None, parser=None, experimental=False, env_var=None):
self.value = default
self.default = default
self.parser = parser or self._solve_parser()
self.name = None
self.doc = doc
self.experimental = experimental
self._env_var = env_var
@property
def env_var(self):
return self._env_var or 'XPATHWD_' + self.name.upper()
def _solve_parser(self):
parser = type(self.default)
if parser == bool:
parser = eval
return parser
def parse(self, value_str):
return self.parser(value_str)
def copy(self, value):
new_cfg = ConfigVar(self.doc, self.default, self.parser, self.experimental, self._env_var)
new_cfg.name = self.name
new_cfg.value = value
return new_cfg
class BaseSettings(object):
def __init__(self):
self._load_env_vars()
def _load_env_vars(self):
'''
Support loading from environment variables
'''
config_vars = self._get_config_vars()
self._wrap_raw_values(config_vars)
for env_var, cfg_var in config_vars.items():
if env_var in os.environ:
logger.debug('Using %s=%r => %s', env_var, os.environ[env_var], cfg_var.name)
setattr(self, cfg_var.name, cfg_var.copy(cfg_var.parse(os.environ[env_var])))
def _wrap_raw_values(self, config_vars):
for cfg in config_vars.values():
name = cfg.name
if hasattr(self, name):
value = getattr(self, name)
if not isinstance(value, ConfigVar):
setattr(self, name, cfg.copy(value))
def _get_config_vars(self):
config = {}
for n in dir(self):
if n.startswith('_'):
continue
cfg_var = getattr(self, n)
if not isinstance(cfg_var, ConfigVar):
cfg_var = self._solve_config_var(n)
if not isinstance(cfg_var, ConfigVar):
logger.warning('Config variable %r not supported (mispelled/deprecated?)', n)
continue
cfg_var.name = cfg_var.name or n
config[cfg_var.env_var] = cfg_var
return config
@classmethod
def _solve_config_var(cls, attr):
cfg_var = getattr(cls, attr, None)
if (not isinstance(cfg_var, ConfigVar)
and issubclass(cls.__bases__[0], BaseSettings)):
return cls.__bases__[0]._solve_config_var(attr)
return cfg_var
class SettingsWrapper(object):
'''
Provide the .get(name, default=None) method for accessing an object's
attributes.
Useful for configuration.
'''
def __init__(self, settings):
self._settings = settings
def get_config_vars(self):
return self._settings._get_config_vars()
def get(self, name, default=None):
if hasattr(self._settings, name):
value = getattr(self._settings, name)
value = value.value if isinstance(value, ConfigVar) else value
return value
return default
def set(self, name, value):
setattr(self._settings, name, value)
def register_settings(settings_path):
'''
Register settings given specific module path.
:param settings_path:
'''
if isinstance(settings_path, str):
solve_settings(file_path=settings_path)
else:
register_settings_instance(settings_path)
set_log_level()
def _register_settings_module(mod):
if hasattr(mod, 'Settings'):
settings_cls = mod.Settings
else:
settings_cls = mod.DefaultSettings
register_settings_instance(settings_cls())
global_settings = None
def register_settings_instance(settings):
global global_settings
if not settings:
logging.debug('Empty settings %s, ignoring them', settings)
return
if settings == global_settings:
logging.debug('Settings %s already registered', settings)
return
if global_settings:
logging.debug('Replacing existing settings %r (old) with %r (new)', global_settings, settings)
if not isinstance(settings, SettingsWrapper):
settings = SettingsWrapper(settings)
global_settings = settings
def set_log_level():
# Set the level of the root logger
# import here due chicke-egg problem
from .base import XpathWdBase
from .logger import Logger
_set_log_level(XpathWdBase, Logger)
def _set_log_level(base_cls, logger_cls):
logger_cls.default_level = global_settings.get('log_level_default')
base_cls.log.setLevel(global_settings.get('log_level_default'))
def solve_settings(module_name=None, file_path=None):
global global_settings
if not global_settings:
file_path = file_path or os.environ.get('XPATHWD_SETTINGS_FILE')
if file_path:
if not os.path.exists(file_path):
raise LookupError(f'No such settings file {file_path}')
spec = importlib.util.spec_from_file_location('xpathwebdriver_custom_settings_file', file_path)
mod = importlib.util.module_from_spec(spec)
else:
module_name = module_name or os.environ.get('XPATHWD_SETTINGS_MODULE',
'xpathwebdriver.default_settings')
mod = importlib.import_module(module_name)
_register_settings_module(mod)
return global_settings
def smoke_test_module():
from .logger import log_test
global called_once
called_once = True
log_test(solve_settings())
#set_log_level()
if __name__ == "__main__":
smoke_test_module()
``` |
{
"source": "joaeechew/toxic",
"score": 3
} |
#### File: toxic/src/train-model.py
```python
import os
import numpy as np
import pandas as pd
import pickle
from numpy import asarray
from numpy import zeros
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.models import Model
from keras.models import Sequential
from keras.models import model_from_json
from keras.wrappers.scikit_learn import KerasClassifier
from keras.layers import Input
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import Dropout
from keras.layers import Embedding
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D
from keras.layers.merge import concatenate
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import Embedding
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D
# ## Settings
model_name = 'glove_model_'
seed = 42
np.random.seed(seed)
# ## Import data
dir_path = os.path.realpath('..')
path = 'data/processed/train.csv'
full_path = os.path.join(dir_path, path)
df = pd.read_csv(full_path, header=0, index_col=0)
print("Training set has {} rows, {} columns.".format(*df.shape))
# ## Train test split
test_size = 0.2
target = ['toxic', 'severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate']
X = df.drop(target, axis=1)
y = df[target]
corpus = 'comment_text'
Xtrain, Xtest, ytrain, ytest = train_test_split(X, y, test_size=test_size, random_state=seed)
# ## Pre-processing
# prepare tokenizer
t = Tokenizer()
t.fit_on_texts(Xtrain[corpus].astype(str))
#define vocab size and max len
vocab_size = len(t.word_index) + 1
max_length = max([len(s.split()) for s in Xtrain[corpus]])
print('Vocabulary size: %d' % vocab_size)
print('Maximum length: %d' % max_length)
# integer encode the documents
encoded_Xtrain = t.texts_to_sequences(Xtrain[corpus].astype(str))
encoded_Xtest = t.texts_to_sequences(Xtest[corpus].astype(str))
# pad documents
padded_train = pad_sequences(encoded_Xtrain, maxlen=max_length, padding='post')
padded_test = pad_sequences(encoded_Xtest, maxlen=max_length, padding='post')
# load the embedding into memory
embeddings_index = dict()
f = open('/Users/joaeechew/dev/glove.6B/glove.6B.100d.txt', mode='rt', encoding='utf-8')
for line in f:
values = line.split()
word = values[0]
coefs = asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
print('Loaded %s word vectors.' % len(embeddings_index))
# create a weight matrix for words in training docs
embedding_matrix = zeros((vocab_size, 100))
for word, i in t.word_index.items():
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
# saving
with open('tokenizer.pickle', 'wb') as handle:
pickle.dump(t, handle, protocol=pickle.HIGHEST_PROTOCOL)\
# ## Model fit
# Function to create model, required for KerasClassifier
def create_model(optimizer='adam', vocab_size=vocab_size, max_length=max_length):
model = Sequential()
model.add(Embedding(vocab_size, 100, input_length=max_length))
model.add(Conv1D(filters=32, kernel_size=8, activation='relu'))
model.add(MaxPooling1D(pool_size=2))
model.add(Flatten())
model.add(Dense(10, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
# compile network
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# summarize defined model
# model.summary()
# plot_model(model, to_file='model.png', show_shapes=True)
return model
def save_model(model, model_path):
# serialize model to JSON
model_json = model.to_json()
with open(model_path + ".json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
model.save_weights(model_path + ".h5")
print("Saved model to disk")
model = KerasClassifier(build_fn=create_model, epochs=1, batch_size=10, verbose=1)
# Tuning the model
param_grid = { "clf__optimizer": ['Adam']
}
# Notes:
# - Important parameters: kernel size, no. of feature maps
# - 1-max pooling generally outperforms otehr types of pooling
# - Dropout has little effect
# - Gridsearch across kernel size in the range 1-10
# - Search no. of filters from 100-600 and dropout of 0.0-0.5
# - Explore tanh, relu, linear activation functions
# Define pipeline
pipeline = Pipeline([
('clf', model)
])
# fit the model
for label in target:
print('... Processing {}'.format(label))
y = ytrain[label]
# train the model
grid = GridSearchCV(pipeline, param_grid=param_grid, verbose=1, cv=2)
grid_result = grid.fit(padded_train, y)
# summarize results
print("Best {} : {} using {}".format(label, grid_result.best_score_, grid_result.best_params_))
means = grid_result.cv_results_['mean_test_score']
stds = grid_result.cv_results_['std_test_score']
params = grid_result.cv_results_['params']
for mean, stdev, param in zip(means, stds, params):
print("%f (%f) with: %r" % (mean, stdev, param))
# save the model
trained_model = grid_result.best_estimator_.named_steps['clf'].model
model_path = os.path.join(dir_path, 'models', model_name+label)
save_model(trained_model, model_path)
# ## Evaluation
from sklearn.metrics import log_loss
def load_model(model_path):
# load json and create model
json_file = open(model_path+'.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights(model_path+".h5")
print("Loaded model from disk")
return loaded_model
y_pred = pd.DataFrame(index=ytest.index, columns=target)
scores =[]
for label in target:
print('... Processing {}'.format(label))
model_path = os.path.join(dir_path, 'models', model_name+label)
# load the model
loaded_model = load_model(model_path)
# evaluate model on test dataset
y_pred[label] = loaded_model.predict(padded_test, verbose=1, batch_size=1)
loss = log_loss(ytest[label], y_pred[label])
scores.append(loss)
print("Log loss for {} is {} .".format(label, loss))
print("Combined log loss is {} .".format(np.mean(scores)))
``` |
{
"source": "joaen/je-maya-tools",
"score": 3
} |
#### File: je-maya-tools/utilities/look_at_target.py
```python
import maya.cmds as cmds
import maya.api.OpenMaya as om
def look(looker, target):
looker_transform = looker
target_transform = target
looker_vec_position = cmds.xform(looker_transform, query=True, worldSpace=True, translation=True)
target_position = cmds.xform(target_transform, query=True, worldSpace=True, translation=True)
looker_vec = om.MVector(looker_vec_position)
target_vec = om.MVector(target_position)
world_up_vec = om.MVector(0,1,0)
# Look at direction / forward direction
z_vec = om.MVector(target_vec - looker_vec).normalize()
# Side direction
x_vec = om.MVector(world_up_vec ^ z_vec).normalize()
# Calculate local up direction
y_vec = om.MVector(z_vec ^ x_vec).normalize()
# Compose the rotation matrix using the directions and the position of the looker object
x = [x_vec.x, x_vec.y, x_vec.z, 0]
y = [y_vec.x, y_vec.y, y_vec.z, 0]
z = [z_vec.x, z_vec.y, z_vec.z, 0]
o = [looker_vec.x, looker_vec.y, looker_vec.z, 1]
matrix_list = [x, y, z, o]
look_at_matrix = om.MMatrix(matrix_list)
# Rotate the looker object using the rotation matrix
cmds.xform(looker_transform, matrix=look_at_matrix)
``` |
{
"source": "joafr/litex",
"score": 2
} |
#### File: cpu/zynqmp/core.py
```python
from migen import *
from litex.soc.cores.cpu import CPU
from litex.soc.interconnect import axi
class ZynqMP(CPU):
variants = ["standard"]
family = "aarch64"
name = "zynqmp"
human_name = "Zynq Ultrascale+ MPSoC"
data_width = 64
endianness = "little"
reset_address = 0xc000_0000
gcc_triple = "aarch64-none-elf"
gcc_flags = ""
linker_output_format = "elf64-littleaarch64"
nop = "nop"
io_regions = { # Origin, Length.
0x8000_0000: 0x4000_0000,
0xe000_0000: 0xff_2000_0000 # TODO: there are more details here
}
@property
def mem_map(self):
return {
"sram": 0x0000_0000, # DDR low in fact
"rom": 0xc000_0000, # Quad SPI memory
}
def __init__(self, platform, variant, *args, **kwargs):
super().__init__(*args, **kwargs)
self.platform = platform
self.reset = Signal()
self.periph_buses = [] # Peripheral buses (Connected to main SoC's bus).
self.memory_buses = [] # Memory buses (Connected directly to LiteDRAM).
self.axi_gp_masters = [None] * 3 # General Purpose AXI Masters.
self.clock_domains.cd_ps = ClockDomain()
self.ps_name = "ps"
self.ps_tcl = []
self.config = {'PSU__FPGA_PL0_ENABLE': 1} # enable pl_clk0
rst_n = Signal()
self.cpu_params = dict(
o_pl_clk0=ClockSignal("ps"),
o_pl_resetn0=rst_n
)
self.comb += ResetSignal("ps").eq(~rst_n)
self.ps_tcl.append(f"set ps [create_ip -vendor xilinx.com -name zynq_ultra_ps_e -module_name {self.ps_name}]")
def add_axi_gp_master(self, n=0, data_width=32):
assert n < 3 and self.axi_gp_masters[n] is None
assert data_width in [32, 64, 128]
axi_gpn = axi.AXIInterface(data_width=data_width, address_width=32, id_width=16)
self.config[f'PSU__USE__M_AXI_GP{n}'] = 1
self.config[f'PSU__MAXIGP{n}__DATA_WIDTH'] = data_width
self.axi_gp_masters.append(axi_gpn)
xpd = {0 : "fpd", 1 : "fpd", 2 : "lpd"}[n]
self.cpu_params[f"i_maxihpm0_{xpd}_aclk"] = ClockSignal("ps")
layout = axi_gpn.layout_flat()
dir_map = {DIR_M_TO_S: 'o', DIR_S_TO_M: 'i'}
for group, signal, direction in layout:
sig_name = group + signal
if sig_name in ['bfirst', 'blast', 'rfirst', 'arfirst', 'arlast', 'awfirst', 'awlast', 'wfirst', 'wid']:
continue
direction = dir_map[direction]
self.cpu_params[f'{direction}_maxigp{n}_{group}{signal}'] = getattr(getattr(axi_gpn, group), signal)
return axi_gpn
def do_finalize(self):
if len(self.ps_tcl):
self.ps_tcl.append("set_property -dict [list \\")
for config, value in self.config.items():
self.ps_tcl.append("CONFIG.{} {} \\".format(config, '{{' + str(value) + '}}'))
self.ps_tcl.append(f"] [get_ips {self.ps_name}]")
self.ps_tcl += [
f"generate_target all [get_ips {self.ps_name}]",
f"synth_ip [get_ips {self.ps_name}]"
]
self.platform.toolchain.pre_synthesis_commands += self.ps_tcl
self.specials += Instance(self.ps_name, **self.cpu_params)
``` |
{
"source": "joagonzalez/tateti",
"score": 3
} |
#### File: doc/cielo/cielo.py
```python
import random
from estrella import Estrella
class Cielo():
def __init__(self, filas, columnas):
self.filas = filas
self.columnas = columnas
self.cielo = []
for i in range(filas):
self.cielo.append([])
for j in range(columnas):
self.cielo[i].append(" ")
def poner_estrellas(self, cant_estrellas):
for i in range(cant_estrellas):
x = random.randint(0, self.columnas - 1)
y = random.randint(0, self.filas - 1)
estrella = Estrella(x, y)
self.cielo[y][x] = estrella
def mostrar(self):
for i in range(self.filas):
for j in range(self.columnas):
print(self.cielo[i][j], end="")
print()
```
#### File: tateti/src/test.py
```python
from game import Game, Player, Board
import sys
def test_player():
print("Player() class tests")
dimension = 3
board_player = Board(dimension)
print("Imprimimos tablero vacio: ")
print(board_player)
board_player.update_board([0, 2], 'X')
board_player.update_board([0, 0], 'O')
board_player.update_board([1, 2], 'X')
board_player.update_board([2, 2], 'X')
board_player.update_board([1, 0], 'X')
board_player.update_board([2, 0], 'O')
board_player.update_board([0, 1], 'O')
board_player.update_board([1, 1], 'X')
#board_player.update_board([2, 1], 'X')
print(board_player)
player_1 = Player('Joaquin', 0, 0, 0)
player_2 = Player('Xano', 1, 1, 1)
print(player_1)
print(player_2)
player_1.movement(board_player)
print(board_player)
print(board_player.is_tateti())
def test_board():
print("Board() class tests")
dimension = int(sys.argv[1])
board = Board(dimension)
board_2 = Board(dimension)
print("Imprimimos tablero vacio: ")
print(board)
board.update_board([0, 2], 'X')
board.update_board([0, 0], 'O')
board.update_board([1, 2], 'X')
board.update_board([2, 2], 'X')
board.update_board([1, 0], 'X')
board.update_board([2, 0], 'O')
board.update_board([0, 1], 'O')
board.update_board([1, 1], 'X')
board.update_board([2, 1], 'X')
if dimension == 4:
board.update_board([3, 3], 'X')
print("Imprimimos tablero con contenido: ")
print(board)
print(board.is_tateti())
print(board.get_board())
print(board.get_id())
print(board.get_dimension())
# board_2
print(board_2)
print(board_2.is_tateti())
board_2.update_board([0, 0], 'X')
print(board_2)
print(board_2.is_tateti())
def test_game():
print("Game() class tests")
game_1 = Game('Joaquin', 'Morita', 0, 1, 0, 1, 0, 1)
game_2 = Game('Julia', 'Ramiro', 0, 1, 0, 1, 0, 1)
print(game_1)
print
print(game_2)
print
print(game_1.get_player('Morita'))
print
print(game_1.get_board())
game_1.movement('Joaquin')
print(game_1.get_board())
board = game_1.get_board()
print(board.get_id())
print(game_1.is_winner())
if __name__ == "__main__":
test_board()
test_player()
test_game()
``` |
{
"source": "joaherrerama/openeo-grassgis-driver",
"score": 2
} |
#### File: openeo_grass_gis_driver/actinia_processing/evi_process.py
```python
from random import randint
import json
from openeo_grass_gis_driver.models.process_graph_schemas import \
ProcessGraph, ProcessGraphNode
from openeo_grass_gis_driver.actinia_processing.base import \
PROCESS_DICT, PROCESS_DESCRIPTION_DICT, Node, \
check_node_parents, DataObject, GrassDataType, \
create_output_name
from openeo_grass_gis_driver.models.process_schemas import \
Parameter, ProcessDescription, ReturnValue, ProcessExample
__license__ = "Apache License, Version 2.0"
__author__ = "<NAME>"
__copyright__ = "Copyright 2019, <NAME>, mundialis"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
# not in the official list
PROCESS_NAME = "evi"
def create_process_description():
p_red = Parameter(
description="Any openEO process object that returns a single space-time raster datasets "
"that contains the RED band for EVI computation.", schema={
"type": "object", "subtype": "raster-cube"}, optional=False)
p_nir = Parameter(
description="Any openEO process object that returns a single space-time raster datasets "
"that contains the NIR band for EVI computation.", schema={
"type": "object", "subtype": "raster-cube"}, optional=False)
p_blue = Parameter(
description="Any openEO process object that returns a single space-time raster datasets "
"that contains the BLUE band for EVI computation.", schema={
"type": "object", "subtype": "raster-cube"}, optional=False)
p_scale = Parameter(description="Scale factor to convert band values",
schema={"type": "object", "subtype": "float"},
optional=True)
rv = ReturnValue(description="Processed EO data.",
schema={"type": "object", "subtype": "raster-cube"})
# Example
arguments = {
"red": {"from_node": "get_red_data"},
"nir": {"from_node": "get_nir_data"},
"blue": {"from_node": "get_blue_data"},
}
node = ProcessGraphNode(process_id=PROCESS_NAME, arguments=arguments)
graph = ProcessGraph(
title="title",
description="description",
process_graph={
"evi_1": node})
examples = [
ProcessExample(
title="Simple example",
description="Simple example",
process_graph=graph)]
pd = ProcessDescription(
id=PROCESS_NAME,
description="Compute the EVI based on the red, nir, and blue bands of the input datasets.",
summary="Compute the EVI based on the red, nir, and blue bands of the input datasets.",
parameters={
"red": p_red,
"nir": p_nir,
"blue": p_blue,
"scale": p_scale},
returns=rv,
examples=examples)
return json.loads(pd.to_json())
PROCESS_DESCRIPTION_DICT[PROCESS_NAME] = create_process_description()
def create_process_chain_entry(
nir_time_series: DataObject,
red_time_series: DataObject,
blue_time_series: DataObject,
scale: float,
output_time_series: DataObject):
"""Create a Actinia process description that uses t.rast.mapcalc to create the EVI time series
:param nir_time_series: The NIR band time series name
:param red_time_series: The RED band time series name
:param blue_time_series: The BLUE band time series name
:param scale: scale factor
:param output_time_series: The name of the output time series
:return: A list of Actinia process chain descriptions
"""
rn = randint(0, 1000000)
pc = [
{"id": "t_rast_mapcalc_%i" % rn,
"module": "t.rast.mapcalc",
"inputs": [{"param": "expression",
"value": "%(result)s = float(2.5 * %(scale)s * (%(nir)s - %(red)s)/"
"(%(nir)s * %(scale)s + 6.0 * %(red)s * %(scale)s -7.5 * %(blue)s * %(scale)s + 1.0))" % {
"result": output_time_series.grass_name(),
"nir": nir_time_series.grass_name(),
"red": red_time_series.grass_name(),
"blue": blue_time_series.grass_name(),
"scale": scale}},
{"param": "inputs",
"value": "%(nir)s,%(red)s,%(blue)s" % {"nir": nir_time_series.grass_name(),
"red": red_time_series.grass_name(),
"blue": blue_time_series.grass_name()}},
{"param": "basename",
"value": output_time_series.name},
{"param": "output",
"value": output_time_series.grass_name()}]},
{"id": "t_rast_color_%i" % rn,
"module": "t.rast.colors",
"inputs": [{"param": "input",
"value": output_time_series.grass_name()},
{"param": "color",
"value": "ndvi"}]}]
return pc
def get_process_list(node: Node):
"""Analyse the process description and return the Actinia process chain and the name of the processing result
:param node: The process node
:return: (output_objects, actinia_process_list)
"""
input_objects, process_list = check_node_parents(node=node)
output_objects = []
# First analyse the data entries
if "red" not in node.arguments:
raise Exception("Process %s requires parameter <red>" % PROCESS_NAME)
if "nir" not in node.arguments:
raise Exception("Process %s requires parameter <nir>" % PROCESS_NAME)
if "blue" not in node.arguments:
raise Exception("Process %s requires parameter <blue>" % PROCESS_NAME)
# Get the red and nir data separately
red_input_objects = node.get_parent_by_name(
parent_name="red").output_objects
nir_input_objects = node.get_parent_by_name(
parent_name="nir").output_objects
blue_input_objects = node.get_parent_by_name(
parent_name="blue").output_objects
if not red_input_objects:
raise Exception(
"Process %s requires an input strds for band <red>" %
PROCESS_NAME)
if not nir_input_objects:
raise Exception(
"Process %s requires an input strds for band <nir>" %
PROCESS_NAME)
if not blue_input_objects:
raise Exception(
"Process %s requires an input strds for band <blue>" %
PROCESS_NAME)
scale = 1.0
if "scale" in node.arguments:
scale = float(node.arguments["scale"])
red_strds = list(red_input_objects)[-1]
nir_strds = list(nir_input_objects)[-1]
blue_strds = list(blue_input_objects)[-1]
output_objects.extend(list(red_input_objects))
output_objects.extend(list(nir_input_objects))
output_objects.extend(list(blue_input_objects))
output_object = DataObject(
name=create_output_name(red_strds.name, node),
datatype=GrassDataType.STRDS)
output_objects.append(output_object)
node.add_output(output_object=output_object)
pc = create_process_chain_entry(
nir_strds,
red_strds,
blue_strds,
scale,
output_object)
process_list.extend(pc)
return output_objects, process_list
PROCESS_DICT[PROCESS_NAME] = get_process_list
```
#### File: openeo_grass_gis_driver/actinia_processing/run_udf_process.py
```python
import json
from openeo_grass_gis_driver.models.process_graph_schemas import \
ProcessGraphNode, ProcessGraph
from openeo_grass_gis_driver.actinia_processing.base import \
Node, check_node_parents, DataObject, GrassDataType, \
create_output_name
from openeo_grass_gis_driver.actinia_processing.base import \
PROCESS_DICT, PROCESS_DESCRIPTION_DICT
from openeo_grass_gis_driver.models.process_schemas import \
Parameter, ProcessDescription, ReturnValue, ProcessExample
__license__ = "Apache License, Version 2.0"
__author__ = "<NAME>"
__copyright__ = "Copyright 2018, <NAME>, mundialis"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
PROCESS_NAME = "run_udf"
def create_process_description():
p_data = Parameter(description="The data to be passed to the UDF as array or raster data cube.",
schema=[
{
"title": "Raster data cube",
"type": "object",
"subtype": "raster-cube"
},
{
"title": "Array",
"type": "array",
"minItems": 1,
"items": {
"description": "Any data type."
}
},
{
"title": "Single Value",
"description": "A single value of any data type."
}
],
optional=False)
p_udf = Parameter(description="Either source code, an absolute URL or a path to an UDF script.",
schema=[{"description": "URI to an UDF",
"type": "string",
"format": "uri",
"subtype": "uri"},
{"description": "Path to an UDF uploaded to the server.",
"type": "string",
"subtype": "file-path"},
{"description": "Source code as string",
"type": "string",
"subtype": "udf-code"}],
optional=False)
p_runtime = Parameter(
description="An UDF runtime identifier available at the back-end.",
schema={
"type": "string",
"subtype": "udf-runtime"},
optional=False)
p_version = Parameter(description="An UDF runtime version. If set to `null`, "
"the default runtime version specified for each runtime is used.",
schema=[{"type": "string",
"subtype": "udf-runtime-version"},
{"title": "Default runtime version",
"type": "null"}],
optional=True)
p_context = Parameter(
description="Additional data such as configuration options "
"that should be passed to the UDF.", schema={
"type": "object"}, optional=True)
rv = ReturnValue(
description="The data processed by the UDF. Returns a raster data cube "
"if a raster data cube was passed for `data`. If an array was "
"passed for `data`, the returned value is defined by the context "
"and is exactly what the UDF returned.", schema=[
{
"title": "Raster data cube", "type": "object", "subtype": "raster-cube"}, {
"title": "Any", "description": "Any data type."}])
# Example
arguments = {
"data": {"from_node": "get_strds_data"},
"udf": "some source code"}
node = ProcessGraphNode(process_id=PROCESS_NAME, arguments=arguments)
graph = ProcessGraph(
title="title",
description="description",
process_graph={
"run_udf1": node})
examples = [
ProcessExample(
title="Simple example",
description="Simple example",
process_graph=graph)]
pd = ProcessDescription(
id=PROCESS_NAME,
description="Runs an UDF in one of the supported runtime environments.",
summary="Run an UDF",
parameters={
"data": p_data,
"udf": p_udf,
"runtime": p_runtime,
"version": p_version,
"context": p_context},
returns=rv,
examples=examples)
return json.loads(pd.to_json())
PROCESS_DESCRIPTION_DICT[PROCESS_NAME] = create_process_description()
def create_process_chain_entry(input_object, python_file_url,
udf_runtime, udf_version, output_object):
"""Create a Actinia command of the process chain that uses t.rast.udf
:param strds_name: The name of the strds
:param python_file_url: The URL to the python file that defines the UDF
:param output_name: The name of the output raster layer
:return: A Actinia process chain description
"""
# rn = randint(0, 1000000)
pc = {"id": "t_rast_udf",
"module": "t.rast.udf",
"inputs": [{"import_descr": {"source": python_file_url,
"type": "file"},
"param": "pyfile",
"value": "$file::my_py_func"},
{"param": "input",
"value": input_object.grass_name()},
{"param": "output",
"value": output_object.grass_name()}]}
return pc
def get_process_list(node: Node):
"""Analyse the process description and return the Actinia process chain
and the name of the processing result layer which is a single raster layer
:param args: The process description
:return: (output_names, actinia_process_list)
"""
# Get the input description and the process chain to attach this process
input_objects, process_list = check_node_parents(node=node)
output_objects = []
input_objects = node.get_parent_by_name(parent_name="data").output_objects
python_file_url = node.arguments["udf"]
udf_runtime = None
if "runtime" in node.arguments:
udf_runtime = node.arguments["runtime"]
udf_version = None
if "version" in node.arguments:
udf_version = node.arguments["version"]
for input_object in input_objects:
output_object = DataObject(
name=create_output_name(input_object.name, node),
datatype=GrassDataType.STRDS)
output_objects.append(output_object)
pc = create_process_chain_entry(input_object,
python_file_url,
udf_runtime, udf_version,
output_object)
process_list.append(pc)
return output_objects, process_list
PROCESS_DICT[PROCESS_NAME] = get_process_list
``` |
{
"source": "jo-ai-chim/Currency_Predictor_Project",
"score": 3
} |
#### File: Currency_Predictor_Project/data/CP_ETL_pipeline.py
```python
import sys
import pandas as pd
import numpy as np
import yfinance as yf
import pycountry
import sqlite3
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import plotly.express as px
def load_data(path_input_index, path_input_currencies):
'''
INPUT:
df_input_index - filepath to the csv file with the indices to download from yfinance
path_input_currencies - filepath to the csv file with the currencies to download from yfinance
OUTPUT:
df_index - dataframe with the downloaded indices
df_currency - dataframe with the downloaded currencies
This function reads in two csv files as input and creates a dataframe for the indices and the currencies based on that input
'''
# load index input file
df_input_index = pd.read_csv(path_input_index, sep=';', index_col='Index')
# load currency dataset
df_input_currencies = pd.read_csv(path_input_currencies, sep=';')
#create the dataframe for the indices
df_index = pd.DataFrame()
#loop through the input dataframe and append one column to the df_index dataframe
for index, row in df_input_index.iterrows():
# create yfinance object and get historical market data
yf_temp = yf.Ticker(index)
df_temp = yf_temp.history(period="max")
#drop not needed columns of returned dataframe
df_temp =df_temp.drop(df_temp.columns.difference(['Open', 'High', 'Low', 'Close']), 1)
#rename left column
df_temp = df_temp.rename(columns={'Open': row['Country'] + '_' + index + '_Open',
'High': row['Country'] + '_' + index + '_High',
'Low': row['Country'] + '_' + index + '_Low',
'Close': row['Country'] + '_' + index + '_Close'})
df_index = df_index.join(df_temp, how='outer')
#Loop over the currencies and append one column (named <Currency>) which contains the close value for each currency
df_currency = pd.DataFrame()
list_currencies = df_input_currencies.Currency.tolist()
list_currencies = list(dict.fromkeys(list_currencies))
for currency in list_currencies:
# create yfinance object and get historical market data
yf_temp = yf.Ticker(currency)
df_temp = yf_temp.history(period="max")
#drop not needed columns of returned dataframe
df_temp =df_temp.drop(df_temp.columns.difference(['Open', 'High', 'Low', 'Close']), 1)
#rename left column
df_temp = df_temp.rename(columns={'Open': currency + '_Open',
'High': currency + '_High',
'Low': currency + '_Low',
'Close': currency + '_Close'})
df_currency = df_currency.join(df_temp, how='outer')
return df_index, df_currency
def clean_df(df, block_size=100, values_from_start=3500, nan_allowed=5/7):
'''
INPUT:
df - dataframe to be checked
block_size - size if the checked blocks
values_from_start - minimum number of values a column must have without any gaps (from the bottom)
nan_allowed - float which states how many Nan values are allowed
OUTPUT:
df_clean - dataframe with only the columns left which fullfill the required input
This function cleans a dataframe by dropping all columns which have to less values
without a gap and afterwards it fills the NaN values in the remaining columns
'''
#Cleaning the loaded data
df_clean = df.copy()
#Eliminate the columns with big gaps in the last values_from_start entries (minimum datapoints for traing the model)
length = df_clean.shape[0]
for i in range(int(values_from_start/block_size)):
df_clean = df_clean.loc[:, df_clean.iloc[(length-(i+1)*block_size):(length-i*block_size)].isnull().mean() < nan_allowed]
#Use Front fill only for columns with Close in name - nan values in the middle occure only because of bank holiday's
columns_list = df_clean.columns.tolist()
close_columns_list = []
for column in columns_list:
if '_Close' in column:
close_columns_list.append(column)
df_clean[[column]] = df_clean[[column]].fillna(method='ffill')
#drop all rows which still have nan values in a close Column
df_clean = df_clean.dropna(subset=close_columns_list)
#for left NaN values use a backfill over the rows therefore on bank holidays the values will be the entire day constant
df_clean = df_clean.fillna(axis=1, method='bfill')
#drop all rows which still have nan values
df_clean = df_clean.dropna(how='any')
return df_clean
def print_results(df, df_cleaned, subject):
'''
INPUT:
df - original dataframe
df_cleaned - cleaned dataframe
subject - indices or currencies
This function prints the results of the cleaning to the console.
'''
#compare how many datapoints are left
number_values_originally = df.shape[0]
number_values_left = df_cleaned.shape[0]
print('For the ' + subject + ' data ' + str(number_values_left) + ' datapoints are left from ' + str(number_values_originally) + '.')
#get the number of droped and kept indices
number_of_originally_columns = df.shape[1]/4
number_of_kept_columns = df_cleaned.shape[1]/4
print('For the ' + subject + ' ' + str(number_of_kept_columns) + ' from ' + str(number_of_originally_columns) + ' provided ' + subject + ' could be kept.')
def save_data(df_index_clean, df_currency_clean, database_filename):
'''
INPUT:
df_index_clean - cleaned dataframe with the remaining index data
df_currency_clean - cleaned dataframe with the remaining currency data
database_filename - Name of the database the cleaned data should be stored in
This function saves the clean dataset into an sqlite database by using pandas to_sql method combined with the SQLAlchemy library.
'''
engine = create_engine('sqlite:///' + database_filename)
Base = declarative_base()
Base.metadata.drop_all(engine)
df_index_clean.to_sql('index_data', engine, index=True, if_exists='replace')
df_currency_clean.to_sql('currency_data', engine, index=True, if_exists='replace')
def main():
'''
This is the main function which is executed when calling the CP_ETL_pipeline.py file over the console. It reads in the arguments
- path_input_index
- path_input_currencies
- block_size
- values_from_start
- nan_allowed
- database_filename
and executes the functions above. If one argument is missing an error is raised.
'''
if len(sys.argv) == 7:
path_input_index = str(sys.argv[1])
path_input_currencies = str(sys.argv[2])
block_size = int(sys.argv[3])
values_from_start = int(sys.argv[4])
nan_allowed = float(sys.argv[5])
database_filename = str(sys.argv[6])
print('Loading data...\n INDICES: {}\n CURRENCIES: {}'
.format(path_input_index, path_input_currencies))
df_index, df_currency = load_data(path_input_index, path_input_currencies)
print('Cleaning currency data...')
df_currency_clean = clean_df(df_currency, block_size, values_from_start, nan_allowed)
print_results(df_currency, df_currency_clean, 'currencies')
print('Cleaning index data...')
df_index_clean = clean_df(df_index, block_size, values_from_start, nan_allowed)
print_results(df_index, df_index_clean, 'indices')
print('Saving data...\n DATABASE: {}'.format(database_filename))
save_data(df_index_clean, df_currency_clean, database_filename)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the input for the currencies and '\
'indices as the first and second argument respectively, as '\
'well as the blocksize, values from start and allowed nan values '\
'as the third, fourth and fifth argument and the filepath of the '\
'database to save the cleaned data to as the sixth argument. '\
'\n\nExample: python CP_ETL_pipeline.py '\
'Input_index.csv Input_currencies.csv 100 4100 0.7143 '\
'db_currency_predictor.db')
if __name__ == '__main__':
main()
``` |
{
"source": "jo-ai-chim/Project_Google_Trends_Prediction",
"score": 3
} |
#### File: jo-ai-chim/Project_Google_Trends_Prediction/googletrends_prediction.py
```python
from pytrends.request import TrendReq
from time import sleep
import numpy as np
import pandas as pd
import datetime
def create_google_trend_df(kw_list, start_year = 2004):
'''
INPUT:
kw_list - a list of search terms
start_year - the year (as integer) from where on the google trend results should be exported
OUTPUT:
google_trends_df - dataframe with one column for each term in the kw_list and all one line for every date in one if the terms has a value in google trends
This function calls google trends several times to extract and merges the results to one dataframe
'''
# Login to Google. Only need to run this once, the rest of requests will use the same session.
pytrend = TrendReq(hl='en-US', tz=360, timeout=(10,25), retries=2, backoff_factor=0.1)
#Loop to add each word to the dataframe used for the model
year = datetime.date.today().year
counter = year - start_year
creation_counter_word = 0
for word in kw_list:
#loop to iterate over time within so that we get an value for every day
creation_counter_date = 0
for i in range(counter + 1):
#create the strings for the two timeslots
time_slot_1 = str(start_year + i) + '-01-01 ' + str(start_year + i) + '-06-30'
time_slot_2 = str(start_year + i) + '-07-01 ' + str(start_year + i) + '-12-31'
#Avoid Error Code 429
sleep(1)
# Create payload and capture API tokens. Only needed for interest_over_time(), interest_by_region() & related_queries()
pytrend.build_payload([word], cat=0, timeframe=time_slot_1,geo='',gprop='')
# Interest Over Time
interest_over_time_df = pytrend.interest_over_time()
#create the goal dataframe or append it to the goal dataframe
if creation_counter_date == 0:
input_df_temp = interest_over_time_df
else:
input_df_temp = input_df_temp.append(interest_over_time_df)
creation_counter_date += 1
# Create payload and capture API tokens. Only needed for interest_over_time(), interest_by_region() & related_queries()
pytrend.build_payload([word], cat=0, timeframe=time_slot_2,geo='',gprop='')
# Interest Over Time
interest_over_time_df = pytrend.interest_over_time()
#create the goal dataframe or append it to the goal dataframe
if creation_counter_date == 0:
input_df_temp = interest_over_time_df
else:
input_df_temp = input_df_temp.append(interest_over_time_df)
creation_counter_date += 1
#Drop isPartial column and merge the dataframe with to the goal data fram
input_df_temp = input_df_temp.drop(['isPartial'], axis=1)
#create goal dataframe or merge the result for the current word to it if it already exists
if creation_counter_word == 0:
input_df = input_df_temp
else:
input_df = input_df.merge(input_df_temp, left_on='date', right_on='date', how='outer')
creation_counter_word += 1
return input_df
``` |
{
"source": "joakimeriksson/ai-smarthome",
"score": 3
} |
#### File: ai-smarthome/roland/read-svz.py
```python
import sys
# Offset from start
startOffset = 128
patch = {
'name':{'offset':8, 'len':16, 'type':'string'}
}
def get_data(name, bytes):
global patch, startOffset
if name in patch:
t = patch[name]['type']
offset = patch[name]['offset']
len = patch[name]['len']
if t == 'string':
return str(bytes[startOffset + offset : startOffset + offset + len], "UTF-8")
return "no-found."
def conv(b):
if b > 30 and b < 127:
c = chr(b)
else:
c = '.'
return c
print("Loading", sys.argv[1])
with open(sys.argv[1], "rb") as f:
bytes_read = f.read()
bytes_read2 = None
if len(sys.argv) > 2:
with open(sys.argv[2], "rb") as f2:
bytes_read2 = f2.read()
s = ""
s2 = ""
d2 = ""
diff = False
i = 0
for adr, b in enumerate(bytes_read):
if i == 0:
print("%04d " % adr, end='')
s = s + conv(b)
if bytes_read2:
b2 = bytes_read2[adr]
s2 = s2 + conv(b2)
d2 = d2 + "%02x" % b2
if b != bytes_read2[adr]:
diff = True
print("%02x" %b, end='')
i = i + 1
if (i == 16):
print(" " + s)
if diff: print("# " + d2 + " " + s2)
i = 0
s = ""
s2 = ""
d2 = ""
diff = False
if bytes_read[0:3] == b'SVZ':
print("")
print("Roland SVZ file")
print("First sound:", get_data('name', bytes_read))
``` |
{
"source": "JoakimEwenson/fastapi-telldus",
"score": 3
} |
#### File: fastapi-telldus/controller/caller.py
```python
import json
import os
from datetime import datetime
from requests_oauthlib import OAuth1Session
# Set up OAuth against Telldus Live API
telldus_oauth1_session = os.environ.get('TELLDUS_OAUTH1_SESSION')
telldus_client_secret = os.environ.get('TELLDUS_CLIENT_SECRET')
telldus_resource_owner_key = os.environ.get('TELLDUS_RESOURCE_OWNER_KEY')
telldus_resource_owner_secret = os.environ.get('TELLDUS_RESOURCE_OWNER_SECRET')
telldus_user = OAuth1Session(telldus_oauth1_session,
client_secret=telldus_client_secret,
resource_owner_key=telldus_resource_owner_key,
resource_owner_secret=telldus_resource_owner_secret)
# Base URL for the API
base_url = "https://api.telldus.com/json"
'''
SensorObject with default data in case of empty or invalid response.
Note that last_updated-values of all sorts are a Unix timestamp and might
need some adjusting to display correct values.
'''
class SensorObject():
sensor_id: str
client_name: str
name: str
last_updated: datetime
ignored: bool
editable: bool
temp_value: float
temp_last_updated: datetime
temp_max_value: float
temp_max_time: datetime
temp_min_value: float
temp_min_time: datetime
humidity_value: float
humidity_last_updated: datetime
humidity_max_value: float
humidity_max_time: datetime
humidity_min_value: float
humidity_min_time: datetime
timezone_offset: int
'''
Function for collecting a list of sensors connected to your Telldus account and fetch latest available information from them.
This function returns a list of SensorObjects to the user.
'''
# TODO: Add error handling and clean up code
def fetch_sensor_list(return_raw=False, return_list=False):
telldus_url = f'{base_url}/sensors/list'
telldus_call = telldus_user.get(telldus_url)
result = json.loads(telldus_call.text)
sensor_list = []
if (return_list):
for res in result['sensor']:
sensor_list.append({
'sensor_id': res['id'],
'sensor_name': res['name'],
'sensor_lastupdate': res['lastUpdated'],
'sensor_model': res['model']
})
else:
for res in result['sensor']:
if (return_raw):
sensor_list.append(fetch_sensor_data(res['id'], True))
else:
sensor_list.append(fetch_sensor_data(res['id']))
return sensor_list
'''
Function for collecting the latest available information from a specified Telldus sensor ID.
Returns a SensorObject containing the information to the user
'''
# TODO: Add error handling and clean up code
def fetch_sensor_data(sensor_id, return_raw=False):
telldus_url = f'{base_url}/sensor/info?id={sensor_id}'
telldus_call = telldus_user.get(telldus_url)
json_data = json.loads(telldus_call.text)
if json_data:
result = SensorObject()
result.sensor_id = json_data['id']
result.name = json_data['name']
result.client_name = json_data['clientName']
result.last_updated = json_data['lastUpdated'] if return_raw else datetime.fromtimestamp(
int(json_data['lastUpdated']))
try:
if json_data['data'][0]['name'] == 'temp':
# Handle temperature values
result.temp_value = float(json_data['data'][0]['value'])
result.temp_max_value = float(json_data['data'][0]['max'])
result.temp_min_value = float(json_data['data'][0]['min'])
# Handle datetime values
if (return_raw):
result.temp_last_updated = json_data['data'][0]['lastUpdated']
result.temp_max_time = json_data['data'][0]['maxTime']
result.temp_min_time = json_data['data'][0]['minTime']
else:
result.templast_updated = datetime.fromtimestamp(
int(json_data['data'][0]['lastUpdated']))
result.temp_max_time = datetime.fromtimestamp(
int(json_data['data'][0]['maxTime']))
result.temp_min_time = datetime.fromtimestamp(
int(json_data['data'][0]['minTime']))
except Exception:
pass
try:
if json_data['data'][1]['name'] == 'humidity':
# Handle humidity values
result.humidity_value = int(json_data['data'][1]['value'])
result.humidity_max_value = int(json_data['data'][1]['max'])
result.humidity_min_value = int(json_data['data'][1]['min'])
# Handle datetime values
if (return_raw):
result.humidity_last_updated = json_data['data'][1]['lastUpdated']
result.humidity_max_time = json_data['data'][1]['maxTime']
result.humidity_min_time = json_data['data'][1]['minTime']
else:
result.humidity_last_updated = datetime.fromtimestamp(
int(json_data['data'][1]['lastUpdated']))
result.humidity_max_time = datetime.fromtimestamp(
int(json_data['data'][1]['maxTime']))
result.humidity_min_time = datetime.fromtimestamp(
int(json_data['data'][1]['minTime']))
except Exception:
pass
result.timezone_offset = json_data['timezoneoffset']
else:
result = SensorObject()
return result
"""
A function for fetching sensor history stored at Telldus
"""
def fetch_sensor_history(sensor_id):
try:
telldus_url = f'{base_url}/sensor/history?id={sensor_id}'
telldus_call = telldus_user.get(telldus_url)
return json.loads(telldus_call.text)
except Exception:
return {'error': 'Error while fetching data.'}
```
#### File: JoakimEwenson/fastapi-telldus/main.py
```python
import os
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from controller.caller import fetch_sensor_data, fetch_sensor_history, fetch_sensor_list
# Initialize FastAPI
app = FastAPI()
origins = [
"*",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Root will show a listing of sensors connected to supplied API key
# TODO: Add error handling
@app.get("/")
async def read_root():
return fetch_sensor_list(return_raw=True, return_list=True)
# /sensor endpoint will show sensor listing with details
# TODO: Add error handling
@app.get("/sensors")
async def get_sensors():
return fetch_sensor_list(return_raw=True)
# /sensor/info/{sensor_id} will show specific sensor details
# TODO: Add error handling
@app.get("/sensor/info/{sensor_id}")
async def get_sensor_info(sensor_id: int):
return {fetch_sensor_data(sensor_id, return_raw=True)}
# /sensor/history/{sensor_id} will show specific sensor history
# TODO: Add error handling
@app.get("/sensor/history/{sensor_id}")
async def get_sensor_history(sensor_id: int):
return {
'sensor_id': sensor_id,
'result': fetch_sensor_history(sensor_id)
}
``` |
{
"source": "JoakimEwenson/pyTelldusCaller",
"score": 3
} |
#### File: JoakimEwenson/pyTelldusCaller/TelldusLogger.py
```python
from TelldusCaller import fetch_sensor_list, fetch_sensor_data
from os import path
from sqlite3 import Error
from datetime import datetime
import time
import sqlite3
import os.path
db_file = 'sensordata.db'
sql_create_sensordata_table = """ CREATE TABLE IF NOT EXISTS sensordata (
id integer PRIMARY KEY,
sensorid integer NOT NULL,
clientName text,
name text,
lastUpdated text,
tempValue real,
tempLastUpdated text,
tempMaxValue real,
tempMaxTime text,
tempMinValue real,
tempMinTime text,
humidityValue real,
humidityLastUpdated text,
humidityMaxValue real,
humidityMaxTime text,
humidityMinValue real,
humidityMinTime text,
timezoneOffset integer
); """
def create_connection(db_file):
""" Create a database connection to the SQLite database
specified by db_file
:param db_file: database file
:return Connection object or None
"""
conn = None
try:
conn = sqlite3.connect(db_file)
return conn
except Error as e:
print(e)
return None
def create_table(conn, create_table_sql):
""" Create a table from the create_table_sql statement
:param conn: Connection object
:param create_table_sql: a CREATE TABLE statement
:return:
"""
try:
c = conn.cursor()
c.execute(create_table_sql)
except Error as e:
print(e)
def insert_sensordata(conn, data):
c = conn.cursor()
for row in data:
c.execute(
f"INSERT INTO sensordata(sensorid,clientName,name,lastUpdated,tempValue,tempLastUpdated,tempMaxValue,tempMaxTime,tempMinValue,tempMinTime,humidityValue,humidityLastUpdated,humidityMaxValue,humidityMaxTime,humidityMinValue,humidityMinTime,timezoneOffset) VALUES('{row.sensor_id}','{row.client_name}','{row.name}','{row.last_updated}','{row.temp_value}','{row.temp_last_updated}','{row.temp_max_value}','{row.temp_max_time}','{row.temp_min_value}','{row.temp_min_time}','{row.humidity_value}','{row.humidity_last_updated}','{row.humidity_max_value}','{row.humidity_max_time}','{row.humidity_min_value}','{row.humidity_min_time}','{row.timezone_offset}')")
conn.commit()
if __name__ == '__main__':
if (path.exists(db_file) == False):
conn = create_connection(db_file)
if conn is not None:
create_table(conn, sql_create_sensordata_table)
else:
print('Error, could not connect to database')
else:
conn = create_connection(db_file)
# Fetch sensor data
while True:
try:
result = fetch_sensor_list(True)
insert_sensordata(conn, result)
print(f"Successful fetch at {datetime.now()}")
except Error as error:
print(error)
# Sleep for 600 (10 m) s and fetch again
time.sleep(600)
``` |
{
"source": "joakimfors/grasso",
"score": 2
} |
#### File: grasso/grasso/fat32.py
```python
from struct import unpack
class ExtendedBIOSParameterBlock32(object):
length = 476
unpacker = "<IHHIHH12sBBB4s11s8s420sH"
def __init__(self, filesystem):
self.filesystem = filesystem
self.offset = self.filesystem.source.tell()
data = unpack(self.unpacker, self.filesystem.source.read(self.length))
self.sector_per_fat = data[0]
self.mirroring_flags = data[1]
self.version = data[2]
self.root_directory_cluster_number = data[3]
self.file_system_information_sector_number = data[4]
self.backup_boot_sector_number = data[5]
self.reserved = list(data[6])
self.physical_drive_number = data[7]
self.reserved_flags = data[8]
self.extended_boot_signature = data[9]
self.volume_id = list(data[10])
self.volume_label = data[11]
self.file_system_type = data[12]
self.boot_code = data[13]
self.signature = data[14]
def __repr__(self):
return "ExtendedBIOSParameterBlock32(\n" \
" offset=%d,\n" \
" length=%d,\n" \
" sector_per_fat=%d,\n" \
" mirroring_flags=%d,\n" \
" version=%d,\n" \
" root_directory_cluster_number=%d,\n" \
" file_system_information_sector_number=%d,\n" \
" backup_boot_sector_number=%d,\n" \
" reserved=%s,\n" \
" physical_drive_number=%d,\n" \
" reserved_flags=%d,\n" \
" extended_boot_signature=%d,\n" \
" volume_id=%s,\n" \
" volume_label='%s',\n" \
" file_system_type='%s',\n" \
" boot_code=[...],\n" \
" signature=%d,\n" \
")" % (
self.offset,
self.length,
self.sector_per_fat,
self.mirroring_flags,
self.version,
self.root_directory_cluster_number,
self.file_system_information_sector_number,
self.backup_boot_sector_number,
self.reserved,
self.physical_drive_number,
self.reserved_flags,
self.extended_boot_signature,
self.volume_id,
self.volume_label,
self.file_system_type,
self.signature
)
class FileSystemInformationSector32(object):
length = 512
unpacker = "<4s480s4sII12s4s"
def __init__(self, filesystem):
self.filesystem = filesystem
self.offset = self.filesystem.source.tell()
data = unpack(self.unpacker, self.filesystem.source.read(self.length))
self.signature_1 = list(data[0])
self.reserved_1 = list(data[1])
self.signature_2 = list(data[2])
self.free_cluster_count = data[3]
self.most_recent_allocated_cluster_number = data[4]
self.reserved_2 = list(data[5])
self.signature_3 = list(data[6])
def __repr__(self):
return "FileSystemInformationSector32(\n" \
" offset=%d,\n" \
" length=%d,\n" \
" signature_1=%s,\n" \
" reserved_1=[...],\n" \
" signature_2=%s,\n" \
" free_cluster_count=%d,\n" \
" most_recent_allocated_cluster_number=%d,\n" \
" reserved_2=%s,\n" \
" signature_3=%s,\n" \
")" % (
self.offset,
self.length,
self.signature_1,
self.signature_2,
self.free_cluster_count,
self.most_recent_allocated_cluster_number,
self.reserved_2,
self.signature_3
)
class FAT32(object):
def __init__(self, filesystem, length):
self.length = length
self.filesystem = filesystem
self.offset = self.filesystem.source.tell()
source = self.filesystem.source
self.media_descriptor = unpack('<B', source.read(1))[0]
self.ones = unpack('<BBB', source.read(3))
self.end_of_cluster = unpack('<I', source.read(4))[0]
self.next_clusters = {}
self.bad_clusters = {}
entries = self.length/4
for i in range(2, entries):
v = unpack('<I', source.read(4))[0] & 0x0FFFFFFF
if not v:
continue
if 0x00000002 <= v and v <= 0x0FFFFFEF:
self.next_clusters[i] = v
if 0x0FFFFFF8 <= v and v <= 0x0FFFFFFF:
self.next_clusters[i] = None
if v == 0x0FFFFFF7:
self.bad_clusters[i] = v
def get_chain(self, cluster):
c = cluster
while c:
yield c
c = self.next_clusters[c]
def __repr__(self):
return "FAT32(\n" \
" offset=%d,\n" \
" length=%d,\n" \
" media_descriptor=%d,\n" \
" end_of_cluster=0x%X,\n" \
" next_clusters=[...],\n" \
" bad_clusters=[...],\n" \
")" % (
self.offset,
self.length,
self.media_descriptor,
self.end_of_cluster,
)
``` |
{
"source": "JoakimFristedt/stocks-data",
"score": 3
} |
#### File: stocks-data/utils/Stock.py
```python
class Stock():
def __init__(self, tickerSymbol, stockData):
self.tickerSymbol = tickerSymbol
self.stockData = stockData
``` |
{
"source": "JoakimHaurum/DL_Projects",
"score": 2
} |
#### File: dlrepo/models/siamese_net.py
```python
import time
import os
import numpy as np
from keras.models import Model
from keras.layers import Input, Dense, BatchNormalization, Conv2D, Flatten, Activation, MaxPool2D
from keras.optimizers import Adam
from keras.utils import plot_model
from keras import backend as K
from utils.datasets import DATASET, create_pairs
from utils.losses import contrastive_loss
from utils.layers import Distance_Layer
from utils.saving import save_loss_log, save_model
from utils.data_ops import scale
class Siamese_net():
"""
Class for constructing a simple Siamese network to compare two image inputs
"""
name = "SiameseNet"
def __init__(self, epochs, batch_size, dataset, loss_path, result_path, checkpoint_path):
creation_time = time.strftime('%Y%m%d-%H%M%S')
dir_prefix = self.name + "_" + creation_time
self.epochs = epochs
self.batch_size = batch_size
self.dataset = dataset
self.loss_path = loss_path + "/" + dir_prefix
self.result_path = result_path + "/" + dir_prefix
self.checkpoint_path = checkpoint_path + "/" + dir_prefix
if self.dataset.lower() in DATASET:
#Load dataset
train_x, train_y, test_x, test_y = DATASET[self.dataset](return_test=True)
num_classes = len(np.unique(train_y))
# Convert values to float32 and convert range to -1-1 from 0-255
train_x = scale(train_x, -1, 1, 0, 255)
test_x = scale(test_x, -1, 1, 0, 255)
# Create training and testing pairs
train_digit_indices = [np.where(train_y == i)[0] for i in range(num_classes)]
self.train_pair_x, self.train_pair_y = create_pairs(train_x, train_digit_indices, num_classes)
test_digit_indices = [np.where(test_y == i)[0] for i in range(num_classes)]
self.test_pair_x, self.test_pair_y = create_pairs(test_x, test_digit_indices, num_classes)
print('image pair shape:', self.train_pair_x.shape)
print('training pair count:', self.train_pair_y.shape[0])
print('testing pair count:', self.test_pair_y.shape[0])
#Set dimensions for input of discriminator
self.input_shape = train_x.shape[1:]
else:
raise NotImplementedError
self.build()
def build(self):
"""
Builds and compiles the siamese network network
"""
self.siamese_net = self.network(self.input_shape)
self.siamese_net.summary()
self.siamese_net.compile(optimizer = Adam(), loss = contrastive_loss, metrics = [self.accuracy_metric])
def network(self, img_shape, base_feature_count = 128, scale_factor = 2):
"""
Constructs the base network used for each leg in the siamese network and combines them with a distance based comparison layer
"""
img_in = Input(img_shape)
x = img_in
# for s in range(scale_factor):
# x = Conv2D(base_feature_count*2**s, (3, 3), strides=(1,1), padding='same', name = "Conv2D_"+str(s)+"a")(x)
# x = BatchNormalization(name = "BN_"+str(s)+"a")(x)
# x = Activation("relu", name = "ReLU_"+str(s)+"a")(x)
## x = Conv2D(base_feature_count*2**s, (3, 3), strides=(1,1), padding='same', name = "Conv2D_"+str(s)+"b")(x)
## x = BatchNormalization(name = "BN_"+str(s)+"b")(x)
## x = Activation("relu", name = "ReLU_"+str(s)+"b")(x)
# x = MaxPool2D(name = "MaxPool_"+str(s))(x)
#
# x = Flatten(name = "Flatten")(x)
# x = Dense(128, name = "FC1")(x)
# x = BatchNormalization(name = "BN_"+str(scale_factor+1))(x)
# x = Activation("relu",name = "ReLU_"+str(scale_factor+1))(x)
x = Flatten()(x)
x = Dense(64)(x)
x = BatchNormalization()(x)
x = Activation("relu")(x)
x = Dense(128)(x)
x = BatchNormalization()(x)
x = Activation("relu")(x)
x = Dense(256)(x)
x = BatchNormalization()(x)
x = Activation("relu")(x)
base_net = Model(img_in, x, name = "Base net")
base_net.summary()
input_a = Input(img_shape, name = "Input a")
input_b = Input(img_shape, name = "Input b")
out_a = base_net(input_a)
out_b = base_net(input_b)
l2_distance = Distance_Layer([out_a,out_b])
return Model([input_a, input_b], l2_distance)
def accuracy_metric(self, y_true, y_pred):
"""
Function for evaluting predictions during training at a fixed threshold (0.5)
"""
return K.mean(K.equal(y_true, K.cast(y_pred < 0.5, y_true.dtype)))
def evaluation_accuracy(self, y_true, y_pred, threshold = 0.5):
"""
Function for evaluting predictions at variable thresholds
"""
pred = y_pred.ravel() < threshold
return np.mean(pred == y_true)
def fit(self):
"""
Trains the siamese net based on the inputs to supplied when initializing the network object
"""
if not os.path.exists(self.checkpoint_path):
os.makedirs(self.checkpoint_path)
if not os.path.exists(self.result_path):
os.makedirs(self.result_path)
if not os.path.exists(self.loss_path):
os.makedirs(self.loss_path)
hist = self.siamese_net.fit(x = [self.train_pair_x[:,0], self.train_pair_x[:,1]], y = self.train_pair_y, batch_size = self.batch_size, epochs = self.epochs)
training_history = {
"training_loss" : hist.history["loss"],
"training_acc" : hist.history["accuracy_metric"]
}
save_loss_log(self.loss_path, "/loss", training_history)
save_model(self.checkpoint_path, "/SiameseNet", self.siamese_net, self.epochs)
def predict(self, img_pair):
"""
Determines whether the input image pairs are from the same class
"""
return self.siamese_net.predict(img_pair)
def pretty_print(self):
"""
Returns supplied values in a pretty print out format
(Need adjustment)
"""
print("\nepochs = \t\t{}\nbatch_size = \t\t{}\ndataset = \t\t{}\
\nloss_path = \t\t{}\nresult_path = \t\t{}\ncheckpoint_path = \t{}\
\ninput height = \t\t{}\ninput width = \t\t{}\ninput channels = \t{}".format(self.epochs,\
self.batch_size,self.dataset,\
self.loss_path,self.result_path,self.checkpoint_path,\
self.input_shape[1],self.input_shape[2],self.input_shape[0]))
if __name__ == "__main__":
sNet = Siamese_net(10, 128, "mnist", "Loss", "Images", "Saved_models")
sNet.pretty_print()
sNet.fit()
``` |
{
"source": "joakimkarlsson/describe_it",
"score": 3
} |
#### File: describe_it/specs/context_spec.py
```python
from describe_it import describe, it, before_each, Fixture, Context
from mock import MagicMock
def empty_describe_fn():
pass
@describe
def context():
f = Fixture()
@before_each
def setup():
f.context = Context(describe_fn=empty_describe_fn,
parent=None)
@describe
def with_before_each_and_after_each_functions():
@before_each
def setup():
f.before_each = MagicMock()
f.after_each = MagicMock()
f.context.add_before_each(f.before_each)
f.context.add_after_each(f.after_each)
@it
def calls_before_each_when_asked():
f.context.run_before_eaches()
f.before_each.assert_called_once_with()
@it
def calls_after_each():
f.context.run_after_eaches()
f.after_each.assert_called_once_with()
@describe
def with_a_child_context():
@before_each
def setup():
f.child_context = Context(describe_fn=empty_describe_fn,
parent=f.context)
@it
def calls_before_each_on_parent_before_running_its_own():
def child_before_each():
f.before_each.assert_called_once_with()
f.child_context.add_before_each(child_before_each)
f.child_context.run_before_eaches()
@it
def calls_after_each_on_parent_after_running_its_own():
def child_after_each():
assert not f.after_each.called
f.child_context.add_after_each(child_after_each)
f.child_context.run_after_eaches()
```
#### File: describe_it/specs/context_testcase_spec.py
```python
from describe_it import describe, it, before_each, Fixture, Context
from describe_it.noseplugin import ContextTestCase
from mock import MagicMock
from nose.tools import assert_raises
from unittest import SkipTest
@describe
def context_testcase():
f = Fixture()
@before_each
def setup():
f.describe_fn = MagicMock()
f.context = Context(describe_fn=f.describe_fn, parent=None)
f.it_fn = MagicMock()
f.it_fn.skip = False
f.context.add_it(f.it_fn)
f.testcase = ContextTestCase(it_fn=f.it_fn)
@it
def calls_it_fn():
f.testcase.run_test()
f.it_fn.assert_called_once_with()
@describe
def context_marked_as_skipped():
@before_each
def setup():
f.context.skip = True
@it
def doesnt_call_it_fn():
try:
f.testcase.run_test()
except SkipTest: # Letting this bubble up would mark this test
# as skipped.
pass
assert not f.it_fn.called
@it
def throws_SkipTest():
assert_raises(SkipTest, f.testcase.run_test)
@describe
def context_has_before_and_after_each():
@before_each
def setup():
f.before_each = MagicMock()
f.context.add_before_each(f.before_each)
f.after_each = MagicMock()
f.context.add_after_each(f.after_each)
@it
def doesnt_call_before_each():
try:
f.testcase.setUp()
except SkipTest: # Letting this bubble up would mark this
# test as skipped.
pass
assert not f.before_each.called
@it
def doesnt_call_after_each():
try:
f.testcase.tearDown()
except SkipTest: # Letting this bubble up would mark this
# test as skipped.
pass
assert not f.after_each.called
@describe
def it_fn_marked_as_skipped():
@before_each
def setup():
f.it_fn.skip = True
@it
def doesnt_call_it_fn():
try:
f.testcase.run_test()
except SkipTest: # Letting this bubble up would mark this test
# as skipped.
pass
assert not f.it_fn.called
@it
def throws_SkipTest():
assert_raises(SkipTest, f.testcase.run_test)
@describe
def context_has_before_and_after_each():
@before_each
def setup():
f.before_each = MagicMock()
f.context.add_before_each(f.before_each)
f.after_each = MagicMock()
f.context.add_after_each(f.after_each)
@it
def doesnt_call_before_each():
try:
f.testcase.setUp()
except SkipTest: # Letting this bubble up would mark this
# test as skipped.
pass
assert not f.before_each.called
@it
def doesnt_call_after_each():
try:
f.testcase.tearDown()
except SkipTest: # Letting this bubble up would mark this
# test as skipped.
pass
assert not f.after_each.called
@describe
def context_has_a_parent():
@before_each
def setup():
f.child_context = Context(describe_fn=f.describe_fn,
parent=f.context)
f.it_fn.context = f.child_context
@it
def calls_it_fn():
f.testcase.run_test()
f.it_fn.assert_called_once_with()
@describe
def parent_is_marked_as_skipped():
@before_each
def setup():
f.context.skip = True
@it
def doesnt_call_it_fn():
try:
f.testcase.run_test()
except SkipTest: # Letting this bubble up would mark this
# test as skipped.
pass
assert not f.it_fn.called
``` |
{
"source": "joakimnordling/migrate-anything",
"score": 3
} |
#### File: custom_storage/migrations/__init__.py
```python
from migrate_anything import configure
class CustomStorage(object):
def __init__(self, file):
self.file = file
def save_migration(self, name, code):
with open(self.file, "a", encoding="utf-8") as file:
file.write("{},{}\n".format(name, code))
def list_migrations(self):
try:
with open(self.file, encoding="utf-8") as file:
return [
line.split(",")
for line in file.readlines()
if line.strip() # Skip empty lines
]
except FileNotFoundError:
return []
def remove_migration(self, name):
migrations = [
migration for migration in self.list_migrations() if migration[0] != name
]
with open(self.file, "w", encoding="utf-8") as file:
for row in migrations:
file.write("{},{}\n".format(*row))
configure(storage=CustomStorage("test.txt"))
```
#### File: mongodb/migrations/01-test.py
```python
from pymongo import MongoClient
client = MongoClient()
db = client.my_db
def up():
db.posts.insert_one(
{
"id": "post-1",
"title": "We're live!",
"content": "This is our first post, yay.",
}
)
db.posts.create_index("id")
def down():
db.posts.drop()
``` |
{
"source": "joakimnordling/pex",
"score": 2
} |
#### File: tools/commands/venv.py
```python
from __future__ import absolute_import
import errno
import os
import shutil
import zipfile
from argparse import ArgumentParser, Namespace
from collections import defaultdict
from textwrap import dedent
from pex import pex_warnings
from pex.common import chmod_plus_x, pluralize, safe_mkdir
from pex.environment import PEXEnvironment
from pex.pex import PEX
from pex.tools.command import Command, Error, Ok, Result
from pex.tools.commands.virtualenv import PipUnavailableError, Virtualenv
from pex.tracer import TRACER
from pex.typing import TYPE_CHECKING
from pex.venv_bin_path import BinPath
if TYPE_CHECKING:
from typing import Iterable, Iterator, Optional, Tuple
# N.B.: We can't use shutil.copytree since we copy from multiple source locations to the same site
# packages directory destination. Since we're forced to stray from the stdlib here, support for
# hardlinks is added to provide a measurable speed up and disk space savings when possible.
def _copytree(
src, # type: str
dst, # type: str
exclude=(), # type: Tuple[str, ...]
):
# type: (...) -> Iterator[Tuple[str, str]]
safe_mkdir(dst)
link = True
for root, dirs, files in os.walk(src, topdown=True, followlinks=False):
if src == root:
dirs[:] = [d for d in dirs if d not in exclude]
files[:] = [f for f in files if f not in exclude]
for d in dirs:
try:
os.mkdir(os.path.join(dst, os.path.relpath(os.path.join(root, d), src)))
except OSError as e:
if e.errno != errno.EEXIST:
raise e
for f in files:
src_entry = os.path.join(root, f)
dst_entry = os.path.join(dst, os.path.relpath(src_entry, src))
yield src_entry, dst_entry
try:
# We only try to link regular files since linking a symlink on Linux can produce
# another symlink, which leaves open the possibility the src_entry target could
# later go missing leaving the dst_entry dangling.
if link and not os.path.islink(src_entry):
try:
os.link(src_entry, dst_entry)
continue
except OSError as e:
if e.errno != errno.EXDEV:
raise e
link = False
shutil.copy(src_entry, dst_entry)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
class CollisionError(Exception):
"""Indicates multiple distributions provided the same file when merging a PEX into a venv."""
def populate_venv_with_pex(
venv, # type: Virtualenv
pex, # type: PEX
bin_path=BinPath.FALSE, # type: BinPath.Value
python=None, # type: Optional[str]
collisions_ok=True, # type: bool
):
# type: (...) -> str
venv_python = python or venv.interpreter.binary
venv_bin_dir = os.path.dirname(python) if python else venv.bin_dir
venv_dir = os.path.dirname(venv_bin_dir) if python else venv.venv_dir
# 1. Populate the venv with the PEX contents.
provenance = defaultdict(list)
def record_provenance(src_to_dst):
# type: (Iterable[Tuple[str, str]]) -> None
for src, dst in src_to_dst:
provenance[dst].append(src)
pex_info = pex.pex_info()
if zipfile.is_zipfile(pex.path()):
record_provenance(
PEXEnvironment(pex.path()).explode_code(
venv.site_packages_dir, exclude=("__main__.py", pex_info.PATH)
)
)
else:
record_provenance(
_copytree(
src=pex.path(),
dst=venv.site_packages_dir,
exclude=(pex_info.internal_cache, pex_info.bootstrap, "__main__.py", pex_info.PATH),
)
)
with open(os.path.join(venv.venv_dir, pex_info.PATH), "w") as fp:
fp.write(pex_info.dump())
for dist in pex.resolve():
record_provenance(
_copytree(src=dist.location, dst=venv.site_packages_dir, exclude=("bin",))
)
dist_bin_dir = os.path.join(dist.location, "bin")
if os.path.isdir(dist_bin_dir):
record_provenance(_copytree(dist_bin_dir, venv.bin_dir))
collisions = {dst: srcs for dst, srcs in provenance.items() if len(srcs) > 1}
if collisions:
message_lines = [
"Encountered {collision} building venv at {venv_dir} from {pex}:".format(
collision=pluralize(collisions, "collision"), venv_dir=venv_dir, pex=pex.path()
)
]
for index, (dst, srcs) in enumerate(collisions.items(), start=1):
message_lines.append(
"{index}. {dst} was provided by:\n\t{srcs}".format(
index=index, dst=dst, srcs="\n\t".join(srcs)
)
)
message = "\n".join(message_lines)
if not collisions_ok:
raise CollisionError(message)
pex_warnings.warn(message)
# 2. Add a __main__ to the root of the venv for running the venv dir like a loose PEX dir
# and a main.py for running as a script.
shebang = "#!{} -sE".format(venv_python)
main_contents = dedent(
"""\
{shebang}
if __name__ == "__main__":
import os
import sys
venv_dir = os.path.abspath(os.path.dirname(__file__))
venv_bin_dir = os.path.join(venv_dir, "bin")
shebang_python = {shebang_python!r}
python = os.path.join(venv_bin_dir, os.path.basename(shebang_python))
def iter_valid_venv_pythons():
# Allow for both the known valid venv pythons and their fully resolved venv path
# version in the case their parent directories contain symlinks.
for python_binary in (python, shebang_python):
yield python_binary
yield os.path.join(
os.path.realpath(os.path.dirname(python_binary)),
os.path.basename(python_binary)
)
current_interpreter_blessed_env_var = "_PEX_SHOULD_EXIT_VENV_REEXEC"
if (
not os.environ.pop(current_interpreter_blessed_env_var, None)
and sys.executable not in tuple(iter_valid_venv_pythons())
):
sys.stderr.write("Re-execing from {{}}\\n".format(sys.executable))
os.environ[current_interpreter_blessed_env_var] = "1"
os.execv(python, [python, "-sE"] + sys.argv)
os.environ["VIRTUAL_ENV"] = venv_dir
sys.path.extend(os.environ.get("PEX_EXTRA_SYS_PATH", "").split(os.pathsep))
bin_path = os.environ.get("PEX_VENV_BIN_PATH", {bin_path!r})
if bin_path != "false":
PATH = os.environ.get("PATH", "").split(os.pathsep)
if bin_path == "prepend":
PATH.insert(0, venv_bin_dir)
elif bin_path == "append":
PATH.append(venv_bin_dir)
else:
sys.stderr.write(
"PEX_VENV_BIN_PATH must be one of 'false', 'prepend' or 'append', given: "
"{{!r}}\\n".format(
bin_path
)
)
sys.exit(1)
os.environ["PATH"] = os.pathsep.join(PATH)
PEX_EXEC_OVERRIDE_KEYS = ("PEX_INTERPRETER", "PEX_SCRIPT", "PEX_MODULE")
pex_overrides = {{
key: os.environ.get(key) for key in PEX_EXEC_OVERRIDE_KEYS if key in os.environ
}}
if len(pex_overrides) > 1:
sys.stderr.write(
"Can only specify one of {{overrides}}; found: {{found}}\\n".format(
overrides=", ".join(PEX_EXEC_OVERRIDE_KEYS),
found=" ".join("{{}}={{}}".format(k, v) for k, v in pex_overrides.items())
)
)
sys.exit(1)
if {strip_pex_env!r}:
for key in list(os.environ):
if key.startswith("PEX_"):
del os.environ[key]
pex_script = pex_overrides.get("PEX_SCRIPT")
if pex_script:
script_path = os.path.join(venv_bin_dir, pex_script)
os.execv(script_path, [script_path] + sys.argv[1:])
pex_interpreter = pex_overrides.get("PEX_INTERPRETER", "").lower() in ("1", "true")
PEX_INTERPRETER_ENTRYPOINT = "code:interact"
entry_point = (
PEX_INTERPRETER_ENTRYPOINT
if pex_interpreter
else pex_overrides.get("PEX_MODULE", {entry_point!r} or PEX_INTERPRETER_ENTRYPOINT)
)
if entry_point == PEX_INTERPRETER_ENTRYPOINT and len(sys.argv) > 1:
args = sys.argv[1:]
arg = args[0]
if arg == "-m":
if len(args) < 2:
sys.stderr.write("Argument expected for the -m option\\n")
sys.exit(2)
entry_point = module = args[1]
sys.argv = args[1:]
# Fall through to entry_point handling below.
else:
filename = arg
sys.argv = args
if arg == "-c":
if len(args) < 2:
sys.stderr.write("Argument expected for the -c option\\n")
sys.exit(2)
filename = "-c <cmd>"
content = args[1]
sys.argv = ["-c"] + args[2:]
elif arg == "-":
content = sys.stdin.read()
else:
with open(arg) as fp:
content = fp.read()
ast = compile(content, filename, "exec", flags=0, dont_inherit=1)
globals_map = globals().copy()
globals_map["__name__"] = "__main__"
globals_map["__file__"] = filename
locals_map = globals_map
{exec_ast}
sys.exit(0)
module_name, _, function = entry_point.partition(":")
if not function:
import runpy
runpy.run_module(module_name, run_name="__main__", alter_sys=True)
else:
import importlib
module = importlib.import_module(module_name)
# N.B.: Functions may be hung off top-level objects in the module namespace,
# e.g.: Class.method; so we drill down through any attributes to the final function
# object.
namespace, func = module, None
for attr in function.split("."):
func = namespace = getattr(namespace, attr)
sys.exit(func())
""".format(
shebang=shebang,
shebang_python=venv_python,
bin_path=bin_path,
strip_pex_env=pex_info.strip_pex_env,
entry_point=pex_info.entry_point,
exec_ast=(
"exec ast in globals_map, locals_map"
if venv.interpreter.version[0] == 2
else "exec(ast, globals_map, locals_map)"
),
)
)
with open(venv.join_path("__main__.py"), "w") as fp:
fp.write(main_contents)
chmod_plus_x(fp.name)
os.symlink(os.path.basename(fp.name), venv.join_path("pex"))
# 3. Re-write any (console) scripts to use the venv Python.
for script in venv.rewrite_scripts(python=venv_python, python_args="-sE"):
TRACER.log("Re-writing {}".format(script))
return shebang
class Venv(Command):
"""Creates a venv from the PEX file."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument(
"venv",
nargs=1,
metavar="PATH",
help="The directory to create the virtual environment in.",
)
parser.add_argument(
"-b",
"--bin-path",
choices=[choice.value for choice in BinPath.values],
default=BinPath.FALSE.value,
help="Add the venv bin dir to the PATH in the __main__.py script.",
)
parser.add_argument(
"-f",
"--force",
action="store_true",
default=False,
help="If the venv directory already exists, overwrite it.",
)
parser.add_argument(
"--collisions-ok",
action="store_true",
default=False,
help=(
"Don't error if population of the venv encounters distributions in the PEX file "
"with colliding files, just emit a warning."
),
)
parser.add_argument(
"-p",
"--pip",
action="store_true",
default=False,
help="Add pip to the venv.",
)
parser.add_argument(
"--copies",
action="store_true",
default=False,
help="Create the venv using copies of system files instead of symlinks",
)
parser.add_argument(
"--compile",
action="store_true",
default=False,
help="Compile all `.py` files in the venv.",
)
def run(
self,
pex, # type: PEX
options, # type: Namespace
):
# type: (...) -> Result
venv_dir = options.venv[0]
venv = Virtualenv.create(
venv_dir, interpreter=pex.interpreter, force=options.force, copies=options.copies
)
populate_venv_with_pex(
venv,
pex,
bin_path=BinPath.for_value(options.bin_path),
collisions_ok=options.collisions_ok,
)
if options.pip:
try:
venv.install_pip()
except PipUnavailableError as e:
return Error(
"The virtual environment was successfully created, but Pip was not "
"installed:\n{}".format(e)
)
if options.compile:
pex.interpreter.execute(["-m", "compileall", venv_dir])
return Ok()
``` |
{
"source": "joakimnordling/russs123_Shooter",
"score": 3
} |
#### File: joakimnordling/russs123_Shooter/shooter_tut2.py
```python
import pygame
pygame.init()
SCREEN_WIDTH = 800
SCREEN_HEIGHT = int(SCREEN_WIDTH * 0.8)
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
pygame.display.set_caption('Shooter')
#set framerate
clock = pygame.time.Clock()
FPS = 60
#define player action variables
moving_left = False
moving_right = False
#define colours
BG = (144, 201, 120)
def draw_bg():
screen.fill(BG)
class Soldier(pygame.sprite.Sprite):
def __init__(self, char_type, x, y, scale, speed):
pygame.sprite.Sprite.__init__(self)
self.char_type = char_type
self.speed = speed
self.direction = 1
self.flip = False
img = pygame.image.load(f'img/{self.char_type}/Idle/0.png')
self.image = pygame.transform.scale(img, (int(img.get_width() * scale), int(img.get_height() * scale)))
self.rect = self.image.get_rect()
self.rect.center = (x, y)
def move(self, moving_left, moving_right):
#reset movement variables
dx = 0
dy = 0
#assign movement variables if moving left or right
if moving_left:
dx = -self.speed
self.flip = True
self.direction = -1
if moving_right:
dx = self.speed
self.flip = False
self.direction = 1
#update rectangle position
self.rect.x += dx
self.rect.y += dy
def draw(self):
screen.blit(pygame.transform.flip(self.image, self.flip, False), self.rect)
player = Soldier('player', 200, 200, 3, 5)
enemy = Soldier('enemy', 400, 200, 3, 5)
run = True
while run:
clock.tick(FPS)
draw_bg()
player.draw()
enemy.draw()
player.move(moving_left, moving_right)
for event in pygame.event.get():
#quit game
if event.type == pygame.QUIT:
run = False
#keyboard presses
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_a:
moving_left = True
if event.key == pygame.K_d:
moving_right = True
if event.key == pygame.K_ESCAPE:
run = False
#keyboard button released
if event.type == pygame.KEYUP:
if event.key == pygame.K_a:
moving_left = False
if event.key == pygame.K_d:
moving_right = False
pygame.display.update()
pygame.quit()
``` |
{
"source": "joakimnordling/shylock",
"score": 2
} |
#### File: shylock/backends/pythonarango.py
```python
from time import sleep
from typing import Optional
try:
from arango.database import StandardDatabase
from arango.collection import StandardCollection
from arango.exceptions import ArangoServerError
except ImportError:
StandardDatabase = None
StandardCollection = None
ArangoServerError = None
from shylock.backends import ShylockSyncBackend
from shylock.exceptions import ShylockException
DOCUMENT_TTL = 60 * 5 # 5min seems like a reasonable TTL
POLL_DELAY = 1 / 16 # Some balance between high polling and high delay
ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED = 1210
class ShylockPythonArangoBackend(ShylockSyncBackend):
@staticmethod
def create(
db: StandardDatabase, collection_name: str = "shylock"
) -> "ShylockPythonArangoBackend":
""""
Create and initialize the backend
:param db: An instance of arango.database.StandardDatabase connected to the desired database
:param collection_name: The name of the collection reserved for shylock
"""
inst = ShylockPythonArangoBackend(db, collection_name)
inst._init_collection()
return inst
def acquire(self, name: str, block: bool = True) -> bool:
"""
Try to acquire a lock, potentially wait until it's available
:param name: Name of the lock
:param block: Wait for lock
:return: If lock was successfully acquired - always True if block is True
"""
while True:
try:
self._db.aql.execute(
"""
INSERT {
"name": @name,
"expiresAt": DATE_NOW() / 1000 + @ttl
} IN @@collection
""",
bind_vars={
"name": name,
"ttl": DOCUMENT_TTL,
"@collection": self._collection_name,
},
)
return True
except ArangoServerError as err:
if err.error_code == ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED:
if not block:
return False
sleep(POLL_DELAY)
else:
raise
def release(self, name: str):
"""
Release a given lock
:param name: Name of the lock
"""
self._db.aql.execute(
"""
FOR l IN @@collection
FILTER l.name == @name
REMOVE l IN @@collection
""",
bind_vars={"name": name, "@collection": self._collection_name},
)
@staticmethod
def _check():
if StandardDatabase is None:
raise ShylockException(
"No python-arango driver available. Cannot use Shylock with PythonArango backend without it."
)
def __init__(self, db: StandardDatabase, collection_name: str = "shylock"):
self._check()
self._db: StandardDatabase = db
self._coll: Optional[StandardCollection] = None
self._collection_name: str = collection_name
def _init_collection(self):
"""
Ensure the collection is ready for our use
"""
if self._db.has_collection(self._collection_name):
self._coll = self._db.collection(self._collection_name)
else:
self._coll = self._db.create_collection(self._collection_name)
self._coll.add_persistent_index(fields=["name"], unique=True)
self._coll.add_ttl_index(fields=["expiresAt"], expiry_time=0)
``` |
{
"source": "joakimnordling/syrupy",
"score": 2
} |
#### File: src/syrupy/__init__.py
```python
import argparse
import sys
from gettext import gettext
from typing import (
Any,
ContextManager,
List,
Optional,
)
import pytest
from .assertion import SnapshotAssertion
from .constants import DISABLE_COLOR_ENV_VAR
from .exceptions import FailedToLoadModuleMember
from .extensions import DEFAULT_EXTENSION
from .location import PyTestLocation
from .session import SnapshotSession
from .terminal import (
received_style,
reset,
snapshot_style,
)
from .utils import (
env_context,
import_module_member,
)
# Global to have access to the session in `pytest_runtest_logfinish` hook
_syrupy: Optional["SnapshotSession"] = None
def __default_extension_option(value: str) -> Any:
try:
return import_module_member(value)
except FailedToLoadModuleMember as e:
raise argparse.ArgumentTypeError(e)
def pytest_addoption(parser: Any) -> None:
"""
Exposes snapshot plugin configuration to pytest.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_addoption
"""
group = parser.getgroup("syrupy")
group.addoption(
"--snapshot-update",
action="store_true",
default=False,
dest="update_snapshots",
help="Update snapshots",
)
group.addoption(
"--snapshot-warn-unused",
action="store_true",
default=False,
dest="warn_unused_snapshots",
help="Do not fail on unused snapshots",
)
group.addoption(
"--snapshot-details",
action="store_true",
default=False,
dest="include_snapshot_details",
help="Include details of unused snapshots in the final report",
)
group.addoption(
"--snapshot-default-extension",
type=__default_extension_option,
default=DEFAULT_EXTENSION,
dest="default_extension",
help="Specify the default snapshot extension",
)
group.addoption(
"--snapshot-no-colors",
action="store_true",
default=not sys.stdout.isatty(),
dest="no_colors",
help="Disable test results output highlighting",
)
def __terminal_color(config: Any) -> "ContextManager[None]":
env = {}
if config.option.no_colors:
env[DISABLE_COLOR_ENV_VAR] = "true"
return env_context(**env)
def pytest_assertrepr_compare(
config: Any, op: str, left: Any, right: Any
) -> Optional[List[str]]:
"""
Return explanation for comparisons in failing assert expressions.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_assertrepr_compare
"""
with __terminal_color(config):
received_name = received_style("[+ received]")
def snapshot_name(name: str) -> str:
return snapshot_style(f"[- {name}]")
if isinstance(left, SnapshotAssertion):
assert_msg = reset(f"{snapshot_name(left.name)} {op} {received_name}")
return [assert_msg] + left.get_assert_diff()
elif isinstance(right, SnapshotAssertion):
assert_msg = reset(f"{received_name} {op} {snapshot_name(right.name)}")
return [assert_msg] + right.get_assert_diff()
return None
def pytest_sessionstart(session: Any) -> None:
"""
Initialize snapshot session before tests are collected and ran.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_sessionstart
"""
session.config._syrupy = SnapshotSession(pytest_session=session)
global _syrupy
_syrupy = session.config._syrupy
session.config._syrupy.start()
def pytest_collection_modifyitems(
session: Any, config: Any, items: List["pytest.Item"]
) -> None:
"""
After tests are collected and before any modification is performed.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_collection_modifyitems
"""
config._syrupy.collect_items(items)
def pytest_collection_finish(session: Any) -> None:
"""
After collection has been performed and modified.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_collection_finish
"""
session.config._syrupy.select_items(session.items)
def pytest_runtest_logfinish(nodeid: str) -> None:
"""
At the end of running the runtest protocol for a single item.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_runtest_logfinish
"""
global _syrupy
if _syrupy:
_syrupy.ran_item(nodeid)
def pytest_sessionfinish(session: Any, exitstatus: int) -> None:
"""
Finish session run and set exit status.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_sessionfinish
"""
session.exitstatus |= exitstatus | session.config._syrupy.finish()
def pytest_terminal_summary(
terminalreporter: Any, exitstatus: int, config: Any
) -> None:
"""
Add syrupy report to pytest.
https://docs.pytest.org/en/latest/reference.html#_pytest.hookspec.pytest_terminal_summary
"""
with __terminal_color(config):
terminalreporter.write_sep("-", gettext("snapshot report summary"))
for line in terminalreporter.config._syrupy.report.lines:
terminalreporter.write_line(line)
@pytest.fixture
def snapshot(request: Any) -> "SnapshotAssertion":
return SnapshotAssertion(
update_snapshots=request.config.option.update_snapshots,
extension_class=request.config.option.default_extension,
test_location=PyTestLocation(request.node),
session=request.session.config._syrupy,
)
```
#### File: tests/integration/test_snapshot_use_extension.py
```python
import pytest
@pytest.fixture
def testcases_initial(testdir):
testdir.makeconftest(
"""
import pytest
from syrupy.extensions.amber import AmberSnapshotExtension
from syrupy.extensions.image import (
PNGImageSnapshotExtension,
SVGImageSnapshotExtension,
)
from syrupy.extensions.single_file import SingleFileSnapshotExtension
class CustomSnapshotExtension(AmberSnapshotExtension):
@property
def _file_extension(self):
return ""
def serialize(self, data, **kwargs):
return str(data)
def get_snapshot_name(self, *, index = 0):
testname = self._test_location.testname[::-1]
return f"{testname}.{index}"
def _get_file_basename(self, *, index = 0):
return self.test_location.filename[::-1]
@pytest.fixture
def snapshot_custom(snapshot):
return snapshot.use_extension(CustomSnapshotExtension)
@pytest.fixture
def snapshot_single(snapshot):
return snapshot.use_extension(SingleFileSnapshotExtension)
@pytest.fixture
def snapshot_png(snapshot):
return snapshot.use_extension(PNGImageSnapshotExtension)
@pytest.fixture
def snapshot_svg(snapshot):
return snapshot.use_extension(SVGImageSnapshotExtension)
"""
)
return {
"passed": (
"""
def test_passed_custom(snapshot_custom):
assert snapshot_custom == 'passed1'
assert snapshot_custom == 'passed2'
def test_passed_single(snapshot_single):
assert snapshot_single == b'passed1'
assert snapshot_single == b'passed2'
"""
),
"failed": (
"""
def test_failed_single(snapshot_single):
assert snapshot_single == 'failed'
def test_failed_image(snapshot_png):
assert "not a byte string" == snapshot_png
"""
),
}
@pytest.fixture
def testcases_updated(testcases_initial):
updated_testcases = {
"passed": (
"""
def test_passed_single(snapshot_single):
assert snapshot_single == b'passed'
"""
)
}
return {**testcases_initial, **updated_testcases}
@pytest.fixture
def generate_snapshots(testdir, testcases_initial):
testdir.makepyfile(test_file=testcases_initial["passed"])
result = testdir.runpytest("-v", "--snapshot-update")
return result, testdir, testcases_initial
def test_unsaved_snapshots(testdir, testcases_initial):
testdir.makepyfile(test_file=testcases_initial["passed"])
result = testdir.runpytest("-v")
result.stdout.re_match_lines(
(r".*Snapshot 'test_passed_single' does not exist!", r".*\+ b'passed1'")
)
assert result.ret == 1
def test_failed_snapshots(testdir, testcases_initial):
testdir.makepyfile(test_file=testcases_initial["failed"])
result = testdir.runpytest("-v", "--snapshot-update")
result.stdout.re_match_lines((r"2 snapshots failed\."))
assert result.ret == 1
def test_generated_snapshots(generate_snapshots):
result = generate_snapshots[0]
result.stdout.re_match_lines((r"4 snapshots generated\."))
assert "snapshots unused" not in result.stdout.str()
assert result.ret == 0
def test_unmatched_snapshots(generate_snapshots, testcases_updated):
testdir = generate_snapshots[1]
testdir.makepyfile(test_file=testcases_updated["passed"])
result = testdir.runpytest("-v")
result.stdout.re_match_lines((r"1 snapshot failed\. 2 snapshots unused\."))
assert result.ret == 1
def test_updated_snapshots(generate_snapshots, testcases_updated):
testdir = generate_snapshots[1]
testdir.makepyfile(test_file=testcases_updated["passed"])
result = testdir.runpytest("-v", "--snapshot-update")
result.stdout.re_match_lines((r"1 snapshot updated\. 2 unused snapshots deleted\."))
assert result.ret == 0
def test_warns_on_snapshot_name(generate_snapshots):
result = generate_snapshots[0]
result.stdout.re_match_lines(
(
r".*Warning:\s+",
r"\s+Can not relate snapshot location",
r"\s+Can not relate snapshot name",
r"4 snapshots generated\.",
)
)
assert result.ret == 0
``` |
{
"source": "joakimnyden/ciscodnacnautobot",
"score": 2
} |
#### File: ciscodnacnautobot/ciscodnacnautobot/models.py
```python
from django.db import models
from django.urls import reverse
import uuid
from nautobot.utilities.querysets import RestrictedQuerySet
from nautobot.core.models import BaseModel
class Settings(BaseModel):
hostname = models.CharField(max_length=2000, unique=True, blank=True, null=True)
username = models.CharField(max_length=100)
password = models.CharField(max_length=100)
version = models.CharField(max_length=10)
verify = models.BooleanField(default=False)
status = models.BooleanField(default=True)
objects = RestrictedQuerySet.as_manager()
class Meta:
app_label = "ciscodnacnautobot"
ordering = ["hostname"]
def __str__(self):
return self.hostname
def get_absolute_url(self):
return reverse("plugins:ciscodnacnautobot:settings")
``` |
{
"source": "JoakimSjo/DockerBuildManagement",
"score": 2
} |
#### File: DockerBuildManagement/DockerBuildManagement/BuildSelections.py
```python
from DockerBuildSystem import DockerComposeTools
from SwarmManagement import SwarmTools
from DockerBuildManagement import BuildTools
import sys
import os
BUILD_KEY = 'build'
SAVE_IMAGES_KEY = 'saveImages'
def GetInfoMsg():
infoMsg = "Build selections is configured by adding a 'build' property to the .yaml file.\r\n"
infoMsg += "The 'build' property is a dictionary of build selections.\r\n"
infoMsg += "Add '-build' to the arguments to build all selections in sequence, \r\n"
infoMsg += "or add specific selection names to build those only.\r\n"
infoMsg += "Example: 'dbm -build myBuildSelection'.\r\n"
return infoMsg
def GetBuildSelections(arguments):
yamlData = SwarmTools.LoadYamlDataFromFiles(
arguments, [BuildTools.DEFAULT_BUILD_MANAGEMENT_YAML_FILE])
buildProperty = SwarmTools.GetProperties(arguments, BUILD_KEY, GetInfoMsg(), yamlData)
if BuildTools.SELECTIONS_KEY in buildProperty:
return buildProperty[BuildTools.SELECTIONS_KEY]
return {}
def BuildSelections(selectionsToBuild, buildSelections):
if len(selectionsToBuild) == 0:
for buildSelection in buildSelections:
BuildSelection(buildSelections[buildSelection], buildSelection)
else:
for selectionToBuild in selectionsToBuild:
if selectionToBuild in buildSelections:
BuildSelection(buildSelections[selectionToBuild], selectionToBuild)
def BuildSelection(buildSelection, selectionToBuild):
cwd = BuildTools.TryChangeToDirectoryAndGetCwd(buildSelection)
BuildTools.HandleTerminalCommandsSelection(buildSelection)
if BuildTools.FILES_KEY in buildSelection:
composeFiles = buildSelection[BuildTools.FILES_KEY]
buildComposeFile = 'docker-compose.build.' + selectionToBuild + '.yml'
DockerComposeTools.MergeComposeFiles(composeFiles, buildComposeFile)
DockerComposeTools.DockerComposeBuild([buildComposeFile])
if BuildTools.ADDITIONAL_TAG_KEY in buildSelection:
DockerComposeTools.TagImages(buildComposeFile, buildSelection[BuildTools.ADDITIONAL_TAG_KEY])
if BuildTools.ADDITIONAL_TAGS_KEY in buildSelection:
for tag in buildSelection[BuildTools.ADDITIONAL_TAGS_KEY]:
DockerComposeTools.TagImages(buildComposeFile, tag)
if SAVE_IMAGES_KEY in buildSelection:
outputFolder = buildSelection[SAVE_IMAGES_KEY]
DockerComposeTools.SaveImages(buildComposeFile, outputFolder)
os.chdir(cwd)
def HandleBuildSelections(arguments):
if len(arguments) == 0:
return
if not('-build' in arguments):
return
if '-help' in arguments:
print(GetInfoMsg())
return
selectionsToBuild = SwarmTools.GetArgumentValues(arguments, '-build')
selectionsToBuild += SwarmTools.GetArgumentValues(arguments, '-b')
buildSelections = GetBuildSelections(arguments)
BuildSelections(selectionsToBuild, buildSelections)
if __name__ == "__main__":
arguments = sys.argv[1:]
HandleBuildSelections(arguments)
```
#### File: DockerBuildManagement/DockerBuildManagement/PublishSelections.py
```python
from DockerBuildSystem import DockerComposeTools
from SwarmManagement import SwarmTools
from DockerBuildManagement import BuildTools
import sys
import os
PUBLISH_KEY = 'publish'
CONTAINER_ARTIFACT_KEY = 'containerArtifact'
def GetInfoMsg():
infoMsg = "Publish selections is configured by adding a 'publish' property to the .yaml file.\r\n"
infoMsg += "The 'publish' property is a dictionary of publish selections.\r\n"
infoMsg += "Add '-publish' to the arguments to publish all selections in sequence, \r\n"
infoMsg += "or add specific selection names to publish those only.\r\n"
infoMsg += "Example: 'dbm -publish myPublishSelection'.\r\n"
return infoMsg
def GetPublishSelections(arguments):
yamlData = SwarmTools.LoadYamlDataFromFiles(
arguments, [BuildTools.DEFAULT_BUILD_MANAGEMENT_YAML_FILE])
publishProperty = SwarmTools.GetProperties(arguments, PUBLISH_KEY, GetInfoMsg(), yamlData)
if BuildTools.SELECTIONS_KEY in publishProperty:
return publishProperty[BuildTools.SELECTIONS_KEY]
return {}
def PublishSelections(selectionsToPublish, publishSelections):
if len(selectionsToPublish) == 0:
for publishSelection in publishSelections:
PublishSelection(publishSelections[publishSelection], publishSelection)
else:
for selectionToPublish in selectionsToPublish:
if selectionToPublish in publishSelections:
PublishSelection(publishSelections[selectionToPublish], selectionToPublish)
def PublishSelection(publishSelection, publishSelectionKey):
cwd = BuildTools.TryChangeToDirectoryAndGetCwd(publishSelection)
BuildTools.HandleTerminalCommandsSelection(publishSelection)
if BuildTools.FILES_KEY in publishSelection:
if BuildTools.TryGetFromDictionary(publishSelection, CONTAINER_ARTIFACT_KEY, True):
PublishContainerSelection(publishSelection, publishSelectionKey)
else:
PublishArtifactSelection(publishSelection)
BuildTools.HandleCopyFromContainer(publishSelection)
os.chdir(cwd)
def PublishContainerSelection(publishSelection, publishSelectionKey):
composeFiles = publishSelection[BuildTools.FILES_KEY]
publishComposeFile = 'docker-compose.publish.' + publishSelectionKey + '.yml'
DockerComposeTools.MergeComposeFiles(composeFiles, publishComposeFile)
DockerComposeTools.PublishDockerImages(publishComposeFile)
if BuildTools.ADDITIONAL_TAG_KEY in publishSelection:
DockerComposeTools.PublishDockerImagesWithNewTag(publishComposeFile, publishSelection[BuildTools.ADDITIONAL_TAG_KEY])
if BuildTools.ADDITIONAL_TAGS_KEY in publishSelection:
for tag in publishSelection[BuildTools.ADDITIONAL_TAGS_KEY]:
DockerComposeTools.PublishDockerImagesWithNewTag(publishComposeFile, tag)
def PublishArtifactSelection(publishSelection):
DockerComposeTools.DockerComposeBuild(
publishSelection[BuildTools.FILES_KEY])
DockerComposeTools.DockerComposeUp(
publishSelection[BuildTools.FILES_KEY], False)
def HandlePublishSelections(arguments):
if len(arguments) == 0:
return
if not('-publish' in arguments):
return
if '-help' in arguments:
print(GetInfoMsg())
return
selectionsToPublish = SwarmTools.GetArgumentValues(arguments, '-publish')
selectionsToPublish += SwarmTools.GetArgumentValues(arguments, '-p')
publishSelections = GetPublishSelections(arguments)
PublishSelections(selectionsToPublish, publishSelections)
if __name__ == "__main__":
arguments = sys.argv[1:]
HandlePublishSelections(arguments)
``` |
{
"source": "JoakimSundling/py-apx",
"score": 2
} |
#### File: py-apx/tests/apx_file_test.py
```python
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import apx
import unittest
import remotefile
import time
import re
from collections import namedtuple
FileWrite = namedtuple('FileWrite', "file offset length")
@apx.NodeDataHandler.register
class MockNodeDataHandler:
def __init__(self):
self.calls=[]
def inPortDataWriteNotify(self, file, offset: int, length: int):
"""
Called by FileManager when it receives a remote write in the node's inPortData file
"""
self.calls.append(FileWrite(file, offset, length))
class MockFileManager:
def __init__(self):
self.calls=[]
def outPortDataWriteNotify(self, file, offset : int, length : int):
self.calls.append(FileWrite(file, offset, length))
class TestApxFile(unittest.TestCase):
def test_input_file(self):
mockDataHandler = MockNodeDataHandler()
inFile = apx.InputFile('test1.in', 10)
inFile.nodeDataHandler=mockDataHandler
self.assertIsInstance(inFile.data, bytearray)
self.assertEqual(len(inFile.data), 10)
retval = inFile.write(5, b"\x01\x02\x03")
self.assertEqual(retval, 3)
self.assertEqual(len(mockDataHandler.calls), 1)
self.assertEqual(mockDataHandler.calls[-1].offset,5)
self.assertEqual(mockDataHandler.calls[-1].length,3)
self.assertEqual(inFile.data[5:8], b"\x01\x02\x03")
data = inFile.read(mockDataHandler.calls[-1].offset, mockDataHandler.calls[-1].length)
self.assertIsInstance(data, bytes)
self.assertEqual(len(data), 3)
self.assertEqual(data, b"\x01\x02\x03")
def test_output_file(self):
outFile = apx.OutputFile('test1.out', 5)
self.assertIsInstance(outFile.data, bytearray)
self.assertEqual(len(outFile.data), 5)
retval = outFile.write(2, b"\x01\x02\x03")
self.assertEqual(retval, 3)
self.assertEqual(outFile.data[2:5], b"\x01\x02\x03")
if __name__ == '__main__':
unittest.main()
```
#### File: py-apx/tests/generator_callback_first_test.py
```python
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import apx
import unittest
import time
import shutil
class TestApxGenerator(unittest.TestCase):
def test_code_generator(self):
node = apx.Node("TestCallbackFirst")
node.append(apx.RequirePort('RS32Port','l','=-2147483648'))
node.append(apx.RequirePort('RU8Port','C','=255'))
callback_map = { 'RS32Port': 'RS32Port_cb_func' }
output_dir = 'derived'
output_dir_full = os.path.join(os.path.dirname(__file__),output_dir)
if not os.path.exists(output_dir_full):
os.makedirs(output_dir_full)
time.sleep(0.1)
apx.NodeGenerator().generate(output_dir_full, node, callbacks=callback_map)
with open (os.path.join(os.path.dirname(__file__), output_dir, 'ApxNode_{0.name}.h'.format(node)), "r") as fp:
generated=fp.read()
with open (os.path.join(os.path.dirname(__file__), 'expected_gen', 'ApxNode_{0.name}.h'.format(node)), "r") as fp:
expected=fp.read()
self.assertEqual(expected, generated)
with open (os.path.join(os.path.dirname(__file__), output_dir, 'ApxNode_{0.name}.c'.format(node)), "r") as fp:
generated=fp.read()
with open (os.path.join(os.path.dirname(__file__), 'expected_gen', 'ApxNode_{0.name}.c'.format(node)), "r") as fp:
expected=fp.read()
self.assertEqual(expected, generated)
shutil.rmtree(output_dir_full)
if __name__ == '__main__':
unittest.main()
```
#### File: py-apx/tests/node_data_test.py
```python
import os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import apx
import unittest
import struct
def create_node_and_data():
node = apx.Node('TestNode')
node.add_type(apx.DataType('InactiveActive_T','C(0,3)'))
node.append(apx.ProvidePort('VehicleSpeed','S','=65535'))
node.append(apx.ProvidePort('MainBeam','T[0]','=3'))
node.append(apx.ProvidePort('TotalDistance','L', '=0xFFFFFFFF'))
node.append(apx.ProvidePort('ComplexRecordSignal','{"SensorData"{"x"S"y"S"z"S}"TimeStamp"L}', '={{65535,65535,65535},0xFFFFFFFF}'))
node.append(apx.RequirePort('RheostatLevelRqst','C','=255'))
node.append(apx.RequirePort('StrSignal','a[8]','=""'))
node.append(apx.RequirePort('RecordSignal','{"Name"a[8]"Id"L"Data"S[3]}','={"",0xFFFFFFFF,{0,0,0}}'))
return node
class TestNodeDataCompile(unittest.TestCase):
def test_port_map_and_compiled_programs(self):
node = create_node_and_data()
self.assertEqual(node.find('VehicleSpeed').id, 0)
self.assertEqual(node.find('RheostatLevelRqst').id, 0)
node_data = apx.NodeData(node)
self.assertEqual(len(node_data.inPortByteMap), 27)
self.assertEqual(node_data.inPortByteMap[0].name, 'RheostatLevelRqst')
self.assertEqual(node_data.inPortByteMap[0].id, 0)
for i in range(1, 9):
self.assertEqual(node_data.inPortByteMap[i].name, 'StrSignal')
self.assertEqual(node_data.inPortByteMap[i].id, 1)
for i in range(9, 27):
self.assertEqual(node_data.inPortByteMap[i].name, 'RecordSignal')
self.assertEqual(node_data.inPortByteMap[i].id, 2)
self.assertEqual(len(node_data.outPortDataMap), 4)
elem = node_data.outPortDataMap[0]
self.assertEqual(elem.data_offset, 0)
self.assertEqual(elem.data_len, 2)
self.assertIs(elem.port, node.find('VehicleSpeed'))
elem = node_data.outPortDataMap[1]
self.assertEqual(elem.data_offset, 2)
self.assertEqual(elem.data_len, 1)
self.assertIs(elem.port, node.find('MainBeam'))
elem = node_data.outPortDataMap[2]
self.assertEqual(elem.data_offset, 3)
self.assertEqual(elem.data_len, 4)
self.assertIs(elem.port, node.find('TotalDistance'))
elem = node_data.outPortDataMap[3]
self.assertEqual(elem.data_offset, 7)
self.assertEqual(elem.data_len, 10)
self.assertIs(elem.port, node.find('ComplexRecordSignal'))
expected = bytes([apx.OPCODE_PACK_PROG, apx.UINT16_LEN,0,0,0,
apx.OPCODE_PACK_U16])
self.assertEqual(node_data.outPortPrograms[0], expected)
expected = bytes([apx.OPCODE_PACK_PROG, apx.UINT8_LEN,0,0,0,
apx.OPCODE_PACK_U8])
self.assertEqual(node_data.outPortPrograms[1], expected)
expected = bytes([apx.OPCODE_PACK_PROG, apx.UINT32_LEN,0,0,0,
apx.OPCODE_PACK_U32])
self.assertEqual(node_data.outPortPrograms[2], expected)
expected = bytes([apx.OPCODE_PACK_PROG, (3*apx.UINT16_LEN+apx.UINT32_LEN),0,0,0,
apx.OPCODE_RECORD_ENTER,
apx.OPCODE_RECORD_SELECT])+"SensorData\0".encode('ascii')+bytes([
apx.OPCODE_RECORD_ENTER,
apx.OPCODE_RECORD_SELECT])+"x\0".encode('ascii')+bytes([
apx.OPCODE_PACK_U16,
apx.OPCODE_RECORD_SELECT])+"y\0".encode('ascii')+bytes([
apx.OPCODE_PACK_U16,
apx.OPCODE_RECORD_SELECT])+"z\0".encode('ascii')+bytes([
apx.OPCODE_PACK_U16,
apx.OPCODE_RECORD_LEAVE,
apx.OPCODE_RECORD_SELECT])+"TimeStamp\0".encode('ascii')+bytes([
apx.OPCODE_PACK_U32,
apx.OPCODE_RECORD_LEAVE
])
self.assertEqual(node_data.outPortPrograms[3], expected)
expected = bytes([apx.OPCODE_UNPACK_PROG, apx.UINT8_LEN,0,0,0,
apx.OPCODE_UNPACK_U8])
self.assertEqual(node_data.inPortPrograms[0], expected)
expected = bytes([apx.OPCODE_UNPACK_PROG, 8,0,0,0,
apx.OPCODE_UNPACK_STR, 8,0])
self.assertEqual(node_data.inPortPrograms[1], expected)
expected = bytes([apx.OPCODE_UNPACK_PROG, (8+apx.UINT32_LEN+apx.UINT16_LEN*3),0,0,0,
apx.OPCODE_RECORD_ENTER,
apx.OPCODE_RECORD_SELECT])+"Name\0".encode('ascii')+bytes([
apx.OPCODE_UNPACK_STR, 8,0,
apx.OPCODE_RECORD_SELECT])+"Id\0".encode('ascii')+bytes([
apx.OPCODE_UNPACK_U32,
apx.OPCODE_RECORD_SELECT])+"Data\0".encode('ascii')+bytes([
apx.OPCODE_UNPACK_U16AR, 3,0,
apx.OPCODE_RECORD_LEAVE
])
self.assertEqual(node_data.inPortPrograms[2], expected)
class TestNodeDataRead(unittest.TestCase):
def test_read_port_RheostatLevelRqst(self):
node = create_node_and_data()
port_RheostatLevelRqst = node.find('RheostatLevelRqst')
node_data = apx.NodeData(node)
input_file = node_data.inPortDataFile
#verify init value
self.assertEqual(node_data.read_require_port(port_RheostatLevelRqst), 255)
#write to input file
input_file.write(0, bytes([0]))
self.assertEqual(node_data.read_require_port(port_RheostatLevelRqst), 0)
input_file.write(0, bytes([10]))
self.assertEqual(node_data.read_require_port(port_RheostatLevelRqst), 10)
input_file.write(0, bytes([255]))
self.assertEqual(node_data.read_require_port(port_RheostatLevelRqst), 255)
def test_read_port_StrSignal(self):
node = create_node_and_data()
port_StrSignal = node.find('StrSignal')
node_data = apx.NodeData(node)
input_file = node_data.inPortDataFile
#verify init value
self.assertEqual(node_data.read_require_port(port_StrSignal), "")
#write to input file
input_file.write(1, 'Hello\0\0\0'.encode('utf-8'))
self.assertEqual(node_data.read_require_port(port_StrSignal), "Hello")
input_file.write(1, 'Selected'.encode('utf-8'))
self.assertEqual(node_data.read_require_port(port_StrSignal), "Selected")
input_file.write(1, 'a\0\0\0\0\0\0'.encode('utf-8'))
self.assertEqual(node_data.read_require_port(port_StrSignal), "a")
input_file.write(1, bytes(8))
self.assertEqual(node_data.read_require_port(port_StrSignal), "")
def test_read_RecordSignal(self):
node = create_node_and_data()
port_RecordSignal = node.find('RecordSignal')
node_data = apx.NodeData(node)
input_file = node_data.inPortDataFile
#verify init value
self.assertEqual(node_data.read_require_port(port_RecordSignal), {'Name': "", 'Id':0xFFFFFFFF, 'Data': [0,0,0]})
name_offset = 9
id_offset = 17
data_offset = 21
input_file.write(name_offset, "abcdefgh".encode('utf-8'))
self.assertEqual(node_data.read_require_port(port_RecordSignal), {'Name': "abcdefgh", 'Id':0xFFFFFFFF, 'Data': [0,0,0]})
input_file.write(id_offset, struct.pack("<L",0x12345678))
self.assertEqual(node_data.read_require_port(port_RecordSignal), {'Name': "abcdefgh", 'Id':0x12345678, 'Data': [0,0,0]})
input_file.write(data_offset, struct.pack("<HHH",0,0,1))
self.assertEqual(node_data.read_require_port(port_RecordSignal), {'Name': "abcdefgh", 'Id':0x12345678, 'Data': [0,0,1]})
input_file.write(data_offset, struct.pack("<HHH",18000,2,10))
self.assertEqual(node_data.read_require_port(port_RecordSignal), {'Name': "abcdefgh", 'Id':0x12345678, 'Data': [18000,2,10]})
def test_byte_to_port_all(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
input_file = node_data.inPortDataFile
RheostatLevelRqst_data_offset = 0
RheostatLevelRqst_data_len = 1
StrSignal_data_offset = 1
StrSignal_data_len = 8
RecordSignal_data_offset = 9
RecordSignal_data_len = 18
total_len = RheostatLevelRqst_data_len+StrSignal_data_len+RecordSignal_data_len
self.assertEqual(len(input_file.data), total_len)
self.assertEqual(len(node_data.inPortByteMap), total_len)
result = list(node_data.byte_to_port(0,total_len))
self.assertEqual(len(result), 3)
self.assertIs(result[0][0], node.find('RheostatLevelRqst'))
self.assertIs(result[1][0], node.find('StrSignal'))
self.assertIs(result[2][0], node.find('RecordSignal'))
self.assertEqual(result[0][1], RheostatLevelRqst_data_offset)
self.assertEqual(result[1][1], StrSignal_data_offset)
self.assertEqual(result[2][1], RecordSignal_data_offset)
self.assertEqual(result[0][2], RheostatLevelRqst_data_len)
self.assertEqual(result[1][2], StrSignal_data_len)
self.assertEqual(result[2][2], RecordSignal_data_len)
def test_byte_to_port_RheostatLevelRqst(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
RheostatLevelRqst_data_offset = 0
RheostatLevelRqst_data_len = 1
result = list(node_data.byte_to_port(RheostatLevelRqst_data_offset, RheostatLevelRqst_data_len))
self.assertEqual(len(result), 1)
port, offset, length = result[0]
self.assertIs(port, node.find('RheostatLevelRqst'))
self.assertEqual(offset, RheostatLevelRqst_data_offset)
self.assertEqual(length, RheostatLevelRqst_data_len)
def test_byte_to_port_StrSignal(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
StrSignal_data_offset = 1
StrSignal_data_len = 8
for offset in range(StrSignal_data_offset, StrSignal_data_offset+StrSignal_data_len):
result = list(node_data.byte_to_port(offset, 1))
self.assertEqual(len(result), 1)
port, offset, length = result[0]
self.assertIs(port, node.find('StrSignal'))
self.assertEqual(offset, StrSignal_data_offset)
self.assertEqual(length, StrSignal_data_len)
def test_byte_to_port_RecordSignal(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
RecordSignal_data_offset = 9
RecordSignal_data_len = 18
for offset in range(RecordSignal_data_offset, RecordSignal_data_offset+RecordSignal_data_len):
result = list(node_data.byte_to_port(offset, 1))
self.assertEqual(len(result), 1)
port, offset, length = result[0]
self.assertIs(port, node.find('RecordSignal'))
self.assertEqual(offset, RecordSignal_data_offset)
self.assertEqual(length, RecordSignal_data_len)
def test_byte_to_port_invalid_args(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
self.assertEqual(len(node_data.inPortByteMap), 27)
with self.assertRaises(ValueError) as context:
result = list(node_data.byte_to_port(28,1))
self.assertEqual(str(context.exception), "start_offset (28) is beyond length of file (27)")
with self.assertRaises(ValueError) as context:
result = list(node_data.byte_to_port(25,5))
self.assertEqual(str(context.exception), "end_offset (30) is beyond length of file (27)")
RecordSignal_data_offset = 9
RecordSignal_data_len = 18
result = list(node_data.byte_to_port(25,2))
port, offset, length = result[0]
self.assertIs(port, node.find('RecordSignal'))
self.assertEqual(offset, RecordSignal_data_offset)
self.assertEqual(length, RecordSignal_data_len)
def test_callback(self):
call_history = []
@apx.NodeDataClient.register
class Listener:
def on_require_port_data(self, port, value):
call_history.append((port, value))
listener_obj = Listener()
node = create_node_and_data()
node_data = apx.NodeData(node)
node_data.nodeDataClient = listener_obj
input_file = node_data.inPortDataFile
RheostatLevelRqst_data_offset = 0
RheostatLevelRqst_data_len = 1
StrSignal_data_offset = 1
StrSignal_data_len = 8
RecordSignal_data_offset = 9
RecordSignal_data_len = 18
#test write RheostatLevelRqst
self.assertEqual(len(call_history), 0)
input_file.write(0, bytes([0]))
self.assertEqual(len(call_history), 1)
self.assertEqual(call_history[-1][0], node.find('RheostatLevelRqst'))
self.assertEqual(call_history[-1][1], 0)
input_file.write(0, bytes([255]))
self.assertEqual(len(call_history), 2)
self.assertEqual(call_history[-1][0], node.find('RheostatLevelRqst'))
self.assertEqual(call_history[-1][1], 255)
#test write RecordSignal
input_file.write(RecordSignal_data_offset, "Test".encode('utf-8'))
self.assertEqual(len(call_history), 3)
self.assertEqual(call_history[-1][0], node.find('RecordSignal'))
self.assertEqual(call_history[-1][1], {'Name': "Test", 'Id': 0xFFFFFFFF, 'Data':[0,0,0]})
input_file.write(RecordSignal_data_offset, "Abc\0\0\0\0\0".encode('utf-8')+struct.pack('<L',918)+struct.pack('<HHH', 1000, 2000, 4000))
self.assertEqual(len(call_history), 4)
self.assertEqual(call_history[-1][0], node.find('RecordSignal'))
self.assertEqual(call_history[-1][1], {'Name': "Abc", 'Id': 918, 'Data':[1000,2000,4000]})
class TestNodeDataWrite(unittest.TestCase):
def test_write_VehicleSpeed(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
VehicleSpeed_port = node.find('VehicleSpeed')
VehicleSpeed_offset = 0
VehicleSpeed_length = 2
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(VehicleSpeed_offset, VehicleSpeed_length), bytes([0xFF, 0xFF]))
node_data.write_provide_port(VehicleSpeed_port, 0x1234)
self.assertEqual(output_file.read(VehicleSpeed_offset, VehicleSpeed_length), bytes([0x34, 0x12]))
def test_write_MainBeam(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
MainBeam_port = node.find('MainBeam')
MainBeam_offset = 2
MainBeam_length = 1
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(MainBeam_offset, MainBeam_length), bytes([3]))
node_data.write_provide_port(MainBeam_port, 0)
self.assertEqual(output_file.read(MainBeam_offset, MainBeam_length), bytes([0]))
node_data.write_provide_port(MainBeam_port, 3)
self.assertEqual(output_file.read(MainBeam_offset, MainBeam_length), bytes([3]))
def test_write_TotalDistance(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
TotalDistance_port = node.find('TotalDistance')
TotalDistance_offset = 3
TotalDistance_length = 4
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(TotalDistance_offset, TotalDistance_length), bytes([0xFF,0xFF,0xFF,0xFF]))
node_data.write_provide_port(TotalDistance_port, 0)
self.assertEqual(output_file.read(TotalDistance_offset, TotalDistance_length), bytes([0,0,0,0]))
node_data.write_provide_port(TotalDistance_port, 0x12345678)
self.assertEqual(output_file.read(TotalDistance_offset, TotalDistance_length), bytes([0x78,0x56,0x34,0x12]))
def test_write_ComplexRecordSignal(self):
node = create_node_and_data()
node_data = apx.NodeData(node)
ComplexRecordSignal_port = node.find('ComplexRecordSignal')
ComplexRecordSignal_offset = 7
ComplexRecordSignal_length = 10
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(ComplexRecordSignal_offset, ComplexRecordSignal_length), struct.pack("<HHHL", 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFFFFFF))
#write some values
node_data.write_provide_port(ComplexRecordSignal_port, {"SensorData": dict(x = 1, y =2, z= 3), 'TimeStamp':0})
self.assertEqual(output_file.read(ComplexRecordSignal_offset, ComplexRecordSignal_length), struct.pack("<HHHL", 1, 2, 3, 0))
def test_write_string(self):
node = apx.Node('TestNode')
port = node.append(apx.ProvidePort('StrSignal', 'a[6]', '=""'))
node_data = apx.NodeData(node)
signal_offset=0
signal_length=6
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(signal_offset, signal_length), bytes(6))
#write value
node_data.write_provide_port(port, "Hello")
self.assertEqual(output_file.read(signal_offset, signal_length), "Hello\0".encode('utf-8'))
node_data.write_provide_port(port, "Abc")
self.assertEqual(output_file.read(signal_offset, signal_length), "Abc\0\0\0".encode('utf-8'))
node_data.write_provide_port(port, "")
self.assertEqual(output_file.read(signal_offset, signal_length), bytes(6))
def test_write_s8(self):
node = apx.Node('TestNode')
port = node.append(apx.ProvidePort('S8Signal', 'c', '=0'))
node_data = apx.NodeData(node)
signal_offset=0
signal_length=1
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(signal_offset, signal_length), bytes(signal_length))
#write value
node_data.write_provide_port(port, -1)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<b', -1))
node_data.write_provide_port(port, -128)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<b', -128))
node_data.write_provide_port(port, 127)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<b', 127))
node_data.write_provide_port(port, 0)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<b', 0))
def test_write_s16(self):
node = apx.Node('TestNode')
port = node.append(apx.ProvidePort('S16Signal', 's', '=0'))
node_data = apx.NodeData(node)
signal_offset=0
signal_length=2
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(signal_offset, signal_length), bytes(signal_length))
#write value
node_data.write_provide_port(port, -1)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<h', -1))
node_data.write_provide_port(port, -32768)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<h', -32768))
node_data.write_provide_port(port, 32767)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<h',32767))
node_data.write_provide_port(port, 0)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<H', 0))
def test_write_s32(self):
node = apx.Node('TestNode')
port = node.append(apx.ProvidePort('S32Signal', 'l', '=0'))
node_data = apx.NodeData(node)
signal_offset=0
signal_length=4
output_file = node_data.outPortDataFile
#verify init value
self.assertEqual(output_file.read(signal_offset, signal_length), bytes(signal_length))
#write value
node_data.write_provide_port(port, -1)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<i', -1))
node_data.write_provide_port(port, -2147483648)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<i', -2147483648))
node_data.write_provide_port(port, 2147483647)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<i',2147483647))
node_data.write_provide_port(port, 0)
self.assertEqual(output_file.read(signal_offset, signal_length), struct.pack('<i', 0))
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joakimvonanka/NEA",
"score": 3
} |
#### File: Code/tkinter/image-viewer-for-extractor.py
```python
from tkinter import *
from PIL import ImageTk, Image
import os, os.path
def forward(img_no):
global label
global button_forward
global button_back
global button_exit
label.grid_forget()
label = Label(image=List_images[img_no-1])
label.grid(row=1, column=0, columnspan=3)
button_for = Button(root, text="forward",
command=lambda: forward(img_no+1))
if img_no == 4:
button_forward = Button(root, text="Forward",
state=DISABLED)
button_back = Button(root, text="Back",
command=lambda: back(img_no-1))
button_back.grid(row=5, column=0)
button_exit.grid(row=5, column=1)
button_for.grid(row=5, column=2)
def back(img_no):
global label
global button_forward
global button_back
global button_exit
label.grid_forget()
label = Label(image=List_images[img_no - 1])
label.grid(row=1, column=0, columnspan=3)
button_forward = Button(root, text="forward",
command=lambda: forward(img_no + 1))
button_back = Button(root, text="Back",
command=lambda: back(img_no - 1))
print(img_no)
if img_no == 1:
button_back = Button(root, Text="Back", state=DISABLED)
label.grid(row=1, column=0, columnspan=3)
button_back.grid(row=5, column=0)
button_exit.grid(row=5, column=1)
button_forward.grid(row=5, column=2)
root = Tk()
root.title("Image Viewer")
root.geometry("1000x900")
List_images = []
folder_names = os.listdir("./extracted_frames/video_frames/")
print(folder_names)
for array_size in range(len(folder_names)):
position = globals()["image_no_%s" % array_size] = ImageTk.PhotoImage(Image.open("./extracted_frames/video_frames/"+folder_names[array_size]))
List_images.append(position)
label = Label(image=List_images[0])
label.grid(row=1, column=0, columnspan=3)
button_back = Button(root, text="Back", command=back,
state=DISABLED)
button_exit = Button(root, text="Exit",
command=root.quit)
button_forward = Button(root, text="Forward",
command=lambda: forward(1))
button_back.grid(row=5, column=0)
button_exit.grid(row=5, column=1)
button_forward.grid(row=5, column=2)
root.mainloop()
``` |
{
"source": "joakimwinum/python-snake",
"score": 2
} |
#### File: joakimwinum/python-snake/python-snake.py
```python
import os
import copy
import random
import select
import subprocess
import sys
import time
# functions
# create functions
def create_player():
player_sprite = '&'
return [[40, 12, player_sprite], [39, 12, player_sprite], [38, 12, player_sprite]]
def create_frame_wall():
frame_wall_array = []
wall_sprite = '#'
i = 0
while i < board_x:
j = 0
while j < board_y:
if i == 0 or i == board_x - 1 or j == 0 or j == board_y - 1:
# create the frame wall
frame_wall_array.append([i, j, wall_sprite])
j += 1
i += 1
return frame_wall_array
def create_background():
background_array = []
background_sprite = ' '
i = 0
while i < board_x:
j = 0
while j < board_y:
# create the background
background_array.append([i, j, background_sprite])
j += 1
i += 1
return background_array
def draw(entities):
global blank_board
global cache_draw
board = ''
# create a blank board array if it is not already done
if '0,0' not in blank_board:
j = 0
while j < board_y:
i = 0
while i < board_x:
blank_board[''+str(i)+','+str(j)+''] = '%'
i += 1
j += 1
board_array = copy.copy(blank_board)
# draw all the entities onto the board array
for entity in entities:
entity_is_multidimensional = True
try:
entity[0][0]
except (NameError, KeyError, TypeError):
entity_is_multidimensional = False
if entity_is_multidimensional:
for coo in entity:
board_array[''+str(coo[0])+','+str(coo[1])+''] = coo[2]
else:
board_array[''+str(entity[0])+','+str(entity[1])+''] = entity[2]
# store the current entities in the draw cache
if cache_draw:
blank_board = board_array
cache_draw = False
# convert the board array to string
j = 0
while j < board_y:
i = 0
while i < board_x:
# add margin on the left side of the board
if i == 0:
board += left_margin
# draw the board array
board += board_array[''+str(i)+','+str(j)+'']
# add a line break on end of each line
if i == board_x - 1:
board += '\n'
i += 1
j += 1
board = board.rstrip()
# return the board string
return board
# other functions
def player_function(player):
global snake_len
snake_len = len(player)
head_direction = None
north = 'north'
south = 'south'
west = 'west'
east = 'east'
# determine the direction of the players head
if player[0][0] > player[1][0]:
head_direction = east
elif player[0][0] < player[1][0]:
head_direction = west
elif player[0][1] < player[1][1]:
head_direction = north
elif player[0][1] > player[1][1]:
head_direction = south
# move player with or without input
if key is not None:
if key == 'w' and (head_direction == west or head_direction == east):
player = move_player(player, north)
elif key == 'a' and (head_direction == north or head_direction == south):
player = move_player(player, west)
elif key == 's' and (head_direction == west or head_direction == east):
player = move_player(player, south)
elif key == 'd' and (head_direction == north or head_direction == south):
player = move_player(player, east)
else:
player = move_player(player, head_direction)
return player
def move_player(player, direction):
north = 'north'
south = 'south'
west = 'west'
east = 'east'
# take off the tail
if not increase_player():
player.pop()
# create the new head
new_head = copy.copy(player[0])
# move the new head
if direction == north:
new_head[1] -= 1
engine.fps = engine.fps_vertical
elif direction == west:
new_head[0] -= 1
engine.fps = engine.fps_horizontal
elif direction == south:
new_head[1] += 1
engine.fps = engine.fps_vertical
elif direction == east:
new_head[0] += 1
engine.fps = engine.fps_horizontal
# add the new head on
player = [new_head] + player
return player
def increase_player(set_variable=False, int_variable=None):
global snake_old_len
global do_increase_player
global increase_interval
global score
score = snake_len - 3
if int_variable is not None:
increase_interval = int_variable
if set_variable:
snake_old_len = snake_len
if snake_len >= snake_old_len + increase_interval:
do_increase_player = False
else:
do_increase_player = True
return do_increase_player
def collision_testing(player, point_dot):
global update_point_dot
# players head
player_head = player[0]
# check for collision with wall
for wall in frame_wall:
if wall[0] == player_head[0] and wall[1] == player_head[1]:
game_over()
# player eats point dot
if player_head[0] == point_dot[0] and player_head[1] == point_dot[1]:
increase_player(True)
update_point_dot = True
# check if player head touches its own tail
for key, part in enumerate(player, start=0):
if key == 0:
# skip head
continue
if player_head[0] == part[0] and player_head[1] == part[1]:
game_over()
def game_over():
screen = left_margin
screen += global_game_title
screen += ' Game Over '
pad_score = str(score).rjust(4, '0')
right_pointing_triangle_sprite = '>'
screen += right_pointing_triangle_sprite
screen += ' Score: '+pad_score
if dev_mode:
screen += ' [DevMode]'
screen += '\n'
screen += board
# clear the screen
engine.clear_screen()
# print the screen
print(screen)
engine.reset_tty()
exit()
def generate_new_coordinates(point_dot, player):
while True:
# get random coordinates
rand_x = random.randint(1, (board_x-2))
rand_y = random.randint(1, (board_y-2))
# check if the player already is on the new coordinates
do_continue = False
for part in player:
if part[0] == rand_x and part[1] == rand_y:
do_continue = True
break
if do_continue:
continue
# check if the new coordinates are in the old place of the point dot
if point_dot is not None and point_dot[0] == rand_x and point_dot[1] == rand_y:
continue
break
return [rand_x, rand_y]
def point_dot_function(player, point_dot=None):
global update_point_dot
point_dot_sprite = '*'
# generate the first dot
if point_dot is None:
coordinates = generate_new_coordinates(None, player)
point_dot = [coordinates[0], coordinates[1], point_dot_sprite]
# update the dot
if update_point_dot:
coordinates = generate_new_coordinates(point_dot, player)
point_dot = [coordinates[0], coordinates[1], point_dot_sprite]
update_point_dot = False
return point_dot
def print_stats():
# add left margin
string = left_margin
# display game name
string += global_game_title
# display score
pad_score = str(score).rjust(4, '0')
string += ' points: '+pad_score
# display extra stats in dev mode
if dev_mode:
# display snake length
pad_snake_len = str(snake_len).rjust(4, '0')
string += ', length: '+pad_snake_len
# display total number of frames
pad_frames = str(total_number_of_frames).rjust(4, '0')
string += ', total frames: '+pad_frames
# display frames per second
pad_fps = str(engine.fps).rjust(4, '0')
string += ', FPS: '+pad_fps
# add new line
string += '\n'
return string
def key_actions():
global dev_mode
global update_point_dot
# do actions upon certain key presses
if key is not None:
if key == 'q':
# exit the game
engine.reset_tty()
exit()
elif key == 'i':
# increase length
if dev_mode:
increase_player(True, 40)
elif key == 'u':
# increase length
if dev_mode:
increase_player(True, 140)
elif key == 'r':
# reset length increase
if dev_mode:
increase_player(False, 1)
elif key == 'e':
# increase fps
if dev_mode:
engine.fps_horizontal = 25
engine.fps_vertical = int(engine.fps_horizontal*engine.fps_factor)
elif key == 'y':
# increase fps by 1 fps
if dev_mode:
engine.fps_horizontal = engine.fps_horizontal + 1
engine.fps_vertical = int(engine.fps_horizontal*engine.fps_factor)
elif key == 'n':
# replace point dot
if dev_mode:
update_point_dot = True
elif key == 't':
# activate dev mode
if not dev_mode:
dev_mode = True
class PythonGameEngine:
"""Class PythonGameEngine
The game engine takes care of mainly three things:
* clearing the screen
* syncing the game loop
* detecting key presses
Remember to call the TTY reset method before exit if the built in key
detection function have been used.
Author: <NAME> <<EMAIL>>
License: https://opensource.org/licenses/mit-license.html MIT License
Version: 1.0.0
"""
def __init__(self):
self._game_time_beginning = None
self._game_time_end = None
self._fps = None
self._fps_horizontal = None
self._fps_vertical = None
self._fps_factor = None
self._os_variable = None
self._key_read_timeout = None
self._tty_settings = None
@property
def game_time_beginning(self):
return self._game_time_beginning
@game_time_beginning.setter
def game_time_beginning(self, value):
self._game_time_beginning = value
@property
def game_time_end(self):
return self._game_time_end
@game_time_end.setter
def game_time_end(self, value):
self._game_time_end = value
@property
def fps(self):
return self._fps
@fps.setter
def fps(self, value):
self._fps = value
@property
def fps_horizontal(self):
return self._fps_horizontal
@fps_horizontal.setter
def fps_horizontal(self, value):
self._fps_horizontal = value
@property
def fps_vertical(self):
return self._fps_vertical
@fps_vertical.setter
def fps_vertical(self, value):
self._fps_vertical = value
@property
def fps_factor(self):
return self._fps_factor
@fps_factor.setter
def fps_factor(self, value):
self._fps_factor = value
@property
def os_variable(self):
return self._os_variable
@os_variable.setter
def os_variable(self, value):
self._os_variable = value
@property
def key_read_timeout(self):
return self._key_read_timeout
@key_read_timeout.setter
def key_read_timeout(self, value):
self._key_read_timeout = value
@property
def tty_settings(self):
return self._tty_settings
@tty_settings.setter
def tty_settings(self, value):
self._tty_settings = value
@staticmethod
def microtime_now():
microtime = time.time()
time_variable = str(microtime).split('.')
timestamp = int(time_variable[0])
microseconds = int(int(time_variable[1])/100)
return [microseconds, timestamp]
def fps_sync(self):
"""This method sets a sleep depending on chosen fps
Put this at the end of a game loop to sync with the fps you have chosen.
"""
# get the time from the bottom of the code
self.game_time_end = self.microtime_now()
if self.game_time_beginning is not None:
time_beginning = self.game_time_beginning[0]
else:
time_beginning = None
time_end = self.game_time_end[0]
if time_beginning is None:
self.key_read_timeout = 100
self.game_time_beginning = self.microtime_now()
return False
# the loop is taking longer than 1 second
if self.game_time_end[1] - self.game_time_beginning[1] > 1:
self.key_read_timeout = 100
self.game_time_beginning = self.microtime_now()
return False
fps = self.fps # frames per second
microsecond = 10**6 # 1 second = 1*10^6 microseconds
if time_end > time_beginning:
time_variable = time_end - time_beginning
else:
time_variable = microsecond + time_end - time_beginning
if time_variable > microsecond:
# the code is going too slow, no wait
self.key_read_timeout = 100
self.game_time_beginning = self.microtime_now()
return False
frames_per_microsecond = int(microsecond/fps)
pause = frames_per_microsecond - time_variable
if pause < 0:
# the code is going too slow, no wait
self.key_read_timeout = 100
self.game_time_beginning = self.microtime_now()
return False
# actively adjust the key reading timeout
self.key_read_timeout = int(pause/10)
# sleep
time.sleep(pause/microsecond)
# get the time from the beginning of the code
self.game_time_beginning = self.microtime_now()
return True
def clear_screen(self):
"""Clears the screen
It will detect the current operation system and choose which system
screen clear function to use.
"""
os_variable = self.os_variable
# check which os the host is running
if os_variable is None:
if os.name == 'nt':
# windows
self.os_variable = 'windows'
else:
# other (linux)
self.os_variable = 'other'
os_variable = self.os_variable
# clear the screen
if os_variable == 'windows':
# windows
os.system('cls')
else:
# other (linux)
os.system('clear')
def read_key_press(self):
"""Returns the key character typed
Can cause high CPU usage.
Timeout variable will be auto updated by the fps_sync function.
"""
self.modify_tty()
timeout = self.key_read_timeout # microseconds
microsecond = 10**6 # 1 second = 1*10^6 microseconds
# set the timeout variable if it has not already been set
if timeout is None:
timeout = 200*10**3 # recommended value
self.key_read_timeout = timeout
stdin = sys.stdin
read = [stdin]
read_timeout = timeout/microsecond # timeout variable in seconds
# check if any key is pressed within the timeout period
rlist, wlist, xlist = select.select(read, [], [], read_timeout)
if len(rlist) == 0:
return None
# return the key pressed
return stdin.read(1)
def modify_tty(self):
tty_settings = self.tty_settings
if tty_settings is not None:
return False
# save current tty config
command = ['stty', '-g']
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
tty_settings = output.decode('ascii')
self.tty_settings = tty_settings
# change tty to be able to read in characters
os.system('stty -icanon')
return True
def reset_tty(self):
tty_settings = self.tty_settings
if tty_settings is None:
return False
# reset tty back to its original state
tty_settings = tty_settings.rstrip()
os.system("stty '"+tty_settings+"'")
return True
# init
engine = PythonGameEngine()
# settings
frames_per_second_horizontal = 16
diff_constant = .65
engine.fps_horizontal = frames_per_second_horizontal
engine.fps_factor = diff_constant
engine.fps_vertical = int(engine.fps_horizontal*engine.fps_factor)
engine.fps = engine.fps_horizontal
point_dot = None
snake_sprite = '&'
right_pointing_triangle_sprite = '>'
# global variables
board_x = 80
board_y = 24
score = 0
snake_len = 0
snake_old_len = 0
total_number_of_frames = 0
increase_interval = 1
global_game_title = snake_sprite+' Python Snake '+right_pointing_triangle_sprite
key = None
left_margin = ' '
screen = None
blank_board = {}
do_increase_player = False
update_point_dot = False
dev_mode = False
# game setup (to be run once)
# create the background and frame wall
background = create_background()
frame_wall = create_frame_wall()
# draw the background and frame onto the board and store it in the draw cache
cache_draw = True
draw([
background,
frame_wall
])
# create the player
player = create_player()
# game loop
while True:
# add stats to the screen
screen = print_stats()
# update the player
player = player_function(player)
# update the point dot
point_dot = point_dot_function(player, point_dot)
# collision testing
collision_testing(player, point_dot)
# draw the board with all the entities on it and add it to the screen
board = draw([
point_dot,
player
])
screen += board
# clear the screen
engine.clear_screen()
# print the screen
print(screen)
# take key input
print(left_margin)
key = engine.read_key_press()
# perform key actions
key_actions()
# count frames
total_number_of_frames += 1
# sync game loop to the saved fps value
engine.fps_sync()
``` |
{
"source": "joakimzhang/python-electron",
"score": 3
} |
#### File: lib/ANUGA/redfearn.py
```python
from geo_reference import Geo_reference, DEFAULT_ZONE
def degminsec2decimal_degrees(dd,mm,ss):
assert abs(mm) == mm
assert abs(ss) == ss
if dd < 0:
sign = -1
else:
sign = 1
return sign * (abs(dd) + mm/60. + ss/3600.)
def decimal_degrees2degminsec(dec):
if dec < 0:
sign = -1
else:
sign = 1
dec = abs(dec)
dd = int(dec)
f = dec-dd
mm = int(f*60)
ss = (f*60-mm)*60
return sign*dd, mm, ss
def redfearn(lat, lon, false_easting=None, false_northing=None,
zone=None, central_meridian=None, scale_factor=None):
"""Compute UTM projection using Redfearn's formula
lat, lon is latitude and longitude in decimal degrees
If false easting and northing are specified they will override
the standard
If zone is specified reproject lat and long to specified zone instead of
standard zone
If meridian is specified, reproject lat and lon to that instead of zone. In this case
zone will be set to -1 to indicate non-UTM projection
Note that zone and meridian cannot both be specifed
"""
from math import pi, sqrt, sin, cos, tan
#GDA Specifications
a = 6378137.0 #Semi major axis
inverse_flattening = 298.257222101 #1/f
if scale_factor is None:
K0 = 0.9996 #Central scale factor
else:
K0 = scale_factor
#print 'scale', K0
zone_width = 6 #Degrees
longitude_of_central_meridian_zone0 = -183
longitude_of_western_edge_zone0 = -186
if false_easting is None:
false_easting = 500000
if false_northing is None:
if lat < 0:
false_northing = 10000000 #Southern hemisphere
else:
false_northing = 0 #Northern hemisphere)
#Derived constants
f = 1.0/inverse_flattening
b = a*(1-f) #Semi minor axis
e2 = 2*f - f*f# = f*(2-f) = (a^2-b^2/a^2 #Eccentricity
e = sqrt(e2)
e2_ = e2/(1-e2) # = (a^2-b^2)/b^2 #Second eccentricity
e_ = sqrt(e2_)
e4 = e2*e2
e6 = e2*e4
#Foot point latitude
n = (a-b)/(a+b) #Same as e2 - why ?
n2 = n*n
n3 = n*n2
n4 = n2*n2
G = a*(1-n)*(1-n2)*(1+9*n2/4+225*n4/64)*pi/180
phi = lat*pi/180 #Convert latitude to radians
sinphi = sin(phi)
sin2phi = sin(2*phi)
sin4phi = sin(4*phi)
sin6phi = sin(6*phi)
cosphi = cos(phi)
cosphi2 = cosphi*cosphi
cosphi3 = cosphi*cosphi2
cosphi4 = cosphi2*cosphi2
cosphi5 = cosphi*cosphi4
cosphi6 = cosphi2*cosphi4
cosphi7 = cosphi*cosphi6
cosphi8 = cosphi4*cosphi4
t = tan(phi)
t2 = t*t
t4 = t2*t2
t6 = t2*t4
#Radius of Curvature
rho = a*(1-e2)/(1-e2*sinphi*sinphi)**1.5
nu = a/(1-e2*sinphi*sinphi)**0.5
psi = nu/rho
psi2 = psi*psi
psi3 = psi*psi2
psi4 = psi2*psi2
#Meridian distance
A0 = 1 - e2/4 - 3*e4/64 - 5*e6/256
A2 = 3.0/8*(e2+e4/4+15*e6/128)
A4 = 15.0/256*(e4+3*e6/4)
A6 = 35*e6/3072
term1 = a*A0*phi
term2 = -a*A2*sin2phi
term3 = a*A4*sin4phi
term4 = -a*A6*sin6phi
m = term1 + term2 + term3 + term4 #OK
if zone is not None and central_meridian is not None:
msg = 'You specified both zone and central_meridian. Provide only one of them'
raise Exception, msg
# Zone
if zone is None:
zone = int((lon - longitude_of_western_edge_zone0)/zone_width)
# Central meridian
if central_meridian is None:
central_meridian = zone*zone_width+longitude_of_central_meridian_zone0
else:
zone = -1
omega = (lon-central_meridian)*pi/180 #Relative longitude (radians)
omega2 = omega*omega
omega3 = omega*omega2
omega4 = omega2*omega2
omega5 = omega*omega4
omega6 = omega3*omega3
omega7 = omega*omega6
omega8 = omega4*omega4
#Northing
term1 = nu*sinphi*cosphi*omega2/2
term2 = nu*sinphi*cosphi3*(4*psi2+psi-t2)*omega4/24
term3 = nu*sinphi*cosphi5*\
(8*psi4*(11-24*t2)-28*psi3*(1-6*t2)+\
psi2*(1-32*t2)-psi*2*t2+t4-t2)*omega6/720
term4 = nu*sinphi*cosphi7*(1385-3111*t2+543*t4-t6)*omega8/40320
northing = false_northing + K0*(m + term1 + term2 + term3 + term4)
#Easting
term1 = nu*omega*cosphi
term2 = nu*cosphi3*(psi-t2)*omega3/6
term3 = nu*cosphi5*(4*psi3*(1-6*t2)+psi2*(1+8*t2)-2*psi*t2+t4)*omega5/120
term4 = nu*cosphi7*(61-479*t2+179*t4-t6)*omega7/5040
easting = false_easting + K0*(term1 + term2 + term3 + term4)
return zone, easting, northing
def convert_from_latlon_to_utm(points=None,
latitudes=None,
longitudes=None,
false_easting=None,
false_northing=None):
"""Convert latitude and longitude data to UTM as a list of coordinates.
Input
points: list of points given in decimal degrees (latitude, longitude) or
latitudes: list of latitudes and
longitudes: list of longitudes
false_easting (optional)
false_northing (optional)
Output
points: List of converted points
zone: Common UTM zone for converted points
Notes
Assume the false_easting and false_northing are the same for each list.
If points end up in different UTM zones, an ANUGAerror is thrown.
"""
old_geo = Geo_reference()
utm_points = []
if points == None:
assert len(latitudes) == len(longitudes)
points = map(None, latitudes, longitudes)
for point in points:
zone, easting, northing = redfearn(float(point[0]),
float(point[1]),
false_easting=false_easting,
false_northing=false_northing)
new_geo = Geo_reference(zone)
old_geo.reconcile_zones(new_geo)
utm_points.append([easting, northing])
return utm_points, old_geo.get_zone()
```
#### File: modules/lib/dumpstacks.py
```python
import threading, sys, traceback
# Import dumpstacks to install a SIGQUIT handler that shows a stack dump for all stacks
# From http://stackoverflow.com/questions/132058/showing-the-stack-trace-from-a-running-python-application
def dumpstacks(signal, frame):
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
code = []
for threadId, stack in sys._current_frames().items():
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
code.append(" %s" % (line.strip()))
print("\n".join(code))
try:
import signal
signal.signal(signal.SIGQUIT, dumpstacks)
except Exception as e:
# Silently ignore failures installing this handler (probably won't work on Windows)
pass
```
#### File: modules/lib/graphdefinition.py
```python
class GraphDefinition(object):
'''a pre-defined graph'''
def __init__(self, name, expression, description, expressions, filename):
self.name = name
self.expression = expression
self.description = description
self.expressions = expressions
self.filename = filename
```
#### File: modules/lib/mp_image.py
```python
import time
from wx_loader import wx
try:
import cv2.cv as cv
except ImportError:
import cv
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import mp_widgets
from MAVProxy.modules.lib.mp_menu import *
class MPImageData:
'''image data to display'''
def __init__(self, img):
self.width = img.width
self.height = img.height
self.data = img.tostring()
class MPImageTitle:
'''window title to use'''
def __init__(self, title):
self.title = title
class MPImageBrightness:
'''image brightness to use'''
def __init__(self, brightness):
self.brightness = brightness
class MPImageFitToWindow:
'''fit image to window'''
def __init__(self):
pass
class MPImageFullSize:
'''show full image resolution'''
def __init__(self):
pass
class MPImageMenu:
'''window menu to add'''
def __init__(self, menu):
self.menu = menu
class MPImagePopupMenu:
'''popup menu to add'''
def __init__(self, menu):
self.menu = menu
class MPImageNewSize:
'''reported to parent when window size changes'''
def __init__(self, size):
self.size = size
class MPImage():
'''
a generic image viewer widget for use in MP tools
'''
def __init__(self,
title='MPImage',
width=512,
height=512,
can_zoom = False,
can_drag = False,
mouse_events = False,
key_events = False,
auto_size = False,
report_size_changes = False,
daemon = False):
import multiprocessing
self.title = title
self.width = width
self.height = height
self.can_zoom = can_zoom
self.can_drag = can_drag
self.mouse_events = mouse_events
self.key_events = key_events
self.auto_size = auto_size
self.report_size_changes = report_size_changes
self.menu = None
self.popup_menu = None
from multiprocessing_queue import makeIPCQueue
self.in_queue = makeIPCQueue()
self.out_queue = makeIPCQueue()
self.default_menu = MPMenuSubMenu('View',
items=[MPMenuItem('Fit Window', 'Fit Window', 'fitWindow'),
MPMenuItem('Full Zoom', 'Full Zoom', 'fullSize')])
self.child = multiprocessing.Process(target=self.child_task)
self.child.daemon = daemon
self.child.start()
self.set_popup_menu(self.default_menu)
def child_task(self):
'''child process - this holds all the GUI elements'''
mp_util.child_close_fds()
from wx_loader import wx
state = self
self.app = wx.App(False)
self.app.frame = MPImageFrame(state=self)
self.app.frame.Show()
self.app.MainLoop()
def is_alive(self):
'''check if child is still going'''
return self.child.is_alive()
def set_image(self, img, bgr=False):
'''set the currently displayed image'''
if not self.is_alive():
return
if bgr:
img = cv.CloneImage(img)
cv.CvtColor(img, img, cv.CV_BGR2RGB)
self.in_queue.put(MPImageData(img))
def set_title(self, title):
'''set the frame title'''
self.in_queue.put(MPImageTitle(title))
def set_brightness(self, brightness):
'''set the image brightness'''
self.in_queue.put(MPImageBrightness(brightness))
def fit_to_window(self):
'''fit the image to the window'''
self.in_queue.put(MPImageFitToWindow())
def full_size(self):
'''show the full image resolution'''
self.in_queue.put(MPImageFullSize())
def set_menu(self, menu):
'''set a MPTopMenu on the frame'''
self.menu = menu
self.in_queue.put(MPImageMenu(menu))
def set_popup_menu(self, menu):
'''set a popup menu on the frame'''
self.popup_menu = menu
self.in_queue.put(MPImagePopupMenu(menu))
def get_menu(self):
'''get the current frame menu'''
return self.menu
def get_popup_menu(self):
'''get the current popup menu'''
return self.popup_menu
def poll(self):
'''check for events, returning one event'''
if self.out_queue.qsize():
return self.out_queue.get()
return None
def events(self):
'''check for events a list of events'''
ret = []
while self.out_queue.qsize():
ret.append(self.out_queue.get())
return ret
def terminate(self):
'''terminate child process'''
self.child.terminate()
self.child.join()
class MPImageFrame(wx.Frame):
""" The main frame of the viewer
"""
def __init__(self, state):
wx.Frame.__init__(self, None, wx.ID_ANY, state.title)
self.state = state
state.frame = self
self.sizer = wx.BoxSizer(wx.VERTICAL)
state.panel = MPImagePanel(self, state)
self.sizer.Add(state.panel, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.Bind(wx.EVT_IDLE, self.on_idle)
self.Bind(wx.EVT_SIZE, state.panel.on_size)
def on_idle(self, event):
'''prevent the main loop spinning too fast'''
state = self.state
time.sleep(0.1)
class MPImagePanel(wx.Panel):
""" The image panel
"""
def __init__(self, parent, state):
wx.Panel.__init__(self, parent)
self.frame = parent
self.state = state
self.img = None
self.redraw_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.on_redraw_timer, self.redraw_timer)
self.Bind(wx.EVT_SET_FOCUS, self.on_focus)
self.redraw_timer.Start(100)
self.mouse_down = None
self.drag_step = 10
self.zoom = 1.0
self.menu = None
self.popup_menu = None
self.wx_popup_menu = None
self.popup_pos = None
self.last_size = None
self.done_PIL_warning = False
state.brightness = 1.0
# dragpos is the top left position in image coordinates
self.dragpos = wx.Point(0,0)
self.need_redraw = True
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(self.mainSizer)
# panel for the main image
self.imagePanel = mp_widgets.ImagePanel(self, wx.EmptyImage(state.width,state.height))
self.mainSizer.Add(self.imagePanel, flag=wx.TOP|wx.LEFT|wx.GROW, border=0)
if state.mouse_events:
self.imagePanel.Bind(wx.EVT_MOUSE_EVENTS, self.on_event)
else:
self.imagePanel.Bind(wx.EVT_MOUSE_EVENTS, self.on_mouse_event)
if state.key_events:
self.imagePanel.Bind(wx.EVT_KEY_DOWN, self.on_event)
else:
self.imagePanel.Bind(wx.EVT_KEY_DOWN, self.on_key_event)
self.imagePanel.Bind(wx.EVT_MOUSEWHEEL, self.on_mouse_wheel)
self.redraw()
state.frame.Fit()
def on_focus(self, event):
self.imagePanel.SetFocus()
def on_focus(self, event):
'''called when the panel gets focus'''
self.imagePanel.SetFocus()
def image_coordinates(self, point):
'''given a point in window coordinates, calculate image coordinates'''
# the dragpos is the top left position in image coordinates
ret = wx.Point(int(self.dragpos.x + point.x/self.zoom),
int(self.dragpos.y + point.y/self.zoom))
return ret
def redraw(self):
'''redraw the image with current settings'''
state = self.state
if self.img is None:
self.mainSizer.Fit(self)
self.Refresh()
state.frame.Refresh()
self.SetFocus()
return
# get the current size of the containing window frame
size = self.frame.GetSize()
(width, height) = (self.img.GetWidth(), self.img.GetHeight())
rect = wx.Rect(self.dragpos.x, self.dragpos.y, int(size.x/self.zoom), int(size.y/self.zoom))
#print("redraw", self.zoom, self.dragpos, size, rect);
if rect.x > width-1:
rect.x = width-1
if rect.y > height-1:
rect.y = height-1
if rect.width > width - rect.x:
rect.width = width - rect.x
if rect.height > height - rect.y:
rect.height = height - rect.y
scaled_image = self.img.Copy()
scaled_image = scaled_image.GetSubImage(rect);
scaled_image = scaled_image.Rescale(int(rect.width*self.zoom), int(rect.height*self.zoom))
if state.brightness != 1.0:
try:
from PIL import Image
pimg = mp_util.wxToPIL(scaled_image)
pimg = Image.eval(pimg, lambda x: int(x * state.brightness))
scaled_image = mp_util.PILTowx(pimg)
except Exception:
if not self.done_PIL_warning:
print("Please install PIL for brightness control")
self.done_PIL_warning = True
# ignore lack of PIL library
pass
self.imagePanel.set_image(scaled_image)
self.need_redraw = False
self.mainSizer.Fit(self)
self.Refresh()
state.frame.Refresh()
self.SetFocus()
'''
from guppy import hpy
h = hpy()
print h.heap()
'''
def on_redraw_timer(self, event):
'''the redraw timer ensures we show new map tiles as they
are downloaded'''
state = self.state
while state.in_queue.qsize():
obj = state.in_queue.get()
if isinstance(obj, MPImageData):
img = wx.EmptyImage(obj.width, obj.height)
img.SetData(obj.data)
self.img = img
self.need_redraw = True
if state.auto_size:
client_area = state.frame.GetClientSize()
total_area = state.frame.GetSize()
bx = max(total_area.x - client_area.x,0)
by = max(total_area.y - client_area.y,0)
state.frame.SetSize(wx.Size(obj.width+bx, obj.height+by))
if isinstance(obj, MPImageTitle):
state.frame.SetTitle(obj.title)
if isinstance(obj, MPImageMenu):
self.set_menu(obj.menu)
if isinstance(obj, MPImagePopupMenu):
self.set_popup_menu(obj.menu)
if isinstance(obj, MPImageBrightness):
state.brightness = obj.brightness
self.need_redraw = True
if isinstance(obj, MPImageFullSize):
self.full_size()
if isinstance(obj, MPImageFitToWindow):
self.fit_to_window()
if self.need_redraw:
self.redraw()
def on_size(self, event):
'''handle window size changes'''
state = self.state
self.need_redraw = True
if state.report_size_changes:
# tell owner the new size
size = self.frame.GetSize()
if size != self.last_size:
self.last_size = size
state.out_queue.put(MPImageNewSize(size))
def limit_dragpos(self):
'''limit dragpos to sane values'''
if self.dragpos.x < 0:
self.dragpos.x = 0
if self.dragpos.y < 0:
self.dragpos.y = 0
if self.img is None:
return
if self.dragpos.x >= self.img.GetWidth():
self.dragpos.x = self.img.GetWidth()-1
if self.dragpos.y >= self.img.GetHeight():
self.dragpos.y = self.img.GetHeight()-1
def on_mouse_wheel(self, event):
'''handle mouse wheel zoom changes'''
state = self.state
if not state.can_zoom:
return
mousepos = self.image_coordinates(event.GetPosition())
rotation = event.GetWheelRotation() / event.GetWheelDelta()
oldzoom = self.zoom
if rotation > 0:
self.zoom /= 1.0/(1.1 * rotation)
elif rotation < 0:
self.zoom /= 1.1 * (-rotation)
if self.zoom > 10:
self.zoom = 10
elif self.zoom < 0.1:
self.zoom = 0.1
if oldzoom < 1 and self.zoom > 1:
self.zoom = 1
if oldzoom > 1 and self.zoom < 1:
self.zoom = 1
self.need_redraw = True
new = self.image_coordinates(event.GetPosition())
# adjust dragpos so the zoom doesn't change what pixel is under the mouse
self.dragpos = wx.Point(self.dragpos.x - (new.x-mousepos.x), self.dragpos.y - (new.y-mousepos.y))
self.limit_dragpos()
def on_drag_event(self, event):
'''handle mouse drags'''
state = self.state
if not state.can_drag:
return
newpos = self.image_coordinates(event.GetPosition())
dx = -(newpos.x - self.mouse_down.x)
dy = -(newpos.y - self.mouse_down.y)
self.dragpos = wx.Point(self.dragpos.x+dx,self.dragpos.y+dy)
self.limit_dragpos()
self.mouse_down = newpos
self.need_redraw = True
self.redraw()
def show_popup_menu(self, pos):
'''show a popup menu'''
self.popup_pos = self.image_coordinates(pos)
self.frame.PopupMenu(self.wx_popup_menu, pos)
def on_mouse_event(self, event):
'''handle mouse events'''
pos = event.GetPosition()
if event.RightDown() and self.popup_menu is not None:
self.show_popup_menu(pos)
return
if event.Leaving():
self.mouse_pos = None
else:
self.mouse_pos = pos
if event.LeftDown():
self.mouse_down = self.image_coordinates(pos)
if event.Dragging() and event.ButtonIsDown(wx.MOUSE_BTN_LEFT):
self.on_drag_event(event)
def on_key_event(self, event):
'''handle key events'''
keycode = event.GetKeyCode()
if keycode == wx.WXK_HOME:
self.zoom = 1.0
self.dragpos = wx.Point(0, 0)
self.need_redraw = True
def on_event(self, event):
'''pass events to the parent'''
state = self.state
if isinstance(event, wx.MouseEvent):
self.on_mouse_event(event)
if isinstance(event, wx.KeyEvent):
self.on_key_event(event)
if (isinstance(event, wx.MouseEvent) and
not event.ButtonIsDown(wx.MOUSE_BTN_ANY) and
event.GetWheelRotation() == 0):
# don't flood the queue with mouse movement
return
evt = mp_util.object_container(event)
pt = self.image_coordinates(wx.Point(evt.X,evt.Y))
evt.X = pt.x
evt.Y = pt.y
state.out_queue.put(evt)
def on_menu(self, event):
'''called on menu event'''
state = self.state
if self.popup_menu is not None:
ret = self.popup_menu.find_selected(event)
if ret is not None:
ret.popup_pos = self.popup_pos
if ret.returnkey == 'fitWindow':
self.fit_to_window()
elif ret.returnkey == 'fullSize':
self.full_size()
else:
state.out_queue.put(ret)
return
if self.menu is not None:
ret = self.menu.find_selected(event)
if ret is not None:
state.out_queue.put(ret)
return
def set_menu(self, menu):
'''add a menu from the parent'''
self.menu = menu
wx_menu = menu.wx_menu()
self.frame.SetMenuBar(wx_menu)
self.frame.Bind(wx.EVT_MENU, self.on_menu)
def set_popup_menu(self, menu):
'''add a popup menu from the parent'''
self.popup_menu = menu
if menu is None:
self.wx_popup_menu = None
else:
self.wx_popup_menu = menu.wx_menu()
self.frame.Bind(wx.EVT_MENU, self.on_menu)
def fit_to_window(self):
'''fit image to window'''
state = self.state
self.dragpos = wx.Point(0, 0)
client_area = state.frame.GetClientSize()
self.zoom = min(float(client_area.x) / self.img.GetWidth(),
float(client_area.y) / self.img.GetHeight())
self.need_redraw = True
def full_size(self):
'''show image at full size'''
self.dragpos = wx.Point(0, 0)
self.zoom = 1.0
self.need_redraw = True
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser("mp_image.py <file>")
parser.add_option("--zoom", action='store_true', default=False, help="allow zoom")
parser.add_option("--drag", action='store_true', default=False, help="allow drag")
parser.add_option("--autosize", action='store_true', default=False, help="auto size window")
(opts, args) = parser.parse_args()
im = MPImage(mouse_events=True,
key_events=True,
can_drag = opts.drag,
can_zoom = opts.zoom,
auto_size = opts.autosize)
img = cv.LoadImage(args[0])
im.set_image(img, bgr=True)
while im.is_alive():
for event in im.events():
if isinstance(event, MPMenuItem):
print(event)
continue
print event.ClassName
if event.ClassName == 'wxMouseEvent':
print 'mouse', event.X, event.Y
if event.ClassName == 'wxKeyEvent':
print 'key %u' % event.KeyCode
time.sleep(0.1)
```
#### File: modules/lib/mp_util.py
```python
import math
import os
import platform
# Some platforms (CYGWIN and others) many not have the wx library
# use imp to see if wx is on the path
has_wxpython = False
if platform.system() == 'Windows':
# auto-detection is failing on windows, for an unknown reason
has_wxpython = True
else:
import imp
try:
imp.find_module('wx')
has_wxpython = True
except ImportError, e:
pass
radius_of_earth = 6378100.0 # in meters
def gps_distance(lat1, lon1, lat2, lon2):
'''return distance between two points in meters,
coordinates are in degrees
thanks to http://www.movable-type.co.uk/scripts/latlong.html'''
lat1 = math.radians(lat1)
lat2 = math.radians(lat2)
lon1 = math.radians(lon1)
lon2 = math.radians(lon2)
dLat = lat2 - lat1
dLon = lon2 - lon1
a = math.sin(0.5*dLat)**2 + math.sin(0.5*dLon)**2 * math.cos(lat1) * math.cos(lat2)
c = 2.0 * math.atan2(math.sqrt(a), math.sqrt(1.0-a))
return radius_of_earth * c
def gps_bearing(lat1, lon1, lat2, lon2):
'''return bearing between two points in degrees, in range 0-360
thanks to http://www.movable-type.co.uk/scripts/latlong.html'''
lat1 = math.radians(lat1)
lat2 = math.radians(lat2)
lon1 = math.radians(lon1)
lon2 = math.radians(lon2)
dLat = lat2 - lat1
dLon = lon2 - lon1
y = math.sin(dLon) * math.cos(lat2)
x = math.cos(lat1)*math.sin(lat2) - math.sin(lat1)*math.cos(lat2)*math.cos(dLon)
bearing = math.degrees(math.atan2(y, x))
if bearing < 0:
bearing += 360.0
return bearing
def wrap_valid_longitude(lon):
''' wrap a longitude value around to always have a value in the range
[-180, +180) i.e 0 => 0, 1 => 1, -1 => -1, 181 => -179, -181 => 179
'''
return (((lon + 180.0) % 360.0) - 180.0)
def gps_newpos(lat, lon, bearing, distance):
'''extrapolate latitude/longitude given a heading and distance
thanks to http://www.movable-type.co.uk/scripts/latlong.html
'''
lat1 = math.radians(lat)
lon1 = math.radians(lon)
brng = math.radians(bearing)
dr = distance/radius_of_earth
lat2 = math.asin(math.sin(lat1)*math.cos(dr) +
math.cos(lat1)*math.sin(dr)*math.cos(brng))
lon2 = lon1 + math.atan2(math.sin(brng)*math.sin(dr)*math.cos(lat1),
math.cos(dr)-math.sin(lat1)*math.sin(lat2))
return (math.degrees(lat2), wrap_valid_longitude(math.degrees(lon2)))
def gps_offset(lat, lon, east, north):
'''return new lat/lon after moving east/north
by the given number of meters'''
bearing = math.degrees(math.atan2(east, north))
distance = math.sqrt(east**2 + north**2)
return gps_newpos(lat, lon, bearing, distance)
def mkdir_p(dir):
'''like mkdir -p'''
if not dir:
return
if dir.endswith("/") or dir.endswith("\\"):
mkdir_p(dir[:-1])
return
if os.path.isdir(dir):
return
mkdir_p(os.path.dirname(dir))
try:
os.mkdir(dir)
except Exception:
pass
def polygon_load(filename):
'''load a polygon from a file'''
ret = []
f = open(filename)
for line in f:
if line.startswith('#'):
continue
line = line.strip()
if not line:
continue
a = line.split()
if len(a) != 2:
raise RuntimeError("invalid polygon line: %s" % line)
ret.append((float(a[0]), float(a[1])))
f.close()
return ret
def polygon_bounds(points):
'''return bounding box of a polygon in (x,y,width,height) form'''
(minx, miny) = (points[0][0], points[0][1])
(maxx, maxy) = (minx, miny)
for p in points:
minx = min(minx, p[0])
maxx = max(maxx, p[0])
miny = min(miny, p[1])
maxy = max(maxy, p[1])
return (minx, miny, maxx-minx, maxy-miny)
def bounds_overlap(bound1, bound2):
'''return true if two bounding boxes overlap'''
(x1,y1,w1,h1) = bound1
(x2,y2,w2,h2) = bound2
if x1+w1 < x2:
return False
if x2+w2 < x1:
return False
if y1+h1 < y2:
return False
if y2+h2 < y1:
return False
return True
class object_container:
'''return a picklable object from an existing object,
containing all of the normal attributes of the original'''
def __init__(self, object):
for v in dir(object):
if not v.startswith('__') and v not in ['this']:
try:
a = getattr(object, v)
if (hasattr(a, '__call__') or
hasattr(a, '__swig_destroy__') or
str(a).find('Swig Object') != -1):
continue
setattr(self, v, a)
except Exception:
pass
def degrees_to_dms(degrees):
'''return a degrees:minutes:seconds string'''
deg = int(degrees)
min = int((degrees - deg)*60)
sec = ((degrees - deg) - (min/60.0))*60*60
return u'%d\u00b0%02u\'%05.2f"' % (deg, abs(min), abs(sec))
class UTMGrid:
'''class to hold UTM grid position'''
def __init__(self, zone, easting, northing, hemisphere='S'):
self.zone = zone
self.easting = easting
self.northing = northing
self.hemisphere = hemisphere
def __str__(self):
return "%s %u %u %u" % (self.hemisphere, self.zone, self.easting, self.northing)
def latlon(self):
'''return (lat,lon) for the grid coordinates'''
from MAVProxy.modules.lib.ANUGA import lat_long_UTM_conversion
(lat, lon) = lat_long_UTM_conversion.UTMtoLL(self.northing, self.easting, self.zone, isSouthernHemisphere=(self.hemisphere=='S'))
return (lat, lon)
def latlon_to_grid(latlon):
'''convert to grid reference'''
from MAVProxy.modules.lib.ANUGA import redfearn
(zone, easting, northing) = redfearn.redfearn(latlon[0], latlon[1])
if latlon[0] < 0:
hemisphere = 'S'
else:
hemisphere = 'N'
return UTMGrid(zone, easting, northing, hemisphere=hemisphere)
def latlon_round(latlon, spacing=1000):
'''round to nearest grid corner'''
g = latlon_to_grid(latlon)
g.easting = (g.easting // spacing) * spacing
g.northing = (g.northing // spacing) * spacing
return g.latlon()
def wxToPIL(wimg):
'''convert a wxImage to a PIL Image'''
from PIL import Image
(w,h) = wimg.GetSize()
d = wimg.GetData()
pimg = Image.new("RGB", (w,h), color=1)
pimg.fromstring(d)
return pimg
def PILTowx(pimg):
'''convert a PIL Image to a wx image'''
from wx_loader import wx
wimg = wx.EmptyImage(pimg.size[0], pimg.size[1])
wimg.SetData(pimg.convert('RGB').tostring())
return wimg
def dot_mavproxy(name):
'''return a path to store mavproxy data'''
dir = os.path.join(os.environ['HOME'], '.mavproxy')
mkdir_p(dir)
return os.path.join(dir, name)
def download_url(url):
'''download a URL and return the content'''
import urllib2
try:
resp = urllib2.urlopen(url)
headers = resp.info()
except urllib2.URLError as e:
print('Error downloading %s' % url)
return None
return resp.read()
def download_files(files):
'''download an array of files'''
for (url, file) in files:
print("Downloading %s as %s" % (url, file))
data = download_url(url)
if data is None:
continue
try:
open(file, mode='w').write(data)
except Exception as e:
print("Failed to save to %s : %s" % (file, e))
child_fd_list = []
def child_close_fds():
'''close file descriptors that a child process should not inherit.
Should be called from child processes.'''
global child_fd_list
import os
while len(child_fd_list) > 0:
fd = child_fd_list.pop(0)
try:
os.close(fd)
except Exception as msg:
pass
def child_fd_list_add(fd):
'''add a file descriptor to list to be closed in child processes'''
global child_fd_list
child_fd_list.append(fd)
def child_fd_list_remove(fd):
'''remove a file descriptor to list to be closed in child processes'''
global child_fd_list
try:
child_fd_list.remove(fd)
except Exception:
pass
```
#### File: modules/lib/wxconsole_util.py
```python
class Text():
'''text to write to console'''
def __init__(self, text, fg='black', bg='white'):
self.text = text
self.fg = fg
self.bg = bg
class Value():
'''a value for the status bar'''
def __init__(self, name, text, row=0, fg='black', bg='white'):
self.name = name
self.text = text
self.row = row
self.fg = fg
self.bg = bg
```
#### File: MAVProxy/modules/mavproxy_dataflash_logger.py
```python
import logging
import os
import os.path
import threading
import types
import sys
from pymavlink import mavutil
import errno
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
import time
from MAVProxy.modules.lib import mp_settings
class dataflash_logger(mp_module.MPModule):
def __init__(self, mpstate):
"""Initialise module. We start poking the UAV for messages after this is called"""
super(dataflash_logger, self).__init__(mpstate, "dataflash_logger", "logging of mavlink dataflash messages")
self.sender = None
self.stopped = False
self.time_last_start_packet_sent = 0
self.time_last_stop_packet_sent = 0
self.dataflash_dir = self._dataflash_dir(mpstate)
self.log_settings = mp_settings.MPSettings(
[ ('verbose', bool, False),
('df_target_system', int, 0),
('df_target_component', int, mavutil.mavlink.MAV_COMP_ID_LOG)
])
self.add_command('dataflash_logger', self.cmd_dataflash_logger, "dataflash logging control", ['status','start','stop','set (LOGSETTING)'])
self.add_completion_function('(LOGSETTING)', self.log_settings.completion)
def usage(self):
'''show help on a command line options'''
return "Usage: dataflash_logger <status|start|stop|set>"
def cmd_dataflash_logger(self, args):
'''control behaviour of the module'''
if len(args) == 0:
print self.usage()
elif args[0] == "status":
print self.status()
elif args[0] == "stop":
self.sender = None
self.stopped = True
elif args[0] == "start":
self.stopped = False
elif args[0] == "set":
self.log_settings.command(args[1:])
else:
print self.usage()
def _dataflash_dir(self, mpstate):
'''returns directory path to store DF logs in. May be relative'''
if mpstate.settings.state_basedir is None:
ret = 'dataflash'
else:
ret = os.path.join(mpstate.settings.state_basedir,'dataflash')
try:
os.makedirs(ret)
except OSError as e:
if e.errno != errno.EEXIST:
print("DFLogger: OSError making (%s): %s" % (ret, str(e)))
except Exception as e:
print("DFLogger: Unknown exception making (%s): %s" % (ret, str(e)))
return ret
def new_log_filepath(self):
'''returns a filepath to a log which does not currently exist and is suitable for DF logging'''
lastlog_filename = os.path.join(self.dataflash_dir,'LASTLOG.TXT')
if os.path.exists(lastlog_filename) and os.stat(lastlog_filename).st_size != 0:
fh = open(lastlog_filename,'rb')
log_cnt = int(fh.read()) + 1
fh.close()
else:
log_cnt = 1
self.lastlog_file = open(lastlog_filename,'w+b')
self.lastlog_file.write(log_cnt.__str__())
self.lastlog_file.close()
return os.path.join(self.dataflash_dir, '%u.BIN' % (log_cnt,));
def start_new_log(self):
'''open a new dataflash log, reset state'''
filename = self.new_log_filepath()
self.last_seqno = 0
self.logfile = open(filename, 'w+b')
print("DFLogger: logging started (%s)" % (filename))
self.prev_cnt = 0
self.download = 0
self.prev_download = 0
self.last_idle_status_printed_time = time.time()
self.last_status_time = time.time()
self.missing_blocks = {}
self.acking_blocks = {}
self.blocks_to_ack_and_nack = []
self.missing_found = 0
self.abandoned = 0
self.dropped = 0
def status(self):
'''returns information about module'''
transfered = self.download - self.prev_download
now = time.time()
interval = now - self.last_status_time
self.last_status_time = now
return("DFLogger: %(state)s Rate(%(interval)ds):%(rate).3fkB/s Block:%(block_cnt)d Missing:%(missing)d Fixed:%(fixed)d Abandoned:%(abandoned)d" %
{"interval": interval,
"rate": transfered/(interval*1000),
"block_cnt": self.last_seqno,
"missing": len(self.missing_blocks),
"fixed": self.missing_found,
"abandoned": self.abandoned,
"state": "Inactive" if self.stopped else "Active"
})
def idle_print_status(self):
'''print out statistics every 10 seconds from idle loop'''
now = time.time()
if (now - self.last_idle_status_printed_time) >= 10:
print self.status()
self.last_idle_status_printed_time = now
self.prev_download = self.download
def idle_send_acks_and_nacks(self):
'''Send packets to UAV in idle loop'''
max_blocks_to_send = 10
blocks_sent = 0
i=0
now = time.time()
while i < len(self.blocks_to_ack_and_nack) and blocks_sent < max_blocks_to_send:
# print("ACKLIST: %s" % ([x[1] for x in self.blocks_to_ack_and_nack],))
stuff = self.blocks_to_ack_and_nack[i]
[master, block, status, first_sent, last_sent] = stuff
if status == 1:
# print("DFLogger: ACKing block (%d)" % (block,))
mavstatus = mavutil.mavlink.MAV_REMOTE_LOG_DATA_BLOCK_ACK
(target_sys,target_comp) = self.sender
self.master.mav.remote_log_block_status_send(target_sys,
target_comp,
block,
mavstatus)
blocks_sent += 1
del self.acking_blocks[block]
del self.blocks_to_ack_and_nack[i]
continue
if block not in self.missing_blocks:
# we've received this block now
del self.blocks_to_ack_and_nack[i]
continue
# give up on packet if we have seen one with a much higher
# number (or after 60 seconds):
if (self.last_seqno - block > 200) or (now - first_sent > 60):
if self.log_settings.verbose:
print("DFLogger: Abandoning block (%d)" % (block,))
del self.blocks_to_ack_and_nack[i]
del self.missing_blocks[block]
self.abandoned += 1
continue
i += 1
# only send each nack every-so-often:
if last_sent is not None:
if now - last_sent < 0.1:
continue
if self.log_settings.verbose:
print("DFLogger: Asking for block (%d)" % (block,))
mavstatus = mavutil.mavlink.MAV_REMOTE_LOG_DATA_BLOCK_NACK
(target_sys,target_comp) = self.sender
self.master.mav.remote_log_block_status_send(target_sys,
target_comp,
block,
mavstatus)
blocks_sent += 1
stuff[4] = now
def idle_task_started(self):
'''called in idle task only when logging is started'''
if self.log_settings.verbose:
self.idle_print_status()
self.idle_send_acks_and_nacks()
def idle_task_not_started(self):
'''called in idle task only when logging is not running'''
if not self.stopped:
self.tell_sender_to_start()
def idle_task(self):
'''called rapidly by mavproxy'''
if self.sender is not None:
self.idle_task_started()
else:
self.idle_task_not_started()
def tell_sender_to_stop(self, m):
'''send a stop packet (if we haven't sent one in the last second)'''
now = time.time()
if now - self.time_last_stop_packet_sent < 1:
return
if self.log_settings.verbose:
print("DFLogger: Sending stop packet")
self.time_last_stop_packet_sent = now
self.master.mav.remote_log_block_status_send(m.get_srcSystem(),
m.get_srcComponent(),
mavutil.mavlink.MAV_REMOTE_LOG_DATA_BLOCK_STOP,
1)
def tell_sender_to_start(self):
'''send a start packet (if we haven't sent one in the last second)'''
now = time.time()
if now - self.time_last_start_packet_sent < 1:
return
self.time_last_start_packet_sent = now
if self.log_settings.verbose:
print("DFLogger: Sending start packet")
target_sys = self.log_settings.df_target_system
target_comp = self.log_settings.df_target_component
self.master.mav.remote_log_block_status_send(target_sys,
target_comp,
mavutil.mavlink.MAV_REMOTE_LOG_DATA_BLOCK_START,
1)
def packet_is_for_me(self, m):
'''returns true if this packet is appropriately addressed'''
if m.target_system != self.master.mav.srcSystem:
return False
if m.target_component != self.master.mav.srcComponent:
return False
# if have a sender we can also check the source address:
if self.sender is not None:
if (m.get_srcSystem(),m.get_srcComponent()) != self.sender:
return False;
return True
def mavlink_packet(self, m):
'''handle mavlink packets'''
if m.get_type() == 'REMOTE_LOG_DATA_BLOCK':
now = time.time()
if not self.packet_is_for_me(m):
dropped += 1
return
if self.sender is None and m.seqno == 0:
if self.log_settings.verbose:
print("DFLogger: Received data packet - starting new log")
self.start_new_log()
self.sender = (m.get_srcSystem(), m.get_srcComponent())
if self.sender is None:
# No connection at the moment, and this packet did not start one
return
if self.stopped:
# send a stop packet every second until the other end gets the idea:
self.tell_sender_to_stop(m)
return
if self.sender is not None:
size = len(m.data)
data = ''.join(str(chr(x)) for x in m.data[:size])
ofs = size*(m.seqno)
self.logfile.seek(ofs)
self.logfile.write(data)
if m.seqno in self.missing_blocks:
if self.log_settings.verbose:
print("DFLogger: Received missing block: %d" % (m.seqno,))
del self.missing_blocks[m.seqno]
self.missing_found += 1
self.blocks_to_ack_and_nack.append([self.master,m.seqno,1,now,None])
self.acking_blocks[m.seqno] = 1
# print("DFLogger: missing blocks: %s" % (str(self.missing_blocks),))
else:
# ACK the block we just got:
if m.seqno in self.acking_blocks:
# already acking this one; we probably sent
# multiple nacks and received this one
# multiple times
pass
else:
self.blocks_to_ack_and_nack.append([self.master,m.seqno,1,now,None])
self.acking_blocks[m.seqno] = 1
# NACK any blocks we haven't seen and should have:
if(m.seqno - self.last_seqno > 1):
for block in range(self.last_seqno+1, m.seqno):
if block not in self.missing_blocks and \
block not in self.acking_blocks:
self.missing_blocks[block] = 1
if self.log_settings.verbose:
print "DFLogger: setting %d for nacking" % (block,)
self.blocks_to_ack_and_nack.append([self.master,block,0,now,None])
#print "\nmissed blocks: ",self.missing_blocks
if self.last_seqno < m.seqno:
self.last_seqno = m.seqno
self.download += size
def init(mpstate):
'''initialise module'''
return dataflash_logger(mpstate)
```
#### File: MAVProxy/modules/mavproxy_gasheli.py
```python
import os, sys, math, time
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_settings
class GasHeliModule(mp_module.MPModule):
def __init__(self, mpstate):
super(GasHeliModule, self).__init__(mpstate, "gas_heli", "Gas Heli", public=False)
self.console.set_status('IGN', 'IGN', row=4)
self.console.set_status('THR', 'THR', row=4)
self.console.set_status('RPM', 'RPM: 0', row=4)
self.add_command('gasheli', self.cmd_gasheli,
'gas helicopter control',
['<start|stop>',
'set (GASHELISETTINGS)'])
self.gasheli_settings = mp_settings.MPSettings(
[ ('ignition_chan', int, 0),
('ignition_disable_time', float, 0.5),
('ignition_stop_time', float, 3),
('starter_chan', int, 0),
('starter_time', float, 3.0),
('starter_pwm_on', int, 2000),
('starter_pwm_off', int, 1000),
]
)
self.add_completion_function('(GASHELISETTINGS)', self.gasheli_settings.completion)
self.starting_motor = False
self.stopping_motor = False
self.motor_t1 = None
self.old_override = 0
def mavlink_packet(self, msg):
'''handle an incoming mavlink packet'''
type = msg.get_type()
master = self.master
# add some status fields
if type in [ 'RC_CHANNELS_RAW' ]:
rc6 = msg.chan6_raw
if rc6 > 1500:
ign_colour = 'green'
else:
ign_colour = 'red'
self.console.set_status('IGN', 'IGN', fg=ign_colour, row=4)
if type in [ 'SERVO_OUTPUT_RAW' ]:
rc8 = msg.servo8_raw
if rc8 < 1200:
thr_colour = 'red'
elif rc8 < 1300:
thr_colour = 'orange'
else:
thr_colour = 'green'
self.console.set_status('THR', 'THR', fg=thr_colour, row=4)
if type in [ 'RPM' ]:
rpm = msg.rpm1
if rpm < 3000:
rpm_colour = 'red'
elif rpm < 10000:
rpm_colour = 'orange'
else:
rpm_colour = 'green'
self.console.set_status('RPM', 'RPM: %u' % rpm, fg=rpm_colour, row=4)
def valid_starter_settings(self):
'''check starter settings'''
if self.gasheli_settings.ignition_chan <= 0 or self.gasheli_settings.ignition_chan > 8:
print("Invalid ignition channel %d" % self.gasheli_settings.ignition_chan)
return False
if self.gasheli_settings.starter_chan <= 0 or self.gasheli_settings.starter_chan > 14:
print("Invalid starter channel %d" % self.gasheli_settings.starter_chan)
return False
return True
def idle_task(self):
'''run periodic tasks'''
if self.starting_motor:
if self.gasheli_settings.ignition_disable_time > 0:
elapsed = time.time() - self.motor_t1
if elapsed >= self.gasheli_settings.ignition_disable_time:
self.module('rc').set_override_chan(self.gasheli_settings.ignition_chan-1, self.old_override)
self.starting_motor = False
if self.stopping_motor:
elapsed = time.time() - self.motor_t1
if elapsed >= self.gasheli_settings.ignition_stop_time:
# hand back control to RC
self.module('rc').set_override_chan(self.gasheli_settings.ignition_chan-1, self.old_override)
self.stopping_motor = False
def start_motor(self):
'''start motor'''
if not self.valid_starter_settings():
return
self.motor_t1 = time.time()
self.stopping_motor = False
if self.gasheli_settings.ignition_disable_time > 0:
self.old_override = self.module('rc').get_override_chan(self.gasheli_settings.ignition_chan-1)
self.module('rc').set_override_chan(self.gasheli_settings.ignition_chan-1, 1000)
self.starting_motor = True
else:
# nothing more to do
self.starting_motor = False
# setup starter run
self.master.mav.command_long_send(self.target_system,
self.target_component,
mavutil.mavlink.MAV_CMD_DO_REPEAT_SERVO, 0,
self.gasheli_settings.starter_chan,
self.gasheli_settings.starter_pwm_on,
1,
self.gasheli_settings.starter_time*2,
0, 0, 0)
print("Starting motor")
def stop_motor(self):
'''stop motor'''
if not self.valid_starter_settings():
return
self.motor_t1 = time.time()
self.starting_motor = False
self.stopping_motor = True
self.old_override = self.module('rc').get_override_chan(self.gasheli_settings.ignition_chan-1)
self.module('rc').set_override_chan(self.gasheli_settings.ignition_chan-1, 1000)
print("Stopping motor")
def cmd_gasheli(self, args):
'''gas help commands'''
usage = "Usage: gasheli <start|stop|set>"
if len(args) < 1:
print(usage)
return
if args[0] == "start":
self.start_motor()
elif args[0] == "stop":
self.stop_motor()
elif args[0] == "set":
self.gasheli_settings.command(args[1:])
else:
print(usage)
def init(mpstate):
'''initialise module'''
return GasHeliModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_gopro.py
```python
import time, os
from MAVProxy.modules.lib import mp_module
from pymavlink import mavutil
class GoProModule(mp_module.MPModule):
def __init__(self, mpstate):
super(GoProModule, self).__init__(mpstate, "gopro", "gopro handling")
self.add_command('gopro', self.cmd_gopro, 'gopro control', [
'status',
'shutter <start|stop>',
'mode <video|camera>',
'power <on|off>'])
def cmd_gopro(self, args):
'''gopro commands'''
usage = "status, shutter <start|stop>, mode <video|camera>, power <on|off>"
mav = self.master.mav
if args[0] == "status":
self.cmd_gopro_status(args[1:])
return
if args[0] == "shutter":
name = args[1].lower()
if name == 'start':
mav.gopro_set_request_send(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,
mavutil.mavlink.GOPRO_COMMAND_SHUTTER, 1)
return
elif name == 'stop':
mav.gopro_set_request_send(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,
mavutil.mavlink.GOPRO_COMMAND_SHUTTER, 0)
return
else:
print("unrecognized")
return
if args[0] == "mode":
name = args[1].lower()
if name == 'video':
mav.gopro_set_request_send(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,
mavutil.mavlink.GOPRO_COMMAND_CAPTURE_MODE, 0)
return
elif name == 'camera':
mav.gopro_set_request_send(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,
mavutil.mavlink.GOPRO_COMMAND_CAPTURE_MODE, 1)
return
else:
print("unrecognized")
return
if args[0] == "power":
name = args[1].lower()
if name == 'on':
mav.gopro_set_request_send(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,
mavutil.mavlink.GOPRO_COMMAND_POWER, 1)
return
elif name == 'off':
mav.gopro_set_request_send(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,
mavutil.mavlink.GOPRO_COMMAND_POWER, 0)
return
else:
print("unrecognized")
return
print(usage)
def cmd_gopro_status(self, args):
'''show gopro status'''
master = self.master
if 'GOPRO_HEARTBEAT' in master.messages:
print(master.messages['GOPRO_HEARTBEAT'])
else:
print("No GOPRO_HEARTBEAT messages")
def init(mpstate):
'''initialise module'''
return GoProModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_graph.py
```python
from pymavlink import mavutil
import re, os, sys
from MAVProxy.modules.lib import live_graph
from MAVProxy.modules.lib import mp_module
class GraphModule(mp_module.MPModule):
def __init__(self, mpstate):
super(GraphModule, self).__init__(mpstate, "graph", "graph control")
self.timespan = 20
self.tickresolution = 0.2
self.graphs = []
self.add_command('graph', self.cmd_graph, "[expression...] add a live graph",
['(VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE)'])
def cmd_graph(self, args):
'''graph command'''
if len(args) == 0:
# list current graphs
for i in range(len(self.graphs)):
print("Graph %u: %s" % (i, self.graphs[i].fields))
return
elif args[0] == "help":
print("graph <timespan|tickresolution|expression>")
elif args[0] == "timespan":
if len(args) == 1:
print("timespan: %.1f" % self.timespan)
return
self.timespan = float(args[1])
elif args[0] == "tickresolution":
if len(args) == 1:
print("tickresolution: %.1f" % self.tickresolution)
return
self.tickresolution = float(args[1])
else:
# start a new graph
self.graphs.append(Graph(self, args[:]))
def unload(self):
'''unload module'''
for g in self.graphs:
g.close()
self.graphs = []
def mavlink_packet(self, msg):
'''handle an incoming mavlink packet'''
# check for any closed graphs
for i in range(len(self.graphs) - 1, -1, -1):
if not self.graphs[i].is_alive():
self.graphs[i].close()
self.graphs.pop(i)
# add data to the rest
for g in self.graphs:
g.add_mavlink_packet(msg)
def init(mpstate):
'''initialise module'''
return GraphModule(mpstate)
class Graph():
'''a graph instance'''
def __init__(self, state, fields):
self.fields = fields[:]
self.field_types = []
self.msg_types = set()
self.state = state
re_caps = re.compile('[A-Z_][A-Z0-9_]+')
for f in self.fields:
caps = set(re.findall(re_caps, f))
self.msg_types = self.msg_types.union(caps)
self.field_types.append(caps)
print("Adding graph: %s" % self.fields)
self.values = [None] * len(self.fields)
self.livegraph = live_graph.LiveGraph(self.fields,
timespan=state.timespan,
tickresolution=state.tickresolution,
title=self.fields[0])
def is_alive(self):
'''check if this graph is still alive'''
if self.livegraph:
return self.livegraph.is_alive()
return False
def close(self):
'''close this graph'''
if self.livegraph:
self.livegraph.close()
self.livegraph = None
def add_mavlink_packet(self, msg):
'''add data to the graph'''
mtype = msg.get_type()
if mtype not in self.msg_types:
return
for i in range(len(self.fields)):
if mtype not in self.field_types[i]:
continue
f = self.fields[i]
self.values[i] = mavutil.evaluate_expression(f, self.state.master.messages)
if self.livegraph is not None:
self.livegraph.add_values(self.values)
```
#### File: MAVProxy/modules/mavproxy_link.py
```python
''' TO USE:
link add 10.11.12.13:14550
link list
link remove 3 # to remove 3rd output
'''
from pymavlink import mavutil
import time, struct, math, sys, fnmatch, traceback
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
dataPackets = frozenset(['BAD_DATA','LOG_DATA'])
delayedPackets = frozenset([ 'MISSION_CURRENT', 'SYS_STATUS', 'VFR_HUD',
'GPS_RAW_INT', 'SCALED_PRESSURE', 'GLOBAL_POSITION_INT',
'NAV_CONTROLLER_OUTPUT' ])
activityPackets = frozenset([ 'HEARTBEAT', 'GPS_RAW_INT', 'GPS_RAW', 'GLOBAL_POSITION_INT', 'SYS_STATUS' ])
class LinkModule(mp_module.MPModule):
def __init__(self, mpstate):
super(LinkModule, self).__init__(mpstate, "link", "link control", public=True)
self.add_command('link', self.cmd_link, "link control",
["<list|ports>",
'add (SERIALPORT)',
'remove (LINKS)'])
self.no_fwd_types = set()
self.no_fwd_types.add("BAD_DATA")
self.add_completion_function('(SERIALPORT)', self.complete_serial_ports)
self.add_completion_function('(LINKS)', self.complete_links)
self.menu_added_console = False
if mp_util.has_wxpython:
self.menu_add = MPMenuSubMenu('Add', items=[])
self.menu_rm = MPMenuSubMenu('Remove', items=[])
self.menu = MPMenuSubMenu('Link',
items=[self.menu_add,
self.menu_rm,
MPMenuItem('Ports', 'Ports', '# link ports'),
MPMenuItem('List', 'List', '# link list'),
MPMenuItem('Status', 'Status', '# link')])
self.last_menu_update = 0
def idle_task(self):
'''called on idle'''
if mp_util.has_wxpython and (not self.menu_added_console and self.module('console') is not None):
self.menu_added_console = True
# we don't dynamically update these yet due to a wx bug
self.menu_add.items = [ MPMenuItem(p, p, '# link add %s' % p) for p in self.complete_serial_ports('') ]
self.menu_rm.items = [ MPMenuItem(p, p, '# link remove %s' % p) for p in self.complete_links('') ]
self.module('console').add_menu(self.menu)
for m in self.mpstate.mav_master:
m.source_system = self.settings.source_system
m.mav.srcSystem = m.source_system
m.mav.srcComponent = self.settings.source_component
def complete_serial_ports(self, text):
'''return list of serial ports'''
ports = mavutil.auto_detect_serial(preferred_list=['*FTDI*',"*Arduino_Mega_2560*", "*3D_Robotics*", "*USB_to_UART*", '*PX4*', '*FMU*'])
return [ p.device for p in ports ]
def complete_links(self, text):
'''return list of links'''
return [ m.address for m in self.mpstate.mav_master ]
def cmd_link(self, args):
'''handle link commands'''
if len(args) < 1:
self.show_link()
elif args[0] == "list":
self.cmd_link_list()
elif args[0] == "add":
if len(args) != 2:
print("Usage: link add LINK")
return
self.cmd_link_add(args[1:])
elif args[0] == "ports":
self.cmd_link_ports()
elif args[0] == "remove":
if len(args) != 2:
print("Usage: link remove LINK")
return
self.cmd_link_remove(args[1:])
else:
print("usage: link <list|add|remove>")
def show_link(self):
'''show link information'''
for master in self.mpstate.mav_master:
linkdelay = (self.status.highest_msec - master.highest_msec)*1.0e-3
if master.linkerror:
print("link %u down" % (master.linknum+1))
else:
print("link %u OK (%u packets, %.2fs delay, %u lost, %.1f%% loss)" % (master.linknum+1,
self.status.counters['MasterIn'][master.linknum],
linkdelay,
master.mav_loss,
master.packet_loss()))
def cmd_link_list(self):
'''list links'''
print("%u links" % len(self.mpstate.mav_master))
for i in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[i]
print("%u: %s" % (i, conn.address))
def link_add(self, device):
'''add new link'''
try:
print("Connect %s source_system=%d" % (device, self.settings.source_system))
conn = mavutil.mavlink_connection(device, autoreconnect=True,
source_system=self.settings.source_system,
baud=self.settings.baudrate)
conn.mav.srcComponent = self.settings.source_component
except Exception as msg:
print("Failed to connect to %s : %s" % (device, msg))
return False
if self.settings.rtscts:
conn.set_rtscts(True)
conn.linknum = len(self.mpstate.mav_master)
conn.mav.set_callback(self.master_callback, conn)
if hasattr(conn.mav, 'set_send_callback'):
conn.mav.set_send_callback(self.master_send_callback, conn)
conn.linknum = len(self.mpstate.mav_master)
conn.linkerror = False
conn.link_delayed = False
conn.last_heartbeat = 0
conn.last_message = 0
conn.highest_msec = 0
self.mpstate.mav_master.append(conn)
self.status.counters['MasterIn'].append(0)
try:
mp_util.child_fd_list_add(conn.port.fileno())
except Exception:
pass
return True
def cmd_link_add(self, args):
'''add new link'''
device = args[0]
print("Adding link %s" % device)
self.link_add(device)
def cmd_link_ports(self):
'''show available ports'''
ports = mavutil.auto_detect_serial(preferred_list=['*FTDI*',"*Arduino_Mega_2560*", "*3D_Robotics*", "*USB_to_UART*", '*PX4*', '*FMU*'])
for p in ports:
print("%s : %s : %s" % (p.device, p.description, p.hwid))
def cmd_link_remove(self, args):
'''remove an link'''
device = args[0]
if len(self.mpstate.mav_master) <= 1:
print("Not removing last link")
#return
for i in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[i]
if str(i) == device or conn.address == device:
print("Removing link %s" % conn.address)
try:
try:
mp_util.child_fd_list_remove(conn.port.fileno())
except Exception:
pass
self.mpstate.mav_master[i].close()
except Exception as msg:
print(msg)
pass
self.mpstate.mav_master.pop(i)
self.status.counters['MasterIn'].pop(i)
# renumber the links
for j in range(len(self.mpstate.mav_master)):
conn = self.mpstate.mav_master[j]
conn.linknum = j
return
def get_usec(self):
'''time since 1970 in microseconds'''
return int(time.time() * 1.0e6)
def master_send_callback(self, m, master):
'''called on sending a message'''
if self.status.watch is not None:
if fnmatch.fnmatch(m.get_type().upper(), self.status.watch.upper()):
self.mpstate.console.writeln('> '+ str(m))
mtype = m.get_type()
if mtype != 'BAD_DATA' and self.mpstate.logqueue:
usec = self.get_usec()
usec = (usec & ~3) | 3 # linknum 3
self.mpstate.logqueue.put(str(struct.pack('>Q', usec) + m.get_msgbuf()))
def handle_msec_timestamp(self, m, master):
'''special handling for MAVLink packets with a time_boot_ms field'''
if m.get_type() == 'GLOBAL_POSITION_INT':
# this is fix time, not boot time
return
msec = m.time_boot_ms
if msec + 30000 < master.highest_msec:
self.say('Time has wrapped')
print('Time has wrapped', msec, master.highest_msec)
self.status.highest_msec = msec
for mm in self.mpstate.mav_master:
mm.link_delayed = False
mm.highest_msec = msec
return
# we want to detect when a link is delayed
master.highest_msec = msec
if msec > self.status.highest_msec:
self.status.highest_msec = msec
if msec < self.status.highest_msec and len(self.mpstate.mav_master) > 1:
master.link_delayed = True
else:
master.link_delayed = False
def report_altitude(self, altitude):
'''possibly report a new altitude'''
master = self.master
if getattr(self.console, 'ElevationMap', None) is not None and self.mpstate.settings.basealt != 0:
lat = master.field('GLOBAL_POSITION_INT', 'lat', 0)*1.0e-7
lon = master.field('GLOBAL_POSITION_INT', 'lon', 0)*1.0e-7
alt1 = self.console.ElevationMap.GetElevation(lat, lon)
if alt1 is not None:
alt2 = self.mpstate.settings.basealt
altitude += alt2 - alt1
self.status.altitude = altitude
if (int(self.mpstate.settings.altreadout) > 0 and
math.fabs(self.status.altitude - self.status.last_altitude_announce) >=
int(self.settings.altreadout)):
self.status.last_altitude_announce = self.status.altitude
rounded_alt = int(self.settings.altreadout) * ((self.settings.altreadout/2 + int(self.status.altitude)) / int(self.settings.altreadout))
self.say("height %u" % rounded_alt, priority='notification')
def master_callback(self, m, master):
'''process mavlink message m on master, sending any messages to recipients'''
# see if it is handled by a specialised sysid connection
sysid = m.get_srcSystem()
if sysid in self.mpstate.sysid_outputs:
self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf())
return
if getattr(m, '_timestamp', None) is None:
master.post_message(m)
self.status.counters['MasterIn'][master.linknum] += 1
mtype = m.get_type()
# and log them
if mtype not in dataPackets and self.mpstate.logqueue:
# put link number in bottom 2 bits, so we can analyse packet
# delay in saved logs
usec = self.get_usec()
usec = (usec & ~3) | master.linknum
self.mpstate.logqueue.put(str(struct.pack('>Q', usec) + m.get_msgbuf()))
# keep the last message of each type around
self.status.msgs[m.get_type()] = m
if not m.get_type() in self.status.msg_count:
self.status.msg_count[m.get_type()] = 0
self.status.msg_count[m.get_type()] += 1
if m.get_srcComponent() == mavutil.mavlink.MAV_COMP_ID_GIMBAL and m.get_type() == 'HEARTBEAT':
# silence gimbal heartbeat packets for now
return
if getattr(m, 'time_boot_ms', None) is not None:
# update link_delayed attribute
self.handle_msec_timestamp(m, master)
if mtype in activityPackets:
if master.linkerror:
master.linkerror = False
self.say("link %u OK" % (master.linknum+1))
self.status.last_message = time.time()
master.last_message = self.status.last_message
if master.link_delayed:
# don't process delayed packets that cause double reporting
if mtype in delayedPackets:
return
if mtype == 'HEARTBEAT' and m.get_srcSystem() != 255:
if self.settings.target_system == 0 and self.settings.target_system != m.get_srcSystem():
self.settings.target_system = m.get_srcSystem()
self.say("online system %u" % self.settings.target_system,'message')
if self.status.heartbeat_error:
self.status.heartbeat_error = False
self.say("heartbeat OK")
if master.linkerror:
master.linkerror = False
self.say("link %u OK" % (master.linknum+1))
self.status.last_heartbeat = time.time()
master.last_heartbeat = self.status.last_heartbeat
armed = self.master.motors_armed()
if armed != self.status.armed:
self.status.armed = armed
if armed:
self.say("ARMED")
else:
self.say("DISARMED")
if master.flightmode != self.status.flightmode and time.time() > self.status.last_mode_announce + 2:
self.status.flightmode = master.flightmode
self.status.last_mode_announce = time.time()
if self.mpstate.functions.input_handler is None:
self.mpstate.rl.set_prompt(self.status.flightmode + "> ")
self.say("Mode " + self.status.flightmode)
if m.type == mavutil.mavlink.MAV_TYPE_FIXED_WING:
self.mpstate.vehicle_type = 'plane'
self.mpstate.vehicle_name = 'ArduPlane'
elif m.type in [mavutil.mavlink.MAV_TYPE_GROUND_ROVER,
mavutil.mavlink.MAV_TYPE_SURFACE_BOAT,
mavutil.mavlink.MAV_TYPE_SUBMARINE]:
self.mpstate.vehicle_type = 'rover'
self.mpstate.vehicle_name = 'APMrover2'
elif m.type in [mavutil.mavlink.MAV_TYPE_QUADROTOR,
mavutil.mavlink.MAV_TYPE_COAXIAL,
mavutil.mavlink.MAV_TYPE_HEXAROTOR,
mavutil.mavlink.MAV_TYPE_OCTOROTOR,
mavutil.mavlink.MAV_TYPE_TRICOPTER,
mavutil.mavlink.MAV_TYPE_HELICOPTER]:
self.mpstate.vehicle_type = 'copter'
self.mpstate.vehicle_name = 'ArduCopter'
elif m.type in [mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER]:
self.mpstate.vehicle_type = 'antenna'
self.mpstate.vehicle_name = 'AntennaTracker'
elif mtype == 'STATUSTEXT':
if m.text != self.status.last_apm_msg or time.time() > self.status.last_apm_msg_time+2:
self.mpstate.console.writeln("APM: %s" % m.text, bg='red')
self.status.last_apm_msg = m.text
self.status.last_apm_msg_time = time.time()
elif mtype == "VFR_HUD":
have_gps_lock = False
if 'GPS_RAW' in self.status.msgs and self.status.msgs['GPS_RAW'].fix_type == 2:
have_gps_lock = True
elif 'GPS_RAW_INT' in self.status.msgs and self.status.msgs['GPS_RAW_INT'].fix_type == 3:
have_gps_lock = True
if have_gps_lock and not self.status.have_gps_lock and m.alt != 0:
self.say("GPS lock at %u meters" % m.alt, priority='notification')
self.status.have_gps_lock = True
elif mtype == "GPS_RAW":
if self.status.have_gps_lock:
if m.fix_type != 2 and not self.status.lost_gps_lock and (time.time() - self.status.last_gps_lock) > 3:
self.say("GPS fix lost")
self.status.lost_gps_lock = True
if m.fix_type == 2 and self.status.lost_gps_lock:
self.say("GPS OK")
self.status.lost_gps_lock = False
if m.fix_type == 2:
self.status.last_gps_lock = time.time()
elif mtype == "GPS_RAW_INT":
if self.status.have_gps_lock:
if m.fix_type < 3 and not self.status.lost_gps_lock and (time.time() - self.status.last_gps_lock) > 3:
self.say("GPS fix lost")
self.status.lost_gps_lock = True
if m.fix_type >= 3 and self.status.lost_gps_lock:
self.say("GPS OK")
self.status.lost_gps_lock = False
if m.fix_type >= 3:
self.status.last_gps_lock = time.time()
elif mtype == "NAV_CONTROLLER_OUTPUT" and self.status.flightmode == "AUTO" and self.mpstate.settings.distreadout:
rounded_dist = int(m.wp_dist/self.mpstate.settings.distreadout)*self.mpstate.settings.distreadout
if math.fabs(rounded_dist - self.status.last_distance_announce) >= self.mpstate.settings.distreadout:
if rounded_dist != 0:
self.say("%u" % rounded_dist, priority="progress")
self.status.last_distance_announce = rounded_dist
elif mtype == "GLOBAL_POSITION_INT":
self.report_altitude(m.relative_alt*0.001)
elif mtype == "COMPASSMOT_STATUS":
print(m)
elif mtype == "BAD_DATA":
if self.mpstate.settings.shownoise and mavutil.all_printable(m.data):
self.mpstate.console.write(str(m.data), bg='red')
elif mtype in [ "COMMAND_ACK", "MISSION_ACK" ]:
self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
if mtype == "COMMAND_ACK" and m.command == mavutil.mavlink.MAV_CMD_PREFLIGHT_CALIBRATION:
if m.result == mavutil.mavlink.MAV_RESULT_ACCEPTED:
self.say("Calibrated")
else:
#self.mpstate.console.writeln("Got MAVLink msg: %s" % m)
pass
if self.status.watch is not None:
if fnmatch.fnmatch(m.get_type().upper(), self.status.watch.upper()):
self.mpstate.console.writeln('< '+str(m))
# don't pass along bad data
if mtype != 'BAD_DATA':
# pass messages along to listeners, except for REQUEST_DATA_STREAM, which
# would lead a conflict in stream rate setting between mavproxy and the other
# GCS
if self.mpstate.settings.mavfwd_rate or mtype != 'REQUEST_DATA_STREAM':
if not mtype in self.no_fwd_types:
for r in self.mpstate.mav_outputs:
r.write(m.get_msgbuf())
# pass to modules
for (mod,pm) in self.mpstate.modules:
if not hasattr(mod, 'mavlink_packet'):
continue
try:
mod.mavlink_packet(m)
except Exception as msg:
if self.mpstate.settings.moddebug == 1:
print(msg)
elif self.mpstate.settings.moddebug > 1:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
def init(mpstate):
'''initialise module'''
return LinkModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_log.py
```python
import time, os
from MAVProxy.modules.lib import mp_module
class LogModule(mp_module.MPModule):
def __init__(self, mpstate):
super(LogModule, self).__init__(mpstate, "log", "log transfer")
self.add_command('log', self.cmd_log, "log file handling", ['<download|status|erase|resume|cancel|list>'])
self.reset()
def reset(self):
self.download_set = set()
self.download_file = None
self.download_lognum = None
self.download_filename = None
self.download_start = None
self.download_last_timestamp = None
self.download_ofs = 0
self.retries = 0
self.entries = {}
self.download_queue = []
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
if m.get_type() == 'LOG_ENTRY':
self.handle_log_entry(m)
elif m.get_type() == 'LOG_DATA':
self.handle_log_data(m)
def handle_log_entry(self, m):
'''handling incoming log entry'''
if m.time_utc == 0:
tstring = ''
else:
tstring = time.ctime(m.time_utc)
self.entries[m.id] = m
print("Log %u numLogs %u lastLog %u size %u %s" % (m.id, m.num_logs, m.last_log_num, m.size, tstring))
def handle_log_data(self, m):
'''handling incoming log data'''
if self.download_file is None:
return
# lose some data
# import random
# if random.uniform(0,1) < 0.05:
# print('dropping ', str(m))
# return
if m.ofs != self.download_ofs:
self.download_file.seek(m.ofs)
self.download_ofs = m.ofs
if m.count != 0:
data = m.data[:m.count]
s = ''.join(str(chr(x)) for x in data)
self.download_file.write(s)
self.download_set.add(m.ofs // 90)
self.download_ofs += m.count
self.download_last_timestamp = time.time()
if m.count == 0 or (m.count < 90 and len(self.download_set) == 1 + (m.ofs // 90)):
dt = time.time() - self.download_start
self.download_file.close()
size = os.path.getsize(self.download_filename)
speed = size / (1000.0 * dt)
print("Finished downloading %s (%u bytes %u seconds, %.1f kbyte/sec %u retries)" % (
self.download_filename,
size,
dt, speed,
self.retries))
self.download_file = None
self.download_filename = None
self.download_set = set()
if len(self.download_queue):
self.log_download_next()
def handle_log_data_missing(self):
'''handling missing incoming log data'''
if len(self.download_set) == 0:
return
highest = max(self.download_set)
diff = set(range(highest)).difference(self.download_set)
if len(diff) == 0:
self.master.mav.log_request_data_send(self.target_system,
self.target_component,
self.download_lognum, (1 + highest) * 90, 0xffffffff)
self.retries += 1
else:
num_requests = 0
while num_requests < 20:
start = min(diff)
diff.remove(start)
end = start
while end + 1 in diff:
end += 1
diff.remove(end)
self.master.mav.log_request_data_send(self.target_system,
self.target_component,
self.download_lognum, start * 90, (end + 1 - start) * 90)
num_requests += 1
self.retries += 1
if len(diff) == 0:
break
def log_status(self):
'''show download status'''
if self.download_filename is None:
print("No download")
return
dt = time.time() - self.download_start
speed = os.path.getsize(self.download_filename) / (1000.0 * dt)
m = self.entries.get(self.download_lognum, None)
if m is None:
size = 0
else:
size = m.size
highest = max(self.download_set)
diff = set(range(highest)).difference(self.download_set)
print("Downloading %s - %u/%u bytes %.1f kbyte/s (%u retries %u missing)" % (self.download_filename,
os.path.getsize(self.download_filename),
size,
speed,
self.retries,
len(diff)))
def log_download_next(self):
latest = self.download_queue.pop()
filename = self.default_log_filename(latest)
self.log_download(latest, filename)
def log_download_all(self):
if len(self.entries.keys()) == 0:
print("Please use log list first")
return
self.download_queue = sorted(self.entries, key=lambda id: self.entries[id].time_utc)
self.log_download_next()
def log_download(self, log_num, filename):
'''download a log file'''
print("Downloading log %u as %s" % (log_num, filename))
self.download_lognum = log_num
self.download_file = open(filename, "wb")
self.master.mav.log_request_data_send(self.target_system,
self.target_component,
log_num, 0, 0xFFFFFFFF)
self.download_filename = filename
self.download_set = set()
self.download_start = time.time()
self.download_last_timestamp = time.time()
self.download_ofs = 0
self.retries = 0
def default_log_filename(self, log_num):
return "log%u.bin" % log_num
def cmd_log(self, args):
'''log commands'''
if len(args) < 1:
print("usage: log <list|download|erase|resume|status|cancel>")
return
if args[0] == "status":
self.log_status()
if args[0] == "list":
print("Requesting log list")
self.download_set = set()
self.master.mav.log_request_list_send(self.target_system,
self.target_component,
0, 0xffff)
elif args[0] == "erase":
self.master.mav.log_erase_send(self.target_system,
self.target_component)
elif args[0] == "resume":
self.master.mav.log_request_end_send(self.target_system,
self.target_component)
elif args[0] == "cancel":
if self.download_file is not None:
self.download_file.close()
self.reset()
elif args[0] == "download":
if len(args) < 2:
print("usage: log download <lognumber> #")
return
if args[1] == 'all':
self.log_download_all()
return
if args[1] == 'latest':
if len(self.entries.keys()) == 0:
print("Please use log list first")
return
log_num = sorted(self.entries, key=lambda id: self.entries[id].time_utc)[-1]
else:
log_num = int(args[1])
if len(args) > 2:
filename = args[2]
else:
filename = self.default_log_filename(log_num)
self.log_download(log_num, filename)
def idle_task(self):
'''handle missing log data'''
if self.download_last_timestamp is not None and time.time() - self.download_last_timestamp > 0.7:
self.download_last_timestamp = time.time()
self.handle_log_data_missing()
def init(mpstate):
'''initialise module'''
return LogModule(mpstate)
```
#### File: modules/mavproxy_map/__init__.py
```python
import sys, os, math
import functools
import time
from MAVProxy.modules.mavproxy_map import mp_elevation
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import mp_settings
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib.mp_menu import *
from pymavlink import mavutil
class MapModule(mp_module.MPModule):
def __init__(self, mpstate):
super(MapModule, self).__init__(mpstate, "map", "map display", public = True)
self.lat = None
self.lon = None
self.heading = 0
self.wp_change_time = 0
self.fence_change_time = 0
self.rally_change_time = 0
self.have_simstate = False
self.have_vehicle = {}
self.move_wp = -1
self.moving_wp = None
self.moving_fencepoint = None
self.moving_rally = None
self.mission_list = None
self.icon_counter = 0
self.click_position = None
self.click_time = 0
self.draw_line = None
self.draw_callback = None
self.have_global_position = False
self.vehicle_type_name = 'plane'
self.ElevationMap = mp_elevation.ElevationModel()
self.last_unload_check_time = time.time()
self.unload_check_interval = 0.1 # seconds
self.map_settings = mp_settings.MPSettings(
[ ('showgpspos', int, 0),
('showgps2pos', int, 1),
('showsimpos', int, 0),
('showahrs2pos', int, 0),
('showahrs3pos', int, 0),
('brightness', float, 1),
('rallycircle', bool, False),
('loitercircle',bool, False)])
service='OviHybrid'
if 'MAP_SERVICE' in os.environ:
service = os.environ['MAP_SERVICE']
import platform
from MAVProxy.modules.mavproxy_map import mp_slipmap
mpstate.map = mp_slipmap.MPSlipMap(service=service, elevation=True, title='Map')
mpstate.map_functions = { 'draw_lines' : self.draw_lines }
mpstate.map.add_callback(functools.partial(self.map_callback))
self.add_command('map', self.cmd_map, "map control", ['icon',
'set (MAPSETTING)'])
self.add_completion_function('(MAPSETTING)', self.map_settings.completion)
self.default_popup = MPMenuSubMenu('Popup', items=[])
self.add_menu(MPMenuItem('Fly To', 'Fly To', '# guided ',
handler=MPMenuCallTextDialog(title='Altitude (m)', default=100)))
self.add_menu(MPMenuItem('Set Home', 'Set Home', '# map sethome '))
self.add_menu(MPMenuItem('Terrain Check', 'Terrain Check', '# terrain check'))
self.add_menu(MPMenuItem('Show Position', 'Show Position', 'showPosition'))
def add_menu(self, menu):
'''add to the default popup menu'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.default_popup.add(menu)
self.mpstate.map.add_object(mp_slipmap.SlipDefaultPopup(self.default_popup, combine=True))
def show_position(self):
'''show map position click information'''
pos = self.click_position
dms = (mp_util.degrees_to_dms(pos[0]), mp_util.degrees_to_dms(pos[1]))
msg = "Coordinates in WGS84\n"
msg += "Decimal: %.6f %.6f\n" % (pos[0], pos[1])
msg += "DMS: %s %s\n" % (dms[0], dms[1])
msg += "Grid: %s\n" % mp_util.latlon_to_grid(pos)
if self.logdir:
logf = open(os.path.join(self.logdir, "positions.txt"), "a")
logf.write("Position: %.6f %.6f at %s\n" % (pos[0], pos[1], time.ctime()))
logf.close()
posbox = MPMenuChildMessageDialog('Position', msg, font_size=32)
posbox.show()
def cmd_map(self, args):
'''map commands'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if args[0] == "icon":
if len(args) < 3:
print("Usage: map icon <lat> <lon> <icon>")
else:
lat = args[1]
lon = args[2]
flag = 'flag.png'
if len(args) > 3:
flag = args[3] + '.png'
icon = self.mpstate.map.icon(flag)
self.mpstate.map.add_object(mp_slipmap.SlipIcon('icon - %s [%u]' % (str(flag),self.icon_counter),
(float(lat),float(lon)),
icon, layer=3, rotation=0, follow=False))
self.icon_counter += 1
elif args[0] == "set":
self.map_settings.command(args[1:])
self.mpstate.map.add_object(mp_slipmap.SlipBrightness(self.map_settings.brightness))
elif args[0] == "sethome":
self.cmd_set_home(args)
else:
print("usage: map <icon|set>")
def display_waypoints(self):
'''display the waypoints'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.mission_list = self.module('wp').wploader.view_list()
polygons = self.module('wp').wploader.polygon_list()
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('Mission'))
for i in range(len(polygons)):
p = polygons[i]
if len(p) > 1:
popup = MPMenuSubMenu('Popup',
items=[MPMenuItem('Set', returnkey='popupMissionSet'),
MPMenuItem('WP Remove', returnkey='popupMissionRemove'),
MPMenuItem('WP Move', returnkey='popupMissionMove')])
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('mission %u' % i, p,
layer='Mission', linewidth=2, colour=(255,255,255),
popup_menu=popup))
loiter_rad = self.get_mav_param('WP_LOITER_RAD')
labeled_wps = {}
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('LoiterCircles'))
for i in range(len(self.mission_list)):
next_list = self.mission_list[i]
for j in range(len(next_list)):
#label already printed for this wp?
if (next_list[j] not in labeled_wps):
self.mpstate.map.add_object(mp_slipmap.SlipLabel(
'miss_cmd %u/%u' % (i,j), polygons[i][j], str(next_list[j]), 'Mission', colour=(0,255,255)))
if (self.map_settings.loitercircle and
self.module('wp').wploader.wp_is_loiter(next_list[j])):
self.mpstate.map.add_object(mp_slipmap.SlipCircle('Loiter Circle %u' % (next_list[j] + 1), 'LoiterCircles', polygons[i][j], abs(loiter_rad), (255, 255, 255), 2))
labeled_wps[next_list[j]] = (i,j)
def display_fence(self):
'''display the fence'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.fence_change_time = self.module('fence').fenceloader.last_change
points = self.module('fence').fenceloader.polygon()
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('Fence'))
if len(points) > 1:
popup = MPMenuSubMenu('Popup',
items=[MPMenuItem('FencePoint Remove', returnkey='popupFenceRemove'),
MPMenuItem('FencePoint Move', returnkey='popupFenceMove')])
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('Fence', points, layer=1,
linewidth=2, colour=(0,255,0), popup_menu=popup))
def closest_waypoint(self, latlon):
'''find closest waypoint to a position'''
(lat, lon) = latlon
best_distance = -1
closest = -1
for i in range(self.module('wp').wploader.count()):
w = self.module('wp').wploader.wp(i)
distance = mp_util.gps_distance(lat, lon, w.x, w.y)
if best_distance == -1 or distance < best_distance:
best_distance = distance
closest = i
if best_distance < 20:
return closest
else:
return -1
def remove_rally(self, key):
'''remove a rally point'''
a = key.split(' ')
if a[0] != 'Rally' or len(a) != 2:
print("Bad rally object %s" % key)
return
i = int(a[1])
self.mpstate.functions.process_stdin('rally remove %u' % i)
def move_rally(self, key):
'''move a rally point'''
a = key.split(' ')
if a[0] != 'Rally' or len(a) != 2:
print("Bad rally object %s" % key)
return
i = int(a[1])
self.moving_rally = i
def selection_index_to_idx(self, key, selection_index):
'''return a mission idx from a selection_index'''
a = key.split(' ')
if a[0] != 'mission' or len(a) != 2:
print("Bad mission object %s" % key)
return None
midx = int(a[1])
if midx < 0 or midx >= len(self.mission_list):
print("Bad mission index %s" % key)
return None
mlist = self.mission_list[midx]
if selection_index < 0 or selection_index >= len(mlist):
print("Bad mission polygon %s" % selection_index)
return None
idx = mlist[selection_index]
return idx
def move_mission(self, key, selection_index):
'''move a mission point'''
idx = self.selection_index_to_idx(key, selection_index)
self.moving_wp = idx
print("Moving wp %u" % idx)
def remove_mission(self, key, selection_index):
'''remove a mission point'''
idx = self.selection_index_to_idx(key, selection_index)
self.mpstate.functions.process_stdin('wp remove %u' % idx)
def remove_fencepoint(self, key, selection_index):
'''remove a fence point'''
self.mpstate.functions.process_stdin('fence remove %u' % (selection_index+1))
def move_fencepoint(self, key, selection_index):
'''move a fence point'''
self.moving_fencepoint = selection_index
print("Moving fence point %u" % selection_index)
def set_mission(self, key, selection_index):
'''set a mission point'''
idx = self.selection_index_to_idx(key, selection_index)
self.mpstate.functions.process_stdin('wp set %u' % idx)
def handle_menu_event(self, obj):
'''handle a popup menu event from the map'''
menuitem = obj.menuitem
if menuitem.returnkey.startswith('# '):
cmd = menuitem.returnkey[2:]
if menuitem.handler is not None:
if menuitem.handler_result is None:
return
cmd += menuitem.handler_result
self.mpstate.functions.process_stdin(cmd)
elif menuitem.returnkey == 'popupRallyRemove':
self.remove_rally(obj.selected[0].objkey)
elif menuitem.returnkey == 'popupRallyMove':
self.move_rally(obj.selected[0].objkey)
elif menuitem.returnkey == 'popupMissionSet':
self.set_mission(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupMissionRemove':
self.remove_mission(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupMissionMove':
self.move_mission(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupFenceRemove':
self.remove_fencepoint(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupFenceMove':
self.move_fencepoint(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'showPosition':
self.show_position()
def map_callback(self, obj):
'''called when an event happens on the slipmap'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if isinstance(obj, mp_slipmap.SlipMenuEvent):
self.handle_menu_event(obj)
return
if not isinstance(obj, mp_slipmap.SlipMouseEvent):
return
if obj.event.m_leftDown and self.moving_rally is not None:
self.click_position = obj.latlon
self.click_time = time.time()
self.mpstate.functions.process_stdin("rally move %u" % self.moving_rally)
self.moving_rally = None
return
if obj.event.m_rightDown and self.moving_rally is not None:
print("Cancelled rally move")
self.moving_rally = None
return
if obj.event.m_leftDown and self.moving_wp is not None:
self.click_position = obj.latlon
self.click_time = time.time()
self.mpstate.functions.process_stdin("wp move %u" % self.moving_wp)
self.moving_wp = None
return
if obj.event.m_leftDown and self.moving_fencepoint is not None:
self.click_position = obj.latlon
self.click_time = time.time()
self.mpstate.functions.process_stdin("fence move %u" % (self.moving_fencepoint+1))
self.moving_fencepoint = None
return
if obj.event.m_rightDown and self.moving_wp is not None:
print("Cancelled wp move")
self.moving_wp = None
return
if obj.event.m_rightDown and self.moving_fencepoint is not None:
print("Cancelled fence move")
self.moving_fencepoint = None
return
elif obj.event.m_leftDown:
if time.time() - self.click_time > 0.1:
self.click_position = obj.latlon
self.click_time = time.time()
self.drawing_update()
if self.module('misseditor') is not None:
self.module('misseditor').update_map_click_position(self.click_position)
if obj.event.m_rightDown:
if self.draw_callback is not None:
self.drawing_end()
return
if time.time() - self.click_time > 0.1:
self.click_position = obj.latlon
self.click_time = time.time()
def unload(self):
'''unload module'''
self.mpstate.map.close()
self.mpstate.map = None
self.mpstate.map_functions = {}
def idle_task(self):
now = time.time()
if self.last_unload_check_time + self.unload_check_interval < now:
self.last_unload_check_time = now
if not self.mpstate.map.is_alive():
self.needs_unloading = True
def create_vehicle_icon(self, name, colour, follow=False, vehicle_type=None):
'''add a vehicle to the map'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if vehicle_type is None:
vehicle_type = self.vehicle_type_name
if name in self.have_vehicle and self.have_vehicle[name] == vehicle_type:
return
self.have_vehicle[name] = vehicle_type
icon = self.mpstate.map.icon(colour + vehicle_type + '.png')
self.mpstate.map.add_object(mp_slipmap.SlipIcon(name, (0,0), icon, layer=3, rotation=0, follow=follow,
trail=mp_slipmap.SlipTrail()))
def drawing_update(self):
'''update line drawing'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if self.draw_callback is None:
return
self.draw_line.append(self.click_position)
if len(self.draw_line) > 1:
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('drawing', self.draw_line,
layer='Drawing', linewidth=2, colour=(128,128,255)))
def drawing_end(self):
'''end line drawing'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if self.draw_callback is None:
return
self.draw_callback(self.draw_line)
self.draw_callback = None
self.mpstate.map.add_object(mp_slipmap.SlipDefaultPopup(self.default_popup, combine=True))
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('Drawing'))
def draw_lines(self, callback):
'''draw a series of connected lines on the map, calling callback when done'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.draw_callback = callback
self.draw_line = []
self.mpstate.map.add_object(mp_slipmap.SlipDefaultPopup(None))
def cmd_set_home(self, args):
'''called when user selects "Set Home" on map'''
(lat, lon) = (self.click_position[0], self.click_position[1])
alt = self.ElevationMap.GetElevation(lat, lon)
print("Setting home to: ", lat, lon, alt)
self.master.mav.command_long_send(
self.settings.target_system, self.settings.target_component,
mavutil.mavlink.MAV_CMD_DO_SET_HOME,
1, # set position
0, # param1
0, # param2
0, # param3
0, # param4
lat, # lat
lon, # lon
alt) # param7
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if m.get_type() == "HEARTBEAT":
if m.type in [mavutil.mavlink.MAV_TYPE_FIXED_WING]:
self.vehicle_type_name = 'plane'
elif m.type in [mavutil.mavlink.MAV_TYPE_GROUND_ROVER,
mavutil.mavlink.MAV_TYPE_SURFACE_BOAT,
mavutil.mavlink.MAV_TYPE_SUBMARINE]:
self.vehicle_type_name = 'rover'
elif m.type in [mavutil.mavlink.MAV_TYPE_QUADROTOR,
mavutil.mavlink.MAV_TYPE_COAXIAL,
mavutil.mavlink.MAV_TYPE_HEXAROTOR,
mavutil.mavlink.MAV_TYPE_OCTOROTOR,
mavutil.mavlink.MAV_TYPE_TRICOPTER]:
self.vehicle_type_name = 'copter'
elif m.type in [mavutil.mavlink.MAV_TYPE_HELICOPTER]:
self.vehicle_type_name = 'heli'
elif m.type in [mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER]:
self.vehicle_type_name = 'antenna'
# this is the beginnings of allowing support for multiple vehicles
# in the air at the same time
vehicle = 'Vehicle%u' % m.get_srcSystem()
if m.get_type() == "SIMSTATE" and self.map_settings.showsimpos:
self.create_vehicle_icon('Sim' + vehicle, 'green')
self.mpstate.map.set_position('Sim' + vehicle, (m.lat*1.0e-7, m.lng*1.0e-7), rotation=math.degrees(m.yaw))
if m.get_type() == "AHRS2" and self.map_settings.showahrs2pos:
self.create_vehicle_icon('AHRS2' + vehicle, 'blue')
self.mpstate.map.set_position('AHRS2' + vehicle, (m.lat*1.0e-7, m.lng*1.0e-7), rotation=math.degrees(m.yaw))
if m.get_type() == "AHRS3" and self.map_settings.showahrs3pos:
self.create_vehicle_icon('AHRS3' + vehicle, 'orange')
self.mpstate.map.set_position('AHRS3' + vehicle, (m.lat*1.0e-7, m.lng*1.0e-7), rotation=math.degrees(m.yaw))
if m.get_type() == "GPS_RAW_INT" and self.map_settings.showgpspos:
(lat, lon) = (m.lat*1.0e-7, m.lon*1.0e-7)
if lat != 0 or lon != 0:
self.create_vehicle_icon('GPS' + vehicle, 'blue')
self.mpstate.map.set_position('GPS' + vehicle, (lat, lon), rotation=m.cog*0.01)
if m.get_type() == "GPS2_RAW" and self.map_settings.showgps2pos:
(lat, lon) = (m.lat*1.0e-7, m.lon*1.0e-7)
if lat != 0 or lon != 0:
self.create_vehicle_icon('GPS2' + vehicle, 'green')
self.mpstate.map.set_position('GPS2' + vehicle, (lat, lon), rotation=m.cog*0.01)
if m.get_type() == 'GLOBAL_POSITION_INT':
(self.lat, self.lon, self.heading) = (m.lat*1.0e-7, m.lon*1.0e-7, m.hdg*0.01)
if abs(self.lat) > 1.0e-3 or abs(self.lon) > 1.0e-3:
self.have_global_position = True
self.create_vehicle_icon('Pos' + vehicle, 'red', follow=True)
self.mpstate.map.set_position('Pos' + vehicle, (self.lat, self.lon), rotation=self.heading)
if m.get_type() == 'LOCAL_POSITION_NED' and not self.have_global_position:
(self.lat, self.lon) = mp_util.gps_offset(0, 0, m.x, m.y)
self.heading = math.degrees(math.atan2(m.vy, m.vx))
self.create_vehicle_icon('Pos' + vehicle, 'red', follow=True)
self.mpstate.map.set_position('Pos' + vehicle, (self.lat, self.lon), rotation=self.heading)
if m.get_type() == "NAV_CONTROLLER_OUTPUT":
if (self.master.flightmode in [ "AUTO", "GUIDED", "LOITER", "RTL" ] and
self.lat is not None and self.lon is not None):
trajectory = [ (self.lat, self.lon),
mp_util.gps_newpos(self.lat, self.lon, m.target_bearing, m.wp_dist) ]
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('trajectory', trajectory, layer='Trajectory',
linewidth=2, colour=(255,0,180)))
else:
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('Trajectory'))
# if the waypoints have changed, redisplay
last_wp_change = self.module('wp').wploader.last_change
if self.wp_change_time != last_wp_change and abs(time.time() - last_wp_change) > 1:
self.wp_change_time = last_wp_change
self.display_waypoints()
#this may have affected the landing lines from the rally points:
self.rally_change_time = time.time()
# if the fence has changed, redisplay
if self.fence_change_time != self.module('fence').fenceloader.last_change:
self.display_fence()
# if the rallypoints have changed, redisplay
if self.rally_change_time != self.module('rally').rallyloader.last_change:
self.rally_change_time = self.module('rally').rallyloader.last_change
icon = self.mpstate.map.icon('rallypoint.png')
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('RallyPoints'))
for i in range(self.module('rally').rallyloader.rally_count()):
rp = self.module('rally').rallyloader.rally_point(i)
popup = MPMenuSubMenu('Popup',
items=[MPMenuItem('Rally Remove', returnkey='popupRallyRemove'),
MPMenuItem('Rally Move', returnkey='popupRallyMove')])
self.mpstate.map.add_object(mp_slipmap.SlipIcon('Rally %u' % (i+1), (rp.lat*1.0e-7, rp.lng*1.0e-7), icon,
layer='RallyPoints', rotation=0, follow=False,
popup_menu=popup))
loiter_rad = self.get_mav_param('WP_LOITER_RAD')
if self.map_settings.rallycircle:
self.mpstate.map.add_object(mp_slipmap.SlipCircle('Rally Circ %u' % (i+1), 'RallyPoints', (rp.lat*1.0e-7, rp.lng*1.0e-7), abs(loiter_rad), (255,255,0), 2))
#draw a line between rally point and nearest landing point
nearest_land_wp = None
nearest_distance = 10000000.0
for j in range(self.module('wp').wploader.count()):
w = self.module('wp').wploader.wp(j)
if (w.command == 21): #if landing waypoint
#get distance between rally point and this waypoint
dis = mp_util.gps_distance(w.x, w.y, rp.lat*1.0e-7, rp.lng*1.0e-7)
if (dis < nearest_distance):
nearest_land_wp = w
nearest_distance = dis
if nearest_land_wp != None:
points = []
#tangential approach?
if self.get_mav_param('LAND_BREAK_PATH') == 0:
theta = math.degrees(math.atan(loiter_rad / nearest_distance))
tan_dis = math.sqrt(nearest_distance * nearest_distance - (loiter_rad * loiter_rad))
ral_bearing = mp_util.gps_bearing(nearest_land_wp.x, nearest_land_wp.y,rp.lat*1.0e-7, rp.lng*1.0e-7)
points.append(mp_util.gps_newpos(nearest_land_wp.x,nearest_land_wp.y, ral_bearing + theta, tan_dis))
else: #not tangential approach
points.append((rp.lat*1.0e-7, rp.lng*1.0e-7))
points.append((nearest_land_wp.x, nearest_land_wp.y))
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('Rally Land %u' % (i+1), points, 'RallyPoints', (255,255,0), 2))
# check for any events from the map
self.mpstate.map.check_events()
def init(mpstate):
'''initialise module'''
return MapModule(mpstate)
```
#### File: modules/mavproxy_misseditor/me_event.py
```python
MEE_READ_WPS = 0
MEE_WRITE_WPS = 1
MEE_TIME_TO_QUIT = 2
MEE_GET_WP_RAD = 3
MEE_GET_LOIT_RAD = 4
MEE_GET_WP_DEFAULT_ALT = 5
MEE_WRITE_WP_NUM = 6
MEE_LOAD_WP_FILE = 7
MEE_SAVE_WP_FILE = 8
MEE_SET_WP_RAD = 9
MEE_SET_LOIT_RAD = 10
MEE_SET_WP_DEFAULT_ALT = 11
#enum of MissionEditorGUIEvent types
MEGE_CLEAR_MISS_TABLE = 0
MEGE_ADD_MISS_TABLE_ROWS = 1
MEGE_SET_MISS_ITEM = 2
MEGE_SET_WP_RAD = 3
MEGE_SET_LOIT_RAD = 4
MEGE_SET_WP_DEFAULT_ALT = 5
MEGE_SET_LAST_MAP_CLICK_POS = 6
class MissionEditorEvent:
def __init__(self, type, **kwargs):
self.type = type
self.arg_dict = kwargs
if not self.type in [MEE_READ_WPS, MEE_WRITE_WPS, MEGE_CLEAR_MISS_TABLE,
MEGE_ADD_MISS_TABLE_ROWS, MEGE_SET_MISS_ITEM, MEE_TIME_TO_QUIT,
MEE_GET_WP_RAD, MEE_GET_LOIT_RAD, MEGE_SET_WP_RAD, MEGE_SET_LOIT_RAD,
MEE_GET_WP_DEFAULT_ALT, MEGE_SET_WP_DEFAULT_ALT, MEE_WRITE_WP_NUM,
MEE_LOAD_WP_FILE, MEE_SAVE_WP_FILE, MEE_SET_WP_RAD, MEE_SET_LOIT_RAD,
MEE_SET_WP_DEFAULT_ALT]:
raise TypeError("Unrecongized MissionEditorEvent type:" + str(self.type))
def get_type(self):
return self.type
def get_arg(self, key):
if not key in self.arg_dict:
print("No key %s in %s" % (key, str(self.type)))
return None
return self.arg_dict[key]
```
#### File: modules/mavproxy_mmap/__init__.py
```python
import os
import sys
import webbrowser
import mmap_server
g_module_context = None
from MAVProxy.modules.lib import mp_module
class MMapModule(mp_module.MPModule):
def __init__(self, mpstate):
super(MMapModule, self).__init__(mpstate, 'mmap', 'modest map display')
self.lat = None
self.lon = None
self.alt = None
self.speed = None
self.airspeed = None
self.groundspeed = None
self.heading = 0
self.wp_change_time = 0
self.fence_change_time = 0
self.server = None
self.server = mmap_server.start_server('127.0.0.1', port=9999, module_state=self)
webbrowser.open('http://127.0.0.1:9999/', autoraise=True)
def unload(self):
"""unload module"""
self.server.terminate()
def mavlink_packet(self, m):
"""handle an incoming mavlink packet"""
if m.get_type() == 'GPS_RAW':
(self.lat, self.lon) = (m.lat, m.lon)
elif m.get_type() == 'GPS_RAW_INT':
(self.lat, self.lon) = (m.lat / 1.0e7, m.lon / 1.0e7)
elif m.get_type() == "VFR_HUD":
self.heading = m.heading
self.alt = m.alt
self.airspeed = m.airspeed
self.groundspeed = m.groundspeed
def init(mpstate):
'''initialise module'''
return MMapModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_mode.py
```python
import time, os
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
class ModeModule(mp_module.MPModule):
def __init__(self, mpstate):
super(ModeModule, self).__init__(mpstate, "mode")
self.add_command('mode', self.cmd_mode, "mode change", self.available_modes())
self.add_command('guided', self.cmd_guided, "fly to a clicked location on map")
def cmd_mode(self, args):
'''set arbitrary mode'''
mode_mapping = self.master.mode_mapping()
if mode_mapping is None:
print('No mode mapping available')
return
if len(args) != 1:
print('!!!!Available modes: ', mode_mapping.keys())
return
if args[0].isdigit():
modenum = int(args[0])
else:
mode = args[0].upper()
if mode not in mode_mapping:
print('Unknown mode %s: ' % mode)
return
modenum = mode_mapping[mode]
self.master.set_mode(modenum)
def available_modes(self):
mode_mapping = self.master.mode_mapping()
if mode_mapping is None:
print('No mode mapping available')
return []
return mode_mapping.keys()
def unknown_command(self, args):
'''handle mode switch by mode name as command'''
mode_mapping = self.master.mode_mapping()
mode = args[0].upper()
if mode in mode_mapping:
self.master.set_mode(mode_mapping[mode])
return True
return False
def cmd_guided(self, args):
'''set GUIDED target'''
if len(args) != 1 and len(args) != 3:
print("Usage: guided ALTITUDE | guided LAT LON ALTITUDE")
return
if len(args) == 3:
latitude = float(args[0])
longitude = float(args[1])
altitude = int(args[2])
latlon = (latitude, longitude)
else:
try:
latlon = self.module('map').click_position
except Exception:
print("No map available")
return
if latlon is None:
print("No map click position available")
return
altitude = int(args[0])
print("Guided %s %d" % (str(latlon), altitude))
self.master.mav.mission_item_send (self.settings.target_system,
self.settings.target_component,
0,
self.module('wp').get_default_frame(),
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
2, 0, 0, 0, 0, 0,
latlon[0], latlon[1], altitude)
def init(mpstate):
'''initialise module'''
return ModeModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_nsh.py
```python
import time, os, fnmatch, sys, time
from pymavlink import mavutil, mavwp
from MAVProxy.modules.lib import mp_settings
from MAVProxy.modules.lib import mp_module
class NSHModule(mp_module.MPModule):
def __init__(self, mpstate):
super(NSHModule, self).__init__(mpstate, "nsh", "remote nsh shell")
self.add_command('nsh', self.cmd_nsh,
'nsh shell control',
['<start|stop>',
'set (SERIALSETTING)'])
self.serial_settings = mp_settings.MPSettings(
[ ('port', int, mavutil.mavlink.SERIAL_CONTROL_DEV_SHELL),
('baudrate', int, 57600)
]
)
self.add_completion_function('(SERIALSETTING)', self.serial_settings.completion)
self.last_packet = time.time()
self.last_check = time.time()
self.started = False
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
if m.get_type() == 'SERIAL_CONTROL':
data = m.data[:m.count]
if m.count > 0:
s = ''.join(str(chr(x)) for x in data)
if self.mpstate.system == 'Windows':
# strip nsh ansi codes
s = s.replace("\033[K","")
sys.stdout.write(s)
self.last_packet = time.time()
def stop(self):
'''stop nsh input'''
self.mpstate.rl.set_prompt(self.status.flightmode + "> ")
self.mpstate.functions.input_handler = None
self.started = False
# unlock the port
mav = self.master.mav
mav.serial_control_send(self.serial_settings.port,
0,
0, self.serial_settings.baudrate,
0, [0]*70)
def send(self, line):
'''send some bytes'''
line = line.strip()
if line == ".":
self.stop()
return
mav = self.master.mav
if line != '+++':
line += "\r\n"
buf = [ord(x) for x in line]
buf.extend([0]*(70-len(buf)))
flags = mavutil.mavlink.SERIAL_CONTROL_FLAG_RESPOND
flags |= mavutil.mavlink.SERIAL_CONTROL_FLAG_MULTI
flags |= mavutil.mavlink.SERIAL_CONTROL_FLAG_EXCLUSIVE
mav.serial_control_send(self.serial_settings.port,
flags,
0, self.serial_settings.baudrate,
len(line), buf)
def idle_task(self):
'''handle mavlink packets'''
if not self.started:
return
now = time.time()
if now - self.last_packet < 1:
timeout = 0.05
else:
timeout = 0.2
if now - self.last_check > timeout:
self.last_check = now
mav = self.master.mav
flags = mavutil.mavlink.SERIAL_CONTROL_FLAG_RESPOND
flags |= mavutil.mavlink.SERIAL_CONTROL_FLAG_MULTI
flags |= mavutil.mavlink.SERIAL_CONTROL_FLAG_EXCLUSIVE
mav.serial_control_send(self.serial_settings.port,
flags,
0, self.serial_settings.baudrate,
0, [0]*70)
def cmd_nsh(self, args):
'''nsh shell commands'''
usage = "Usage: nsh <start|stop|set>"
if len(args) < 1:
print(usage)
return
if args[0] == "start":
self.mpstate.functions.input_handler = self.send
self.started = True
self.mpstate.rl.set_prompt("")
elif args[0] == "stop":
self.stop()
elif args[0] == "set":
self.serial_settings.command(args[1:])
else:
print(usage)
def init(mpstate):
'''initialise module'''
return NSHModule(mpstate)
```
#### File: modules/mavproxy_smartcamera/__init__.py
```python
import time, math, sched
# Module Dependent Headers
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib.mp_settings import MPSetting
# Own Headers
from sc_webcam import SmartCameraWebCam
from sc_SonyQX1 import SmartCamera_SonyQX
import sc_config
#****************************************************************************
# LOCAL DEFINES
#****************************************************************************
#****************************************************************************
# Class name : SmartCameraModule
#
# Public Methods : init
# mavlink_packet
#
# Private Methods : __vRegisterCameras
# __vCmdCamTrigger
#
#****************************************************************************
class SmartCameraModule(mp_module.MPModule):
#****************************************************************************
# Method Name : __init__ Class Initializer
#
# Description : Initializes the class
#
# Parameters : mpstate
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __init__(self, mpstate):
super(SmartCameraModule, self).__init__(mpstate, "SmartCamera", "SmartCamera commands")
self.add_command('camtrigger', self.__vCmdCamTrigger, "Trigger camera")
self.add_command('connectcams', self.__vCmdConnectCameras, "Connect to Cameras")
self.add_command('setCamISO', self.__vCmdSetCamISO, "Set Camera ISO")
self.add_command('setCamAperture', self.__vCmdSetCamAperture, "Set Camera Aperture")
self.add_command('setCamShutterSpeed', self.__vCmdSetCamShutterSpeed, "Set Camera Shutter Speed")
self.add_command('setCamExposureMode', self.__vCmdSetCamExposureMode, "Set Camera Exposure Mode")
self.CamRetryScheduler = sched.scheduler(time.time, time.sleep)
self.ProgramAuto = 1
self.Aperture = 2
self.Shutter = 3
self.Manual = 4
self.IntelligentAuto = 5
self.SuperiorAuto = 6
self.WirelessPort = "wlan0"
self.u8RetryTimeout = 0
self.u8MaxRetries = 5
self.__vRegisterCameras()
#****************************************************************************
# Method Name : __vRegisterQXCamera
#
# Description : Tries to connect to a QX camera on the specified Wireless
# port. If no camera is found it will retry every 5 seconds
# until u8MaxRetries is reached.
#
# Parameters : None
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vRegisterQXCamera(self,u8CamNumber):
if (self.u8RetryTimeout < self.u8MaxRetries):
new_camera = SmartCamera_SonyQX(u8CamNumber, self.WirelessPort)
if new_camera.boValidCameraFound() is True:
self.camera_list = self.camera_list + [new_camera]
print("Found QX Camera")
else:
print("No Valid Camera Found, retry in 5 sec")
self.u8RetryTimeout = self.u8RetryTimeout + 1
self.CamRetryScheduler.enter(5, 1, self.__vRegisterQXCamera, [u8CamNumber])
self.CamRetryScheduler.run()
else:
print("Max retries reached, No QX Camera Found")
self.u8RetryTimeout = 0
#****************************************************************************
# Method Name : __vRegisterCameras
#
# Description : Creates camera objects based on camera-type configuration
#
# Parameters : None
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vRegisterCameras(self):
# initialise list
self.camera_list = []
#look for up to 2 cameras
for i in range(0,2):
config_group = "camera%d" % i
camera_type = sc_config.config.get_integer(config_group, 'type', 0)
# webcam
if camera_type == 1:
new_camera = SmartCameraWebCam(i)
self.camera_list = self.camera_list + [new_camera]
# Sony QX1
if camera_type == 2:
self.__vRegisterQXCamera(i)
# display number of cameras found
print ("cameras found: %d" % len(self.camera_list))
#****************************************************************************
# Method Name : __vCmdCamTrigger
#
# Description : Triggers all the cameras and stores Geotag information
#
# Parameters : None
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdCamTrigger(self, args):
'''Trigger Camera'''
#print(self.camera_list)
for cam in self.camera_list:
cam.take_picture()
print("Trigger Cam %s" % cam)
#****************************************************************************
# Method Name : __vCmdConnectCameras
#
# Description : Initiates connection to cameras
#
# Parameters : None
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdConnectCameras(self, args):
'''ToDo: Validate the argument as a valid port'''
if len(args) >= 1:
self.WirelessPort = args[0]
print ("Connecting to Cameras on %s" % self.WirelessPort)
self.__vRegisterCameras()
#****************************************************************************
# Method Name : __vCmdSetCamExposureMode
#
# Description : Sets the camera exposure mode
#
# Parameters : Exposure Mode, Cam number
# Valid values are Program Auto, Aperture, Shutter, Manual
# Intelligent Auto, Superior Auto
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdSetCamExposureMode(self, args):
'''ToDo: Validate CAM number and Valid Mode Values'''
if len(args) == 1:
for cam in self.camera_list:
cam.boSetExposureMode(args[0])
elif len(args) == 2:
cam = self.camera_list[int(args[1])]
cam.boSetExposureMode(args[0])
else:
print ("Usage: setCamExposureMode MODE [CAMNUMBER], Valid values for MODE: Program Auto, Aperture, Shutter, Manual, Intelligent Auto, Superior Auto")
#****************************************************************************
# Method Name : __vCmdSetCamAperture
#
# Description : Sets the camera aperture
#
# Parameters : Aperture Value, Cam number
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdSetCamAperture(self, args):
'''ToDo: Validate CAM number and Valid Aperture Value'''
if len(args) == 1:
for cam in self.camera_list:
cam.boSetAperture(int(args[0]))
elif len(args) == 2:
cam = self.camera_list[int(args[1])]
cam.boSetAperture(int(args[0]))
else:
print ("Usage: setCamAperture APERTURE [CAMNUMBER], APERTURE is value x10")
#****************************************************************************
# Method Name : __vCmdSetCamShutterSpeed
#
# Description : Sets the shutter speed for the camera
#
# Parameters : Shutter speed, Cam Number
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdSetCamShutterSpeed(self, args):
'''ToDo: Validate CAM number and Valid Shutter Speed'''
if len(args) == 1:
for cam in self.camera_list:
cam.boSetShutterSpeed(int(args[0]))
elif len(args) == 2:
cam = self.camera_list[int(args[1])]
cam.boSetShutterSpeed(int(args[0]))
else:
print ("Usage: setCamShutterSpeed SHUTTERVALUE [CAMNUMBER], Shutter value is the devisor in 1/x (only works for values smaller than 1)")
#****************************************************************************
# Method Name : __vCmdSetCamISO
#
# Description : Sets the ISO value for the camera
#
# Parameters : ISO Value, Cam Number
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdSetCamISO(self, args):
'''ToDo: Validate CAM number and Valid ISO Value'''
if len(args) == 1:
for cam in self.camera_list:
cam.boSetISO(args[0])
elif len(args) == 2:
cam = self.camera_list[int(args[1])]
cam.boSetISO(args[0])
else:
print ("Usage: setCamISO ISOVALUE [CAMNUMBER]")
#****************************************************************************
# Method Name : __vCmdCamZoomIn
#
# Description : Commands the Camera to Zoom In
#
# Parameters : None
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdCamZoomIn(self):
for cam in self.camera_list:
cam.boZoomIn()
#****************************************************************************
# Method Name : __vCmdCamZoomOut
#
# Description : Commands the Camera to Zoom In
#
# Parameters : None
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vCmdCamZoomOut(self):
for cam in self.camera_list:
cam.boZoomOut()
#****************************************************************************
# Method Name : __vDecodeDIGICAMConfigure
#
# Description : Decode and process the camera configuration Messages
#
# Parameters : CommandLong Message
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vDecodeDIGICAMConfigure(self, mCommand_Long):
if mCommand_Long.param1 != 0:
print ("Exposure Mode = %d" % mCommand_Long.param1)
if mCommand_Long.param1 == self.ProgramAuto:
self.__vCmdSetCamExposureMode(["Program Auto"])
elif mCommand_Long.param1 == self.Aperture:
self.__vCmdSetCamExposureMode(["Aperture"])
elif mCommand_Long.param1 == self.Shutter:
self.__vCmdSetCamExposureMode(["Shutter"])
'''Shutter Speed'''
if mCommand_Long.param2 != 0:
print ("Shutter Speed= %d" % mCommand_Long.param2)
self.__vCmdSetCamShutterSpeed([mCommand_Long.param2])
'''Aperture'''
if mCommand_Long.param3 != 0:
print ("Aperture = %d" % mCommand_Long.param3)
self.__vCmdSetCamAperture([mCommand_Long.param3])
'''ISO'''
if mCommand_Long.param4 != 0:
print ("ISO = %d" % mCommand_Long.param4)
self.__vCmdSetCamISO([mCommand_Long.param4])
'''Exposure Type'''
if mCommand_Long.param5 != 0:
print ("Exposure type= %d" % mCommand_Long.param5)
#****************************************************************************
# Method Name : __vDecodeDIGICAMControl
#
# Description : Decode and process the camera control Messages
#
# Parameters : CommandLong Message
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def __vDecodeDIGICAMControl(self, mCommand_Long):
'''Session'''
if mCommand_Long.param1 != 0:
print ("Session = %d" % mCommand_Long.param1)
'''Zooming Step Value'''
if mCommand_Long.param2 != 0:
print ("Zooming Step = %d" % mCommand_Long.param2)
'''Zooming Step Value'''
if mCommand_Long.param3 != 0:
print ("Zooming Value = %d" % mCommand_Long.param3)
if (mCommand_Long.param3 == 1):
self.__vCmdCamZoomIn()
elif (mCommand_Long.param3 == -1):
self.__vCmdCamZoomOut()
else:
print ("Invalid Zoom Value")
'''Focus 0=Unlock/1=Lock/2=relock'''
if mCommand_Long.param4 != 0:
print ("Focus = %d" % mCommand_Long.param4)
'''Trigger'''
if mCommand_Long.param5 != 0:
print ("Trigger = %d" % mCommand_Long.param5)
self.__vCmdCamTrigger(mCommand_Long)
#****************************************************************************
# Method Name : mavlink_packet
#
# Description : MAVProxy requiered callback function used to recieve MAVlink
# packets
#
# Parameters : MAVLink Message
#
# Return Value : None
#
# Autor : <NAME>
#
#****************************************************************************
def mavlink_packet(self, m):
'''handle a mavlink packet'''
mtype = m.get_type()
if mtype == "GLOBAL_POSITION_INT":
for cam in self.camera_list:
cam.boSet_GPS(m)
if mtype == "ATTITUDE":
for cam in self.camera_list:
cam.boSet_Attitude(m)
if mtype == "CAMERA_STATUS":
print ("Got Message camera_status")
if mtype == "CAMERA_FEEDBACK":
print ("Got Message camera_feedback")
'''self.__vCmdCamTrigger(m)'''
if mtype == "COMMAND_LONG":
if m.command == mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONFIGURE:
print ("Got Message Digicam_configure")
self.__vDecodeDIGICAMConfigure(m)
elif m.command == mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONTROL:
print ("Got Message Digicam_control")
self.__vDecodeDIGICAMControl(m)
#****************************************************************************
# Method Name : init
#
# Description :
#
# Parameters : mpstate
#
# Return Value : SmartCameraModule Instance
#
# Autor : <NAME>
#
#****************************************************************************
def init(mpstate):
'''initialise module'''
return SmartCameraModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_test.py
```python
import time, math
def enum(**enums):
return type('Enum', (), enums)
TestState = enum(INIT=1, FBWA=2, AUTO=3)
from MAVProxy.modules.lib import mp_module
class TestModule(mp_module.MPModule):
def __init__(self, mpstate):
super(TestModule, self).__init__(mpstate, "test", "test flight")
self.state = TestState.INIT
print("Module test loaded")
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
if self.state == TestState.INIT:
if self.status.flightmode == "MANUAL":
self.mpstate.functions.process_stdin("switch 4")
self.mpstate.functions.process_stdin("rc 2 1300")
self.mpstate.functions.process_stdin("rc 3 2000")
self.mpstate.functions.process_stdin("module load sensors")
self.mpstate.functions.process_stdin("watch sensors")
self.mpstate.functions.process_stdin("wp list")
self.state = TestState.FBWA
if self.state == TestState.FBWA:
if self.status.altitude > 60:
self.mpstate.functions.process_stdin("rc 2 1500")
self.mpstate.functions.process_stdin("auto")
self.state = TestState.AUTO
def init(mpstate):
'''initialise module'''
return TestModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_tracker.py
```python
import sys, os, time
from MAVProxy.modules.lib import mp_settings
from MAVProxy.modules import mavproxy_map
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.mavproxy_param import ParamState
# this should be in mavutil.py
mode_mapping_antenna = {
'MANUAL' : 0,
'AUTO' : 10,
'INITIALISING' : 16
}
class TrackerModule(mp_module.MPModule):
def __init__(self, mpstate):
from pymavlink import mavparm
super(TrackerModule, self).__init__(mpstate, "tracker", "antenna tracker control module")
self.connection = None
self.tracker_param = mavparm.MAVParmDict()
self.pstate = ParamState(self.tracker_param, self.logdir, self.vehicle_name, 'tracker.parm')
self.tracker_settings = mp_settings.MPSettings(
[ ('port', str, "/dev/ttyUSB0"),
('baudrate', int, 57600),
('debug', int, 0)
]
)
self.add_command('tracker', self.cmd_tracker,
"antenna tracker control module",
['<start|arm|disarm|level|mode|position|calpress|mode>',
'set (TRACKERSETTING)',
'param <set|show|fetch|help> (TRACKERPARAMETER)',
'param (TRACKERSETTING)'])
self.add_completion_function('(TRACKERSETTING)', self.tracker_settings.completion)
self.add_completion_function('(TRACKERPARAMETER)', self.complete_parameter)
def complete_parameter(self, text):
'''complete a tracker parameter'''
return self.tracker_param.keys()
def find_connection(self):
'''find an antenna tracker connection if possible'''
if self.connection is not None:
return self.connection
for m in self.mpstate.mav_master:
if 'HEARTBEAT' in m.messages:
if m.messages['HEARTBEAT'].type == mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER:
return m
return None
def cmd_tracker(self, args):
'''tracker command parser'''
usage = "usage: tracker <start|set|arm|disarm|level|param|mode|position> [options]"
if len(args) == 0:
print(usage)
return
if args[0] == "start":
self.cmd_tracker_start()
elif args[0] == "set":
self.tracker_settings.command(args[1:])
elif args[0] == 'arm':
self.cmd_tracker_arm()
elif args[0] == 'disarm':
self.cmd_tracker_disarm()
elif args[0] == 'level':
self.cmd_tracker_level()
elif args[0] == 'param':
self.cmd_tracker_param(args[1:])
elif args[0] == 'mode':
self.cmd_tracker_mode(args[1:])
elif args[0] == 'position':
self.cmd_tracker_position(args[1:])
elif args[0] == 'calpress':
self.cmd_tracker_calpress(args[1:])
else:
print(usage)
def cmd_tracker_position(self, args):
'''tracker manual positioning commands'''
connection = self.find_connection()
if not connection:
print("No antenna tracker found")
return
positions = [0, 0, 0, 0, 0] # x, y, z, r, buttons. only position[0] (yaw) and position[1] (pitch) are currently used
for i in range(0, 4):
if len(args) > i:
positions[i] = int(args[i]) # default values are 0
connection.mav.manual_control_send(connection.target_system,
positions[0], positions[1],
positions[2], positions[3],
positions[4])
def cmd_tracker_calpress(self, args):
'''calibrate barometer on tracker'''
connection = self.find_connection()
if not connection:
print("No antenna tracker found")
return
connection.calibrate_pressure()
def cmd_tracker_mode(self, args):
'''set arbitrary mode'''
connection = self.find_connection()
if not connection:
print("No antenna tracker found")
return
mode_mapping = connection.mode_mapping()
if mode_mapping is None:
print('No mode mapping available')
return
if len(args) != 1:
print('Available modes: ', mode_mapping.keys())
return
mode = args[0].upper()
if mode not in mode_mapping:
print('Unknown mode %s: ' % mode)
return
connection.set_mode(mode_mapping[mode])
def mavlink_packet(self, m):
'''handle an incoming mavlink packet from the master vehicle. Relay it to the tracker
if it is a GLOBAL_POSITION_INT'''
if m.get_type() in ['GLOBAL_POSITION_INT', 'SCALED_PRESSURE']:
connection = self.find_connection()
if not connection:
return
if m.get_srcSystem() != connection.target_system:
connection.mav.send(m)
def idle_task(self):
'''called in idle time'''
if not self.connection:
return
# check for a mavlink message from the tracker
m = self.connection.recv_msg()
if m is None:
return
if self.tracker_settings.debug:
print(m)
self.pstate.handle_mavlink_packet(self.connection, m)
self.pstate.fetch_check(self.connection)
if self.module('map') is None:
return
if m.get_type() == 'GLOBAL_POSITION_INT':
(self.lat, self.lon, self.heading) = (m.lat*1.0e-7, m.lon*1.0e-7, m.hdg*0.01)
if self.lat != 0 or self.lon != 0:
self.module('map').create_vehicle_icon('AntennaTracker', 'red', follow=False, vehicle_type='antenna')
self.mpstate.map.set_position('AntennaTracker', (self.lat, self.lon), rotation=self.heading)
def cmd_tracker_start(self):
if self.tracker_settings.port == None:
print("tracker port not set")
return
if self.connection is not None:
self.connection.close()
self.connection = None
print("Closed old connection")
print("connecting to tracker %s at %d" % (self.tracker_settings.port,
self.tracker_settings.baudrate))
m = mavutil.mavlink_connection(self.tracker_settings.port,
autoreconnect=True,
source_system=self.settings.source_system,
baud=self.tracker_settings.baudrate)
m.mav.srcComponent = self.settings.source_component
if self.logdir:
m.setup_logfile(os.path.join(self.logdir, 'tracker.tlog'))
self.connection = m
def cmd_tracker_arm(self):
'''Enable the servos in the tracker so the antenna will move'''
if not self.connection:
print("tracker not connected")
return
self.connection.arducopter_arm()
def cmd_tracker_disarm(self):
'''Disable the servos in the tracker so the antenna will not move'''
if not self.connection:
print("tracker not connected")
return
self.connection.arducopter_disarm()
def cmd_tracker_level(self):
'''Calibrate the accelerometers. Disarm and move the antenna level first'''
if not self.connection:
print("tracker not connected")
return
self.connection.calibrate_level()
def cmd_tracker_param(self, args):
'''Parameter commands'''
if not self.connection:
print("tracker not connected")
return
self.pstate.handle_command(self.connection, self.mpstate, args)
def init(mpstate):
'''initialise module'''
return TrackerModule(mpstate)
```
#### File: MAVProxy/modules/mavproxy_wp.py
```python
import time, os, fnmatch, copy, platform
from pymavlink import mavutil, mavwp
from MAVProxy.modules.lib import mp_module
from MAVProxy.modules.lib import mp_util
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
class WPModule(mp_module.MPModule):
def __init__(self, mpstate):
super(WPModule, self).__init__(mpstate, "wp", "waypoint handling", public = True)
self.wp_op = None
self.wp_save_filename = None
self.wploader = mavwp.MAVWPLoader()
self.loading_waypoints = False
self.loading_waypoint_lasttime = time.time()
self.last_waypoint = 0
self.wp_period = mavutil.periodic_event(0.5)
self.undo_wp = None
self.undo_type = None
self.undo_wp_idx = -1
self.add_command('wp', self.cmd_wp, 'waypoint management',
["<list|clear|move|remove|loop|set|undo|movemulti|param>",
"<load|update|save|show> (FILENAME)"])
if self.continue_mode and self.logdir != None:
waytxt = os.path.join(mpstate.status.logdir, 'way.txt')
if os.path.exists(waytxt):
self.wploader.load(waytxt)
print("Loaded waypoints from %s" % waytxt)
self.menu_added_console = False
self.menu_added_map = False
if mp_util.has_wxpython:
self.menu = MPMenuSubMenu('Mission',
items=[MPMenuItem('Clear', 'Clear', '# wp clear'),
MPMenuItem('List', 'List', '# wp list'),
MPMenuItem('Load', 'Load', '# wp load ',
handler=MPMenuCallFileDialog(flags=('open',),
title='Mission Load',
wildcard='*.txt')),
MPMenuItem('Save', 'Save', '# wp save ',
handler=MPMenuCallFileDialog(flags=('save', 'overwrite_prompt'),
title='Mission Save',
wildcard='*.txt')),
MPMenuItem('Draw', 'Draw', '# wp draw ',
handler=MPMenuCallTextDialog(title='Mission Altitude (m)',
default=100)),
MPMenuItem('Undo', 'Undo', '# wp undo'),
MPMenuItem('Loop', 'Loop', '# wp loop')])
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
mtype = m.get_type()
if mtype in ['WAYPOINT_COUNT','MISSION_COUNT']:
if self.wp_op is None:
self.console.error("No waypoint load started")
else:
self.wploader.clear()
self.wploader.expected_count = m.count
self.console.writeln("Requesting %u waypoints t=%s now=%s" % (m.count,
time.asctime(time.localtime(m._timestamp)),
time.asctime()))
self.master.waypoint_request_send(0)
elif mtype in ['WAYPOINT', 'MISSION_ITEM'] and self.wp_op != None:
if m.seq > self.wploader.count():
self.console.writeln("Unexpected waypoint number %u - expected %u" % (m.seq, self.wploader.count()))
elif m.seq < self.wploader.count():
# a duplicate
pass
else:
self.wploader.add(m)
if m.seq+1 < self.wploader.expected_count:
self.master.waypoint_request_send(m.seq+1)
else:
if self.wp_op == 'list':
for i in range(self.wploader.count()):
w = self.wploader.wp(i)
print("%u %u %.10f %.10f %f p1=%.1f p2=%.1f p3=%.1f p4=%.1f cur=%u auto=%u" % (
w.command, w.frame, w.x, w.y, w.z,
w.param1, w.param2, w.param3, w.param4,
w.current, w.autocontinue))
if self.logdir != None:
waytxt = os.path.join(self.logdir, 'way.txt')
self.save_waypoints(waytxt)
print("Saved waypoints to %s" % waytxt)
elif self.wp_op == "save":
self.save_waypoints(self.wp_save_filename)
self.wp_op = None
elif mtype in ["WAYPOINT_REQUEST", "MISSION_REQUEST"]:
self.process_waypoint_request(m, self.master)
elif mtype in ["WAYPOINT_CURRENT", "MISSION_CURRENT"]:
if m.seq != self.last_waypoint:
self.last_waypoint = m.seq
if self.settings.wpupdates:
self.say("waypoint %u" % m.seq,priority='message')
def idle_task(self):
'''handle missing waypoints'''
if self.wp_period.trigger():
# cope with packet loss fetching mission
if self.master is not None and self.master.time_since('MISSION_ITEM') >= 2 and self.wploader.count() < getattr(self.wploader,'expected_count',0):
seq = self.wploader.count()
print("re-requesting WP %u" % seq)
self.master.waypoint_request_send(seq)
if self.module('console') is not None and not self.menu_added_console:
self.menu_added_console = True
self.module('console').add_menu(self.menu)
if self.module('map') is not None and not self.menu_added_map:
self.menu_added_map = True
self.module('map').add_menu(self.menu)
def process_waypoint_request(self, m, master):
'''process a waypoint request from the master'''
if (not self.loading_waypoints or
time.time() > self.loading_waypoint_lasttime + 10.0):
self.loading_waypoints = False
self.console.error("not loading waypoints")
return
if m.seq >= self.wploader.count():
self.console.error("Request for bad waypoint %u (max %u)" % (m.seq, self.wploader.count()))
return
wp = self.wploader.wp(m.seq)
wp.target_system = self.target_system
wp.target_component = self.target_component
self.master.mav.send(self.wploader.wp(m.seq))
self.loading_waypoint_lasttime = time.time()
self.console.writeln("Sent waypoint %u : %s" % (m.seq, self.wploader.wp(m.seq)))
if m.seq == self.wploader.count() - 1:
self.loading_waypoints = False
self.console.writeln("Sent all %u waypoints" % self.wploader.count())
def send_all_waypoints(self):
'''send all waypoints to vehicle'''
self.master.waypoint_clear_all_send()
if self.wploader.count() == 0:
return
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.waypoint_count_send(self.wploader.count())
def load_waypoints(self, filename):
'''load waypoints from a file'''
self.wploader.target_system = self.target_system
self.wploader.target_component = self.target_component
try:
self.wploader.load(filename)
except Exception as msg:
print("Unable to load %s - %s" % (filename, msg))
return
print("Loaded %u waypoints from %s" % (self.wploader.count(), filename))
self.send_all_waypoints()
def update_waypoints(self, filename, wpnum):
'''update waypoints from a file'''
self.wploader.target_system = self.target_system
self.wploader.target_component = self.target_component
try:
self.wploader.load(filename)
except Exception as msg:
print("Unable to load %s - %s" % (filename, msg))
return
if self.wploader.count() == 0:
print("No waypoints found in %s" % filename)
return
if wpnum == -1:
print("Loaded %u updated waypoints from %s" % (self.wploader.count(), filename))
elif wpnum >= self.wploader.count():
print("Invalid waypoint number %u" % wpnum)
return
else:
print("Loaded updated waypoint %u from %s" % (wpnum, filename))
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
if wpnum == -1:
start = 0
end = self.wploader.count()-1
else:
start = wpnum
end = wpnum
self.master.mav.mission_write_partial_list_send(self.target_system,
self.target_component,
start, end)
def save_waypoints(self, filename):
'''save waypoints to a file'''
try:
self.wploader.save(filename)
except Exception as msg:
print("Failed to save %s - %s" % (filename, msg))
return
print("Saved %u waypoints to %s" % (self.wploader.count(), filename))
def get_default_frame(self):
'''default frame for waypoints'''
if self.settings.terrainalt == 'Auto':
if self.get_mav_param('TERRAIN_FOLLOW',0) == 1:
return mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT
return mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT
if self.settings.terrainalt == 'True':
return mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT
return mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT
def wp_draw_callback(self, points):
'''callback from drawing waypoints'''
if len(points) < 3:
return
from MAVProxy.modules.lib import mp_util
home = self.wploader.wp(0)
self.wploader.clear()
self.wploader.target_system = self.target_system
self.wploader.target_component = self.target_component
self.wploader.add(home)
if self.get_default_frame() == mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT:
use_terrain = True
else:
use_terrain = False
for p in points:
self.wploader.add_latlonalt(p[0], p[1], self.settings.wpalt, terrain_alt=use_terrain)
self.send_all_waypoints()
def wp_loop(self):
'''close the loop on a mission'''
loader = self.wploader
if loader.count() < 2:
print("Not enough waypoints (%u)" % loader.count())
return
wp = loader.wp(loader.count()-2)
if wp.command == mavutil.mavlink.MAV_CMD_DO_JUMP:
print("Mission is already looped")
return
wp = mavutil.mavlink.MAVLink_mission_item_message(0, 0, 0, 0, mavutil.mavlink.MAV_CMD_DO_JUMP,
0, 1, 1, -1, 0, 0, 0, 0, 0)
loader.add(wp)
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.waypoint_count_send(self.wploader.count())
print("Closed loop on mission")
def set_home_location(self):
'''set home location from last map click'''
try:
latlon = self.module('map').click_position
except Exception:
print("No map available")
return
lat = float(latlon[0])
lon = float(latlon[1])
if self.wploader.count() == 0:
self.wploader.add_latlonalt(lat, lon, 0)
w = self.wploader.wp(0)
w.x = lat
w.y = lon
self.wploader.set(w, 0)
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.mav.mission_write_partial_list_send(self.target_system,
self.target_component,
0, 0)
def cmd_wp_move(self, args):
'''handle wp move'''
if len(args) != 1:
print("usage: wp move WPNUM")
return
idx = int(args[0])
if idx < 1 or idx > self.wploader.count():
print("Invalid wp number %u" % idx)
return
try:
latlon = self.module('map').click_position
except Exception:
print("No map available")
return
if latlon is None:
print("No map click position available")
return
wp = self.wploader.wp(idx)
# setup for undo
self.undo_wp = copy.copy(wp)
self.undo_wp_idx = idx
self.undo_type = "move"
(lat, lon) = latlon
if getattr(self.console, 'ElevationMap', None) is not None and wp.frame != mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT:
alt1 = self.console.ElevationMap.GetElevation(lat, lon)
alt2 = self.console.ElevationMap.GetElevation(wp.x, wp.y)
if alt1 is not None and alt2 is not None:
wp.z += alt1 - alt2
wp.x = lat
wp.y = lon
wp.target_system = self.target_system
wp.target_component = self.target_component
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.mav.mission_write_partial_list_send(self.target_system,
self.target_component,
idx, idx)
self.wploader.set(wp, idx)
print("Moved WP %u to %f, %f at %.1fm" % (idx, lat, lon, wp.z))
def cmd_wp_movemulti(self, args):
'''handle wp move of multiple waypoints'''
if len(args) < 3:
print("usage: wp movemulti WPNUM WPSTART WPEND <rotation>")
return
idx = int(args[0])
if idx < 1 or idx > self.wploader.count():
print("Invalid wp number %u" % idx)
return
wpstart = int(args[1])
if wpstart < 1 or wpstart > self.wploader.count():
print("Invalid wp number %u" % wpstart)
return
wpend = int(args[2])
if wpend < 1 or wpend > self.wploader.count():
print("Invalid wp number %u" % wpend)
return
if idx < wpstart or idx > wpend:
print("WPNUM must be between WPSTART and WPEND")
return
# optional rotation about center point
if len(args) > 3:
rotation = float(args[3])
else:
rotation = 0
try:
latlon = self.module('map').click_position
except Exception:
print("No map available")
return
if latlon is None:
print("No map click position available")
return
wp = self.wploader.wp(idx)
if not self.wploader.is_location_command(wp.command):
print("WP must be a location command")
return
(lat, lon) = latlon
distance = mp_util.gps_distance(wp.x, wp.y, lat, lon)
bearing = mp_util.gps_bearing(wp.x, wp.y, lat, lon)
for wpnum in range(wpstart, wpend+1):
wp = self.wploader.wp(wpnum)
if not self.wploader.is_location_command(wp.command):
continue
(newlat, newlon) = mp_util.gps_newpos(wp.x, wp.y, bearing, distance)
if wpnum != idx and rotation != 0:
# add in rotation
d2 = mp_util.gps_distance(lat, lon, newlat, newlon)
b2 = mp_util.gps_bearing(lat, lon, newlat, newlon)
(newlat, newlon) = mp_util.gps_newpos(lat, lon, b2+rotation, d2)
if getattr(self.console, 'ElevationMap', None) is not None and wp.frame != mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT:
alt1 = self.console.ElevationMap.GetElevation(newlat, newlon)
alt2 = self.console.ElevationMap.GetElevation(wp.x, wp.y)
if alt1 is not None and alt2 is not None:
wp.z += alt1 - alt2
wp.x = newlat
wp.y = newlon
wp.target_system = self.target_system
wp.target_component = self.target_component
self.wploader.set(wp, wpnum)
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.mav.mission_write_partial_list_send(self.target_system,
self.target_component,
wpstart, wpend+1)
print("Moved WPs %u:%u to %f, %f rotation=%.1f" % (wpstart, wpend, lat, lon, rotation))
def cmd_wp_remove(self, args):
'''handle wp remove'''
if len(args) != 1:
print("usage: wp remove WPNUM")
return
idx = int(args[0])
if idx < 0 or idx >= self.wploader.count():
print("Invalid wp number %u" % idx)
return
wp = self.wploader.wp(idx)
# setup for undo
self.undo_wp = copy.copy(wp)
self.undo_wp_idx = idx
self.undo_type = "remove"
self.wploader.remove(wp)
self.send_all_waypoints()
print("Removed WP %u" % idx)
def cmd_wp_undo(self):
'''handle wp undo'''
if self.undo_wp_idx == -1 or self.undo_wp is None:
print("No undo information")
return
wp = self.undo_wp
if self.undo_type == 'move':
wp.target_system = self.target_system
wp.target_component = self.target_component
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.mav.mission_write_partial_list_send(self.target_system,
self.target_component,
self.undo_wp_idx, self.undo_wp_idx)
self.wploader.set(wp, self.undo_wp_idx)
print("Undid WP move")
elif self.undo_type == 'remove':
self.wploader.insert(self.undo_wp_idx, wp)
self.send_all_waypoints()
print("Undid WP remove")
else:
print("bad undo type")
self.undo_wp = None
self.undo_wp_idx = -1
def cmd_wp_param(self, args):
'''handle wp parameter change'''
if len(args) < 2:
print("usage: wp param WPNUM PNUM <VALUE>")
return
idx = int(args[0])
if idx < 1 or idx > self.wploader.count():
print("Invalid wp number %u" % idx)
return
wp = self.wploader.wp(idx)
param = [wp.param1, wp.param2, wp.param3, wp.param4]
pnum = int(args[1])
if pnum < 1 or pnum > 4:
print("Invalid param number %u" % pnum)
return
if len(args) == 2:
print("Param %u: %f" % (pnum, param[pnum-1]))
return
param[pnum-1] = float(args[2])
wp.param1 = param[0]
wp.param2 = param[1]
wp.param3 = param[2]
wp.param4 = param[3]
wp.target_system = self.target_system
wp.target_component = self.target_component
self.loading_waypoints = True
self.loading_waypoint_lasttime = time.time()
self.master.mav.mission_write_partial_list_send(self.target_system,
self.target_component,
idx, idx)
self.wploader.set(wp, idx)
print("Set param %u for %u to %f" % (pnum, idx, param[pnum-1]))
def cmd_wp(self, args):
'''waypoint commands'''
usage = "usage: wp <list|load|update|save|set|clear|loop|remove|move>"
if len(args) < 1:
print(usage)
return
if args[0] == "load":
if len(args) != 2:
print("usage: wp load #")
return
self.load_waypoints(args[1])
elif args[0] == "update":
if len(args) < 2:
print("usage: wp update # <wpnum>")
return
if len(args) == 3:
wpnum = int(args[2])
else:
wpnum = -1
self.update_waypoints(args[1], wpnum)
elif args[0] == "list":
self.wp_op = "list"
self.master.waypoint_request_list_send()
elif args[0] == "save":
if len(args) != 2:
print("usage: wp save #")
return
self.wp_save_filename = args[1]
self.wp_op = "save"
self.master.waypoint_request_list_send()
elif args[0] == "savelocal":
if len(args) != 2:
print("usage: wp savelocal #")
return
self.wploader.save(args[1])
elif args[0] == "show":
if len(args) != 2:
print("usage: wp show #")
return
self.wploader.load(args[1])
elif args[0] == "move":
self.cmd_wp_move(args[1:])
elif args[0] == "movemulti":
self.cmd_wp_movemulti(args[1:])
elif args[0] == "param":
self.cmd_wp_param(args[1:])
elif args[0] == "remove":
self.cmd_wp_remove(args[1:])
elif args[0] == "undo":
self.cmd_wp_undo()
elif args[0] == "set":
if len(args) != 2:
print("usage: wp set <wpindex>")
return
self.master.waypoint_set_current_send(int(args[1]))
elif args[0] == "clear":
self.master.waypoint_clear_all_send()
self.wploader.clear()
elif args[0] == "draw":
if not 'draw_lines' in self.mpstate.map_functions:
print("No map drawing available")
return
if self.wploader.count() == 0:
print("Need home location - refresh waypoints")
return
if len(args) > 1:
self.settings.wpalt = int(args[1])
self.mpstate.map_functions['draw_lines'](self.wp_draw_callback)
print("Drawing waypoints on map at altitude %d" % self.settings.wpalt)
elif args[0] == "sethome":
self.set_home_location()
elif args[0] == "loop":
self.wp_loop()
else:
print(usage)
def fetch(self):
"""Download wpts from vehicle (this operation is public to support other modules)"""
if self.wp_op is None: # If we were already doing a list or save, just restart the fetch without changing the operation
self.wp_op = "fetch"
self.master.waypoint_request_list_send()
def init(mpstate):
'''initialise module'''
return WPModule(mpstate)
```
#### File: MAVProxy/tools/MAVExplorer.py
```python
import sys, struct, time, os, datetime
import math, re
import Queue
import fnmatch
import threading, multiprocessing
from math import *
from MAVProxy.modules.lib import rline
from MAVProxy.modules.lib import wxconsole
from MAVProxy.modules.lib import grapher
from MAVProxy.modules.lib import mavmemlog
from pymavlink.mavextra import *
from MAVProxy.modules.lib.mp_menu import *
import MAVProxy.modules.lib.mp_util as mp_util
from pymavlink import mavutil
from MAVProxy.modules.lib.mp_settings import MPSettings, MPSetting
from MAVProxy.modules.lib import wxsettings
from MAVProxy.modules.lib.graphdefinition import GraphDefinition
from lxml import objectify
import pkg_resources
#Global var to hold the GUI menu element
TopMenu = None
class MEStatus(object):
'''status object to conform with mavproxy structure for modules'''
def __init__(self):
self.msgs = {}
class MEState(object):
'''holds state of MAVExplorer'''
def __init__(self):
self.input_queue = Queue.Queue()
self.rl = None
self.console = wxconsole.MessageConsole(title='MAVExplorer')
self.exit = False
self.status = MEStatus()
self.settings = MPSettings(
[ MPSetting('marker', str, '+', 'data marker', tab='Graph'),
MPSetting('condition', str, None, 'condition'),
MPSetting('xaxis', str, None, 'xaxis'),
MPSetting('linestyle', str, None, 'linestyle'),
MPSetting('show_flightmode', bool, True, 'show flightmode'),
MPSetting('legend', str, 'upper left', 'legend position'),
MPSetting('legend2', str, 'upper right', 'legend2 position')
]
)
self.mlog = None
self.command_map = command_map
self.completions = {
"set" : ["(SETTING)"],
"condition" : ["(VARIABLE)"],
"graph" : ['(VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE)'],
"map" : ['(VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE) (VARIABLE)']
}
self.aliases = {}
self.graphs = []
self.flightmode_selections = []
self.last_graph = GraphDefinition('Untitled', '', '', [], None)
def have_graph(name):
'''return true if we have a graph of the given name'''
for g in mestate.graphs:
if g.name == name:
return True
return False
def menu_callback(m):
'''called on menu selection'''
if m.returnkey.startswith('# '):
cmd = m.returnkey[2:]
if m.handler is not None:
if m.handler_result is None:
return
cmd += m.handler_result
process_stdin(cmd)
elif m.returnkey == 'menuSettings':
wxsettings.WXSettings(mestate.settings)
elif m.returnkey.startswith("mode-"):
idx = int(m.returnkey[5:])
mestate.flightmode_selections[idx] = m.IsChecked()
elif m.returnkey.startswith("loadLog"):
print "File: " + m.returnkey[8:]
elif m.returnkey == 'quit':
mestate.console.close()
mestate.exit = True
print "Exited. Press Enter to continue."
sys.exit(0)
else:
print('Unknown menu selection: %s' % m.returnkey)
def flightmode_menu():
'''construct flightmode menu'''
modes = mestate.mlog.flightmode_list()
ret = []
idx = 0
for (mode,t1,t2) in modes:
modestr = "%s %us" % (mode, (t2-t1))
ret.append(MPMenuCheckbox(modestr, modestr, 'mode-%u' % idx))
idx += 1
mestate.flightmode_selections.append(False)
return ret
def graph_menus():
'''return menu tree for graphs (recursive)'''
ret = MPMenuSubMenu('Graphs', [])
for i in range(len(mestate.graphs)):
g = mestate.graphs[i]
path = g.name.split('/')
name = path[-1]
path = path[:-1]
ret.add_to_submenu(path, MPMenuItem(name, name, '# graph :%u' % i))
return ret
def setup_file_menu():
global TopMenu
TopMenu = MPMenuTop([])
TopMenu.add(MPMenuSubMenu('MAVExplorer',
items=[MPMenuItem('Settings', 'Settings', 'menuSettings'),
MPMenuItem('&Open\tCtrl+O', 'Open Log', '# loadLog ',
handler=MPMenuCallFileDialog(
flags=('open',),
title='Logfile Load',
wildcard='*.tlog;*.log;*.BIN;*.bin')),
MPMenuItem('&Quit\tCtrl+Q', 'Quit', 'quit')]))
mestate.console.set_menu(TopMenu, menu_callback)
def setup_menus():
'''setup console menus'''
global TopMenu
TopMenu.add(MPMenuSubMenu('Display',
items=[MPMenuItem('Map', 'Map', '# map'),
MPMenuItem('Save Graph', 'Save', '# save'),
MPMenuItem('Reload Graphs', 'Reload', '# reload')]))
TopMenu.add(graph_menus())
TopMenu.add(MPMenuSubMenu('FlightMode', items=flightmode_menu()))
mestate.console.set_menu(TopMenu, menu_callback)
def expression_ok(expression):
'''return True if an expression is OK with current messages'''
expression_ok = True
fields = expression.split()
for f in fields:
try:
if f.endswith(':2'):
f = f[:-2]
if mavutil.evaluate_expression(f, mestate.status.msgs) is None:
expression_ok = False
except Exception:
expression_ok = False
break
return expression_ok
def load_graph_xml(xml, filename, load_all=False):
'''load a graph from one xml string'''
ret = []
try:
root = objectify.fromstring(xml)
except Exception:
return []
if root.tag != 'graphs':
return []
if not hasattr(root, 'graph'):
return []
for g in root.graph:
name = g.attrib['name']
expressions = [e.text for e in g.expression]
if load_all:
ret.append(GraphDefinition(name, e, g.description.text, expressions, filename))
continue
if have_graph(name):
continue
for e in expressions:
if expression_ok(e):
ret.append(GraphDefinition(name, e, g.description.text, expressions, filename))
break
return ret
def load_graphs():
'''load graphs from mavgraphs.xml'''
mestate.graphs = []
gfiles = ['mavgraphs.xml']
if 'HOME' in os.environ:
for dirname, dirnames, filenames in os.walk(os.path.join(os.environ['HOME'], ".mavproxy")):
for filename in filenames:
if filename.lower().endswith('.xml'):
gfiles.append(os.path.join(dirname, filename))
elif 'LOCALAPPDATA' in os.environ:
for dirname, dirnames, filenames in os.walk(os.path.join(os.environ['LOCALAPPDATA'], "MAVProxy")):
for filename in filenames:
if filename.lower().endswith('.xml'):
gfiles.append(os.path.join(dirname, filename))
for file in gfiles:
if not os.path.exists(file):
continue
graphs = load_graph_xml(open(file).read(), file)
if graphs:
mestate.graphs.extend(graphs)
mestate.console.writeln("Loaded %s" % file)
# also load the built in graphs
dlist = pkg_resources.resource_listdir("MAVProxy", "tools/graphs")
for f in dlist:
raw = pkg_resources.resource_stream("MAVProxy", "tools/graphs/%s" % f).read()
graphs = load_graph_xml(raw, None)
if graphs:
mestate.graphs.extend(graphs)
mestate.console.writeln("Loaded %s" % f)
mestate.graphs = sorted(mestate.graphs, key=lambda g: g.name)
def graph_process(fields, mavExpLog, mavExpFlightModeSel, mavExpSettings):
'''process for a graph'''
mavExpLog.reduce_by_flightmodes(mavExpFlightModeSel)
mg = grapher.MavGraph()
mg.set_marker(mavExpSettings.marker)
mg.set_condition(mavExpSettings.condition)
mg.set_xaxis(mavExpSettings.xaxis)
mg.set_linestyle(mavExpSettings.linestyle)
mg.set_show_flightmode(mavExpSettings.show_flightmode)
mg.set_legend(mavExpSettings.legend)
mg.add_mav(mavExpLog)
for f in fields:
mg.add_field(f)
mg.process()
mg.show()
def display_graph(graphdef):
'''display a graph'''
mestate.console.write("Expression: %s\n" % ' '.join(graphdef.expression.split()))
child = multiprocessing.Process(target=graph_process, args=[graphdef.expression.split(), mestate.mlog, mestate.flightmode_selections, mestate.settings])
child.start()
def cmd_graph(args):
'''graph command'''
usage = "usage: graph <FIELD...>"
if len(args) < 1:
print(usage)
return
if args[0][0] == ':':
i = int(args[0][1:])
g = mestate.graphs[i]
expression = g.expression
args = expression.split()
mestate.console.write("Added graph: %s\n" % g.name)
if g.description:
mestate.console.write("%s\n" % g.description, fg='blue')
mestate.rl.add_history("graph %s" % ' '.join(expression.split()))
mestate.last_graph = g
else:
expression = ' '.join(args)
mestate.last_graph = GraphDefinition('Untitled', expression, '', [expression], None)
display_graph(mestate.last_graph)
def map_process(args, MAVExpLog, MAVExpFlightModes, MAVExpSettings):
'''process for a graph'''
from mavflightview import mavflightview_mav, mavflightview_options
MAVExpLog.reduce_by_flightmodes(MAVExpFlightModes)
options = mavflightview_options()
options.condition = MAVExpSettings.condition
if len(args) > 0:
options.types = ','.join(args)
mavflightview_mav(MAVExpLog, options)
def cmd_map(args):
'''map command'''
child = multiprocessing.Process(target=map_process, args=[args, mestate.mlog, mestate.flightmode_selections, mestate.settings])
child.start()
def cmd_set(args):
'''control MAVExporer options'''
mestate.settings.command(args)
def cmd_condition(args):
'''control MAVExporer conditions'''
if len(args) == 0:
print("condition is: %s" % mestate.settings.condition)
return
mestate.settings.condition = ' '.join(args)
if len(mestate.settings.condition) == 0 or mestate.settings.condition == 'clear':
mestate.settings.condition = None
def cmd_reload(args):
'''reload graphs'''
mestate.console.writeln('Reloading graphs', fg='blue')
load_graphs()
setup_menus()
mestate.console.write("Loaded %u graphs\n" % len(mestate.graphs))
def save_graph(graphdef, mestate):
'''save a graph as XML'''
if graphdef.filename is None:
if 'HOME' in os.environ:
dname = os.path.join(os.environ['HOME'], '.mavproxy')
if os.path.exists(dname):
mp_util.mkdir_p(dname)
graphdef.filename = os.path.join(dname, 'mavgraphs.xml')
elif 'LOCALAPPDATA' in os.environ:
dname = os.path.join(os.environ['LOCALAPPDATA'], 'MAVProxy')
if os.path.exists(dname):
mp_util.mkdir_p(dname)
graphdef.filename = os.path.join(dname, 'mavgraphs.xml')
else:
graphdef.filename = 'mavgraphs.xml'
if graphdef.filename is None:
mestate.console.writeln("No file to save graph to", fg='red')
return
try:
graphs = load_graph_xml(open(graphdef.filename).read(), graphdef.filename, load_all=True)
except Exception:
graphs = []
found_name = False
for i in range(len(graphs)):
if graphs[i].name == graphdef.name:
graphs[i] = graphdef
found_name = True
break
if not found_name:
graphs.append(graphdef)
mestate.console.writeln("Saving %u graphs to %s" % (len(graphs), graphdef.filename))
f = open(graphdef.filename, "w")
f.write("<graphs>\n\n")
for g in graphs:
f.write(" <graph name='%s'>\n" % g.name.strip())
if g.description is None:
g.description = ''
f.write(" <description>%s</description>\n" % g.description.strip())
for e in g.expressions:
f.write(" <expression>%s</expression>\n" % e.strip())
f.write(" </graph>\n\n")
f.write("</graphs>\n")
f.close()
def save_callback(operation, graphdef):
'''callback from save thread'''
if operation == 'test':
for e in graphdef.expressions:
if expression_ok(e):
graphdef.expression = e
display_graph(graphdef)
return
mestate.console.writeln('Invalid graph expressions', fg='red')
return
if operation == 'save':
save_graph(graphdef, mestate)
def save_process(MAVExpLastGraph):
'''process for saving a graph'''
from MAVProxy.modules.lib import wx_processguard
from MAVProxy.modules.lib.wx_loader import wx
from MAVProxy.modules.lib.wxgrapheditor import GraphDialog
app = wx.App(False)
frame = GraphDialog('Graph Editor',
MAVExpLastGraph,
save_callback)
frame.ShowModal()
frame.Destroy()
def cmd_save(args):
'''save a graph'''
child = multiprocessing.Process(target=save_process, args=[mestate.last_graph])
child.start()
def cmd_param(args):
'''show parameters'''
if len(args) > 0:
wildcard = args[0]
else:
wildcard = '*'
k = sorted(mestate.mlog.params.keys())
for p in k:
if fnmatch.fnmatch(str(p).upper(), wildcard.upper()):
print("%-16.16s %f" % (str(p), mestate.mlog.params[p]))
def cmd_loadfile(args):
'''callback from menu to load a log file'''
if len(args) != 1:
print "Error loading file"
return
loadfile(args[0])
def loadfile(args):
'''load a log file (path given by arg)'''
mestate.console.write("Loading %s...\n" % args)
t0 = time.time()
mlog = mavutil.mavlink_connection(args, notimestamps=False,
zero_time_base=False)
mestate.mlog = mavmemlog.mavmemlog(mlog, progress_bar)
mestate.status.msgs = mlog.messages
t1 = time.time()
mestate.console.write("\ndone (%u messages in %.1fs)\n" % (mestate.mlog._count, t1-t0))
load_graphs()
setup_menus()
def process_stdin(line):
'''handle commands from user'''
if line is None:
sys.exit(0)
line = line.strip()
if not line:
return
args = line.split()
cmd = args[0]
if cmd == 'help':
k = command_map.keys()
k.sort()
for cmd in k:
(fn, help) = command_map[cmd]
print("%-15s : %s" % (cmd, help))
return
if cmd == 'exit':
mestate.exit = True
return
if not cmd in command_map:
print("Unknown command '%s'" % line)
return
(fn, help) = command_map[cmd]
try:
fn(args[1:])
except Exception as e:
print("ERROR in command %s: %s" % (args[1:], str(e)))
def input_loop():
'''wait for user input'''
while mestate.exit != True:
try:
if mestate.exit != True:
line = raw_input(mestate.rl.prompt)
except EOFError:
mestate.exit = True
sys.exit(1)
mestate.input_queue.put(line)
def main_loop():
'''main processing loop, display graphs and maps'''
while True:
if mestate is None or mestate.exit:
return
while not mestate.input_queue.empty():
line = mestate.input_queue.get()
cmds = line.split(';')
for c in cmds:
process_stdin(c)
time.sleep(0.1)
command_map = {
'graph' : (cmd_graph, 'display a graph'),
'set' : (cmd_set, 'control settings'),
'reload' : (cmd_reload, 'reload graphs'),
'save' : (cmd_save, 'save a graph'),
'condition' : (cmd_condition, 'set graph conditions'),
'param' : (cmd_param, 'show parameters'),
'map' : (cmd_map, 'show map view'),
'loadLog' : (cmd_loadfile, 'load a log file'),
}
def progress_bar(pct):
if pct % 2 == 0:
mestate.console.write('#')
if __name__ == "__main__":
multiprocessing.freeze_support()
mestate = MEState()
setup_file_menu()
mestate.rl = rline.rline("MAV> ", mestate)
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("files", metavar="<FILE>", nargs="?")
args = parser.parse_args()
#If specified, open the log file
if args.files != None and len(args.files) != 0:
loadfile(args.files)
# run main loop as a thread
mestate.thread = threading.Thread(target=main_loop, name='main_loop')
mestate.thread.daemon = True
mestate.thread.start()
# input loop
while mestate.rl != None and mestate.exit != True:
try:
try:
line = raw_input(mestate.rl.prompt)
except EOFError:
mestate.exit = True
break
mestate.input_queue.put(line)
except KeyboardInterrupt:
mestate.exit = True
break
``` |
{
"source": "joakimzhang/qa_study",
"score": 2
} |
#### File: autotest/workspace/test.py
```python
import paramiko
import os
def get_file(remote, usb_dir):
t = paramiko.Transport(("10.209.156.46",22))
t.connect(username = "zhangq", password = "<PASSWORD>")
sftp = paramiko.SFTPClient.from_transport(t)
remotepath=r'/media/streams/AudioTestFile/%s'%(remote.replace("\\", "/"))
localpath='%s%s'%(usb_dir, remote)
localpath_dir = os.path.split(localpath)
try:
sftp.get(remotepath, localpath)
except Exception, e:
os.makedirs(localpath_dir[0])
sftp.get(remotepath, localpath)
t.close()
get_file("FromSunplus\\AC3\\384kBps_44.1kHz\\ac3_0006-2002n_ac3_16bit.ac3", "f:\\")
```
#### File: workspace/test_tool_2016_05_10/strip_pcm.py
```python
import time
import os
def strip_pcm(_file_name):
print _file_name
if os.path.exists(r"%s"%_file_name) == False:
print r"can not find the file:%s"%_file_name
return 0
resource_obj = open(r"%s"%_file_name,'rb')
new_obj = open("%s"%_file_name.replace(".","_new."),'wb')
L_16BIT = resource_obj.read(2)
R_16BIT = resource_obj.read(2)
new_obj.write(L_16BIT)
new_obj.write(R_16BIT)
i = 0
while 1:
L_16BIT_NEW = resource_obj.read(2)
R_16BIT_NEW = resource_obj.read(2)
if L_16BIT_NEW == "":
print "strip complete!!!!!"
break
else:
if L_16BIT_NEW != L_16BIT:
new_obj.write(L_16BIT_NEW)
#print "R:%s\n"% L_16BIT_NEW
L_16BIT = L_16BIT_NEW
i = i+1
if R_16BIT_NEW != R_16BIT:
new_obj.write(R_16BIT_NEW)
#print "R:%s\n"%R_16BIT_NEW
R_16BIT = R_16BIT_NEW
i = i+1
#time.sleep(0.01)
if i%512 == 0:
print "%skbit"%(i/512)
resource_obj.close()
new_obj.close()
def user_input():
while 1:
print "please input the file name:"
file_name = raw_input()
if file_name == "exit":
break
else:
strip_pcm(file_name)
print "press exit to quit,file name to continue"
time.sleep(0.1)
user_input()
#strip_pcm("AUDIO_IO_HDMI_48K_FS.pcm")
```
#### File: workspace/test_tool_2016_05_10/sub_process.py
```python
import subprocess
import time
import sys
import threading
import re
class sub_process():
#def __init__(self):
#self.p = subprocess.Popen(r"D:\similator_test\Debug\test_audio.exe",stdin=subprocess.PIPE)
def connect_command_sim(self,_exe_path):
#self.handle = open(r'stdout.txt','w',0)
#self.handle2 = open(r'stderr.txt','w',0)
#self.p = subprocess.Popen(str(_exe_path),bufsize=0,stdin=subprocess.PIPE,stdout=self.handle,stderr=self.handle2)
self.p = subprocess.Popen(str(_exe_path),bufsize=0,stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
self.pid = self.p.pid
print self.pid
self.thread_exit_tag = 0
self.check_result = 0
self.threading_obj = threading.Thread(target=self.print_all_out)
self.threading_obj.start()
def clear_out(self):
i = 1
returncode = self.p.poll()
while 1:
#sys.stdout.flush()
get_read = self.p.stdout.readline()
returncode = self.p.poll()
i = i+1
print i,returncode
print get_read
time.sleep(0.01)
if get_read == "":
break
print "complete"
def get_checksum(self,_reference_sum):
for i in range(3000):
#print "repeat:",i,"!!!!!!!!!!!!"
time.sleep(1)
if i%10 == 0:
self.sent_command_sim("help")
#self.handle.close()
#time.sleep(1)
#self.handle = open(r'stdout.txt','r',0)
#real_sum = self.print_all_out()
#self.handle.close()
if self._check_num == _reference_sum:
print "checksum:%s==%s"%(self._check_num,_reference_sum)
#return 1
return self._check_num
elif self._check_num == "":
#print "can not find check sum:^_^"
#return 0
pass
else:
print "checksum is:%s!=reference:%s"%(self._check_num,_reference_sum)
#return 0
return self._check_num
print "50 minute time out,can not find checksum"
return 0
def get_result(self):
for i in range(3000):
if self.check_result == 1:
print "QA_PASS"
return 1
else:
time.sleep(1)
print "time out 50minute not find qa pass tag"
return 0
def print_all_out(self):
self._check_num = ""
while 1:
if self.thread_exit_tag == 1:
return 0
#final_txt = self.handle.read()
final_txt = self.p.stdout.readline()
print final_txt
check_obj = re.search("Final Dec checksum (\w+)",final_txt)
_check_result = re.search("QA_PASS",final_txt)
if check_obj != None:
self._check_num = check_obj.group(1)
if _check_result != None:
self.check_result = 1
time.sleep(0.01)
def sent_command_sim(self,_command):
self.p.stdin.write("%s\n"%_command)
print _command
#self.p.stdout.flush()
self.p.stdin.flush()
def print_checksum(self):
for i in range(1200):
#print "repeat:",i,"!!!!!!!!!!!!"
time.sleep(0.01)
self.sent_command_sim("help")
#self.handle.close()
time.sleep(1)
#self.handle = open(r'stdout.txt','r',0)
self.print_all_out()
def kill_subprocess(self):
#for i in range(100):
# #print "repeat:",i,"!!!!!!!!!!!!"
# time.sleep(0.01)
# self.sent_command_sim("help")
#self.handle.close()
#time.sleep(1)
#self.p.kill()
#self.print_checksum()
self.thread_exit_tag = 1
time.sleep(2)
#self.p.terminate()
self.p.kill()
print "kill the process"
#self.handle.close()
#self.handle = open(r'stdout.txt','w+',0)
#self.print_all_out()
#self.handle.close()
if __name__ == "__main__":
new_sub_process = sub_process()
new_sub_process.connect_command_sim(r"D:\Debug\test_audio.exe")
#new_sub_process.print_stdout(1000)
new_sub_process.clear_out()
print "b"
new_sub_process.sent_command_sim(r"configfile test.mp3")
#new_sub_process.sent_command_sim(r"play 8 0 1")
new_sub_process.sent_command_sim(r"source")
new_sub_process.sent_command_sim(r"stop 1")
new_sub_process.sent_command_sim(r"source")
new_sub_process.sent_command_sim(r"source")
new_sub_process.print_stdout(2)
#new_sub_process.sent_command_sim(r"play 8 1 1")
time.sleep(1)
#new_sub_process.sent_command_sim(r"stop 1")
new_sub_process.kill_subprocess
#time.sleep(5)
#handle = open(r'1.txt','r')
#_file = handle.read()
#print _file
#print re.search("Final Dec checksum (\w+)",_file).group(1)
```
#### File: qa_study/card/bzk_daily_practice.py
```python
import unittest
import pdb
import os
import binascii
import array
class SimplisticTest(unittest.TestCase):
def test(self):
self.failUnless(True)
class OutcomesTest(unittest.TestCase):
def testPass(self):
return
'''
def testFail(self):
self.failIf(True)
def testError(self):
raise RuntimeError('Test error!')
'''
class TruthTest(unittest.TestCase):
def testFailUnless(self):
self.failUnless(True)
def testAssertTrue(self):
self.assertTrue(True)
def testFailIf(self):
self.failIf(False)
def testAssertFalse(self):
self.assertFalse(False, "the assert false")
class FixturesTest(unittest.TestCase):
def setUp(self):
print 'In setUp()'
self.fixture = range(1, 10)
def tearDown(self):
print 'in teardown()'
del self.fixture
def test(self):
print 'In test()'
self.failUnlessEqual(self.fixture, range(1, 10))
class MyObj(object):
def __init__(self, num_loops):
self.num_loops = num_loops
print "init"
def go(self):
for i in range(self.num_loops):
print i
return
class GetWalk(object):
def __init__(self, _root_path):
self.root_path = _root_path
def get_walk(self):
contents = os.walk(self.root_path)
for _dir, sub_dir, _file in contents:
print _dir+"\\"
for i in sub_dir:
print i+"\\"
for j in _file:
print j
print
class BinArray(object):
def testbin(self):
a = "a"
a = array.array("c", a)
print a
print binascii.hexlify(a)
for i, j in enumerate(a):
print i, j
import tempfile
class FileToArray(object):
def read_file(self):
a = array.array('i', xrange(11))
#a.byteswap()
print "A1:", a
with open("test.txt", "rb") as _read:
print binascii.hexlify(_read.read())
output = open("test.txt", "wb")
a.tofile(output)
print output.name
output.close()
with open("test.txt", "rb") as _read:
print binascii.hexlify(_read.read())
print "aaaaaaaa"
with open(output.name, 'rb') as input:
raw_data = input.read()
print len(raw_data)
print type(raw_data)
print 'raw contents:%s' % binascii.b2a_hex(raw_data)
input.seek(0)
a2 = array.array('i')
a2.fromfile(input, len(a))
print "len(a):", len(a)
print "A2:", a2
import struct
import binascii
class StructTest(object):
def test_struct(self):
values = (1, 'ab', 2.7)
s = struct.Struct('I 2s f')
packed_data = s.pack(*values)
print 'Original values:', values
print 'Format string:', s.format
print 'Uses:', s.size, 'bytes'
print 'packed value:', binascii.hexlify(packed_data)
print binascii.unhexlify("61")
def test_struct_endianness(self):
values = (1, 'ab', 2.7)
endianness = [('@', 'native,native'),
('=', 'native, standard'),
('<', 'little-endian'),
('>', 'big-endian'),
('!', 'network'),
]
for code, name in endianness:
s = struct.Struct(code + ' I 2s f')
packed_data = s.pack(*values)
print
print 'format string:', s.format, 'for', name
print 'uses:', s.size, 'bytes'
print 'packed value:', binascii.hexlify(packed_data)
print 'unpacked value:', s.unpack(packed_data)
def struct_buffer(self):
s = struct.Struct('I 2s f')
values = (1, 'ab', 2.7)
print 'original:', values
print
print 'ctypes string buffer'
import ctypes
b = ctypes.create_string_buffer(s.size)
print 'before:', binascii.hexlify(b.raw)
s.pack_into(b, 0, *values)
print 'After :', binascii.hexlify(b.raw)
print 'unpacked:', s.unpack_from(b, 0)
print
print 'array'
import array
a = array.array('c', '\0' * s.size)
print 'before:', binascii.hexlify(a)
s.pack_into(a, 0, *values)
print 'after:', binascii.hexlify(a)
print 'unpacked:', s.unpack_from(a, 0)
import datetime
class DatetimeTest(object):
def time_test(self):
c_time = datetime.datetime.now()
print c_time.strftime('%Y-%m-%d %H:%M:%S')
if __name__ == '__main__':
#pdb.set_trace()
# unittest.main()
#MyObj(5).go()
#GetWalk("D:\djtest").get_walk()
#BinArray().testbin()
#FileToArray().read_file()
#with open("test.txt", "rb") as a:
# print a.read()
#StructTest().test_struct_endianness()
#StructTest().struct_buffer()
#DatetimeTest().time_test()
StructTest().test_struct()
```
#### File: card/study/cap_moto2.py
```python
import urllib
import urllib2
import Queue
import threading
import re
import HTMLParser
import chardet
from BeautifulSoup import BeautifulSoup
import shelve
from contextlib import closing
#import sys
#question_word = "����"
html_parser = HTMLParser.HTMLParser()
#seed_url = r'http://www.dmoz.org'
#root_url = r'http://www.wikipedia.org'
root_url = r'http://www.cheapcycleparts.com/'
root_url_key = root_url.split('.')[-2]
print root_url_key
UserAgent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36'
#root_url = "http://www.news.baidu.com/ns?cl=2&rn=20&tn=news&word=" + urllib.quote(question_word.decode(sys.stdin.encoding).encode('gbk'))
q = Queue.Queue()
q2 = Queue.Queue()
all_list = [root_url]
def write_to_txt(str_text):
tmp_file = open('cap_moto.txt','a')
tmp_file.write(str_text)
tmp_file.close
def pre_url(seed_url):
#test = urllib.urlopen(seed_url)
#html = test.read()
request = urllib2.Request(seed_url)
request.add_header('Accept', "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp")
request.add_header('Accept-Encoding', "*")
request.add_header('User-Agent', UserAgent)
try:
html_1 = urllib2.urlopen(request).read()
except Exception,ex:
print Exception,':',ex
return
encoding_dict = chardet.detect(html_1)
#print encoding
web_encoding = encoding_dict['encoding']
print "encoding ~~~~~~~~",web_encoding
if web_encoding == 'utf-8' or web_encoding == 'UTF-8':
html = html_1
else :
html = html_1.decode('gbk','ignore').encode('utf-8')
return html
#test.close()
def get_url(seed_url):
global all_list
url_list = []
html = pre_url(seed_url)
soup = BeautifulSoup(html)
[x.extract() for x in soup.findAll('script')]
#soup_text = BeautifulSoup(html,"lxml")
#print soup
tags1 = soup.findAll('ul',id="slider")
print tags1
#return
#_text = soup.findAll('div', id="bodyContent")
#_text = soup.findAll('div', attrs = {'class':'field-items'})
#_text = soup.findAll('td', attrs = {'class':'t_f'})
_text = soup.findAll('p')
#print soup_text.get_text()
#print len(_text)
"""
if _text != []:
#text_len = 0
#for i in _text:
# print i.text.encode('utf8')
text_all = "\n\r".join([i.text for i in _text])
text_all = html_parser.unescape(text_all)
pattern_no_html = re.compile(r'<[^>]+>')
chinese_txt2 = ur".*?发表于.*\d:\d\d|.*编辑|http.*html"
pattern_no_some = re.compile(chinese_txt2)
text_all = pattern_no_html.sub('',text_all)
text_all = pattern_no_some.sub('\n',text_all)
print "current text_all size is:", len(text_all)
if len(text_all)>1000:
#print text_all
write_to_txt(text_all)
#print "~~~~~~~~~~~~~~~~~~~~~~~the len of text_all is ",len(text_all)
"""
#tags2 = soup.findAll('a', {'target': '_blank'})
#tags3 = soup.findAll('tr', {'class': 'bg'})
pattern = re.compile(r'href="([^"]*)"')
pattern_2 = re.compile(root_url)
#pattern_2 = re.compile(r'^http')
pattern_3 = re.compile(r'^/[^/]')
pattern_4 = re.compile(r'^//')
pattern_5 = re.compile(r'http.*%s'%root_url_key)
for i in tags1:
#print i
result_all = pattern.findall(str(i))
if result_all !=[]:
for j in result_all:
#print "the j is~~~~~~~~~",j
if pattern_2.search(j):
#print "!!!!!!!!!!!!!!"
#print j
#print "~!!!!!!!!!!!!!!!!"
whole_url = j
elif pattern_3.search(j):
whole_url = root_url +j
#print whole_url
elif pattern_4.search(j):
whole_url = "http:" +j
#print whole_url
elif pattern_5.search(j):
#print "~~~~~~~~~~~~~~~~"
#print j
whole_url = j
#print "~~~~~~~~~~~~~~~~"
#whole_url = root_url + j
else:
continue
url_list.append((whole_url,1))
url_list = list(set(url_list))
#url_list1 = list({(i,1) for i in url_list})
print url_list
creat_shelf(seed_url,url_list)
#return
#print "current urllist size is:", len(url_list)
push_queue(url_list)
all_list.extend(url_list)
all_list = list(set(all_list))
#print "all list size is:", len(all_list)
#return url_list
def get_url_2(seed_url):
global all_list
url_list = []
html = pre_url(seed_url)
#print html
soup = BeautifulSoup(html)
tags1 = soup.findAll('div', attrs = {'class':'fiche_link'})
print tags1
pattern = re.compile(r'href="([^"]*)"')
pattern_2 = re.compile(root_url)
#pattern_2 = re.compile(r'^http')
pattern_3 = re.compile(r'^/[^/]')
pattern_4 = re.compile(r'^//')
pattern_5 = re.compile(r'http.*%s'%root_url_key)
for i in tags1:
#print i
result_all = pattern.findall(str(i))
if result_all !=[]:
for j in result_all:
#print "the j is~~~~~~~~~",j
if pattern_2.search(j):
#print "!!!!!!!!!!!!!!"
#print j
#print "~!!!!!!!!!!!!!!!!"
whole_url = j
elif pattern_3.search(j):
whole_url = root_url +j
#print whole_url
elif pattern_4.search(j):
whole_url = "http:" +j
#print whole_url
elif pattern_5.search(j):
#print "~~~~~~~~~~~~~~~~"
#print j
whole_url = j
#print "~~~~~~~~~~~~~~~~"
#whole_url = root_url + j
else:
continue
url_list.append((whole_url,2))
url_list = list(set(url_list))
#url_list1 = list({(i,1) for i in url_list})
print url_list
creat_shelf(seed_url,url_list)
#print "current urllist size is:", len(url_list)
push_queue(url_list)
all_list.extend(url_list)
all_list = list(set(all_list))
def get_url_3(seed_url):
global all_list
url_list = []
html = pre_url(seed_url)
#print html
soup = BeautifulSoup(html)
print "oooooooooooo0000000000000000000000000000000000oooooo",html
tags1 = soup.findAll('ul', attrs = {'class':'partsubselect'})
print tags1
pattern = re.compile(r'href="([^"]*)"')
pattern_2 = re.compile(root_url)
#pattern_2 = re.compile(r'^http')
pattern_3 = re.compile(r'^/[^/]')
pattern_4 = re.compile(r'^//')
pattern_5 = re.compile(r'http.*%s'%root_url_key)
for i in tags1:
#print i
result_all = pattern.findall(str(i))
if result_all !=[]:
for j in result_all:
#print "the j is~~~~~~~~~",j
if pattern_2.search(j):
#print "!!!!!!!!!!!!!!"
#print j
#print "~!!!!!!!!!!!!!!!!"
whole_url = j
elif pattern_3.search(j):
whole_url = root_url +j
#print whole_url
elif pattern_4.search(j):
whole_url = "http:" +j
#print whole_url
elif pattern_5.search(j):
#print "~~~~~~~~~~~~~~~~"
#print j
whole_url = j
#print "~~~~~~~~~~~~~~~~"
#whole_url = root_url + j
else:
continue
url_list.append((whole_url,3))
url_list = list(set(url_list))
#url_list1 = list({(i,1) for i in url_list})
print url_list
creat_shelf(seed_url,url_list)
#print "current urllist size is:", len(url_list)
push_queue(url_list)
all_list.extend(url_list)
all_list = list(set(all_list))
def get_url_4(seed_url):
global all_list
url_list = []
html = pre_url(seed_url)
soup = BeautifulSoup(html)
tags1 = soup.findAll('ul', attrs = {'class':'partsubselect'})
print tags1
pattern = re.compile(r'href="([^"]*)"')
pattern_2 = re.compile(root_url)
#pattern_2 = re.compile(r'^http')
pattern_3 = re.compile(r'^/[^/]')
pattern_4 = re.compile(r'^//')
pattern_5 = re.compile(r'http.*%s'%root_url_key)
for i in tags1:
#print i
result_all = pattern.findall(str(i))
if result_all !=[]:
for j in result_all:
#print "the j is~~~~~~~~~",j
if pattern_2.search(j):
#print "!!!!!!!!!!!!!!"
#print j
#print "~!!!!!!!!!!!!!!!!"
whole_url = j
elif pattern_3.search(j):
whole_url = root_url +j
#print whole_url
elif pattern_4.search(j):
whole_url = "http:" +j
#print whole_url
elif pattern_5.search(j):
#print "~~~~~~~~~~~~~~~~"
#print j
whole_url = j
#print "~~~~~~~~~~~~~~~~"
#whole_url = root_url + j
else:
continue
url_list.append((whole_url,4))
url_list = list(set(url_list))
#url_list1 = list({(i,1) for i in url_list})
print url_list
creat_shelf(seed_url,url_list)
#print "current urllist size is:", len(url_list)
push_queue(url_list)
all_list.extend(url_list)
all_list = list(set(all_list))
def get_url_5(seed_url):
global all_list
url_list = []
html = pre_url(seed_url)
soup = BeautifulSoup(html)
print "5555555555555555555555555555555555555555555555"
#print html
tags1 = soup.findAll('ul', attrs = {'class':'partlistrow'})
print tags1
tag_pic = soup.findAll('div',id = 'diagram')
print tag_pic
pattern = re.compile(r'src="([^"]*)"')
pattern_2 = re.compile(root_url)
pattern_3 = re.compile(r'^/[^/]')
pattern_4 = re.compile(r'^//')
pattern_5 = re.compile(r'http.*%s'%root_url_key)
for i in tags1:
result_all = pattern.findall(str(i))
if result_all !=[]:
for j in result_all:
#print "the j is~~~~~~~~~",j
if pattern_2.search(j):
#print "!!!!!!!!!!!!!!"
#print j
#print "~!!!!!!!!!!!!!!!!"
whole_url = j
elif pattern_3.search(j):
whole_url = root_url +j
#print whole_url
elif pattern_4.search(j):
whole_url = "http:" +j
#print whole_url
elif pattern_5.search(j):
#print "~~~~~~~~~~~~~~~~"
#print j
whole_url = j
#print "~~~~~~~~~~~~~~~~"
#whole_url = root_url + j
else:
continue
url_list.append((whole_url,5))
url_list = list(set(url_list))
#url_list1 = list({(i,1) for i in url_list})
print url_list
creat_shelf(seed_url,url_list)
#print "current urllist size is:", len(url_list)
#push_queue(url_list)
#all_list.extend(url_list)
#all_list = list(set(all_list))
def creat_shelf(key,value):
with closing(shelve.open('test_shelf.db')) as s:
s[key]=value
def print_shelf():
with closing(shelve.open('test_shelf.db')) as s:
print [a for a in s]
print [s[a] for a in s]
def push_queue(url_list):
for i in url_list:
if i not in all_list:
q.put(i)
def work_process():
while 1:
next_url = q.get()
print "the q next url is: ", next_url[0]
print "the q next url id: ", next_url[1]
print "queue number is:", q.qsize()
q.task_done()
if next_url[1] == 1:
get_url_2(next_url[0])
print "1111111111111111111"
elif next_url[1] == 2:
get_url_3(next_url[0])
print "222222222222222222222"
elif next_url[1] == 3:
get_url_4(next_url[0])
print "3333333333333333"
elif next_url[1] == 4:
get_url_5(next_url[0])
print "44444444444444444"
elif next_url[1] == 5:
#get_url_5(next_url[0])
print "the last~~~~~~~~"
#print_shelf()
get_url(root_url)
thread_list = [threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process),
threading.Thread(target = work_process)]
#new_thread = threading.Thread(target = get_url, args=(next_url,))
for i in thread_list:
i.setDaemon(True)
i.start()
# i.join()
q.join()
```
#### File: daily_practice/tools/GetMem.py
```python
from serial import Serial
import re
from threading import Thread
import time
import datetime
import pygal
import os
class FilterMem(object):
def __init__(self, port, baudrate):
self.serial_obj = Serial()
self.serial_obj.port = port-1
self.serial_obj.baudrate = baudrate
self.connect_uart()
def connect_uart(self):
try:
self.serial_obj.open()
except Exception, e:
if self.serial_obj.isOpen():
self.serial_obj.close()
print e
return 0
def send_thread(self, _command, _period):
self.sent_thread = Thread(target=self.sendfunc,args=(_command, _period))
self.sent_thread.setDaemon(True)
self.sent_thread.start()
#self.getmem()
def getmem(self, keyword, file_name):
today = datetime.date.today()
self.file_name = r"%s_%s" % (file_name, today)
x_list = []
y_list = []
with open("%s.log"%self.file_name, "w") as f:
while 1:
self.info = self.serial_obj.readline()
print self.info
current = datetime.datetime.now()
f_time = "%s-%s-%s %s:%s:%s" % (current.year, current.month, current.day, current.hour, current.minute, current.second)
f.write("%s:%s" % (f_time, self.info))
match_info = re.search("%s.+?(\d+).+bytes" % keyword, self.info)
if match_info:
mem_val = match_info.group(1)
y_list.append(int(mem_val))
x_list.append(current)
print mem_val
if len(y_list)%10 == 0:
self.make_pic(x_list, y_list)
#print match_info.group(0)
#print "bbb"
#time.sleep(1)
def sendfunc(self, _char, _period):
self.serial_obj.write("mon\n")
while 1:
self.serial_obj.write("%s\n" % _char)
time.sleep(_period)
#print _char
# plot a sine wave from 0 to 4pi
def make_pic(self, x_list, y_list):
line_chart = pygal.Line()
line_chart.title = 'Mem usage evolution (in %)'
line_chart.x_labels = x_list
line_chart.add('Mem', y_list)
line_chart.render()
f = open('%s.html' % self.file_name, 'w')
f.write(line_chart.render())
f.close()
if __name__ == "__main__":
my_obj = FilterMem(9, 115200)
my_obj.send_thread("mid", 10)
#my_obj.connect_uart()
my_obj.getmem("Used")
# my_obj.sent_thread.join()
```
#### File: qa_study/hdcp_app/hdcp.py
```python
import os
import sys
import hashlib
import re
import redis
import MySQLdb
import chardet
import ConfigParser
import subprocess
import shlex
import binascii
import xlrd
#import datetime
#from dateutil.parser import parse
from PyQt4 import QtGui, QtCore
from hashlib import sha256
from hmac import HMAC
import time
import threading
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
def encrypt_password(password, salt=None):
"""Hash password on the fly."""
if salt is None:
salt = os.urandom(8) # 64 bits.
assert 8 == len(salt)
assert isinstance(salt, str)
if isinstance(password, unicode):
password = password.<PASSWORD>('<PASSWORD>')
assert isinstance(password, str)
result = password
for _ in xrange(10):
result = HMAC(result, salt, sha256).digest()
return salt + result
def validate_password(hashed, input_password):
return hashed == encrypt_password(input_password, salt=hashed[:8])
def execCLI(cmd_line, shell=True):
cmd_args = shlex.split(cmd_line, posix=False)
cmd_exec = subprocess.Popen(cmd_args,bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell)
output,strrout= cmd_exec.communicate()
cmd_exec.wait()
return (cmd_exec.returncode, output, strrout)
class Ui_Register(QtGui.QDialog):
def __init__(self, db, parent=None):
self.db = db
QtGui.QDialog.__init__(self, parent)
self.resize(429, 253)
self.label = QtGui.QLabel(self)
self.label.setGeometry(QtCore.QRect(60, 60, 261, 16))
self.label.setObjectName(_fromUtf8("label"))
self.lineEdit = QtGui.QLineEdit(self)
self.lineEdit.setGeometry(QtCore.QRect(60, 90, 231, 20))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.label_2 = QtGui.QLabel(self)
self.label_2.setGeometry(QtCore.QRect(60, 120, 231, 16))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.lineEdit_2 = QtGui.QLineEdit(self)
self.lineEdit_2.setGeometry(QtCore.QRect(60, 150, 231, 20))
self.lineEdit_2.setObjectName(_fromUtf8("lineEdit_2"))
self.pushButton = QtGui.QPushButton(self)
self.pushButton.setGeometry(QtCore.QRect(140, 200, 75, 23))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.pushButton_2 = QtGui.QPushButton(self)
self.pushButton_2.setGeometry(QtCore.QRect(220, 200, 75, 23))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.pushButton.clicked.connect(self.login)
self.pushButton_2.clicked.connect(self.reject)
self.retranslateUi()
# QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self):
self.setWindowTitle(_translate("Dialog", u"注册管理员密码", None))
self.label.setText(_translate("Dialog", u"初次使用,请注册管理员(admin)密码:", None))
self.label_2.setText(_translate("Dialog", u"请再次输入:", None))
self.pushButton.setText(_translate("Dialog", u"确定", None))
self.pushButton_2.setText(_translate("Dialog", u"取消", None))
def login(self):
password_1 = str(self.lineEdit.text())
password_2 = str(self.lineEdit_2.text())
if not password_1 and not password_2:
QtGui.QMessageBox.critical(self, 'Error', u'密码不能为空!')
return
if password_1 != password_2:
QtGui.QMessageBox.critical(self, 'Error', u'两次输入密码不一致, 请重新输入!')
self.lineEdit.clear()
self.lineEdit_2.clear()
else:
print 'write into db'
# encr_pwd = <PASSWORD>(password_1)
self.db.add_user('admin', password_1)
self.accept()
class Ui_Login(QtGui.QDialog):
def __init__(self, db, parent=None):
self.db = db
QtGui.QDialog.__init__(self, parent)
self.resize(432, 257)
self.label = QtGui.QLabel(self)
self.label.setGeometry(QtCore.QRect(60, 80, 61, 21))
self.label.setObjectName(_fromUtf8("label"))
self.groupBox = QtGui.QGroupBox(self)
self.groupBox.setGeometry(QtCore.QRect(30, 40, 361, 181))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.pushButton = QtGui.QPushButton(self.groupBox)
self.pushButton.setGeometry(QtCore.QRect(130, 130, 75, 23))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.pushButton_2 = QtGui.QPushButton(self.groupBox)
self.pushButton_2.setGeometry(QtCore.QRect(210, 130, 75, 23))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.pushButton.clicked.connect(self.login)
self.pushButton_2.clicked.connect(self.reject)
self.lineEdit = QtGui.QLineEdit(self.groupBox)
self.lineEdit.setGeometry(QtCore.QRect(90, 40, 191, 21))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.lineEdit_2 = QtGui.QLineEdit(self.groupBox)
self.lineEdit_2.setGeometry(QtCore.QRect(90, 80, 191, 21))
self.lineEdit_2.setObjectName(_fromUtf8("lineEdit_2"))
self.lineEdit_2.setEchoMode(QtGui.QLineEdit.Password)
self.lineEdit.setPlaceholderText('username')
self.lineEdit_2.setPlaceholderText('password')
self.label_3 = QtGui.QLabel(self)
self.label_3.setGeometry(QtCore.QRect(60, 120, 61, 21))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.retranslateUi()
# QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self):
self.setWindowTitle(_translate("Dialog", u"HDCP Key 管理工具", None))
self.label.setText(_translate("Dialog", u"用户名:", None))
self.groupBox.setTitle(_translate("Dialog", u"登录", None))
self.label_3.setText(_translate("Dialog", u"密码:", None))
self.pushButton.setText(_translate("Dialog", u"确定", None))
self.pushButton_2.setText(_translate("Dialog", u"取消", None))
def login(self):
try:
password = self.db.get_ps(self.lineEdit.text())
except redis.exceptions.ConnectionError:
QtGui.QMessageBox.critical(self, 'Error', u'Redis server 没有运行!')
return
if password == None:
QtGui.QMessageBox.critical(self, 'Error', u'用户名: "%s" 不存在!'%self.lineEdit.text())
elif password == self.lineEdit_2.text():
self.current_user = self.lineEdit.text()
self.accept()
else:
QtGui.QMessageBox.critical(self, 'Error', u'密码不正确!')
#class WorkThread(QtCore.QThread):
# def __init__(self, parent = None, _func):
# super(WorkThread, self).__init__(parent)
# self.func = _func(
#def run(self):
# _func()
class Ui_MainWindow(QtGui.QMainWindow):
sinOut_err = QtCore.pyqtSignal(str)
sinOut_progress_bar = QtCore.pyqtSignal(int)
sinOut_info = QtCore.pyqtSignal(str, str)
sinOut_enable = QtCore.pyqtSignal(bool)
sinOut_status = QtCore.pyqtSignal()
def __init__(self, redis, db, user, parent = None):
self.redis_inst = redis
self.db = db
self.user = user
QtGui.QMainWindow.__init__(self, parent)
self.setObjectName(_fromUtf8("Dialog"))
self.resize(942, 712)
self.tabWidget = QtGui.QTabWidget(self)
self.tabWidget.setGeometry(QtCore.QRect(0, 30, 931, 661))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.mac_pre = QtGui.QLineEdit(self.tab)
self.mac_pre.setGeometry(QtCore.QRect(250, 110, 311, 23))
self.mac_pre.setObjectName(_fromUtf8("mac_pre"))
self.mac_button = QtGui.QPushButton(self.tab)
self.mac_button.setGeometry(QtCore.QRect(630, 110, 75, 23))
self.mac_button.setObjectName(_fromUtf8("mac_button"))
self.mac_button.clicked.connect(self.mac_import)
self.mac_label = QtGui.QLabel(self.tab)
self.mac_label.setGeometry(QtCore.QRect(160, 110, 85, 23))
self.mac_label.setObjectName(_fromUtf8("mac_label"))
self.file_edit = QtGui.QLineEdit(self.tab)
self.file_edit.setGeometry(QtCore.QRect(250, 150, 311, 23))
self.file_edit.setObjectName(_fromUtf8("file_edit"))
self.import_button = QtGui.QPushButton(self.tab)
self.import_button.setGeometry(QtCore.QRect(630, 150, 75, 23))
self.import_button.setObjectName(_fromUtf8("import_button"))
self.import_button.clicked.connect(self.import_)
#self.sinOut.connect(self.outText)
self.sinOut_err.connect(self.warning)
self.sinOut_progress_bar.connect(self.progress_bar)
self.sinOut_info.connect(self.info)
self.sinOut_enable.connect(self.enable)
self.sinOut_status.connect(self.display_status)
self.file_import = QtGui.QPushButton(self.tab)
self.file_import.setGeometry(QtCore.QRect(160, 150, 90, 23))
self.file_import.setObjectName(_fromUtf8("file_import"))
self.file_import.clicked.connect(lambda: self.chose_file(1))
self.key_version = QtGui.QLabel(self.tab)
self.key_version.setGeometry(QtCore.QRect(160, 200, 71, 16))
self.key_version.setObjectName(_fromUtf8("key_version"))
self.x1 = QtGui.QRadioButton(self.tab)
self.x1.setGeometry(QtCore.QRect(260, 200, 89, 16))
self.x1.setObjectName(_fromUtf8("x1"))
self.x2 = QtGui.QRadioButton(self.tab)
self.x2.setGeometry(QtCore.QRect(400, 200, 89, 16))
self.x2.setObjectName(_fromUtf8("x2"))
self.key_version_group = QtGui.QButtonGroup()
self.key_version_group.addButton(self.x1)
self.key_version_group.addButton(self.x2)
self.x1.setChecked(True)
self.title_1x_tx_total = QtGui.QLabel(self.tab)
self.title_1x_tx_total.setGeometry(QtCore.QRect(160, 260, 124, 16))
self.title_1x_tx_total.setObjectName(_fromUtf8("title_1x_tx_total"))
self.num_1x_tx_left = QtGui.QLabel(self.tab)
self.num_1x_tx_left.setGeometry(QtCore.QRect(560, 260, 54, 16))
self.num_1x_tx_left.setObjectName(_fromUtf8("num_1x_tx_left"))
self.num_1x_rx_left = QtGui.QLabel(self.tab)
self.num_1x_rx_left.setGeometry(QtCore.QRect(560, 300, 80, 16))
self.num_1x_rx_left.setObjectName(_fromUtf8("num_1x_rx_left"))
self.title_1x_tx_left = QtGui.QLabel(self.tab)
self.title_1x_tx_left.setGeometry(QtCore.QRect(500, 260, 80, 16))
self.title_1x_tx_left.setObjectName(_fromUtf8("title_1x_tx_left"))
self.num_1x_tx_total = QtGui.QLabel(self.tab)
self.num_1x_tx_total.setGeometry(QtCore.QRect(330, 260, 100, 16))
self.num_1x_tx_total.setObjectName(_fromUtf8("num_1x_tx_total"))
self.num_1x_rx_total = QtGui.QLabel(self.tab)
self.num_1x_rx_total.setGeometry(QtCore.QRect(330, 300, 54, 16))
self.num_1x_rx_total.setObjectName(_fromUtf8("num_1x_rx_total"))
self.title_1x_rx_left = QtGui.QLabel(self.tab)
self.title_1x_rx_left.setGeometry(QtCore.QRect(500, 300, 51, 16))
self.title_1x_rx_left.setObjectName(_fromUtf8("title_1x_rx_left"))
self.title_1x_rx_total = QtGui.QLabel(self.tab)
self.title_1x_rx_total.setGeometry(QtCore.QRect(160, 300, 124, 16))
self.title_1x_rx_total.setObjectName(_fromUtf8("title_1x_rx_total"))
self.title_2x_tx_total = QtGui.QLabel(self.tab)
self.title_2x_tx_total.setGeometry(QtCore.QRect(160, 340, 124, 16))
self.title_2x_tx_total.setObjectName(_fromUtf8("title_2x_tx_total"))
self.num_2x_tx_left = QtGui.QLabel(self.tab)
self.num_2x_tx_left.setGeometry(QtCore.QRect(560, 340, 80, 16))
self.num_2x_tx_left.setObjectName(_fromUtf8("num_2x_tx_left"))
self.num_2x_rx_left = QtGui.QLabel(self.tab)
self.num_2x_rx_left.setGeometry(QtCore.QRect(560, 380, 80, 16))
self.num_2x_rx_left.setObjectName(_fromUtf8("num_2x_rx_left"))
self.title_2x_tx_left = QtGui.QLabel(self.tab)
self.title_2x_tx_left.setGeometry(QtCore.QRect(500, 340, 51, 16))
self.title_2x_tx_left.setObjectName(_fromUtf8("title_2x_tx_left"))
self.num_2x_tx_total = QtGui.QLabel(self.tab)
self.num_2x_tx_total.setGeometry(QtCore.QRect(330, 340, 54, 16))
self.num_2x_tx_total.setObjectName(_fromUtf8("num_2x_tx_total"))
self.num_2x_rx_total = QtGui.QLabel(self.tab)
self.num_2x_rx_total.setGeometry(QtCore.QRect(330, 380, 54, 16))
self.num_2x_rx_total.setObjectName(_fromUtf8("num_2x_rx_total"))
self.title_2x_rx_left = QtGui.QLabel(self.tab)
self.title_2x_rx_left.setGeometry(QtCore.QRect(500, 380, 51, 16))
self.title_2x_rx_left.setObjectName(_fromUtf8("title_2x_rx_left"))
self.title_2x_rx_total = QtGui.QLabel(self.tab)
self.title_2x_rx_total.setGeometry(QtCore.QRect(160, 380, 124, 16))
self.title_2x_rx_total.setObjectName(_fromUtf8("title_2x_rx_total"))
self.title_mac_total = QtGui.QLabel(self.tab)
self.title_mac_total.setGeometry(QtCore.QRect(160, 420, 124, 16))
self.title_mac_total.setObjectName(_fromUtf8("title_mac_total"))
self.num_mac_left = QtGui.QLabel(self.tab)
self.num_mac_left.setGeometry(QtCore.QRect(560, 420, 80, 16))
self.num_mac_left.setObjectName(_fromUtf8("num_mac_left"))
self.title_mac_left = QtGui.QLabel(self.tab)
self.title_mac_left.setGeometry(QtCore.QRect(500, 420, 51, 16))
self.title_mac_left.setObjectName(_fromUtf8("title_mac_left"))
self.num_mac_total = QtGui.QLabel(self.tab)
self.num_mac_total.setGeometry(QtCore.QRect(330, 420, 54, 16))
self.num_mac_total.setObjectName(_fromUtf8("num_mac_total"))
self.tabWidget.addTab(self.tab, _fromUtf8(""))
'''tab 2'''
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tableWidget = QtGui.QTableWidget(self.tab_2)
self.tableWidget.setGeometry(QtCore.QRect(20, 20, 870, 60))
self.tableWidget.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget.setRowCount(2)
self.tableWidget.setColumnCount(4)
self.tableWidget.setObjectName(_fromUtf8("tableWidget"))
# self.tableWidget.itemClicked.connect(self.get_item_text)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget.setItem(0, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget.setItem(0, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget.setItem(0, 2, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget.setItem(0, 3, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget.setItem(1, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget.setItem(1, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget.setItem(1, 2, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget.setItem(1, 3, item)
self.tableWidget.horizontalHeader().setVisible(False)
self.tableWidget.horizontalHeader().setDefaultSectionSize(180)
self.tableWidget.verticalHeader().setVisible(False)
self.tableWidget.setColumnWidth(0,175)
self.tableWidget.setColumnWidth(1,250)
self.tableWidget.setColumnWidth(2,175)
self.tableWidget.setColumnWidth(3,268)
self.tableWidget.setRowHeight(1, 28)
'''tableWidget_2'''
self.tableWidget_2 = QtGui.QTableWidget(self.tab_2)
self.tableWidget_2.setGeometry(QtCore.QRect(20, 80, 870, 390))
self.tableWidget_2.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget_2.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget_2.setRowCount(13)
self.tableWidget_2.setColumnCount(5)
self.tableWidget_2.setObjectName(_fromUtf8("tableWidget_2"))
# self.tableWidget_2.itemClicked.connect(self.get_item_text)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(0, 0, item)
self.radio = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(0, 1, self.radio)
self.radio_1 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(0, 2, self.radio_1)
self.radio_2 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(0, 3, self.radio_2)
self.radio_1.setChecked(True)
self.buttonGroup = QtGui.QButtonGroup()
self.buttonGroup.addButton(self.radio)
self.buttonGroup.addButton(self.radio_1)
self.buttonGroup.addButton(self.radio_2)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(1, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(2, 0, item)
self.radio_3 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(1, 1, self.radio_3)
self.radio_4 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(1, 2, self.radio_4)
self.radio_3.setChecked(True)
self.buttonGroup_1 = QtGui.QButtonGroup()
self.buttonGroup_1.addButton(self.radio_3)
self.buttonGroup_1.addButton(self.radio_4)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(3, 0, item)
self.radio_5 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(3, 2, self.radio_5)
self.radio_6 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(4, 2, self.radio_6)
self.radio_20 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(5, 2, self.radio_20)
self.radio_21 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(6, 2, self.radio_21)
self.radio_5.setChecked(True)
self.buttonGroup_2 = QtGui.QButtonGroup()
self.buttonGroup_2.addButton(self.radio_5)
self.buttonGroup_2.addButton(self.radio_6)
self.buttonGroup_2.addButton(self.radio_20)
self.buttonGroup_2.addButton(self.radio_21)
self.radio_22 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(3, 4, self.radio_22)
self.radio_23 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(4, 4, self.radio_23)
self.radio_24 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(5, 4, self.radio_24)
self.radio_25 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(6, 4, self.radio_25)
self.radio_22.setChecked(True)
self.buttonGroup_6 = QtGui.QButtonGroup()
self.buttonGroup_6.addButton(self.radio_22)
self.buttonGroup_6.addButton(self.radio_23)
self.buttonGroup_6.addButton(self.radio_24)
self.buttonGroup_6.addButton(self.radio_25)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(4, 0, item)
self.radio_7 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(2, 1, self.radio_7)
self.radio_8 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(2, 2, self.radio_8)
self.radio_9 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(2, 3, self.radio_9)
self.radio_7.setChecked(True)
self.buttonGroup_3 = QtGui.QButtonGroup()
self.buttonGroup_3.addButton(self.radio_7)
self.buttonGroup_3.addButton(self.radio_8)
self.buttonGroup_3.addButton(self.radio_9)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(5, 0, item)
# self.radio_10 = QtGui.QRadioButton()
# self.radio_11 = QtGui.QRadioButton()
# self.radio_10.setChecked(True)
#
#
# self.buttonGroup_4 = QtGui.QButtonGroup()
# self.buttonGroup_4.addButton(self.radio_10)
# self.buttonGroup_4.addButton(self.radio_11)
# self.buttonGroup_2.addButton(self.radio_15)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(6, 0, item)
self.radio_12 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(7, 1, self.radio_12)
self.radio_12.setChecked(True)
self.radio_13 = QtGui.QRadioButton()
self.tableWidget_2.setCellWidget(7, 2, self.radio_13)
self.buttonGroup_5 = QtGui.QButtonGroup()
self.buttonGroup_5.addButton(self.radio_12)
self.buttonGroup_5.addButton(self.radio_13)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(7, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(8, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget_2.setItem(8, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(9, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget_2.setItem(9, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(10, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget_2.setItem(10, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(11, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget_2.setItem(11, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(12, 0, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget_2.setItem(12, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(3, 1, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_2.setItem(3, 3, item)
self.comboBox_3 = QtGui.QComboBox()
# # self.comboBox.setGeometry(QtCore.QRect(130, 60, 101, 22))
self.comboBox_3.setObjectName(_fromUtf8("comboBox"))
self.comboBox_3.setEditable(True)
self.tableWidget_2.setCellWidget(8, 1, self.comboBox_3)
self.tableWidget_2.horizontalHeader().setVisible(False)
self.tableWidget_2.horizontalHeader().setDefaultSectionSize(170)
self.tableWidget_2.verticalHeader().setVisible(False)
self.tableWidget_2.setWordWrap(True)
self.tableWidget_2.setSpan(0,3,1,2)
self.tableWidget_2.setSpan(1,2,1,3)
self.tableWidget_2.setSpan(2,3,1,2)
# self.tableWidget_2.setSpan(7,2,1,2)
self.tableWidget_2.setSpan(3,3,4,1)
self.tableWidget_2.setSpan(3,1,4,1)
self.tableWidget_2.setSpan(3,0,4,1)
self.tableWidget_2.setSpan(7,0,2,1)
self.tableWidget_2.setSpan(7,2,1,3)
self.tableWidget_2.setSpan(8,2,1,3)
for i in range(9, 13):
self.tableWidget_2.setSpan(i,1,1,4)
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.tableWidget_2.setColumnWidth(0,188)
self.tableWidget_2.setRowHeight(11, 28)
self.tableWidget_3 = QtGui.QTableWidget(self.tab_2)
self.tableWidget_3.setGeometry(QtCore.QRect(20, 468, 870, 60))
self.tableWidget_3.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget_3.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.tableWidget_3.setRowCount(2)
self.tableWidget_3.setColumnCount(4)
self.tableWidget_3.horizontalHeader().setVisible(False)
self.tableWidget_3.horizontalHeader().setDefaultSectionSize(223)
self.tableWidget_3.verticalHeader().setVisible(False)
self.tableWidget_3.setWordWrap(True)
self.tableWidget_3.setColumnWidth(0,50)
self.tableWidget_3.setColumnWidth(1,110)
self.tableWidget_3.setColumnWidth(2,110)
self.tableWidget_3.setColumnWidth(3,598)
self.tableWidget_3.setRowHeight(1,28)
for i in range(0, 4):
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.NoItemFlags)
self.tableWidget_3.setItem(0, i, item)
item = QtGui.QTableWidgetItem()
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
self.tableWidget_3.setItem(1, i, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.NoItemFlags)
# self.tableWidget_3.setItem(0, 1, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.NoItemFlags)
# self.tableWidget_3.setItem(0, 2, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.NoItemFlags)
# self.tableWidget_3.setItem(0, 3, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
# self.tableWidget_3.setItem(1, 0, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
# self.tableWidget_3.setItem(1, 1, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
# self.tableWidget_3.setItem(1, 2, item)
# item = QtGui.QTableWidgetItem()
# item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsEditable|QtCore.Qt.ItemIsEnabled)
# self.tableWidget_3.setItem(1, 3, item)
self.file_edit_2 = QtGui.QLineEdit(self.tab_2)
self.file_edit_2.setGeometry(QtCore.QRect(100, 555, 400, 25))
self.file_edit_2.setObjectName(_fromUtf8("file_edit_2"))
self.load_table = QtGui.QPushButton(self.tab_2)
self.load_table.setGeometry(QtCore.QRect(17, 555, 75, 23))
self.load_table.setObjectName(_fromUtf8("load_table"))
self.load_table.clicked.connect(lambda: self.chose_file(2))
self.file_edit_3 = QtGui.QLineEdit(self.tab_2)
self.file_edit_3.setGeometry(QtCore.QRect(100, 585, 400, 25))
self.file_edit_3.setObjectName(_fromUtf8("file_edit_3"))
self.export_dir = QtGui.QPushButton(self.tab_2)
self.export_dir.setGeometry(QtCore.QRect(17, 585, 75, 23))
self.export_dir.setObjectName(_fromUtf8("export_dir"))
self.export_dir.clicked.connect(lambda: self.chose_file(3))
self.start_export = QtGui.QPushButton(self.tab_2)
self.start_export.setGeometry(QtCore.QRect(780, 560, 75, 41))
self.start_export.setObjectName(_fromUtf8("start_export"))
self.start_export.clicked.connect(self.export)
self.tab_3 = QtGui.QWidget()
self.tab_3.setObjectName(_fromUtf8("tab_3"))
self.tabWidget.addTab(self.tab_3, _fromUtf8(""))
self.lot_id_label = QtGui.QLabel(self.tab_3)
self.lot_id_label.setGeometry(QtCore.QRect(40, 60, 81, 21))
self.lot_id_label.setObjectName(_fromUtf8("lot_id_label"))
self.comboBox = QtGui.QComboBox(self.tab_3)
self.comboBox.setGeometry(QtCore.QRect(130, 60, 101, 22))
self.comboBox.setEditable(True)
# self.display_lot()
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.wafer_id = QtGui.QLabel(self.tab_3)
self.wafer_id.setGeometry(QtCore.QRect(40, 100, 81, 21))
self.wafer_id.setObjectName(_fromUtf8("wafer_id"))
self.comboBox_2 = QtGui.QComboBox(self.tab_3)
self.comboBox_2.setGeometry(QtCore.QRect(130, 100, 101, 22))
self.comboBox_2.setEditable(True)
self.comboBox_2.setObjectName(_fromUtf8("comboBox_2"))
self.x_cor = QtGui.QLabel(self.tab_3)
self.x_cor.setGeometry(QtCore.QRect(40, 140, 81, 21))
self.x_cor.setObjectName(_fromUtf8("x_cor"))
self.y_cor = QtGui.QLabel(self.tab_3)
self.y_cor.setGeometry(QtCore.QRect(40, 180, 81, 21))
self.y_cor.setObjectName(_fromUtf8("y_cor"))
self.lineEdit_4 = QtGui.QLineEdit(self.tab_3)
self.lineEdit_4.setGeometry(QtCore.QRect(130, 140, 51, 20))
self.lineEdit_4.setObjectName(_fromUtf8("lineEdit_4"))
self.search_result = QtGui.QLabel(self.tab_3)
self.search_result.setGeometry(QtCore.QRect(40, 235, 91, 16))
self.search_result.setObjectName(_fromUtf8("search_result"))
self.textBrowser = QtGui.QTextBrowser(self.tab_3)
self.textBrowser.setGeometry(QtCore.QRect(40, 265, 390, 320))
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.search_key = QtGui.QPushButton(self.tab_3)
self.search_key.setGeometry(QtCore.QRect(320, 180, 75, 23))
self.search_key.setObjectName(_fromUtf8("search_key"))
self.search_key.clicked.connect(self.key_search)
self.label_18 = QtGui.QLabel(self.tab_3)
self.label_18.setGeometry(QtCore.QRect(190, 140, 16, 16))
self.label_18.setObjectName(_fromUtf8("label_18"))
self.lineEdit_6 = QtGui.QLineEdit(self.tab_3)
self.lineEdit_6.setGeometry(QtCore.QRect(210, 140, 51, 20))
self.lineEdit_6.setObjectName(_fromUtf8("lineEdit_6"))
self.lineEdit_5 = QtGui.QLineEdit(self.tab_3)
self.lineEdit_5.setGeometry(QtCore.QRect(130, 180, 51, 20))
self.lineEdit_5.setObjectName(_fromUtf8("lineEdit_5"))
self.lineEdit_7 = QtGui.QLineEdit(self.tab_3)
self.lineEdit_7.setGeometry(QtCore.QRect(210, 180, 51, 20))
self.lineEdit_7.setObjectName(_fromUtf8("lineEdit_7"))
self.label_19 = QtGui.QLabel(self.tab_3)
self.label_19.setGeometry(QtCore.QRect(190, 180, 16, 16))
self.label_19.setObjectName(_fromUtf8("label_19"))
self.search_lot = QtGui.QPushButton(self.tab_3)
self.search_lot.setGeometry(QtCore.QRect(320, 60, 75, 23))
self.search_lot.setObjectName(_fromUtf8("search_lot"))
self.search_lot.clicked.connect(self.lot_search)
self.line_2 = QtGui.QFrame(self.tab_3)
self.line_2.setGeometry(QtCore.QRect(450, 0, 20, 650))
self.line_2.setFrameShape(QtGui.QFrame.VLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
#回收
self.op_type = QtGui.QLabel(self.tab_3)
self.op_type.setGeometry(QtCore.QRect(500, 90, 81, 20))
self.op_type.setObjectName(_fromUtf8("op_type"))
self.statis_search = QtGui.QPushButton(self.tab_3)
self.statis_search.setGeometry(QtCore.QRect(780, 230, 75, 23))
self.statis_search.setObjectName(_fromUtf8("statis_search"))
self.statis_search.clicked.connect(self.op_record_search)
self.user_ = QtGui.QLabel(self.tab_3)
self.user_.setGeometry(QtCore.QRect(500, 50, 71, 20))
self.user_.setObjectName(_fromUtf8("user_"))
self.lineEdit_2 = QtGui.QLineEdit(self.tab_3)
self.lineEdit_2.setGeometry(QtCore.QRect(600, 50, 121, 20))
self.lineEdit_2.setObjectName(_fromUtf8("lineEdit_2"))
self.comboBox_4 = QtGui.QComboBox(self.tab_3)
self.comboBox_4.setGeometry(QtCore.QRect(600, 90, 121, 22))
self.comboBox_4.setObjectName(_fromUtf8("comboBox_4"))
self.comboBox_5 = QtGui.QComboBox(self.tab_3)
self.comboBox_5.setGeometry(QtCore.QRect(600, 130, 121, 22))
self.comboBox_5.setObjectName(_fromUtf8("comboBox_5"))
self.comboBox_5.setEditable(True)
self.inner_type = QtGui.QLabel(self.tab_3)
self.inner_type.setGeometry(QtCore.QRect(500, 130, 91, 20))
self.inner_type.setObjectName(_fromUtf8("inner_type"))
self.date_ = QtGui.QLabel(self.tab_3)
self.date_.setGeometry(QtCore.QRect(500, 170, 91, 20))
self.date_.setObjectName(_fromUtf8("date_"))
self.dateEdit = QtGui.QDateEdit(self.tab_3)
self.dateEdit.setGeometry(QtCore.QRect(600, 170, 121, 22))
self.dateEdit.setObjectName(_fromUtf8("dateEdit"))
self.dateEdit.setDateTime(self.dateEdit.dateTimeFromText('2015/3/1'))
self.dateEdit_2 = QtGui.QDateEdit(self.tab_3)
self.dateEdit_2.setGeometry(QtCore.QRect(740, 170, 121, 22))
self.dateEdit_2.setObjectName(_fromUtf8("dateEdit_2"))
self.dateEdit_2.setDateTime(self.dateEdit.dateTimeFromText(time.strftime('%Y/%m/%d',time.localtime())))
self.label_49 = QtGui.QLabel(self.tab_3)
self.label_49.setGeometry(QtCore.QRect(725, 170, 16, 20))
self.label_49.setObjectName(_fromUtf8("label_49"))
self.textBrowser_2 = QtGui.QTextBrowser(self.tab_3)
self.textBrowser_2.setGeometry(QtCore.QRect(495, 265, 390, 320))
self.textBrowser_2.setObjectName(_fromUtf8("textBrowser_2"))
self.search_result_2 = QtGui.QLabel(self.tab_3)
self.search_result_2.setGeometry(QtCore.QRect(500, 235, 91, 16))
self.search_result_2.setObjectName(_fromUtf8("search_result_2"))
#tab_4
self.tab_4 = QtGui.QWidget()
self.tab_4.setObjectName(_fromUtf8("tab_4"))
self.tabWidget.addTab(self.tab_4, _fromUtf8(""))
self.line_3 = QtGui.QFrame(self.tab_4)
self.line_3.setGeometry(QtCore.QRect(450, 0, 20, 650))
self.line_3.setFrameShape(QtGui.QFrame.VLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName(_fromUtf8("line_3"))
self.op_history = QtGui.QLabel(self.tab_4)
self.op_history.setGeometry(QtCore.QRect(40, 30, 91, 16))
self.op_history.setObjectName(_fromUtf8("op_history"))
self.textBrowser_3 = QtGui.QTextBrowser(self.tab_4)
self.textBrowser_3.setGeometry(QtCore.QRect(40, 50, 390, 580))
self.textBrowser_3.setObjectName(_fromUtf8("textBrowser_3"))
self.admin_password = QtGui.QLabel(self.tab_4)
self.admin_password.setGeometry(QtCore.QRect(520, 140, 65, 16))
self.admin_password.setObjectName(_fromUtf8("admin_password"))
self.lineEdit_10 = QtGui.QLineEdit(self.tab_4)
self.lineEdit_10.setGeometry(QtCore.QRect(600, 140, 111, 20))
self.lineEdit_10.setObjectName(_fromUtf8("lineEdit_10"))
self.lineEdit_10.setEchoMode(QtGui.QLineEdit.Password)
self.lot_id_2 = QtGui.QLabel(self.tab_4)
self.lot_id_2.setGeometry(QtCore.QRect(520, 180, 61, 16))
self.lot_id_2.setObjectName(_fromUtf8("label"))
self.lineEdit = QtGui.QLineEdit(self.tab_4)
self.lineEdit.setGeometry(QtCore.QRect(600, 180, 111, 20))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.input_again = QtGui.QLabel(self.tab_4)
self.input_again.setGeometry(QtCore.QRect(520, 220, 65, 16))
self.input_again.setObjectName(_fromUtf8("input_again"))
self.lineEdit_11 = QtGui.QLineEdit(self.tab_4)
self.lineEdit_11.setGeometry(QtCore.QRect(600, 220, 111, 20))
self.lineEdit_11.setObjectName(_fromUtf8("lineEdit_11"))
self.retrieve = QtGui.QPushButton(self.tab_4)
self.retrieve.setGeometry(QtCore.QRect(770, 220, 75, 23))
self.retrieve.setObjectName(_fromUtf8("retrieve"))
self.retrieve.clicked.connect(self.retrieve_key)
self.menuBar = QtGui.QMenuBar(self)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 887, 23))
self.menuBar.setObjectName(_fromUtf8("menuBar"))
User = self.menuBar.addMenu(u'&用户管理')
add_delete = User.addAction(u'添加/删除用户')
password = User.addAction(u'修改密码')
if self.user != 'admin':
add_delete.setDisabled(True)
self.menuBar.connect(add_delete, QtCore.SIGNAL('triggered()'), self.add_delete)
self.menuBar.connect(password, QtCore.SIGNAL('triggered()'), self.change_password)
self.setMenuBar(self.menuBar)
self.progressBar = QtGui.QProgressBar(self)
self.progressBar.setGeometry(QtCore.QRect(0, 690, 940, 23))
self.progressBar.setProperty("value", 0)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.retranslateUi()
self.display_status()
self.combox_init()
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(self)
def retranslateUi(self):
self.setWindowTitle(_translate("Dialog", u"HDCP Key 管理工具", None))
self.mac_label.setText(_translate("Dialog", u" 输入mac前缀:", None))
self.file_import.setText(_translate("Dialog", u"导入bin文件:", None))
self.mac_button.setText(_translate("Dialog", u"开始导入", None))
self.import_button.setText(_translate("Dialog", u"开始导入", None))
self.title_1x_tx_total.setText(_translate("Dialog", u"1.x Transmitter 总数:", None))
self.num_1x_tx_left.setText(_translate("Dialog", "0", None))
self.num_1x_rx_left.setText(_translate("Dialog", "0", None))
self.title_1x_tx_left.setText(_translate("Dialog", u"剩余:", None))
self.num_1x_tx_total.setText(_translate("Dialog", "0", None))
self.num_1x_rx_total.setText(_translate("Dialog", "0", None))
self.title_1x_rx_left.setText(_translate("Dialog", u"剩余:", None))
self.title_1x_rx_total.setText(_translate("Dialog", u"1.x Receiver 总数:", None))
self.key_version.setText(_translate("Dialog", u"key 版本:", None))
self.title_2x_tx_total.setText(_translate("Dialog", u"2.x Transmitter 总数:", None))
self.num_2x_tx_left.setText(_translate("Dialog", u"0", None))
self.num_2x_rx_left.setText(_translate("Dialog", u"0", None))
self.title_2x_tx_left.setText(_translate("Dialog", u"剩余", None))
self.num_2x_tx_total.setText(_translate("Dialog", u"0", None))
self.num_2x_rx_total.setText(_translate("Dialog", u"0", None))
self.title_2x_rx_left.setText(_translate("Dialog", u"剩余", None))
self.title_2x_rx_total.setText(_translate("Dialog", u"2.x Receiver 总数:", None))
self.title_mac_total.setText(_translate("Dialog", u"Mac 总数:", None))
self.title_mac_left.setText(_translate("Dialog", u"剩余:", None))
self.num_mac_total.setText(_translate("Dialog", u"0", None))
self.num_mac_left.setText(_translate("Dialog", u"0", None))
self.x1.setText(_translate("Dialog", u"1.X", None))
self.x2.setText(_translate("Dialog", u"2.X", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("Dialog", u"导入Key", None))
__sortingEnabled = self.tableWidget.isSortingEnabled()
self.tableWidget.setSortingEnabled(False)
item = self.tableWidget.item(0, 0)
item.setText(_translate("Dialog", u"申请日期", None))
item = self.tableWidget.item(0, 2)
item.setText(_translate("Dialog", u"所属部门", None))
item = self.tableWidget.item(1, 0)
item.setText(_translate("Dialog", u"申请人员", None))
item = self.tableWidget.item(1, 2)
item.setText(_translate("Dialog", u"此处待定", None))
item = self.tableWidget_2.item(0, 0)
item.setText(_translate("Dialog", u"领用类型", None))
item = self.tableWidget_2.item(1, 0)
item.setText(_translate("Dialog", u"KEY来源", None))
item = self.tableWidget_2.item(2, 0)
item.setText(_translate("Dialog", u"KEY内容", None))
# item = self.tableWidget_2.item(4, 0)
# item.setText(_translate("Dialog", u"KEY版本", None))
item = self.tableWidget_2.item(3, 0)
item.setText(_translate("Dialog", u"KEY版本/类型", None))
item = self.tableWidget_2.item(6, 0)
item.setText(_translate("Dialog", u"", None))
item = self.tableWidget_2.item(7, 0)
item.setText(_translate("Dialog", u"有无内部型号", None))
item = self.tableWidget_2.item(9, 0)
item.setText(_translate("Dialog", u"需求片数", None))
item = self.tableWidget_2.item(10, 0)
item.setText(_translate("Dialog", u"需求key数", None))
item = self.tableWidget_2.item(11, 0)
item.setText(_translate("Dialog", u"单位", None))
item = self.tableWidget_2.item(12, 0)
item.setText(_translate("Dialog", u"测试工程师", None))
item = self.tableWidget_3.item(0, 0)
item.setText(_translate("Dialog", u"序号", None))
item = self.tableWidget_3.item(0, 1)
item.setText(_translate("Dialog", u"外包商", None))
item = self.tableWidget_3.item(0, 2)
item.setText(_translate("Dialog", u"批号", None))
item = self.tableWidget_3.item(0, 3)
item.setText(_translate("Dialog", u"片号", None))
item = self.tableWidget_2.item(3, 1)
item.setText(_translate("Dialog", u"TX", None))
item = self.tableWidget_2.item(3, 3)
item.setText(_translate("Dialog", u"RX", None))
self.load_table.setText(_translate("Dialog", u"载入表格:", None))
self.export_dir.setText(_translate("Dialog", u"导出目录:", None))
self.start_export.setText(_translate("Dialog", u"开始导出", None))
self.radio.setText(u'工程')
self.radio_1.setText(u'量产')
self.radio_2.setText(u'pilot run')
self.radio_3.setText(u'Availink')
self.radio_4.setText(u'Customer')
self.radio_5.setText(u'1.X')
self.radio_6.setText(u'2.X')
self.radio_7.setText(u'16进制')
self.radio_8.setText(u'8进制')
self.radio_9.setText(u'2进制')
self.radio_12.setText(u'有内部型号')
self.radio_13.setText(u'无内部型号')
self.radio_20.setText(u'Both')
self.radio_21.setText(u'None')
self.radio_22.setText(u'1.X')
self.radio_23.setText(u'2.X')
self.radio_24.setText(u'Both')
self.radio_25.setText(u'None')
self.tableWidget.setSortingEnabled(__sortingEnabled)
__sortingEnabled = self.tableWidget_2.isSortingEnabled()
self.tableWidget_2.setSortingEnabled(False)
self.tableWidget_2.setSortingEnabled(__sortingEnabled)
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("Dialog", u"导出Key", None))
self.lot_id_label.setText(_translate("Dialog", "Lot ID:", None))
self.wafer_id.setText(_translate("Dialog", "Wafer ID:", None))
self.x_cor.setText(_translate("Dialog", u"X 坐标:", None))
self.y_cor.setText(_translate("Dialog", u"Y 坐标:", None))
self.search_result.setText(_translate("Dialog", u"查询结果:", None))
self.label_18.setText(_translate("Dialog", "-", None))
self.label_19.setText(_translate("Dialog", "-", None))
self.search_key.setText(_translate("Dialog", "查询key", None))
self.search_lot.setText(_translate("Dialog", "查询lot", None))
self.op_type.setText(_translate("Dialog", "操作类型:", None))
self.user_.setText(_translate("Dialog", "用户:", None))
self.inner_type.setText(_translate("Dialog", "内部型号:", None))
self.date_.setText(_translate("Dialog", "日期:", None))
self.statis_search.setText(_translate("Dialog", "查询", None))
self.label_49.setText(_translate("Dialog", "-", None))
self.search_result_2.setText(_translate("Dialog", "查询结果:", None))
self.lot_id_2.setText(_translate("Dialog", "LotID:", None))
self.input_again.setText(_translate("Dialog", u"再输入一次:", None))
self.retrieve.setText(_translate("Dialog", "回收", None))
self.admin_password.setText(_translate("Dialog", u"管理员密码:", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("Dialog", u"查询", None))
self.op_history.setText(_translate("Dialog", "操作历史:", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("Dialog", u"历史/回收", None))
def add_delete(self):
ui = UserDialog(self.db, self)
ui.exec_()
def change_password(self):
'''change password'''
dialog = PasswordDialog(self.db, self.user, parent = self)
if dialog.exec_():
# new_password = QtGui.QStandardItem(dialog.newPassword())
new_password = dialog.new_password()
# encr_pwd = encrypt_password(str(new_password))
self.db.update_ps(str(self.user), new_password)
dialog.destroy()
def import_(self):
self.file_path = self.file_edit.text()
if not self.file_path:
QtGui.QMessageBox.critical(self.tab, 'Error', u'You have not chose any file!')
return
if not os.path.exists(self.file_path):
QtGui.QMessageBox.critical(self.tab, 'Error', u'This file is not exist!')
self.tabWidget.setEnabled(False)
thread_obj = threading.Thread(target=self.parse_key_file)
thread_obj.start()
#self.display_status()
def md5_calc(self, data):
m = hashlib.md5()
m.update(data)
return m.hexdigest()
def sha1_calc(self, data):
sha1obj = hashlib.sha1()
sha1obj.update(data)
return sha1obj.hexdigest()
def mac_import(self):
mac_pre = str(self.mac_pre.text()).strip()
if not mac_pre:
QtGui.QMessageBox.critical(self.tab, 'Error', u'Mac 前缀不能为空!')
if self.redis_inst.val_is_exist(mac_pre, 'mac'):
QtGui.QMessageBox.critical(self.tab, 'Error', u'Mac 前缀已经被导入过!')
return
mac_total = 256*256*256
for i in range(0, mac_total):
val = int((i + 1)*100/mac_total)
self.progressBar.setValue(val)
self.db.insert_mac( "0000" + mac_pre + "{0:0>6}".format(hex(i)[2:]))
if i % 100 == 0:
print i
self.db.update_status('mac', 'import', total = mac_total, left = mac_total)
self.redis_inst.add_val(mac_pre)
self.redis_inst.add_history('%s import %s mac address at %s' % (self.user, mac_total, time.strftime('%Y/%m/%d',time.localtime())))
QtGui.QMessageBox.information(self, u'提示', u'Mac导入已完成,共导入%s个Mac!'%mac_total)
self.progressBar.setValue(0)
def warning(self, str):
QtGui.QMessageBox.critical(self.tab, 'Error', str)
def progress_bar(self, val):
self.progressBar.setValue(val)
def info(self,str1, str2):
QtGui.QMessageBox.information(self, str1, str2)
def enable(self, bool):
self.tabWidget.setEnabled(bool)
def parse_key_file(self):
start_time = time.time()
bin_size = os.path.getsize(self.file_path)
with open(self.file_path, 'rb') as f:
data = f.read()
md5_val = self.md5_calc(data)
if self.redis_inst.val_is_exist(md5_val):
#QtGui.QMessageBox.critical(self.tab, 'Error', u'文件数据已经被导入过!')
self.sinOut_err.emit(u'文件数据已经被导入过!')
self.sinOut_enable.emit(True)
return
order_format = bytearray(data)
f.seek(0)
key_type = self.checked_radio(self.key_version_group).text()
if key_type == '1.X':
table_pre = '1X'
if (bin_size - 4)%308 != 0:
#QtGui.QMessageBox.critical(self, 'Error', u'bin文件大小不对,请确认是否导入了正确的文件!')
self.sinOut_err.emit(u'bin文件大小不对,请确认是否导入了正确的文件!')
self.sinOut_enable.emit(True)
return
if order_format[0] == 1:
table_suff = 'TX'
else:
table_suff = 'RX'
elif key_type == '2.X':
table_pre = '2X'
if (bin_size - 40)%862 != 0:
#QtGui.QMessageBox.critical(self, 'Error', u'bin文件大小不对,请确认是否导入了正确的文件!')
self.sinOut_err.emit(u'bin文件大小不对,请确认是否导入了正确的文件!')
self.sinOut_enable.emit(True)
return
if order_format[3] == 1:
table_suff = 'TX'
else:
table_suff = 'RX'
db_table = table_pre + '_' + table_suff
print db_table
#self.tabWidget.setEnabled(False)
#self.sinOut_enable.emit(False)
if table_pre == '1X':
with open(self.file_path, 'rb') as f:
f.read(4)
key_total = (bin_size - 4)/308
key_imported = 0
for i in range(0, key_total):
key_bin = f.read(288)
sha = f.read(20)
if self.sha1_calc(key_bin) == binascii.hexlify(sha):
key_hex = binascii.hexlify(key_bin)
if i % ((key_total/1000)+1) == 0:
print i
val = int((i + 1)*100/key_total)
self.sinOut_progress_bar.emit(val)
self.db.insert_key(db_table, key_hex)
key_imported += 1
else:
print "error occured!"
self.sinOut_err.emit(u'bin文件checksum不对,请确认是否导入了正确的文件!')
self.sinOut_enable.emit(True)
return
elif db_table == '2X_RX':
with open(self.file_path, 'rb') as f:
f.read(40)
data = f.read(862)
key_total = (bin_size - 40)/862
key_imported = 0
while data:
key_hex = binascii.hexlify(data)
if key_imported % ((key_total/1000)+1) == 0:
print key_imported
val = int((key_imported + 1)*100/key_total)
self.sinOut_progress_bar.emit(val)
self.db.insert_key(db_table, key_hex)
key_imported += 1
data = f.read(862)
elif db_table == '2X_TX':
self.sinOut_err.emit(u'暂时不支持2X_TX类型!')
self.sinOut_enable.emit(True)
return 0
self.db.update_status(db_table.lower(), 'import', total = key_imported, left = key_imported)
self.redis_inst.add_val(md5_val)
today = time.strftime('%Y-%m-%d',time.localtime())
sql = "insert into op_record(user,operate_type,%s,day) values('%s','storage',%s,'%s')" % (db_table.lower(), self.user, key_imported, today)
self.db.set_op_record(sql)
self.redis_inst.add_history('%s import %s keys: %s at %s' % (self.user, db_table, key_imported, today))
# print 'history: ', '%s import %s keys: %s at %s' % (self.user, db_table, key_imported, time.strftime('%Y/%m/%d',time.localtime()))
#QtGui.QMessageBox.information(self, u'提示', u'导入已完成,共导入%s个key!'%key_imported)
self.sinOut_info.emit(u'提示',u'导入已完成,共导入%s个key!'%key_imported)
self.sinOut_progress_bar.emit(0)
end_time = time.time()
print "spend time:%d second"%(end_time-start_time)
#self.progressBar.setValue(0)
#self.tabWidget.setEnabled(True)
self.sinOut_enable.emit(True)
self.sinOut_status.emit()
def chose_file(self, n):
if n == 1:
filename = QtGui.QFileDialog.getOpenFileName(self,'Open file','./')
self.file_edit.setText(filename)
elif n == 2:
filename = str(QtGui.QFileDialog.getOpenFileName(self,'Open file','./'))
self.load_excel_form(filename)
self.file_edit_2.setText(filename)
elif n == 3:
dir_name = QtGui.QFileDialog.getExistingDirectory(self,'Open directory','./')
self.file_edit_3.setText(dir_name)
def load_excel_form(self, file_name):
try:
data = xlrd.open_workbook(file_name)
except:
QtGui.QMessageBox.critical(self, 'Error', u'打开文件格式不对!')
return
self.get_form_detail()
table = data.sheets()[0]
apply_date = (xlrd.xldate.xldate_as_datetime(table.cell(1,2).value, 0)).strftime( '%Y-%m-%d')
print apply_date
self.apply_date_p.setText(apply_date)
department = table.cell(1,7).value
self.department_p.setText(department)
apply_person = table.cell(2,2).value
self.apply_person_p.setText(apply_person)
review_person = table.cell(2,7).value
self.review_person_p.setText(review_person)
usage = table.cell(3,4).value
self.chose_radio(self.buttonGroup, usage)
key_source = table.cell(5,4).value
self.chose_radio(self.buttonGroup_1, key_source)
key_content = table.cell(7,4).value
self.chose_radio(self.buttonGroup_3, key_content)
tx = table.cell(9,5).value
self.chose_radio(self.buttonGroup_2, tx)
rx = table.cell(9,8).value
self.chose_radio(self.buttonGroup_6, rx)
inner_model = table.cell(11,4).value
self.inner_model_p.setEditText(inner_model)
chip_num = table.cell(12,4).value
self.chip_num_p.setText(str(int(chip_num)))
test_engineer = table.cell(14,4).value
self.test_engineer_p.setText(test_engineer)
No = table.cell(16,0).value
self.No_p.setText(str(int(No)))
contractor = table.cell(16,1).value
self.contractor_p.setText(contractor)
lot_id = table.cell(16,3).value
self.lot_id_p.setText(lot_id)
wafers = table.cell(16,5).value
if type(wafers)==float:
wafers = str(int(wafers))
self.wafers_p.setText(wafers)
def get_form_detail(self):
self.dir_path = self.file_edit_3.text()
self.apply_date_p = self.tableWidget.item(0, 1)
self.apply_date = self.apply_date_p.text()
self.department_p = self.tableWidget.item(0, 3)
self.department = self.department_p.text()
self.apply_person_p = self.tableWidget.item(1, 1)
self.apply_person = self.apply_person_p.text()
self.review_person_p = self.tableWidget.item(1, 3)
self.review_person = self.review_person_p.text()
self.usage = self.checked_radio(self.buttonGroup)
self.key_source = self.checked_radio(self.buttonGroup_1)
self.tx = self.checked_radio(self.buttonGroup_2).text()
self.rx = self.checked_radio(self.buttonGroup_6).text()
self.key_content = self.checked_radio(self.buttonGroup_3)
# self.inner_model_radio = self.checked_radio(self.buttonGroup_5).text()
self.inner_model_p = self.comboBox_3
self.inner_model = self.inner_model_p.currentText()
self.chip_num_p = self.tableWidget_2.item(9, 1)
self.chip_num = self.chip_num_p.text()
self.key_num_p = self.tableWidget_2.item(10, 1)
self.key_num = self.key_num_p.text()
self.unit_p = self.tableWidget_2.item(11, 1)
self.unit = self.unit_p.text()
self.test_engineer_p = self.tableWidget_2.item(12, 1)
self.test_engineer = self.test_engineer_p.text()
self.No_p = self.tableWidget_3.item(1,0)
self.No = self.No_p.text()
self.contractor_p = self.tableWidget_3.item(1,1)
self.contractor = self.contractor_p.text()
self.lot_id_p = self.tableWidget_3.item(1, 2)
self.lot_id = self.lot_id_p.text()
self.wafers_p = self.tableWidget_3.item(1, 3)
self.wafers = self.wafers_p.text()
def chose_radio(self, group, name):
for radio in group.buttons():
if radio.text() == name:
radio.setChecked(True)
def checked_radio(self, group_button):
for radio in group_button.buttons():
if radio.isChecked():
return radio
def get_statis(self):
statis = self.db.get_statistics()
X1_TX_total, X1_TX_left = statis['1X_TX'][0], statis['1X_TX'][1]
X1_RX_total, X1_RX_left = statis['1X_RX'][0], statis['1X_RX'][1]
X2_TX_total, X2_TX_left = statis['2X_TX'][0], statis['2X_TX'][1]
X2_RX_total, X2_RX_left = statis['2X_RX'][0], statis['2X_RX'][1]
mac_total, mac_left = statis['mac'][0], statis['mac'][1]
return X1_TX_total, X1_TX_left, X1_RX_total, X1_RX_left, X2_TX_total, X2_TX_left, X2_RX_total, X2_RX_left, mac_total, mac_left
def display_status(self):
X1_TX_total, X1_TX_left, X1_RX_total, X1_RX_left, X2_TX_total, X2_TX_left, X2_RX_total, X2_RX_left, mac_total, mac_left = self.get_statis()
self.num_1x_tx_total.setText(str(X1_TX_total))
self.num_1x_rx_total.setText(str(X1_RX_total))
self.num_1x_tx_left.setText(str(X1_TX_left))
self.num_1x_rx_left.setText(str(X1_RX_left))
self.num_2x_tx_left.setText(str(X2_TX_left))
self.num_2x_rx_left.setText(str(X2_RX_left))
self.num_2x_tx_total.setText(str(X2_TX_total))
self.num_2x_rx_total.setText(str(X2_RX_total))
self.num_mac_total.setText(str(mac_total))
self.num_mac_left.setText(str(mac_left))
history = self.redis_inst.get_history()
for item in history:
self.textBrowser_3.append(item)
def get_item_text(self):
# item = self.tableWidget_2.currentItem()
test_zh = self.tableWidget.currentItem().text()
print 'out of db:'
print test_zh, type(test_zh)
self.db.add_user(test_zh, '123')
users = self.db.get_users()
print 'in db:'
print users[-1], type(users[-1])
print chardet.detect(users[-1])
# print new.decode('gbk'), type(new)
item = self.tableWidget.item(1,0)
item.setText(users[-1].decode('utf8'))
def export(self):
self.get_form_detail()
key_table = []
if self.tx == "1.X":
key_table.append("1X_TX")
elif self.tx == "2.X":
key_table.append("2X_TX")
elif self.tx == "Both":
key_table.extend(["1X_TX", "2X_TX"])
if self.rx == "1.X":
key_table.append("1X_RX")
elif self.rx == "2.X":
key_table.append("2X_RX")
elif self.rx == "Both":
key_table.extend(["1X_RX", "2X_RX"])
if not self.lot_id:
QtGui.QMessageBox.critical(self.tab, 'Error', u'批号不能为空!')
return
elif self.redis_inst.val_is_exist(str(self.lot_id), 'lot_id'):
QtGui.QMessageBox.critical(self.tab, 'Error', u'批号:%s 已经被导出过!' % str(self.lot_id))
return
if not self.inner_model:
QtGui.QMessageBox.critical(self.tab, 'Error', u'内部型号不能为空!')
return
elif str(self.chip_num).strip() != str(len(self.wafers.split(','))):
QtGui.QMessageBox.critical(self.tab, 'Error', u'wafer片号与总数不匹配!')
return
elif not str(self.dir_path).strip():
QtGui.QMessageBox.critical(self.tab, 'Error', u'你还没有选择导出目录!')
return
elif not os.path.isdir(str(self.dir_path)):
QtGui.QMessageBox.critical(self.tab, 'Error', u'输入的不是目录名!')
return
self.parse_cfg()#解析 conf文件
self.create_target_dir()
export_thread_obj = threading.Thread(target = self._export)
export_thread_obj.start()
def _export(self):
#self.tabWidget.setEnabled(False)
self.sinOut_enable.emit(False)
#解析txt文件,获得wafer上的每个芯片的位置
keys_in_one_wafer = len(self.wafer_map)
self.lot_id_str = str(self.lot_id).replace('.', '_')
#创建lotid这个表,用于通过lotid查询
self.db.create_table_by_lot(self.lot_id_str)
self.mac_start_id = self.mac_end_id = self.db.get_start_id('mac')
exported_keys = 0
if len(self.key_type) == 1:
total_keys = keys_in_one_wafer*(int(str(self.chip_num)))
for tb in self.key_type:
#start id是上一次取key结束的位置,end id是这次取key结束的位置
key_start_id = key_end_id = self.db.get_start_id(tb)
#self.wafers代表“片号”
for wafer_id in self.wafers.split(','):
i = 0
while i < keys_in_one_wafer:
x, y = self.wafer_map[i]
print "lot_id: %s, wafer_id: %s, key_type: %s, x: %s, y: %s, id: %s"%(self.lot_id_str, wafer_id, tb, x, y, key_end_id)
#根据key id查tb表获取key值
key = self.db.get_key(tb, key_end_id)
#将key生成key文件
self.gen_key_file(tb, wafer_id, x, y, key)
#往lot_id_str这个表里插入信息,用于通过lotid查询导出的key的
self.db.insert_value_by_lot(self.lot_id_str, wafer_id, tb, x, y, key_end_id)
i += 1
key_end_id += 1
exported_keys += 1
val = int((exported_keys + 1)*100/total_keys)
#self.progressBar.setValue(val)
self.sinOut_progress_bar.emit(val)
#print exported_keys
wafer_dir = os.path.join(self.output_dir, str("%02d" % int(wafer_id)))
self.clean_file(wafer_dir)
today = time.strftime('%Y-%m-%d',time.localtime())
self.db.update_status(tb.lower(), 'export', left = key_end_id - key_start_id, start_id = key_end_id)
sql = "insert into op_record(user,inner_model,operate_type,%s,day) values('%s','%s','fetch',%s,'%s')" % (tb.lower(), self.user, str(self.inner_model), key_end_id - key_start_id, today)
self.db.set_op_record(sql)
elif len(self.key_type) == 2:
assert '1X_TX' == self.key_type[0] and '2X_RX' == self.key_type[1]
x1 = '1X_TX'
x2 = '2X_RX'
x1_tx_start_id = x1_tx_end_id = self.db.get_start_id(x1)
x2_rx_start_id = x2_rx_end_id = self.db.get_start_id(x2)
total_keys = keys_in_one_wafer*(int(str(self.chip_num)))
for wafer_id in self.wafers.split(','):
i = 0
while i < keys_in_one_wafer:
x, y = self.wafer_map[i]
x1_key = self.db.get_key(x1, x1_tx_end_id)
x2_key = self.db.get_key(x2, x2_rx_end_id)
self.gen_key_file(x1, wafer_id, x, y, x1_key, x2_key)
self.db.insert_value_by_lot(self.lot_id_str, wafer_id, x1, x, y, x1_tx_end_id)
self.db.insert_value_by_lot(self.lot_id_str, wafer_id, x2, x, y, x2_rx_end_id)
i += 1
x1_tx_end_id += 1
x2_rx_end_id += 1
exported_keys += 1
val = int((exported_keys + 1)*100/total_keys)
#self.progressBar.setValue(val)
self.sinOut_progress_bar.emit(val)
# print exported_keys
wafer_dir = os.path.join(self.output_dir, str("%02d" % int(wafer_id)))
self.clean_file(wafer_dir)
today = time.strftime('%Y-%m-%d',time.localtime())
self.db.update_status(x1.lower(), 'export', left = x1_tx_end_id - x1_tx_start_id, start_id = x1_tx_end_id)
self.db.update_status(x2.lower(), 'export', left = x2_rx_end_id - x2_rx_start_id, start_id = x2_rx_end_id)
sql = "insert into op_record(user,inner_model,operate_type,%s,day) values('%s','%s','fetch',%s,'%s')" % (x1, self.user, str(self.inner_model), x1_tx_end_id - x1_tx_start_id, today)
self.db.set_op_record(sql)
sql = "insert into op_record(user,inner_model,operate_type,%s,day) values('%s','%s','fetch',%s,'%s')" % (x2, self.user, str(self.inner_model), x1_tx_end_id - x1_tx_start_id, today)
self.db.set_op_record(sql)
# print "lot_id: %s, wafer_id: %s, key_type: %s, x: %s, y: %s, id: %s"%(self.lot_id_str, wafer_id, self.key_type, x, y, x1_tx_end_id)
self.db.update_status('mac', 'export', left = self.mac_end_id - self.mac_start_id, start_id = self.mac_end_id)
self.redis_inst.add_val(str(self.lot_id), 'lot_id')
for type_ in self.key_type:
self.redis_inst.add_history('%s export %s keys: %s at %s' % (self.user, type_, total_keys, time.strftime('%Y/%m/%d',time.localtime())))
#QtGui.QMessageBox.information(self, u'提示', u'导出已完成,共导出%s个key!' % total_keys)
self.sinOut_info.emit(u'提示', u'导出已完成,共导出%s个key!' % total_keys)
#self.display_status()
#self.progressBar.setValue(0)
self.sinOut_progress_bar.emit(0)
#self.tabWidget.setEnabled(True)
self.sinOut_enable.emit(True)
self.sinOut_status.emit()
def create_target_dir(self):
main_dir = os.getcwd()
self.output_dir = os.path.join(main_dir, "Output", str(self.lot_id))
if not os.path.exists(self.output_dir):
os.mkdir(self.output_dir)
def gen_key_file(self, tb, wafer_id, x, y, key_1, key_2 = None):
wafer_dir = os.path.join(self.output_dir, str("%02d" % int(wafer_id)))
print wafer_dir
if not os.path.exists(wafer_dir):
os.mkdir(wafer_dir)
work_dir = os.getcwd()
os.chdir(wafer_dir)
name_base = "%s_%s_%s_%s" % (str(self.lot_id), str("%02d" % int(wafer_id)), x, y)
print name_base
key_name = "%s.key_source" % name_base
key2_name = "%s.txt" % name_base
key_name_abs = os.path.join(wafer_dir, key_name)
key2_name_abs = os.path.join(wafer_dir, key2_name)
if "TX" in tb:
type_ = '1'
else:
type_ = '2'
if "1X" in tb:
bytes_ = '288'
with open(key_name_abs, 'a+') as f:
f.write("==PAR_START==" + "\n")
f.write(str(self.lot_id) + "\n")
f.write(str(wafer_id) + '\n')
f.write(str(x) + '\n')
f.write(str(y) + '\n')
f.write(type_ + '\n')
f.write(bytes_ + '\n')
f.write("==KEY_START==" + '\n')
length = len(zip(key_1[0::2], key_1[1::2]))
for index, item in enumerate(zip(key_1[0::2], key_1[1::2])):
if index != length - 1:
f.write(''.join(item) + '\n')
else:
f.write(''.join(item))
if self.bind_mac == 'yes':
mac_value = self.db.get_mac(self.mac_end_id)
self.mac_end_id += 1
else:
mac_value = 1234567890123456
if len(self.key_type) == 1:
cmd_str = '%s %s %d' % (self.rom_gen_exe, key_name_abs, 1)
execCLI(cmd_str)
with open("%s.rom"%key_name_abs, 'a+') as f:
f.write('\n')
cmd_final = '%s %s %s %s' % (self.final_exe[0], "%s.rom"%key_name_abs, "%s.key"%name_base, mac_value)
execCLI(cmd_final)
print 'key: %s, mac: %s' % (name_base, mac_value)
elif len(self.key_type) == 2:
with open(key2_name_abs, 'a+') as f:
length = len(zip(key_2[0::2], key_2[1::2]))
for index, item in enumerate(zip(key_2[0::2], key_2[1::2])):
f.write((''.join(item) + '\n'))
cmd_str = '%s %s %d' % (self.rom_gen_exe, key_name_abs, 1)
execCLI(cmd_str)
with open("%s.rom"%key_name_abs, 'a+') as f:
f.write('\n')
cmd_final_1 = '%s %s %s %s' % (self.final_exe[0], "%s.rom"%key_name_abs, "%s.out"%key_name_abs, mac_value)
execCLI(cmd_final_1)
cmd_final_2 = '%s %s %s %s' % (self.final_exe[1], "%s.out"%key_name_abs, key2_name_abs, "%s.key"%name_base)
execCLI(cmd_final_2)
print 'key: %s, mac: %s' % (name_base, mac_value)
os.chdir(work_dir)
def clean_file(self, clean_dir, reserved = 'key'):
work_dir = os.getcwd()
os.chdir(clean_dir)
for f in os.listdir('.'):
if not f.endswith(reserved):
os.remove(f)
os.chdir(work_dir)
def parse_cfg(self):
target_dir = os.path.join(os.getcwd(),'Input', str(self.inner_model))
if not os.path.exists(target_dir):
QtGui.QMessageBox.critical(self.tab, 'Error', u'目标目录不存在!')
return
for f in os.listdir(target_dir):
if f.endswith("cfg"):
cfg_file = f
break
cfg_file_abs = os.path.join(target_dir, cfg_file)
config = ConfigParser.ConfigParser()
try:
cfg_fp = open(cfg_file_abs,"r")
config.readfp(cfg_fp)
except Exception,e:
print e
self.chip_name = config.get('Chip name', 'chip_name').strip()
self.map_file = os.path.join(target_dir, config.get('Map file', 'file').strip())
self.rom_gen_exe = os.path.join(target_dir, config.get('HDCP_ROM_GEN', 'rom_gen_exe').strip())
print self.rom_gen_exe
# self.final_exe = os.path.join(target_dir, config.get('FINAL_ROM_GEN', 'final_exe').strip())
self.final_exe = [os.path.join(target_dir, config.get('FINAL_ROM_GEN', item).strip()) for item in config.options('FINAL_ROM_GEN')]
print self.final_exe
self.bind_mac = config.get('MAC', 'bind_mac').strip()
self.key_type = [config.get('Key type', item).strip() for item in config.options('Key type')]
# print self.chip_name
# print self.map_file
# print self.rom_gen_exe
# print self.final_exe
# print self.key_type
self.wafer_map = self.parse_map(self.map_file)
# print self.wafer_map
def parse_map(self, file_name):
with open(file_name, 'r') as f:
data = f.readlines()
start_x = int(data[7].split('=')[1].split(',')[0])
start_y = int(data[7].split('=')[1].split(',')[1])
length = len(data)
#解析出出wafer圆的第一行
for i in range(0, length):
if '=' not in data[i] and data[i].strip():
circle_start = i
break
#解析出出wafer圆的最后一行,如果最后一行后面不是空格怎么办呢?感觉会有问题
for i in range(circle_start, length):
if not data[i].strip():
circle_end = i - 1
a = re.compile(r"\s*[MSms]*([^MmSs\s]+?).*")
for i in range(circle_start, circle_end + 1):
match = a.search(data[i])
if match:
#x_base 是第一行的最左边一个字符的坐标,start(1)函数是指group(1)匹配的字符(括号里面匹配的部分)在整个字符串里面的位置
x_base = start_x - match.start(1)
break
wafer_map = []
for i in range(circle_start, circle_end + 1):
match = a.search(data[i])
if match:
first_index = match.start(1)
incr = 0
for j in data[i][first_index:].strip():
if j not in "MmSs":
x = x_base + first_index + incr
y = start_y
wafer_map.append((x, y))
incr += 1
start_y -= 1
return wafer_map
def key_search(self):
lot_id = str(self.comboBox.currentText()).replace('.', '_')
wafer_id = self.comboBox_2.currentText()
x_start = str(self.lineEdit_4.text()).strip()
y_start = str(self.lineEdit_5.text()).strip()
x_end = str(self.lineEdit_6.text()).strip()
y_end = str(self.lineEdit_7.text()).strip()
self.textBrowser.clear()
if not lot_id:
QtGui.QMessageBox.critical(self.tab_3, 'Error', u'Lot ID 不能为空!')
return
sql = "select id, x_coordinate, y_coordinate from %s where 1=1 " % lot_id
if not wafer_id:
QtGui.QMessageBox.critical(self.tab_3, 'Error', u'Wafer ID 不能为空!')
return
sql += "and waferID=%s " % wafer_id
if not x_start and not x_end:
QtGui.QMessageBox.critical(self.tab_3, 'Error', u'至少应输入一个 X坐标!')
return
elif not x_start.isdigit() and not x_end.isdigit():
QtGui.QMessageBox.critical(self.tab_3, 'Error', u'坐标应该输入数字!')
return
elif x_start and not x_end:
sql += "and x_coordinate=%s " % x_start
elif x_end and not x_start:
sql += "and x_coordinate=%s " % x_end
elif x_start and x_end:
sql += "and x_coordinate>=%s and x_coordinate<=%s " % (x_start, x_end)
if not y_start and not y_end:
QtGui.QMessageBox.critical(self.tab_3, 'Error', u'至少应输入一个 Y坐标!')
return
elif not y_start.isdigit() and not y_end.isdigit():
QtGui.QMessageBox.critical(self.tab_3, 'Error', u'坐标应该输入数字!')
return
elif y_start and not y_end:
sql += "and y_coordinate=%s " % y_start
elif y_end and not y_start:
sql += "and y_coordinate=%s " % y_end
elif x_start and x_end:
sql += "and y_coordinate>=%s and y_coordinate<=%s " % (y_start, y_end)
key_types = self.db.get_key_types(lot_id)
for item in key_types:
cur_sql = sql + "and key_type='%s' " % item
print cur_sql
cors = self.db.get_key_id(cur_sql)
if cors:
self.textBrowser.append("[key_type: %s]" % item)
for cor in cors:
self.textBrowser.append("X Coordinate: %s, Y Coordinate: %s\n" % (cor[1], cor[2]))
self.textBrowser.append(self.db.get_key_info(item, cor[0]) + '\n')
def lot_search(self):
self.textBrowser.setText("hello sky")
def display_lot(self):
if self.redis_inst.is_exist('lot_id'):
for item in self.redis_inst.r.smembers('lot_id'):
self.comboBox.addItem(QtCore.QString(item))
def combox_init(self):
if self.redis_inst.is_exist('lot_id'):
for item in self.redis_inst.r.smembers('lot_id'):
self.comboBox.addItem(QtCore.QString(item))
for i in range(1,26):
self.comboBox_2.addItem(QtCore.QString(str(i)))
self.comboBox_4.addItem(QtCore.QString(u'storage'))
self.comboBox_4.addItem(QtCore.QString(u'fetch'))
def op_record_search(self):
sql = "select sum(1x_tx), sum(1x_rx), sum(2x_tx), sum(2x_rx) from op_record where 1=1 "
html = "<p><b>Date:</b> %s -- %s</p><p><b>Operate type:</b> %s</p>"
from_date = self.dateEdit.textFromDateTime(self.dateEdit.dateTime())
if from_date:
sql += "and day >= '%s' " % from_date
real_para = [from_date,]
end_date = self.dateEdit_2.textFromDateTime(self.dateEdit_2.dateTime())
if end_date:
sql += "and day <= '%s' " % end_date
real_para.append(end_date)
operate_type = self.comboBox_4.currentText()
if operate_type:
sql += "and operate_type='%s' " % operate_type
real_para.append(operate_type)
user = self.lineEdit_2.text()
if user:
sql += "and user='%s' " % user
html += "<p><b>User:</b> %s</p>"
real_para.append(user)
inner_model = self.comboBox_5.currentText()
if inner_model:
sql += "and inner_model='%s' " % inner_model
html += "<p><b>Inner model:</b> %s</p>"
real_para.append(inner_model)
x1_tx, x1_rx, x2_tx, x2_rx = self.db.get_op_record(sql)
html += "<p><b>1X_TX:</b> %s</p><p><b>1X_RX:</b> %s</p><p><b>2X_TX:</b> %s</p><p><b>2X_RX:</b> %s</p>"
x1_tx = 0 if not x1_tx else x1_tx
x1_rx = 0 if not x1_rx else x1_rx
x2_tx = 0 if not x2_tx else x2_tx
x2_rx = 0 if not x2_rx else x2_rx
real_para.extend([x1_tx, x1_rx, x2_tx, x2_rx])
self.textBrowser_2.setText(html%tuple(real_para))
def retrieve_key(self):
lot_id_1 = str(self.lineEdit.text()).replace('.', '_')
lot_id_2 = str(self.lineEdit_11.text()).replace('.', '_')
if not self.lineEdit_10.text() == self.db.get_ps('admin'):
QtGui.QMessageBox.critical(self.tab, 'Error', u'输入的管理员密码错误!')
return
if lot_id_1 != lot_id_2:
QtGui.QMessageBox.critical(self.tab, 'Error', u'两次输入的lot id 不一致!')
return
if not self.redis_inst.val_is_exist(lot_id_1.replace('_', '.'),'lot_id'):
QtGui.QMessageBox.critical(self.tab, 'Error', u'输入的lot id在数据库中不存在!')
return
key_types = self.db.get_key_types(lot_id_1)
if not key_types:
QtGui.QMessageBox.critical(self.tab, 'Error', u'lot id 对应的表类型为空!')
return
for type_ in key_types:
new_id = int(mysql_inst.get_new_id(type_)) + 1
del_ids = self.db.get_del_ids(lot_id_1, type_)
id_num = len(del_ids)
#update id
for id_ in del_ids:
self.db.update_key_id(type_, id_, str(new_id))
new_id += 1
print new_id
#update statistics table
self.db.update_status(type_, 'retrieve', left = id_num)
#should insert code here of update op_record
self.redis_inst.add_history('%s retrieve %s keys: %s at %s' % (self.user, type_, id_num, time.strftime('%Y/%m/%d',time.localtime())))
#drop table
self.db.drop_table(lot_id_1)
self.redis_inst.del_val(lot_id_1.replace('_', '.'), 'lot_id')
self.display_status()
QtGui.QMessageBox.information(self, u'提示', u'回收操作已完成!')
class UserDialog(QtGui.QDialog):
def __init__(self, db, parent=None):
QtGui.QDialog.__init__(self, parent)
self.db = db
self.resize(400, 300)
self.pushButton = QtGui.QPushButton(self)
self.pushButton.setGeometry(QtCore.QRect(300, 80, 75, 23))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.pushButton.clicked.connect(self.delete)
self.pushButton_4 = QtGui.QPushButton(self)
self.pushButton_4.setGeometry(QtCore.QRect(300, 40, 75, 23))
self.pushButton_4.setObjectName(_fromUtf8("pushButton_4"))
self.pushButton_4.clicked.connect(self.add)
self.listWidget = QtGui.QListWidget(self)
self.listWidget.setGeometry(QtCore.QRect(20, 30, 256, 201))
self.listWidget.setObjectName(_fromUtf8("listWidget"))
items = [QtGui.QListWidgetItem(item) for item in self.db.get_users()]
for i in range(len(items)):
if items[i].text() != 'admin':
self.listWidget.insertItem(i+1,items[i])
self.listWidget.itemClicked.connect(self.cur_item)
self.retranslateUi(self)
QtCore.QMetaObject.connectSlotsByName(self)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", u"添加/删除用户", None))
self.pushButton.setText(_translate("Dialog", u"删除", None))
self.pushButton_4.setText(_translate("Dialog", u"添加", None))
def cur_item(self):
self.current_item = self.listWidget.currentItem().text()
def add(self):
ui = AddDialog(self.db, self)
if ui.exec_():
self.listWidget.addItem(QtGui.QListWidgetItem(ui.addUser))
def delete(self):
if not hasattr(self, 'current_item'):
QtGui.QMessageBox.critical(self, 'Error', u'请选择要删除的用户!')
return
reply = QtGui.QMessageBox.question(self, 'Warning', u'你确定要删除用户: %s 吗?'%self.current_item, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.listWidget.setItemHidden(self.listWidget.currentItem(), True)
self.db.del_user(self.current_item)
class AddDialog(QtGui.QDialog):
def __init__(self, db, parent = None):
QtGui.QDialog.__init__(self, parent)
self.db = db
self.setObjectName(_fromUtf8("Dialog"))
self.resize(345, 208)
self.buttonBox = QtGui.QDialogButtonBox(self)
self.buttonBox.setGeometry(QtCore.QRect(40, 160, 291, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.label = QtGui.QLabel(self)
self.label.setGeometry(QtCore.QRect(40, 40, 141, 16))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self)
self.label_2.setGeometry(QtCore.QRect(40, 90, 141, 16))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.lineEdit = QtGui.QLineEdit(self)
self.lineEdit.setGeometry(QtCore.QRect(40, 60, 221, 20))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.lineEdit_2 = QtGui.QLineEdit(self)
self.lineEdit_2.setGeometry(QtCore.QRect(40, 110, 221, 20))
self.lineEdit_2.setObjectName(_fromUtf8("lineEdit_2"))
self.retranslateUi()
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), self.add_user)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), self.reject)
QtCore.QMetaObject.connectSlotsByName(self)
def retranslateUi(self):
self.setWindowTitle(_translate("Dialog", u"添加用户", None))
self.label.setText(_translate("Dialog", u"请输入用户名:", None))
self.label_2.setText(_translate("Dialog", u"请输入密码:", None))
def add_user(self):
username = self.lineEdit.text()
password = str(self.lineEdit_2.text())
if not username:
QtGui.QMessageBox.warning(self, 'Warning', u'用户名不能为空!', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
return
if not password:
QtGui.QMessageBox.warning(self, 'Warning', u'密码不能为空!', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
return
if username in self.db.get_users():
QtGui.QMessageBox.warning(self, 'Warning', u'用户: %s 已经存在,请更换其他的用户名!'%username, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
else:
self.db.add_user(str(username), password)
if username in self.db.get_users():
self.addUser = username
self.accept()
class PasswordDialog(QtGui.QDialog):
def __init__(self, db, user, parent=None):
self.db = db
self.user = user
QtGui.QDialog.__init__(self, parent)
self.resize(240, 200)
self.setWindowTitle(u'修改密码')
grid = QtGui.QGridLayout()
grid.addWidget(QtGui.QLabel(u'旧密码:', parent=self), 0, 0, 1, 1)
self.oldPassword = QtGui.QLineEdit(parent=self)
grid.addWidget(self.oldPassword, 0, 1, 1, 1)
grid.addWidget(QtGui.QLabel(u'新密码:', parent=self), 1, 0, 1, 1)
self.newPassword = QtGui.QLineEdit(parent=self)
grid.addWidget(self.newPassword, 1, 1, 1, 1)
buttonBox = QtGui.QDialogButtonBox(parent=self)
buttonBox.setOrientation(QtCore.Qt.Horizontal)
buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
buttonBox.accepted.connect(self.judge)
buttonBox.rejected.connect(self.reject)
layout = QtGui.QVBoxLayout()
layout.addLayout(grid)
spacerItem = QtGui.QSpacerItem(20, 48, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
layout.addItem(spacerItem)
layout.addWidget(buttonBox)
self.setLayout(layout)
def judge(self):
if self.db.get_ps(self.user) == self.old_password():
# if self.old_password() == self.db.get_ps(self.user):
self.accept()
else:
QtGui.QMessageBox.critical(self, 'Error', u'旧密码不正确!')
def old_password(self):
return self.oldPassword.text()
def new_password(self):
return self.newPassword.text()
class RedisOperate(object):
def __init__(self):
pool = redis.ConnectionPool(host='127.0.0.1', port = 6379, password = '<PASSWORD>')
self.r = redis.StrictRedis(connection_pool=pool)
def get_ps(self, name):
return self.r.hget('user_management', name)
def set_ps(self, name, value):
self.r.hset('user_management', name, value)
def is_hexist(self, name):
return self.r.hexists('user_management', name)
def is_exist(self, name):
return self.r.exists(name)
def add_val(self, val, set_type = 'file_md5'):
self.r.sadd(set_type, val)
def del_val(self, val, set_type):
self.r.srem(set_type, val)
def val_is_exist(self, val, set_type = 'file_md5'):
return self.r.sismember(set_type, val)
def add_history(self, action):
self.r.lpush('history', action)
def get_history(self):
return self.r.lrange('history', 0, -1)
def set(self, name, value):
self.r.set(name, value)
def get(self, name):
return self.r.get(name)
def delete_user(self, name):
self.r.hdel('user_management', name)
def dump_users(self):
return self.r.hkeys('user_management')
class MysqlOperate(object):
def __init__(self, host, user, passwd, db):
self.conn = MySQLdb.connect(host, user, passwd, db)
self.cur = self.conn.cursor()
self.conn.set_character_set('utf8')
self.cur.execute('SET NAMES utf8;')
self.cur.execute('SET CHARACTER SET utf8;')
self.cur.execute('SET character_set_connection=utf8;')
if not self.table_is_exist():
self.create_tables()
def create_tables(self):
#username management table
self.cur.execute("create table user(name varchar(20), password varchar(50))")
#key table
self.cur.execute("create table 1X_TX(id int not null auto_increment, value varchar(2000), is_use enum('yes', 'no'), primary key(id))")
self.cur.execute("create table 1X_RX(id int not null auto_increment, value varchar(2000), is_use enum('yes', 'no'), primary key(id))")
self.cur.execute("create table 2X_TX(id int not null auto_increment, value varchar(2000), is_use enum('yes', 'no'), primary key(id))")
self.cur.execute("create table 2X_RX(id int not null auto_increment, value varchar(2000), is_use enum('yes', 'no'), primary key(id))")
#lot table, to be finish
self.cur.execute("create table lot(lotID varchar(20), department varchar(20), applyPerson varchar(20), \
applyType enum('project', 'batch', 'pilot'), keySource enum('availink', 'customer'), \
keyContent enum('16', '8', '2'), TX enum('1X','2X','Both','None'), RX enum('1X','2X','Both','None'), \
innerType varchar(30), sliceNumber tinyint, keyNumber int, testEngineer varchar(20), slices varchar(80))")
#key record
self.cur.execute("create table op_record(user char(20), inner_model char(30), operate_type enum('fetch','storage'), 1x_tx int, 1x_rx int, 2x_tx int, 2x_rx int, day date)")
#Statistics table
self.cur.execute("create table Statistics(type enum('1X_TX','1X_RX','2X_TX','2X_RX','mac'), total int, left_num int, start_id int)")
sql = "insert into Statistics values(%s,%s,%s,%s)"
self.cur.executemany(sql,[('1X_TX', '0', '0', '1'), ('1X_RX', '0', '0', '1'), ('2X_TX', '0', '0', '1'), ('2X_RX', '0', '0', '1'), ('mac', '0', '0', '1')])
#mac info
self.cur.execute("create table mac_addr(id int not null auto_increment,mac_value char(16),primary key(id))")
self.conn.commit()
def insert_key(self, table_name, value):
# print "insert into %s(value, is_use) values('%s', 'no')"%(table_name, value)
self.cur.execute("insert into %s(value, is_use) values('%s', 'no')"%(table_name, value))
#self.conn.commit()
def insert_mac(self, value):
self.cur.execute("insert into mac_addr(mac_value) values('%s')" % value)
self.conn.commit()
def update_status(self, key_type, op_type, total = None, left = None, start_id = None):
if op_type == 'import':
sql = "update statistics set total=total+%s,left_num=left_num+%s where type='%s'"%(total, left, key_type)
elif op_type == 'export':
sql = "update statistics set left_num=left_num-%s,start_id=%s where type='%s'"%(left, start_id, key_type)
elif op_type == 'retrieve':
sql = "update statistics set left_num=left_num+%s where type='%s'"%(left, key_type)
self.cur.execute(sql)
self.conn.commit()
def update_start_id(self, tb, start_id):
sql = "update statistics set start_id=%s where type='%s'" %(start_id, tb)
self.cur.execute(sql)
self.conn.commit()
def table_is_exist(self):
self.cur.execute("SELECT count(*) FROM information_schema.tables WHERE table_schema = 'hdcp' AND table_name ='user'")
return self.cur.fetchone()[0] == 1
def create_table_by_lot(self, lotID):
self.cur.execute("create table %s(waferID tinyint, key_type enum('1X_RX', '1X_TX', '2X_RX', '2X_TX'), x_coordinate smallint, y_coordinate smallint, id int)"%lotID)
def insert_value_by_lot(self, lotID, waferID, key_type, x, y, id_):
self.cur.execute("insert into %s values(%s,'%s',%s,%s,%s)"%(lotID, waferID, key_type, x, y, id_))
self.conn.commit()
def get_start_id(self, tb):
sql = "select start_id from statistics where type='%s'" % tb
self.cur.execute(sql)
return self.cur.fetchone()[0]
def get_new_id(self, tb):
sql = "select id from %s order by id desc limit 1" % tb
self.cur.execute(sql)
return self.cur.fetchone()[0]
def get_key_id(self, sql):
self.cur.execute(sql)
return self.cur.fetchall()
def get_key(self, tb, id_):
self.cur.execute("select value from %s where id='%s'" % (tb, id_))
key_value = self.cur.fetchone()[0]
# self.cur.execute("update %s set is_use='yes' where id='%s'" % (tb, id))
return key_value
def get_mac(self, id_):
self.cur.execute("select mac_value from mac_addr where id='%s'" % id_)
mac_value = self.cur.fetchone()[0]
return mac_value
def get_key_info(self, target_table, id_):
self.cur.execute("select value from %s where id=%s" % (target_table, id_))
return self.cur.fetchone()[0]
def get_key_types(self, tb):
self.cur.execute("select distinct key_type from %s" % tb)
types_ = [item[0] for item in self.cur.fetchall()]
return types_
def get_del_ids(self, tb, key_type):
self.cur.execute("select id from %s where key_type='%s'" % (tb, key_type))
ids = [item[0] for item in self.cur.fetchall()]
return ids
def update_key_id(self, tb, cur_id, new_id):
self.cur.execute("update %s set id=%s where id=%s" % (tb, new_id, cur_id))
self.conn.commit()
def drop_table(self,tb):
self.cur.execute("drop table %s" % tb)
self.conn.commit()
def set_op_record(self, sql):
self.cur.execute(sql)
self.conn.commit()
def get_op_record(self, sql):
self.cur.execute(sql)
x1_tx, x1_rx, x2_tx, x2_rx = self.cur.fetchall()[0]
return x1_tx, x1_rx, x2_tx, x2_rx
def get_lot_info(self, lotID):
self.cur.execute("select * from lot where lotID='%s'" % lotID)
print self.cur.fetchall()
return self.cur.fetchall()[0]
def add_user(self, name, passwd):
self.cur.execute("insert into user values('%s','%s')"%(name, passwd))
self.conn.commit()
def get_ps(self, name):
self.cur.execute("select password from user where name='%s'"%name)
return self.cur.fetchone()[0]
def update_ps(self, name, ps):
self.cur.execute("update user set password='%s' where name='%s'"%(ps, name))
self.conn.commit()
def del_user(self, name):
self.cur.execute("delete from user where name='%s'"%name)
self.conn.commit()
def get_users(self):
self.cur.execute("select name from user")
users = []
for item in self.cur.fetchall():
users.extend(item)
return users
def get_statistics(self):
self.cur.execute("select type, total, left_num from statistics")
statis = {}
for item in self.cur.fetchall():
statis[item[0]] = (item[1], item[2])
return statis
def close(self):
self.cur.close()
self.conn.close()
if __name__ == '__main__':
redis_inst = RedisOperate()
mysql_inst = MysqlOperate(host='127.0.0.1', user='root', passwd='<PASSWORD>', db ='hdcp')
app = QtGui.QApplication(sys.argv)
if 'admin' not in mysql_inst.get_users():
reg = Ui_Register(mysql_inst)
if reg.exec_():
login = Ui_Login(mysql_inst)
if login.exec_():
ui = Ui_MainWindow(redis_inst, mysql_inst, login.current_user)
ui.show()
sys.exit(app.exec_())
else:
login = Ui_Login(mysql_inst)
if login.exec_():
ui = Ui_MainWindow(redis_inst, mysql_inst, login.current_user)
ui.show()
sys.exit(app.exec_())
# ui = Ui_MainWindow(redis_inst, mysql_inst, 'admin')
# ui.show()
# sys.exit(app.exec_())
mysql_inst.close()
redis_inst.r.save()
```
#### File: qa_study/hdcp_app/views.py
```python
from django.shortcuts import render
from hdcp_app.models import test_table
# Create your views here.
def test(request):
test_obj = test_table.objects.all()
return render(request,'hdcp_app/index.html',{'test_obj':test_obj})
#return HttpResponse("hello world")
def login(request):
pass
```
#### File: IM_test/app_lib/caseReport.py
```python
import jinja2
import os
import socket
import globalVariable
import sendMail
template = jinja2.Template("""
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html lang='en'>
<head xmlns="http://www.w3.org/1999/xhtml">
<meta charset='utf-8' />
<title>Test results</title>
<style>
.title
{
text-align:center;
}
.report
{
font-family:"Trebuchet MS", Arial, Helvetica, sans-serif;
width:100%;
border-collapse:collapse;
}
.report td, #report th
{
font-size:1em;
border:1px solid #98bf21;
padding:3px 7px 2px 7px;
}
.report th
{
font-size:1.1em;
text-align:left;
padding-top:5px;
padding-bottom:4px;
background-color:#97CBFF;
color:#ffffff;
}
.report tr.alt td
{
color:#000000;
background-color:#EAF2D3;
}
caption
{
font-size:25px;
font-weight: bold
}
</style>
</head>
<body>
<h3 class='title'>
App Automation Report
</h3>
<table id="testruns" class="report">
<thead>
<tr>
<th style="width:10em">Timestamp</th>
<th style="width:30%">Test</th>
<th>Exit status</th>
<th>Notes</th>
<th style="width:5em">Duration</th>
</tr>
</thead>
<tbody>
{% for run in runs %}
<tr bgcolor="{{run.bg_color()}}">
<td>{{run.timestamp}}</td>
<td>{{run.test_name}}</td>
<td>
{% if run.exit_status == 'PASS(memory check)' %}
<a href = {{run.img_url()}}>{{run.exit_status}}</a>
{% else %}
{{run.exit_status}}
{% if run.exit_status not in ("", "PASS", "PASS(memory check)") %}
- <span>{{run.failure_reason|e}}</span>
{% endif %}
{% endif %}
</td>
<td>{{run.notes|e}}</td>
<td>{{run.duration}}</td>
</tr>
{% endfor %}
</tbody>
</table>
</body>
</html>
""")
class CaseInfo():
def __init__(self, *args, **kwargs):
self.__dict__.update(kwargs)
def bg_color(self):
if self.exit_status == 'PASS' or self.exit_status == 'PASS(memory check)':
return "#edffe6"
elif self.exit_status == 'FAIL':
return "#ff9797" # Red: Possible system-under-test failure
def img_url(self):
if socket.gethostbyname(socket.gethostname()) != globalVariable.APACHE_SERVER:
img_url = 'http://%s/not_exist.jpg'%globalVariable.APACHE_SERVER
else:
img_url = os.path.join('http://%s'%globalVariable.APACHE_SERVER, globalVariable.IMAGE_DICT[self.test_name])
img_url = img_url.replace('\\', '/')
return img_url
class CaseFactory(object):
cases = []
def __init__(self, *args, **kwargs):
self.cases.append(CaseInfo(*args, **kwargs))
@classmethod
def create_html(cls, html = 'report.html'):
with open(html, "w+") as f:
f.write(template.render(name = 'app', runs = cls.cases))
@classmethod
def send_report(cls, report_time):
cls.create_html()
with open('report.html', 'r') as f:
content = f.read()
inst = sendMail.SendMail()
inst.send_html_mail('App Automation Report at %s'%report_time, content)
if __name__ == '__main__':
CaseFactory(timestamp = 1, test_name = 'test1', exit_status = 'PASS', failure_reason = 'success', duration = '30', note = '')
CaseFactory(timestamp = 2, test_name = 'test2', exit_status = 'FAIL', failure_reason = 'not match', duration = '28', note = '')
CaseFactory.create_html()
```
#### File: IM_test/app_lib/utils.py
```python
import os
import time
import shlex
import subprocess
import datetime
import psutil
def timeit(func):
def wrapper(*args, **kwargs):
start = datetime.datetime.now()
ret = func(*args, **kwargs)
end = datetime.datetime.now()
return end - start, ret
return wrapper
def execCLI_wait(cmd_line, shell=True):
cmd_args = shlex.split(cmd_line, posix=False)
cmd_exec = subprocess.Popen(cmd_args,bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell)
output,strrout= cmd_exec.communicate()
cmd_exec.wait()
return (cmd_exec.returncode, output, strrout)
def execCLI(cmd_line, shell=True):
cmd_args = shlex.split(cmd_line, posix=False)
subprocess.Popen(cmd_args,bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell)
# output,strrout= cmd_exec.communicate()
# return output, strrout
# def pKill(name = 'VideoCapture.exe'):
# cmd_line = 'taskkill /F /IM %s' % name
# cmd_args = shlex.split(cmd_line, posix = False)
# subprocess.call(cmd_args)
def pKill(log_obj, process = "VideoCapture.exe"):
pidList = psutil.get_pid_list()
for eachPid in pidList:
eachProcess = psutil.Process(eachPid)
try:
processName = eachProcess.name()
except:
pass
if(processName == process):
try:
log_obj.info('kill process: <%s>' % process)
eachProcess.terminate()
except:
pass
def Time2ISOString( s ):
return time.strftime("%H:%M:%S", time.localtime( float(s) ) )
if __name__ == "__main__":
cmd_str = os.path.join(os.path.dirname(os.getcwd()),'videoCapture','VideoCapture.exe')
for i in range(100):
try:
output, strrout = execCLI(cmd_str)
print 'output:',output
print 'strrout:',strrout
except Exception,e:
print e
time.sleep(2)
pKill()
print i
```
#### File: qa_study/IM_test/start.py
```python
import os
import app_lib
def main():
main_dir = os.getcwd()
logging_ins = app_lib.CollectSysLoging()
logging_ins.addSysLog(os.path.join(os.getcwd(), 'SYS_LOG', 'APP_sys_log.txt'))
logger_obj = logging_ins.getLogger()
config_ins = app_lib.ParseConfigFile(main_dir)
config_ins.readConfigFile()
statis_ins = app_lib.StatisticsCaseCount()
app_lib.ParseIrkey()
parse_tc_ins = app_lib.parseCase.ParseCase(main_dir, logger_obj)
case_info = parse_tc_ins.get_group()
app_lib.pKill(logger_obj)
app_lib.pKill(logger_obj, process = "ttermpro.exe")
while case_info is not None:
if case_info[0] == 'NonExist':
case_name = case_info[1]
logger_obj.info("run case: <%s>" % case_name)
log_folder = statis_ins.log_folder
case_inst = app_lib.CaseLog(case_name, log_folder)
ret = ('FAIL', 'case file is not existed')
statis_ins.add_case_result(case_name, ret)
app_lib.CaseFactory(timestamp = case_inst.curr_time, test_name = case_name,
exit_status = ret[0], failure_reason = ret[1], duration = 0, notes = '')
else:
case_name = case_info[0]
case_items = case_info[1]
case_content = case_info[2]
logger_obj.info("run case: <%s>" % case_name)
log_folder = statis_ins.log_folder
case_inst = app_lib.CaseLog(case_name, log_folder)
case_exec = app_lib.ExecCase(main_dir, logger_obj, case_inst, log_folder)
duration, ret = case_exec.exec_case_group(case_items, case_content)
statis_ins.add_case_result(case_name, ret)
app_lib.CaseFactory(timestamp = case_inst.curr_time, test_name = case_name,
exit_status = ret[0], failure_reason = ret[1], duration = duration, notes = '')
case_info = parse_tc_ins.get_group()
statis_ins.add_statistics_result()
app_lib.CaseFactory.send_report(statis_ins.curr_time)
logger_obj.info('all case done!')
logging_ins.closeHnadler()
if __name__ == '__main__':
main()
```
#### File: qa_study/IM_test/UI2.py
```python
import wx
import sent_key
import parseIrkey
#import format_case
import globalVariable
import os
import re
class ExamplePanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.main_path=os.getcwd()
self.IP="10.209.157.84"
self.port="7777"
self.card="BKS"
self.delay="1000"
self.temtext=""
self.name=""
self.Stream_Std="DTMB"
self.Stream_File="\\10.209.157.77"
self.Stream_Freq="584"
self.Stream_Bandwidth="4"
self.Stream_Modulation="4"
self.filename=[]
self.filepath=[]
self.is_import=1
self.tag=[]
self.key_index=1
self.TT_TAG=0
self.B1=[]
self.B2=[]
self.B3=[]
self.B4=[]
self.B5=[]
self.B6=[]
self.B7=[]
self.case_list=[]
# create some sizers
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.hSizer = wx.BoxSizer(wx.HORIZONTAL)
self.grid = wx.GridBagSizer(hgap=5, vgap=5)
self.grid_stream = wx.GridBagSizer(hgap=5, vgap=5)
self.grid_box = wx.GridBagSizer(hgap=5, vgap=5)
self.grid_delay = wx.GridBagSizer(hgap=5, vgap=5)
self.grid_button = wx.GridBagSizer(hgap=5, vgap=5)
self.case = wx.GridSizer(20,4,0,0)
self.gs = wx.GridBagSizer(hgap=1, vgap=1)
self.gs_case = wx.GridBagSizer(hgap=1, vgap=1)
#self.gs = wx.GridSizer(20,3,0,0)
#input case box
self.logger = wx.TextCtrl(self, size=(320,580), style=wx.TE_MULTILINE )
self.Bind(wx.EVT_TEXT, self.BoxText, self.logger)
#video button
self.button_video_cap =wx.Button(self, label="video_cap",size=(70, 25))
self.button_video_cap.SetBackgroundColour("Navy")
self.button_video_cap.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, self.video_cap,self.button_video_cap)
self.grid_button.Add(self.button_video_cap, pos=(3,1), span=(1,1))
#start button
self.button_start =wx.Button(self, label="start",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.start_case,self.button_start)
self.grid_button.Add(self.button_start, pos=(1,1), span=(1,1))
#SAVE button
self.button =wx.Button(self, label="Save",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.OnClick_SAVE,self.button)
self.grid_button.Add(self.button, pos=(1,0), span=(1,1))
#format case button
self.button =wx.Button(self, label="format",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.format_file,self.button)
self.grid_button.Add(self.button, pos=(1,2), span=(1,1))
"""
#Delete button
self.button =wx.Button(self, label="Delete")
self.Bind(wx.EVT_BUTTON, self.OnClick_Del,self.button)
self.grid_button.Add(self.button, pos=(1,2), span=(1,1))
"""
#Open case button
self.button =wx.Button(self, label="Open case",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.OnClick_Open_case,self.button)
self.grid_button.Add(self.button, pos=(2,1), span=(1,1))
#Open caselist button
self.button =wx.Button(self, label="caselist",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.OnClick_Open_case_list,self.button)
self.grid_button.Add(self.button, pos=(2,2), span=(1,1))
#scan case button
self.button =wx.Button(self, label="scan_case",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.OnClick_Scan_case,self.button)
self.grid_button.Add(self.button, pos=(2,0), span=(1,1))
# Radio Boxes
radioList = ['libra2', 'librasd', 'Sunplus']
self.rb = wx.RadioBox(self, label="Choose the IR_KEY MAP ", pos=(20, 210), choices=radioList, majorDimension=3,style=wx.RA_SPECIFY_COLS)
self.grid.Add(self.rb, pos=(0,0), span=(1,2))
self.Bind(wx.EVT_RADIOBOX, self.Evt_Change_IR_MAP, self.rb)
print "~~~~~~~~"
print self.rb.GetItemLabel(self.rb.GetSelection())
print "~~~~~~~~"
"""
#test type box
test_type = ['normal_test', 'mem_test']
self.tt = wx.RadioBox(self, label="Choose the test type ", pos=(20, 210), choices=test_type, majorDimension=2,style=wx.RA_SPECIFY_COLS)
self.grid.Add(self.tt, pos=(1,0), span=(1,2))
self.Bind(wx.EVT_RADIOBOX, self.Evt_test_type, self.tt)
"""
# Case name.
self.lblname = wx.StaticText(self, label="Case name :")
self.grid_box.Add(self.lblname, pos=(0,0))
self.editname = wx.TextCtrl(self, value="APP-", size=(140,-1))
self.grid_box.Add(self.editname, pos=(0,1))
self.Bind(wx.EVT_TEXT, self.EvtName, self.editname)
#input stream button
self.button_stream =wx.Button(self, label="-->",size=(70,25))
self.Bind(wx.EVT_BUTTON, self.OnClick_stream,self.button_stream)
self.grid_stream.Add(self.button_stream, pos=(0,0), span=(1,1))
#IP drop down box
self.sampleList = ['10.209.157.84', '10.209.157.83', '10.209.157.77']
self.lblhear = wx.StaticText(self, label=" ip")
self.grid_stream.Add(self.lblhear, pos=(1,0))
self.edithear = wx.ComboBox(self, value="10.209.156.203", size=(95, -1), choices=self.sampleList, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(1,1))
self.Bind(wx.EVT_COMBOBOX, self.EvtIP, self.edithear)
self.Bind(wx.EVT_TEXT, self.EvtIP,self.edithear)
#port drop down box
self.sampleList1 = ['7777', '8080', '21', '777']
self.lblhear = wx.StaticText(self, label=" port")
self.grid_stream.Add(self.lblhear, pos=(2,0))
self.edithear = wx.ComboBox(self, value="7777",size=(95, -1), choices=self.sampleList1, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(2,1))
self.Bind(wx.EVT_COMBOBOX, self.EvtPORT, self.edithear)
self.Bind(wx.EVT_TEXT, self.EvtPORT,self.edithear)
#card drop down box
self.sampleList2 = ['BKS', 'DTK']
self.lblhear = wx.StaticText(self, label=" card")
self.grid_stream.Add(self.lblhear, pos=(3,0))
self.edithear = wx.ComboBox(self,value="BKS", size=(95, -1), choices=self.sampleList2, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(3,1))
self.Bind(wx.EVT_COMBOBOX, self.EvtCARD, self.edithear)
self.Bind(wx.EVT_TEXT, self.EvtCARD,self.edithear)
#Stream_Bandwidth drop down box
self.sampleList3 = ['4', '8', '16', '32']
self.lblhear = wx.StaticText(self, label="Bandwidth")
self.grid_stream.Add(self.lblhear, pos=(4,0))
self.edithear = wx.ComboBox(self,value="4", size=(95, -1), choices=self.sampleList3, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(4,1))
self.Bind(wx.EVT_COMBOBOX, self.Evt_Stream_Bandwidth, self.edithear)
self.Bind(wx.EVT_TEXT, self.Evt_Stream_Bandwidth,self.edithear)
#Stream_Modulation drop down box
self.sampleList4 = ['4','8','16', '32', '64']
self.lblhear = wx.StaticText(self, label="Modulation")
self.grid_stream.Add(self.lblhear, pos=(5,0))
self.edithear = wx.ComboBox(self,value="4", size=(95, -1), choices=self.sampleList4, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(5,1))
self.Bind(wx.EVT_COMBOBOX, self.Evt_Stream_Modulation, self.edithear)
self.Bind(wx.EVT_TEXT, self.Evt_Stream_Modulation,self.edithear)
#Stream_Modulation drop down box
self.sampleList5 = ['DTMB','dvb']
self.lblhear = wx.StaticText(self, label=" Stream_Std")
self.grid_stream.Add(self.lblhear, pos=(6,0))
self.edithear = wx.ComboBox(self, value="DTMB",size=(95, -1), choices=self.sampleList5, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(6,1))
self.Bind(wx.EVT_COMBOBOX, self.Evt_Stream_Std, self.edithear)
self.Bind(wx.EVT_TEXT, self.Evt_Stream_Std,self.edithear)
#Stream_Modulation drop down box
self.sampleList6 = ['F:\\zhangq\\0.3.0_loader_stream.ts','\\\\10.209.156.121\\111.ts']
self.lblhear =wx.Button(self, label="Stream_File",size=(70, 25))
self.Bind(wx.EVT_BUTTON, self.Choose_ts_file,self.lblhear)
self.grid_stream.Add(self.lblhear, pos=(7,0),span=(1,1))
self.tsfile = wx.TextCtrl(self, value="input stream file path", size=(140,-1))
self.grid_stream.Add(self.tsfile, pos=(7,1))
#self.Bind(wx.EVT_COMBOBOX, self.Evt_Stream_File, self.tsfile)
self.Bind(wx.EVT_TEXT, self.Evt_Stream_File,self.tsfile)
#Stream_Modulation drop down box
self.sampleList7 = ['474','714','666']
self.lblhear = wx.StaticText(self, label=" Stream_Freq")
self.grid_stream.Add(self.lblhear, pos=(8,0))
self.edithear = wx.ComboBox(self, value="474",size=(95, -1), choices=self.sampleList7, style=wx.CB_DROPDOWN)
self.grid_stream.Add(self.edithear, pos=(8,1))
self.Bind(wx.EVT_COMBOBOX, self.Evt_Stream_Freq, self.edithear)
self.Bind(wx.EVT_TEXT, self.Evt_Stream_Freq,self.edithear)
#hSizer.Add(self.button, 0, wx.CENTER)
self.grid.Add(self.grid_box, pos=(2,0))
self.grid.Add(self.grid_stream, pos=(4,0))
self.grid.Add(self.grid_button, pos=(5,0))
self.hSizer.Add(self.grid, 0, wx.ALL, 5)
"""
#add case_group_button
self.case = wx.GridSizer(1000,3,0,0)
self.case_list = self.scan_case((os.getcwd()+'\caseInfo'+'\Case'),postfix='.conf')
self.gs_case = wx.GridBagSizer()
self.Add_case(self.case_list)
self.gs_case.Add(self.case,pos=(0,0))
self.hSizer.Add(self.gs_case)
"""
#add logger window
self.hSizer.Add(self.logger)
self.Evt_Change_IR_MAP('libra2')
#self.hSizer.Add(self.gs)
#mainSizer.Add(self.gs, 0, wx.CENTER)
self.mainSizer.Add(self.hSizer, 0, wx.CENTER, 5)
self.s_key=sent_key.send_key()
self.SetSizerAndFit(self.mainSizer)
def scan_case(self,directory,prefix=None,postfix=None):
files_list=[]
for root, sub_dirs, files in os.walk(directory):
for special_file in files:
if postfix:
if special_file.endswith(postfix):
files_list.append(((special_file),(os.path.join(root,special_file))))
elif prefix:
if special_file.startswith(prefix):
files_list.append(((special_file),(os.path.join(root,special_file))))
else:
files_list.append(((special_file),(os.path.join(root,special_file))))
return files_list
def EvtIP(self, event):
#self.logger.AppendText('EvtIP: %s\n' % event.GetString())
self.IP='%s' % event.GetString()
#self.logger.AppendText('%s\n' % self.IP)
def EvtPORT(self, event):
#self.logger.AppendText('EvtIP: %s\n' % event.GetString())
self.port='%s' % event.GetString()
#self.logger.AppendText('%s\n' % self.port)
def EvtCARD(self, event):
#self.logger.AppendText('EvtIP: %s\n' % event.GetString())
self.card='%s' % event.GetString()
#self.logger.AppendText('%s\n' % self.card)
def Evt_Stream_Std(self, event):
self.Stream_Std='%s' % event.GetString()
def Evt_Stream_File(self, event):
self.Stream_File='%s' % event.GetString()
def Evt_Stream_Freq(self, event):
self.Stream_Freq='%s' % event.GetString()
def Evt_Stream_Bandwidth(self, event):
self.Stream_Bandwidth='%s' % event.GetString()
def Evt_Stream_Modulation(self, event):
self.Stream_Modulation='%s' % event.GetString()
def EvtRadioBox(self, event):
self.logger.AppendText('%s' % event.GetString())
def EvtComboBox(self, event):
self.logger.AppendText('%s' % event.GetString())
def OnClick(self,event,mark):
if self.temtext:
index_list=re.findall(r"Input_(.)",self.temtext)
if index_list:
self.position=self.logger.GetInsertionPoint()
self.current_text=self.logger.GetRange(0,self.position)
self.tag=self.current_text.split("\n")
if self.tag[-2]:
print "~~~~~~~~~~~~~~~~~%s" % self.tag[-2]
if re.search('F*,',self.tag[-2]):
self.is_import=0
self.logger.WriteText("%s , %s , F , 1\n" % (mark, self.delay))
#if self.is_import:
# self.logger.AppendText("[Input_%s]\nIRList=" % (str(self.key_index)))
if re.search("Input_(.)",self.tag[-2])or re.search('duration',self.tag[-2]):
self.logger.WriteText("IRList=%s , %s , F , 1\n" % (mark, self.delay))
print self.delay
self.is_import=1
self.s_key.send_one_key(mark)
def OnClick_SAVE(self,event):
#self.s_key.send_one_key('MENU')
#self.logger.AppendText(" Stream_ServerIp= %s\nStream_ServerPORT=%s\nCard_Type=%s\n" % (self.IP, self.port, self.card))
print self.name
main_dir=os.path.join(os.getcwd(),'caseInfo')
if self.name=="caselist.txt":
tmp_case=open(os.path.join(main_dir,'caselist.txt'),"wb")
tmp_case.write(self.temtext)
tmp_case.close
else:
case=open(os.path.join(main_dir,'Case',self.name),"wb")
case.write(self.temtext)
case.close
def OnClick_Scan_case(self,event):
self.case_list = self.scan_case((os.getcwd()+'\caseInfo'+'\Case'),postfix='.conf')
"""
#refresh case button
self.gs_case.Hide(self.case)
self.gs_case.Remove(self.case)
self.gs_case.Layout()
self.case = wx.GridSizer(20,1,0,0)
self.case_list = self.scan_case((os.getcwd()+'\caseInfo'+'\Case'),postfix='.conf')
self.Add_case(self.case_list)
self.gs_case.Add(self.case,pos=(0,0))
self.hSizer.Layout()
"""
#add cast list to caselist.txt
#f = open(os.path.join(main_dir,'caselist.txt'),"a")
#f.truncate()
self.editname.Clear()
self.editname.AppendText('%s' % 'caselist.txt')
self.logger.Clear()
for i in self.case_list:
self.logger.AppendText(i[0]+"\n")
#f.close()
"""
def OnClick_Del(self,event):
print self.name
file_dir=os.path.join(os.getcwd(),'caseInfo','Case',self.name)
print file_dir
os.remove (file_dir)
#refresh case button
self.gs_case.Hide(self.case)
self.gs_case.Remove(self.case)
self.case = wx.GridSizer(20,1,0,0)
self.case_list = self.scan_case((os.getcwd()+'\caseInfo'+'\Case'),postfix='.conf')
self.Add_case(self.case_list)
self.gs_case.Add(self.case,pos=(0,0))
self.hSizer.Layout()
"""
def Choose_file(self,event):
dialog = wx.FileDialog(self,"Open file...",os.getcwd(),style=wx.OPEN,wildcard="*.bmp")
if dialog.ShowModal() == wx.ID_OK:
self.filepath.append(dialog.GetPath())
self.filename.append(dialog.GetPath().split('\\')[-1])
print self.filename[-1]
self.logger.AppendText("bmpFile=%s\n" % (self.filename[-1]))
self.is_import=1
dialog.Destroy()
os.system ("copy %s %s" % (self.filepath[-1], os.path.join(self.main_path,'caseInfo', 'Case_pic')))
def Choose_ircase_file(self,event):
dialog = wx.FileDialog(self,"Open file...",os.getcwd(),style=wx.OPEN,wildcard="*.ircase")
if dialog.ShowModal() == wx.ID_OK:
filename=(dialog.GetPath().split('\\')[-1])
print self.filename
self.logger.AppendText("[Input_%s]\nIRFile=%s\n" % (str(self.key_index-1),filename))
self.is_import=1
dialog.Destroy()
def Choose_ts_file(self,event):
#dialog = wx.FileDialog(self,"Open file...",os.getcwd(),style=wx.OPEN,wildcard="TS files (*.ts)|*.ts|TRP files (*.trp)|*.trp")
#dialog = wx.FileDialog(self,"Open file...",defaultDir="//bjfile02/BJShare/Public/TS",style=wx.OPEN,wildcard="TS files (*.ts)|*.ts|TRP files (*.trp)|*.trp")
dialog = wx.FileDialog(self,"Open file...",defaultDir="//bjfile02/BJShare/Department/FAE/Soc/AVL8332/Stream/DTMB",style=wx.OPEN,wildcard="TS files (*.ts)|*.ts|TRP files (*.trp)|*.trp")
if dialog.ShowModal() == wx.ID_OK:
filename=(dialog.GetPath())
print self.filename
#self.logger.AppendText("[Input_%s]\nIRFile=%s\n" % (str(self.key_index-1),filename))
self.tsfile.Clear()
self.tsfile.AppendText('%s' % filename)
dialog.Destroy()
def OnClick_Open_case(self,event):
default_pwd="<PASSWORD>"
dialog = wx.FileDialog(self,"Open file...",defaultDir=default_pwd,style=wx.OPEN,wildcard="conf files (*.conf)|*.conf|all files (*.*)|*.*")
if dialog.ShowModal() == wx.ID_OK:
filepath=(dialog.GetPath())
filename=(dialog.GetPath().split('\\')[-1])
#filename.SetValue(filepath)
print filepath
f = open(filepath,"rb")
print f
content=f.read()
self.editname.Clear()
self.editname.AppendText('%s' % filename)
self.logger.Clear()
self.logger.AppendText("%s" % content)
print '~~~~~~~~~~~~'
print content
f.close()
dialog.Destroy()
def OnClick_Open_case_list(self,event):
dialog = wx.FileDialog(self,"Open file...",defaultDir="caseInfo",style=wx.OPEN,wildcard="*.txt")
if dialog.ShowModal() == wx.ID_OK:
filepath=(dialog.GetPath())
filename=(dialog.GetPath().split('\\')[-1])
#filename.SetValue(filepath)
print filepath
f = open(filepath,"rb")
print f
content=f.read()
self.editname.Clear()
self.editname.AppendText('%s' % filename)
self.logger.Clear()
self.logger.AppendText("%s" % content)
print '~~~~~~~~~~~~'
print content
f.close()
dialog.Destroy()
#fopen = open(self.filepath)
#fcontent = fopen.read()
#self.logger.AppendText("%s" % fcontent)
#contents.SetValue(fcontent)
#fopen.close()
def mem_file(self,event):
self.logger.AppendText("cmdfile=meminfo.txt\n");
def EvtText(self, event):
self.delay='%s' % event.GetString()
#self.logger.AppendText('EvtText: %s\n' % event.GetString())
def EvtChar(self, event):
self.logger.AppendText('EvtChar: %d\n' % event.GetKeyCode())
event.Skip()
"""
def Evt_test_type(self,event):
if self.TT_TAG==1:
self.Evt_Change_IR_MAP(self.rb.GetItemLabel(self.rb.GetSelection()))
if event.GetString()=='run_case':
self.case = wx.GridSizer(20,1,0,0)
#self.hSizer.Hide(self.gs)
#self.hSizer.Remove(self.gs)
#self.hSizer.Layout()
#self.gs_case = wx.GridBagSizer()
#self.Add_case(self.case_list)
#self.gs_case.Add(self.case,pos=(0,0))
#self.hSizer.Add(self.gs_case)
self.hSizer.Layout()
self.TT_TAG=1
elif event.GetString()=='mem_test':
self.logger.Clear()
self.logger.AppendText('[Input_1]\ncommand=meminfo\nduration=600\n')
elif event.GetString()=='normal_test':
self.logger.Clear()
#self.Evt_Change_IR_MAP(self.rb.GetItemLabel(self.rb.GetSelection()))
"""
def Evt_Change_IR_MAP(self, event):
self.TT_TAG=0
globalVariable.IRK_MAP = {}
#parseIrkey.ParseIrkey.insertIrk2Map()
self.B1=[]
self.B2=[]
self.B3=[]
self.B4=[]
self.B5=[]
self.B6=[]
self.B7=[]
if isinstance(event, wx._core.CommandEvent):
globalVariable.serial_config['target_type'] = ('%s' % event.GetString())
#print event.GetString()
else:
globalVariable.serial_config['target_type'] = ('%s' % event)
#print event
#ADD GS
self.hSizer.Hide(self.gs)
self.hSizer.Remove(self.gs)
self.hSizer.Layout()
globalVariable.IRK_MAP = {}
parseIrkey.ParseIrkey()
N=globalVariable.IRK_MAP
M=self.sort_IRK_MAP(N)
self.gs = wx.GridBagSizer()
self.gs1 = wx.GridBagSizer(1,3)
self.gs2 = wx.GridBagSizer(5,3)
self.gs3 = wx.GridBagSizer(5,3)
self.gs4 = wx.GridBagSizer(4,3)
self.gs5 = wx.GridSizer(1,3,0,3)
self.gs6 = wx.GridSizer(2,3,0,3)
self.gs7 = wx.GridSizer(20,3,0,3)
self.grid_delay = wx.GridBagSizer(hgap=5, vgap=5)
self.grid_tool = wx.GridBagSizer(hgap=5, vgap=5)
self.Add_button(self.B1)
self.Add_button2(self.B2)
self.Add_button3(self.B3)
self.Add_button4(self.B4)
self.Add_button5(self.B5)
self.Add_button6(self.B6)
self.Add_button7(self.B7)
self.Add_tool_button()
self.Add_grid_delay()
self.gs.Add(self.grid_tool,pos=(0,0))
self.gs.Add(self.grid_delay,pos=(1,0))
self.gs.Add(self.gs1,pos=(2,0))
self.gs.Add(self.gs2,pos=(3,0))
self.gs.Add(self.gs3,pos=(5,0))
self.gs.Add(self.gs4,pos=(4,0))
self.gs.Add(self.gs5,pos=(6,0))
self.gs.Add(self.gs6,pos=(7,0))
self.gs.Add(self.gs7,pos=(8,0))
self.hSizer.Add(self.gs)
self.hSizer.Layout()
def Add_case(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(50, 20))
#self.btn.SetBackgroundColour("gray")
#self.btn.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i : self.Run_Case(evt,mark) ,self.btn )
self.case.Add(self.btn, 0, wx.EXPAND)
def Add_grid_delay(self):
#delay time
self.lblname = wx.StaticText(self, label="delay time :")
self.grid_delay.Add(self.lblname, pos=(0,0))
self.delay_box = wx.TextCtrl(self, value="1000", size=(140,-1))
self.grid_delay.Add(self.delay_box, pos=(0,1))
self.Bind(wx.EVT_TEXT, self.EvtText, self.delay_box)
self.Bind(wx.EVT_CHAR, self.EvtChar, self.delay_box)
def Add_tool_button(self):
self.btn=wx.Button(self, label='input',size=(70, 22))
self.Bind(wx.EVT_BUTTON, lambda evt,mark='[Input_1]\n':self.OnClick_tool(evt,mark) ,self.btn )
self.grid_tool.Add(self.btn, pos=(0,0))
self.btn=wx.Button(self, label='output',size=(70, 22))
self.Bind(wx.EVT_BUTTON, lambda evt,mark='[Output_1]\n':self.OnClick_tool(evt,mark) ,self.btn )
self.grid_tool.Add(self.btn, pos=(1,0))
self.btn=wx.Button(self, label='meminfo',size=(70, 22))
self.Bind(wx.EVT_BUTTON, lambda evt,mark='command=meminfo\nduration=600\n':self.OnClick_tool(evt,mark) ,self.btn )
self.grid_tool.Add(self.btn, pos=(0,1))
#file button
self.button_file =wx.Button(self, label="BMP FILE",size=(70, 22))
self.Bind(wx.EVT_BUTTON, self.Choose_file,self.button_file)
self.grid_tool.Add(self.button_file, pos=(1,2), span=(1,1))
#ircase file button
self.button_file =wx.Button(self, label="ircase FILE",size=(70, 22))
self.Bind(wx.EVT_BUTTON, self.Choose_ircase_file,self.button_file)
self.grid_tool.Add(self.button_file, pos=(0,2), span=(1,1))
#mem file button
self.button_mem =wx.Button(self, label="mem_file",size=(70, 22))
self.Bind(wx.EVT_BUTTON, self.mem_file,self.button_mem)
self.grid_tool.Add(self.button_mem, pos=(1,1), span=(1,1))
def Add_button(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
if i[0]=="POWER":
self.gs1.Add(self.btn, pos=(0,0))
if i[0]=="MUTE":
self.gs1.Add(self.btn, pos=(0,2))
#self.btn=wx.Button(self, label="")
self.btn=(wx.StaticText(self))
self.gs1.Add(self.btn, pos=(0,1),flag=wx.RIGHT, border=70)
#self.btn.SetBackgroundColour("Navy")
def Add_button2(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
if re.match(r'[1-9]',i[0]):
self.gs2.Add(self.btn, pos=((int(i[0])-1)/3,(int(i[0])-1)%3))
#self.btn.SetBackgroundColour("Black")
if re.match(r'0',i[0]):
self.gs2.Add(self.btn, pos=(3,1))
if re.match(r'[pP][gG]\+',i[0]) or re.match(r'PgUp\+',i[0]):
self.gs2.Add(self.btn, pos=(3,0))
if re.match(r'[pP][gG]-',i[0]) or re.match(r'PgDn-',i[0]):
self.gs2.Add(self.btn, pos=(3,2))
self.btn.SetForegroundColour("orange")
def Add_button3(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
#self.btn.SetBackgroundColour("Blue")
#self.btn.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
if i[0]=="RED":
self.gs3.Add(self.btn, pos=(0,1))
self.btn.SetBackgroundColour("RED")
if i[0]=="GREEN":
self.gs3.Add(self.btn, pos=(1,1))
self.btn.SetBackgroundColour("GREEN")
if i[0]=="YELLOW":
self.gs3.Add(self.btn, pos=(2,1))
self.btn.SetBackgroundColour("YELLOW")
if i[0]=="BLUE":
self.gs3.Add(self.btn, pos=(3,1))
self.btn.SetBackgroundColour("BLUE")
self.btn.SetForegroundColour("white")
if i[0]=="VOL+":
self.gs3.Add(self.btn, pos=(0,0))
if i[0]=="VOL-":
self.gs3.Add(self.btn, pos=(0,2))
if i[0]=="CH+":
self.gs3.Add(self.btn, pos=(1,0))
if i[0]=="CH-":
self.gs3.Add(self.btn, pos=(1,2))
if re.match(r'[tT][oO][Ee][nN][Dd]',i[0]) or re.match(r'>>\|',i[0]):
self.gs3.Add(self.btn, pos=(2,0))
if re.match(r'[Bb][sS][tT][Aa][Rr][tT]',i[0]) or re.match(r'\|<<',i[0]):
self.gs3.Add(self.btn, pos=(2,2))
if re.match(r'QPLAY',i[0]) or i[0]=='>>':
self.gs3.Add(self.btn, pos=(3,0))
if re.match(r'QBACK',i[0]) or i[0]=='<<':
self.gs3.Add(self.btn, pos=(3,2))
self.btn=(wx.StaticText(self))
self.gs3.Add(self.btn, pos=(4,0),flag=wx.RIGHT, border=70)
def Add_button4(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
self.btn.SetBackgroundColour("Gray")
self.btn.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
if i[0]=="UP":
self.gs4.Add(self.btn,pos=(0,1))
self.btn.SetBackgroundColour("Black")
if i[0]=="DOWN":
self.gs4.Add(self.btn,pos=(2,1))
self.btn.SetBackgroundColour("Black")
if i[0]=="LEFT":
self.gs4.Add(self.btn,pos=(1,0))
self.btn.SetBackgroundColour("Black")
if i[0]=="RIGHT":
self.gs4.Add(self.btn,pos=(1,2))
self.btn.SetBackgroundColour("Black")
if i[0]=="OK":
self.gs4.Add(self.btn,pos=(1,1))
self.btn.SetBackgroundColour("Black")
if i[0]=="MENU":
self.gs4.Add(self.btn,pos=(0,0))
if i[0]=="EXIT":
self.gs4.Add(self.btn,pos=(0,2))
if re.match(r'[iI][nN][fF][oO]',i[0]):
self.gs4.Add(self.btn,pos=(2,0))
if re.match(r'DVR',i[0]) or re.match(r'[sS][aA][Tt]',i[0]):
self.gs4.Add(self.btn,pos=(2,2))
def Add_button5(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
self.btn.SetBackgroundColour("gray")
self.btn.SetForegroundColour("black")
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
self.gs5.Add(self.btn, 0, wx.EXPAND)
def Add_button6(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
self.btn.SetBackgroundColour("gray")
if re.search(r'^[rR][Ee][cC]$',i[0]):
self.btn.SetForegroundColour("orange")
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
self.gs6.Add(self.btn, 0, wx.EXPAND)
def Add_button7(self,M):
for i in M:
self.btn=wx.Button(self, label=i[0].lstrip(),size=(70, 22))
#self.btn.SetBackgroundColour("Gray")
#self.btn.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, lambda evt, mark=i[0] : self.OnClick(evt,mark) ,self.btn )
self.gs7.Add(self.btn, 0, wx.EXPAND)
def sort_IRK_MAP(self,N):
#SORT THE DIR IR KEY
M=sorted(N.iteritems(), key = lambda asd:asd[0] )
for i in M:
if re.match(r'[pP][gG]',i[0]):
#print i[0]
self.B2.append(i)
elif re.match(r'[tT][oO][Ee][nN][Dd]',i[0]) or re.match(r'>>\|',i[0]):
print i[0]
self.B3.append(i)
elif re.match(r'[Bb][sS][tT][Aa][Rr][tT]',i[0]) or re.match(r'\|<<',i[0]):
#print i[0]
self.B3.append(i)
elif re.match(r'QPLAY',i[0]) or i[0]=='>>' or re.match(r'QBACK',i[0]) or i[0]=='<<':
#print i[0]
self.B3.append(i)
elif re.match(r'VOL+',i[0]) or re.match(r'VOL-',i[0]):
#print i[0]
self.B3.append(i)
elif re.match(r'CH-',i[0]) or re.match(r'CH+',i[0]):
self.B3.append(i)
elif i[0]=="MENU" or i[0]=="EXIT" or re.match(r'[iI][nN][fF][oO]',i[0]) or i[0]=="DVR" or re.match(r'[Ss][aA][tT]',i[0]):
self.B4.append(i)
elif i[0]=="UP" or i[0]=="DOWN" or i[0]=="LEFT" or i[0]=="RIGHT" or i[0]=="OK" :
self.B4.append(i)
elif i[0]=="RED" or i[0]=="GREEN" or i[0]=="BLUE" or i[0]=="YELLOW":
self.B3.append(i)
elif re.match(r'POWER',i[0]) or re.match(r'MUTE',i[0]):
self.B1.append(i)
elif re.match(r'[sS][tT][oO][pP]',i[0]) or re.match(r'[pP][lL][aA][yY]',i[0]) or re.match(r'[pP][aA][uU][sS][eE]',i[0]) or re.match(r'[Ss][tT][Aa][rR][tT]',i[0]):
self.B5.append(i)
elif re.match(r'[aA][Uu][dD][Ii][oO]',i[0]) or re.match(r'[Tt][vV]/[rR]',i[0]) or re.match(r'[eE][Pp][gG]',i[0]) or re.search(r'[lL][Ii][sS][Tt]',i[0])or re.search(r'[Rr][Ee][Cc]',i[0])or re.search(r'[fF][Ii][nN][dD]',i[0]):
self.B6.append(i)
elif re.match(r'[0-9]',i[0]):
self.B2.append(i)
else:
self.B7.append(i)
for i in self.B2:
print i
return M
def BoxText(self, event):
self.temtext='%s' % event.GetString()
def OnClick_stream(self,event):
self.logger.AppendText("[Stream_1]\nStream_ServerIp= %s\nStream_ServerPORT=%s\nCard_Type=%s\nStream_Std=%s\nStream_File=%s\nStream_Freq=%s\nStream_Bandwidth=%s\nStream_Modulation=%s\n" % (self.IP, self.port, self.card,self.Stream_Std,self.Stream_File,self.Stream_Freq,self.Stream_Bandwidth,self.Stream_Modulation))
self.is_import=1
def OnClick_tool(self,event,mark):
self.logger.AppendText("%s" % mark)
#self.logger.AppendText("aaaa")
def EvtName(self, event):
self.name='%s' % event.GetString()
def start_case(self, event):
sent_key.main().main()
def video_cap(self, event):
wx.Execute('VideoCapture.exe')
def format_file(self,event):
#all_lines = self.temtext.readlines()
all_lines = self.temtext.split("\n")
print all_lines
def findkeyline(strline):
regex=re.compile(r'\[.*\]')
if re.search(regex, strline):
return True
else:
return False
all_list = filter(findkeyline, all_lines)
#print all_list
n=0
for ll in all_list:
n += 1
if(n>=len(all_list)):
break
#print ll[0].upper(),all_list[n][0].upper()
if re.search(r"STREAM", ll.upper()):
pattern=re.compile(r'STREAM|INPUT|SOUTPUT')
if pattern.search(all_list[n].upper()):
pass
else:
print 'line: ', all_list[n], ' with errors!'
return False
elif re.search(r"SOUTPUT", ll.upper()):
pattern=re.compile(r'STREAM|INPUT')
if pattern.search(all_list[n].upper()):
pass
else:
print 'line: ', all_list[n], ' with errors!'
return False
elif re.search(r"INPUT", ll.upper()):
pattern=re.compile(r'OUTPUT')
if pattern.search(all_list[n].upper()):
pass
else:
print 'line: ', all_list[n], ' with errors!'
return False
elif re.search(r"OUTPUT", ll.upper()):
pattern=re.compile(r'STREAM|INPUT')
if pattern.search(all_list[n].upper()):
pass
else:
print 'line: ', all_list[n], ' with errors!'
return False
print "check case file success!"
stream_num=0
input_num=0
new_lines=''
for line in all_lines:
new='';
if re.search(r"\[STREAM", line.upper()):
stream_num += 1
if re.search(r'_', line):
new = "%s_%d]\n"%(line.split('_')[0],stream_num)
else:
new = "%s_%d]\n"%(line.split(']')[0],stream_num)
elif re.search(r"\[SOUTPUT", line.upper()):
if re.search(r'_', line):
new = "%s_%d]\n"%(line.split('_')[0],stream_num)
else:
new = "%s_%d]\n"%(line.split(']')[0],stream_num)
elif re.search(r"\[INPUT", line.upper()):
input_num += 1
if re.search(r'_', line):
new = "%s_%d]\n"%(line.split('_')[0],input_num)
else:
new = "%s_%d]\n"%(line.split(']')[0],input_num)
elif re.search(r"\[OUTPUT", line.upper()):
if re.search(r'_', line):
new = "%s_%d]\n"%(line.split('_')[0],input_num)
else:
new = "%s_%d]\n"%(line.split(']')[0],input_num)
else:
new = line+'\n'
new_lines += new
print "format case file success!"
print new_lines
#return new_lines
self.logger.Clear()
self.logger.AppendText("%s" % new_lines)
"""
def format_case(self, event):
format_file=os.path.join(os.getcwd(),'caseInfo','Case',self.name)
#wx.Execute('python %s %s' % (format_tool,self.name))
format_fun=format_case.format_case()
format_fun.format_file(format_file)
f = open(format_file,"rb")
print f
content=f.read()
self.logger.Clear()
self.logger.AppendText("%s" % content)
print '~~~~~~~~~~~~'
print content
f.close()
#print format_tool
"""
def Run_Case(self,event,mark):
print mark[1]
self.editname.Clear()
self.editname.AppendText('%s' % mark[0])
case=open(mark[1],"rb")
case_content=case.read()
case.close
self.logger.Clear()
self.logger.AppendText(case_content)
app = wx.App()
frame = wx.Frame(None,size=(800,630),pos=(0,0))
panel = ExamplePanel(frame)
frame.Show()
app.MainLoop()
``` |
{
"source": "joakimzhang/qtest",
"score": 3
} |
#### File: qtest/Ts_app/switch_stream.py
```python
import socket
import sys
import re
from optparse import OptionParser
import time
class ConnectSocket(object):
def __init__(self, ip_addr, port):
self.host = ip_addr
self.port = port
self.socket_fd = None
try:
self.socket_fd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket_fd.connect((self.host, self.port))
except socket.error, msg:
print 'Failed to create socket. Error code: ' + str(msg[0])\
+ ' , Error message : ' + msg[1]
sys.exit()
def recevieResult(self):
result = ''
self.socket_fd.settimeout(5)
while 1:
try:
result = result + self.socket_fd.recv(1)
except socket.timeout:
# print 'timeout exception'
break
# print 'sss',unicode(result, 'utf-16le')
return unicode(result, 'utf-16le')
def sendCmd(self, cmd):
cmd_str = cmd.encode('utf-16le')
self.socket_fd.send(cmd_str)
def closeSocket(self):
self.socket_fd.close()
def arguParse(argus):
parser = OptionParser(usage=("usage:%prog [OPTIONS]"
" -H hostname "
" -P port "
" -f test stream location"
" --std test stream standard"
" -F frequency number"
" -S symbol number"))
parser.add_option("-H", "--HOSTNAME",
action="store",
dest="hostname",
default=None,
help="Specify host name"
)
parser.add_option("-P", "--PORT",
action="store",
type='int',
dest="port",
default=None,
help="Specify port number"
)
parser.add_option("-f", "--TS",
action="store",
dest="ts",
default=None,
help="Specify test stream location"
)
parser.add_option("--std", "--STD",
action="store",
dest="std",
default=None,
help="Specify test stream standard"
)
parser.add_option("-F", "--FREQ",
action="store",
type='int',
dest="freq",
default=None,
help="Specify frequence number"
)
'''DVB-S mode'''
parser.add_option("-S", "--syml",
action="store",
type='int',
dest="syml",
default=27500,
help="Specify symbol number"
)
'''DTMB mode'''
parser.add_option("--BW", "--BandWidth",
action="store",
type='int',
dest="bandwidth",
default=None,
help="Specify bandwidth number"
)
parser.add_option("-M", "--Modulation",
action="store",
type='int',
dest="modulation",
default=None,
help="Specify Modulation number"
)
parser.add_option("--FM", "--FrameMode",
action="store",
type='int',
dest="framemode",
default=None,
help="Specify framemode number"
)
parser.add_option("--CR", "--CodeRate",
action="store",
type='int',
dest="coderate",
default=None,
help="Specify coderate number"
)
parser.add_option("--CM", "--CarrierMode",
action="store",
type='int',
dest="carriermode",
default=None,
help="Specify carriermode number"
)
'''
parser.add_option("-u", "--url",
action = "store_true",
dest = "url",
default = False,
help = "Specify if the target is an URL"
)
'''
options, _ = parser.parse_args(argus)
if options.hostname is None:
parser.description
print 'Hostname is incorrect'
print parser.usage
sys.exit()
elif options.port is None:
parser.description
print 'Port is incorrect'
print parser.usage
sys.exit()
elif options.ts is None:
parser.description
print 'Test stream location is incorrect'
print parser.usage
# sys.exit()
elif options.std is None:
parser.description
print 'Test stream standard is incorrect'
print parser.usage
sys.exit()
if options.std == 'DVB-S':
if options.freq is None:
parser.description
print 'Frequency number is incorrect'
print parser.usage
sys.exit()
elif options.syml is None:
parser.description
print 'Symbol number is incorrect'
print parser.usage
sys.exit()
elif options.std == 'DTMB':
if options.freq is None:
parser.description
print 'Frequency number is incorrect'
print parser.usage
sys.exit()
# elif options.modulation is None:
# parser.description
# print 'Modulation number is incorrect'
# print parser.usage
# sys.exit()
'''
print options.hostname
print options.port
print options.ts
print options.freq
print options.syml
'''
return options
def exec_switch(argv_stream):
options = arguParse(argv_stream)
result_message = []
# Connect remote server
# socket_con = ConnectSocket(options.hostname,options.port)
# socket_con.recevieResult()
try:
socket_con = ConnectSocket(options.hostname, options.port)
except:
# return -99
return ["can not connect to: %s" % options.hostname]
res = socket_con.recevieResult()
if re.search('Welcome', res):
pass
else:
# return -99
return res
# Stop ts
# socket_con.sendCmd('stop')
# socket_con.recevieResult()
if options.std == 'DVBS':
# Set std value
std_name = 'dvbs'
print 'std %s' % std_name
socket_con.sendCmd('chstd %s' % std_name)
res = socket_con.recevieResult()
if re.search('Change STD', res):
pass
else:
socket_con.closeSocket()
return -2
# Load ts to TSRunner
print 'loadfile %s' % options.ts
socket_con.sendCmd('loadfile %s' % options.ts)
res = socket_con.recevieResult()
if re.search('Success', res):
pass
else:
socket_con.closeSocket()
return -3
# Set frequency value
print 'freq %d' % options.freq
socket_con.sendCmd('freq %d' % options.freq)
res = socket_con.recevieResult()
if re.search('Set Frequence', res):
pass
else:
socket_con.closeSocket()
return -4
# Set symbol rate value
print 'Syml %d' % options.syml
socket_con.sendCmd('chpara SymbolRate %d' % options.syml)
res = socket_con.recevieResult()
if re.search('Set', res):
pass
else:
socket_con.closeSocket()
return -9
# Set code rate value
socket_con.sendCmd('chpara Coderate %d' % (options.coderate-1))
res = socket_con.recevieResult()
if re.search('Para Name Error', res):
print "Code Rate is failed for setting 2/3,\
the code rate will use default value"
socket_con.closeSocket()
return -10
elif options.std == 'DTMB':
# Set std value
std_name = 'dtmb'
print 'std %s' % options.std
socket_con.sendCmd('chstd %s' % std_name)
res = socket_con.recevieResult()
result_message.append(res)
if re.search('Change STD', res):
pass
else:
socket_con.closeSocket()
return [-2]
# Load ts to TSRunner
print 'loadfile %s' % options.ts
socket_con.sendCmd('loadfile %s' % options.ts)
for i in range(5):
res = socket_con.recevieResult()
if re.search('Success', res):
# pass
break
else:
# socket_con.closeSocket()
time.sleep(1)
if i == 4:
socket_con.closeSocket()
# return -3
return ['load file fail:', options.ts]
# result_message = result_message + res + '<br/>'
result_message.append(res)
result_message.append(options.ts)
# Set frequency value
print 'freq %d' % options.freq
socket_con.sendCmd('freq %d' % options.freq)
res = socket_con.recevieResult()
# result_message = result_message + res + '\n'
result_message.append(res)
if re.search('Set Frequence', res):
pass
else:
socket_con.closeSocket()
return [-4]
# Set bandwith value
if options.bandwidth:
print 'bandwidth %d' % options.bandwidth
cmd_bw = ''
if options.bandwidth == 1:
cmd_bw = 'chpara BandWidth 3'
elif options.bandwidth == 2:
cmd_bw = 'chpara BandWidth 2'
elif options.bandwidth == 3:
cmd_bw = 'chpara BandWidth 1'
elif options.bandwidth == 4:
cmd_bw = 'chpara BandWidth 0'
socket_con.sendCmd(cmd_bw)
res = socket_con.recevieResult()
# result_message = result_message + res + '\n'
result_message.append(res)
if re.search('Set BandWidth', res):
pass
else:
socket_con.closeSocket()
return [-5]
# Set modulation value
if options.modulation:
print 'modulation %d' % options.modulation
socket_con.sendCmd('chpara Modulation %d' % (options.modulation-1))
res = socket_con.recevieResult()
result_message.append(res)
# result_message = result_message + res + '\n'
if re.search('Set Modulation', res):
pass
else:
socket_con.closeSocket()
return [-6]
# Set modulation value
if options.framemode:
print 'framemode %d' % options.framemode
socket_con.sendCmd('chpara FrameMode %d' % (options.framemode-1))
res = socket_con.recevieResult()
# result_message = result_message + res + '\n'
result_message.append(res)
if re.search('Set FrameMode', res):
pass
else:
socket_con.closeSocket()
return [-8]
# Set coderate value
if options.coderate:
print 'coderate %d' % options.coderate
socket_con.sendCmd('chpara CodeRate %d' % (options.coderate-1))
res = socket_con.recevieResult()
# result_message = result_message + res + '\n'
result_message.append(res)
if re.search('Set CodeRate', res):
pass
else:
socket_con.closeSocket()
return [-7]
# Set carriermode value
if options.carriermode:
print 'carriermode %d' % options.carriermode
socket_con.sendCmd(
'chpara CarrierMode %d' % (options.carriermode-1))
res = socket_con.recevieResult()
result_message.append(res)
if re.search('Set CarrierMode', res):
pass
else:
socket_con.closeSocket()
return [-9]
elif options.std == "custom":
socket_con.sendCmd('loadfile %s' % options.ts)
for i in range(5):
res = socket_con.recevieResult()
if re.search('Success', res):
# pass
break
else:
# socket_con.closeSocket()
time.sleep(1)
if i == 4:
socket_con.closeSocket()
# return -3
return ['load file fail:', options.ts]
# result_message = result_message + res + '<br/>'
result_message.append(res)
result_message.append(options.ts)
elif options.std == "mode":
if options.modulation:
print 'modulation %d' % options.modulation
socket_con.sendCmd('chpara Modulation %d' % (options.modulation-1))
res = socket_con.recevieResult()
result_message.append(res)
# result_message = result_message + res + '\n'
if re.search('Set Modulation', res):
pass
else:
socket_con.closeSocket()
return [-6]
# Set modulation value
if options.framemode:
print 'framemode %d' % options.framemode
socket_con.sendCmd(
'chpara FrameMode %d' % (options.framemode-1))
res = socket_con.recevieResult()
# result_message = result_message + res + '\n'
result_message.append(res)
if re.search('Set FrameMode', res):
pass
else:
socket_con.closeSocket()
return [-8]
# Set coderate value
if options.coderate:
print 'coderate %d' % options.coderate
socket_con.sendCmd(
'chpara CodeRate %d' % (options.coderate-1))
res = socket_con.recevieResult()
# result_message = result_message + res + '\n'
result_message.append(res)
if re.search('Set CodeRate', res):
pass
else:
socket_con.closeSocket()
return [-7]
if options.carriermode:
print 'carriermode %d' % options.carriermode
socket_con.sendCmd(
'chpara CarrierMode %d' % (options.carriermode-1))
res = socket_con.recevieResult()
result_message.append(res)
if re.search('Set CarrierMode', res):
pass
else:
socket_con.closeSocket()
return [-9]
# Play ts
socket_con.sendCmd('play')
res = socket_con.recevieResult()
if re.search('Success', res) or re.search('Already Playing!', res):
pass
else:
socket_con.closeSocket()
return [-1]
socket_con.closeSocket()
return result_message
# return 0
if __name__ == '__main__':
r = exec_switch()
print r
``` |
{
"source": "joakiti/Benchmark-SubsetSums",
"score": 4
} |
#### File: Benchmark-SubsetSums/FFT/FastFourierTransform.py
```python
import math
import random
import numpy as np
from numpy.fft import fft, ifft
from numpy import multiply
def dft(x):
"""
Works fine, was taken from From: https://jakevdp.github.io/blog/2013/08/28/understanding-the-fft/
:param x:
:return:
"""
x = np.array(x, dtype=float)
N = len(x)
n = np.arange(N)
k = n.reshape((N, 1))
M = np.exp(-2j * np.pi * k * n / N)
return np.dot(M, x)
def idft(x):
"""
Does not work
:param x:
:return:
"""
array = np.asarray(x, dtype=float)
# array length
N = array.shape[0]
# new array of lenght N [0, N-1]
n = np.arange(N)
k = n.reshape((N, 1))
# Calculate the exponential of all elements in the input array.
M = np.exp(2j * np.pi * k * n / N)
return 1 / N * np.dot(M, array)
def FFT(P):
"""
It works, after adding - to the root of unity.
:param P: Coefficient representation of P
:return: point value representation of P evaluated at roots of unity of order len(P)
"""
n = len(P) # perhaps pad to power of 2
if n % 2 > 0:
raise ValueError("must be a power of 2")
if n <= 2:
return dft(P)
Peven, Podd = P[::2], P[1::2]
valsEven = FFT(Peven)
valsOdd = FFT(Podd)
vals = [0] * n
ω = pow(math.e, -2j * math.pi / n)
return mergeSolutions(n, vals, valsEven, valsOdd, ω)
def mergeSolutions(n, vals, valsEven, valsOdd, ω):
for i in range(int(n / 2)):
vals[i] = valsEven[i] + pow(ω, i) * valsOdd[i]
vals[i + int(n / 2)] = valsEven[i] - pow(ω, i) * valsOdd[i]
return vals
def IFFT(P):
"""
Does not work
:param P: point value representation of P evaluated at roots of unity
:return: P in coefficient representation
"""
return np.multiply(private_IFFT(P), np.full((len(P)), 1 / len(P)))
def private_IFFT(P):
"""Runs FFT, but it doesnt work correctly, as it seems the output is different.
"""
n = len(P) # perhaps pad to power of 2
if n % 2 > 0:
raise ValueError("must be a power of 2")
if n <= 2:
return dft(P)
Peven, Podd = P[::2], P[1::2]
valsEven = FFT(Peven)
valsOdd = FFT(Podd)
vals = [0] * n
ω = pow(math.e, 2j * math.pi / n)
return mergeSolutions(n, vals, valsEven, valsOdd, ω)
```
#### File: Benchmark-SubsetSums/Implementations/DivideAndConquerRunner.py
```python
import math
from Implementations.Interfaces.IDeterministicAlgorithm import IDeterministicAlgorithm
from Implementations.helpers import Helper
from Implementations.helpers.Helper import ListToPolynomial, toNumbers, sumSet
class DivideAndConquerRunner(IDeterministicAlgorithm):
def __init__(self, label, benchmarkMode=False):
self.label = label
self.benchmarkMode = benchmarkMode
def run(cls, values, target):
return Helper.divideAndConquerSumSet(values, target)
```
#### File: Implementations/FasterSubsetSum/RandomizedAdaptiveFunction.py
```python
from Implementations.helpers.ComputeLayersByMass import ComputeLayersByMass
from Implementations.helpers.Helper import padWithZero, ListToPolynomial
from Implementations.Interfaces.IRandomizedWithPartitionFunction import IRandomizedWithPartitionFunction
import numpy as np
class RandomizedAdaptiveFunction(IRandomizedWithPartitionFunction):
def __init__(self, debug, repetitions):
self.analyzer = None
super().__init__(debug,
None,
None,
None,
repetitions)
def fasterSubsetSum(self, Z, t, delta):
self.analyzer = ComputeLayersByMass(Z, t)
super().__init__(self.debug,
self.analyzer.layerFunction(),
self.analyzer.intervalFunction(),
self.analyzer.retrieveSolutionSizeFunction(),
self.repetitions)
return super().fasterSubsetSum(Z, t, delta)
def partitionIntoLayers(self, Z, n, t):
Zi = [self.layerFunction(Z, n, t, sample) for sample in self.intervalFunction(Z, n, t)]
if self.debug:
self.layerInformation = list()
sampleKey = 0
for sample in self.intervalFunction(Z, n, t):
self.layerInformation.append((len(Zi[sampleKey]), sample[1]))
sampleKey += 1
self.layerInformation.append((0, 0))
for i in range(len(Zi)):
if len(Zi[i]) < 1:
Zi[i] = padWithZero([])
Zi = np.array(list(map(ListToPolynomial, Zi)))
return Zi
```
#### File: Implementations/FasterSubsetSum/RandomizedBaseLessRepetitions.py
```python
import math
import numpy as np
from Implementations.helpers.Helper import ListToPolynomial, toNumbers
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
class RandomizedBaseLessRepetitions(NearLinearBase):
def color_coding(self, Z, t, k, delta):
if len(Z) == 1:
return [1]
if self.repetitions == 0:
# if math.log(t, 1.05) >= self.n:
# repetitions = 5
# else:
# repetitions = 1
repetitions = math.log(1.0 / delta, 17.0 / 7.0)
else:
repetitions = self.repetitions
S = [[] for _ in range(math.ceil(repetitions))]
for j in range(0, math.ceil(repetitions)):
partition = self.partitionSetIntoK(Z, k * k * k) # max(int(k*k//2), 2))
sumset = partition[0]
for i in range(1, len(partition)):
sumset = self.sumSet(sumset, partition[i], t)
S[j] = sumset
union = np.array(S[0])
for j in range(1, len(S)):
if len(S[j]) > len(union):
S[j][np.nonzero(union)[0]] = 1
union = S[j]
else:
union[np.nonzero(S[j])[0]] = 1
return list(union)
```
#### File: Implementations/FasterSubsetSum/RandomizedDoesLowerBoundMakeDifference.py
```python
import math
import numpy as np
from Implementations.helpers.Helper import ListToPolynomial, toNumbers, padWithZero
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
class RandomizedLowerBoundDifference(NearLinearBase):
def __init__(self, debug):
super().__init__(debug)
def fasterSubsetSum(self, Z, t, delta):
n = len(Z)
Z = np.array(Z)
Z = Z[Z <= t]
Zi, minimals = self.partitionIntoLayers(Z, n, t)
S = [1]
for i in range(0, len(Zi)):
z = np.array(Zi[i])
if len(z) > 1:
z = ListToPolynomial(z)
Si = self.ColorCodingLayer(z, t, int(t // max(minimals[i], 1)), delta / len(Zi))
S = self.sumSet(Si, S, t)
return toNumbers(S)
def partitionIntoLayers(self, Z, n, t):
Zi = [Z[(t / pow(2, i) <= Z) & (Z < t / pow(2, i - 1))] for i in
range(1, math.ceil(math.log2(n)))]
Zi.append(Z[(0 <= Z) & (Z < t / pow(2, math.ceil(math.log2(n)) - 1))])
if self.debug:
self.layerInformation = list()
for i in range(len(Zi)):
self.layerInformation.append((len(Zi[i]), t / pow(2, i)))
self.layerInformation.append((len(Zi[len(Zi) - 1]), 0))
for i in range(len(Zi)):
if len(Zi[i]) < 1:
Zi[i] = padWithZero([])
minimals = [min(z) for z in Zi]
return Zi, minimals
```
#### File: Implementations/FasterSubsetSum/RandomizedMultiThreadedVer3.py
```python
import math
import os
from collections import defaultdict
import threading
import concurrent.futures
import time
from concurrent.futures._base import as_completed
from multiprocessing import Process
from parfor import parfor
import numpy as np
import shutil
from joblib import Parallel, delayed, dump, load, parallel_backend
from joblib.externals.loky import set_loky_pickler
from scipy.signal import fftconvolve
from Implementations.helpers.Helper import toNumbers, ListToPolynomial
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
class RandomizedMultiThreadedVer3(NearLinearBase):
def __init__(self, debug, repetitions, threads):
super().__init__(debug, repetitions)
self.threads = threads
def sumSet(self, A, B, threshold):
Apoly = ListToPolynomial(A)
Bpoly = ListToPolynomial(B)
eps = 0.0001 # account for floating error
AsumsetB = fftconvolve(Apoly, Bpoly)
return toNumbers(np.select([AsumsetB[:int(threshold + 1)] > eps], [1]))
def color_coding(self, Z, t, k, delta):
if len(Z) == 1:
return [0, Z[0]]
if self.repetitions == 0:
# if math.log(t, 1.05) >= self.n:
# repetitions = 5
# else:
# repetitions = 1
repetitions = math.log(1.0 / delta, 4.0 / 3.0)
else:
repetitions = self.repetitions
S = [[] for _ in range(math.ceil(repetitions))]
for j in range(0, math.ceil(repetitions)):
partitions = self.partitionSetIntoK(Z, k * k)
if len(partitions) == 1:
return partitions[0]
sumset = partitions[0]
for i in range(1, len(partitions)):
sumset = self.sumSet(sumset, partitions[i], t)
S[j] = sumset
# partitionPerThread = divmod(len(partitions), self.threads)
# index = 0
# threadListWork = list()
# for i in range(self.threads):
# if i == self.threads - 1:
# threadListWork.append((index, index + partitionPerThread[0] + partitionPerThread[1]))
# continue
# else:
# threadListWork.append((index, index + partitionPerThread[0]))
# index = index + partitionPerThread[0]
#
# #(list for pair in threadListWork for list in partitions[pair[0], pair[1])
# @parfor(threadListWork, nP=self.threads, rP=1, serial=1)
# def combinePartitions(x):
# start = partitions[x[0]]
# for o in range(x[0], x[1]):
# start = self.sumSet(start, partitions[o], t)
# return start
# partialSumsets = combinePartitions
# sumset = partialSumsets[0]
# for x in range(1, len(partialSumsets)):
# sumset = self.sumSet(partialSumsets[x], sumset, t)
# S[j] = sumset
union = set(S[0])
for j in range(1, len(S)):
for s in S[j]:
union.add(s)
# if len(S[j]) > len(union):
# S[j][np.nonzero(union)[0]] = 1
# union = S[j]
# else:
# union[np.nonzero(S[j])[0]] = 1
return list(union)
def partitionSetIntoK(self, Z, k):
k = math.ceil(k)
partition = defaultdict(list)
listUsed = set()
for i in Z: # Ignore 0 component with 1:
goesTo = np.random.randint(0, k)
partition[goesTo].append(i)
listUsed.add(goesTo)
return [partition[x] for x in listUsed]
def ColorCodingLayer(self, Z, t, l, delta, high=(1, 0)):
if len(Z) == 1:
return [0, Z[0]]
divisor = math.log2(l / delta)
if l < divisor:
return self.color_coding(Z, t, l, delta)
m = self.roundToPowerOf2(l / divisor)
Z = self.partitionSetIntoK(Z, m)
m = self.roundToPowerOf2(len(Z))
while len(Z) < m:
Z.append([1])
gamma = 6 * divisor
if gamma > l:
gamma = l
@parfor(range(m), nP=self.threads, rP=1, serial=1)
def combinePartitions(i):
return self.color_coding(Z[i], 2 * t * gamma / l, round(gamma), delta / l)
S = combinePartitions
for h in range(1, int(math.log2(m)) + 1):
threshold = min(pow(2, h) * 2 * gamma * t / l, t)
for j in range(1, int(m / pow(2, h)) + 1):
S[j - 1] = self.sumSet(S[2 * j - 1 - 1], S[2 * j - 1], threshold)
S[0] = np.array(S[0])
return S[0]
def partitionIntoLayers(self, Z, n, t):
Zi = [Z[(t / pow(2, i) <= Z) & (Z < t / pow(2, i - 1))] for i in
range(1, math.ceil(math.log2(n)))]
Zi.append(Z[(0 <= Z) & (Z < t / pow(2, math.ceil(math.log2(n)) - 1))])
if self.debug:
self.layerInformation = list()
for i in range(len(Zi)):
self.layerInformation.append((len(Zi[i]), t / pow(2, i)))
self.layerInformation.append((len(Zi[len(Zi) - 1]), 0))
return Zi
def fasterSubsetSum(self, Z, t, delta):
n = len(Z)
self.n = n
Z = np.array(Z)
Zi = self.partitionIntoLayers(Z, n, t)
S = [1]
if len(Zi[0]) > 1:
S = Zi[0]
if len(Zi) == 1:
S = self.ColorCodingLayer(S, t, len(Z), delta / (math.ceil(math.log2(n))))
for i in range(1, len(Zi)):
z = np.array(Zi[i])
if len(z) > 0:
start = time.time()
Si = self.ColorCodingLayer(z, t, pow(2, i + 1) - 1, delta / (math.ceil(math.log2(n))),
high=pow(2, i) if i != len(Zi) - 1 else (2 ** i, "Last is zero"))
S = self.sumSet(Si, S, t)
end = time.time()
print('solved layer ', i, 'in ', end - start)
return toNumbers(S)
```
#### File: Implementations/helpers/ComputeLayersByMass.py
```python
import math
import numpy as np
import scipy as scipy
from scipy import stats
class ComputeLayersByMass:
def __init__(self, distribution, T):
self.input = list(filter(lambda x: x < T // 2, distribution))
if (len(self.input) < 1):
self.input.append(0)
self.layerValues = []
n = len(self.input)
hist = np.histogram(self.input, bins=T)
self.hist_dist = scipy.stats.rv_histogram(hist)
samples = max(math.ceil(math.log2(n)), 1)
massPerSample = (n / samples) / n
self.lowerBoundOfPartition = [massPerSample * i for i in range(samples)]
self.UpperBoundOfPartition = [massPerSample * i for i in range(1, samples + 1)]
self.layerValues = self.hist_dist.ppf(self.lowerBoundOfPartition)
self.layerValues = self.layerValues.tolist()
self.layerValues = np.append(self.layerValues, (T // 2, T))
def retrieveSolutionSizeFunction(self):
def solutionSizeFunction(Z, n, t, i):
return math.ceil(t / max(self.layerValues[i], 1))
return solutionSizeFunction
def layerFunction(self):
def layering(Z, n, t, sample):
return Z[(sample[0] <= Z) & (Z <= sample[1])]
return layering
def intervalFunction(self):
# getting data of the histogram
def intervalFunction(Z, n, t):
intervalsLowThenUpperTuples = list(zip(
self.hist_dist.ppf(self.lowerBoundOfPartition),
self.hist_dist.ppf(self.UpperBoundOfPartition)))
intervalsLowThenUpperTuples.append((t // 2, t))
return intervalsLowThenUpperTuples
return intervalFunction
# return range(math.ceil(math.log2(n) + 1))
```
#### File: Implementations/Interfaces/IRandomizedWithPartitionFunction.py
```python
from abc import ABC, abstractmethod
import numpy as np
from Implementations.helpers.Helper import toNumbers, padWithZero, ListToPolynomial
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
class IRandomizedWithPartitionFunction(NearLinearBase, ABC):
def __init__(self, debug, layerFunction, intervalFunction, solutionSizeFunction, repetitions):
super().__init__(debug, 'adaptive', repetitions)
self.layerFunction = layerFunction
self.intervalFunction = intervalFunction
self.solutionSizeFunction = solutionSizeFunction
@abstractmethod
def fasterSubsetSum(self, Z, t, delta):
n = len(Z)
Z = np.array(Z)
Zi = self.partitionIntoLayers(Z, n, t)
dontDoLast = 0
S = [1]
if len(Zi[len(Zi) - 1]) > t // 2:
S = Zi[len(Zi) - 1]
dontDoLast = 1
for i in range(0, len(Zi) - dontDoLast):
z = np.array(Zi[i])
if len(z) > 1:
Si = self.ColorCodingLayer(z, t, self.solutionSizeFunction(Z, n, t, i), delta / len(Zi),
high=pow(2, i) if i != len(Zi) - 1 else (
2 ** i, "Last is zero")) # Just adding some garbage, does not matter.
S = self.sumSet(Si, S, t)
return toNumbers(S)
@abstractmethod
def partitionIntoLayers(self, Z, n, t):
Zi = [self.layerFunction(Z, n, t, sample) for sample in self.intervalFunction(Z, n, t)]
if self.debug:
self.layerInformation = list()
sampleKey = 0
for sample in self.intervalFunction(Z, n, t):
self.layerInformation.append((len(Zi[sampleKey]), sample[1]))
sampleKey += 1
self.layerInformation.append((0, 0))
for i in range(len(Zi)):
if len(Zi[i]) < 1:
Zi[i] = padWithZero([])
Zi = np.array(list(map(ListToPolynomial, Zi)))
return Zi
```
#### File: tests/FasterSubsetSumTests/test_randomizedBase.py
```python
import unittest
from unittest import TestCase
from Implementations.FastIntegersFromGit import FastIntegersFromGit
from Implementations.helpers.Helper import ListToPolynomial, toNumbers
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
from benchmarks.test_distributions import Distributions as dist
class RandomizedBaseTester(TestCase):
@classmethod
def setUp(cls):
cls.fasterSubset = NearLinearBase(False, 1)
def test_faster_sumset_base_returns_correct_sumset(self):
vals = [1, 15, 3, 8, 120, 290, 530, 420, 152, 320, 150, 190]
T = 11
sums = self.fasterSubset.fasterSubsetSum(vals, T, 0.2)
self.assertListEqual(sums, [0, 1, 3, 4, 8, 9, 11])
def test_color_coding_base_returns_correct_sumset(self):
vals = [1, 15, 3, 8, 120, 290, 530, 420, 152, 320, 150, 190]
T = 11
characteristic = ListToPolynomial(vals)
sums = self.fasterSubset.color_coding(characteristic, T, len(vals), 0.2)
self.assertListEqual(toNumbers(sums), [0, 1, 3, 4, 8, 9, 11])
@unittest.skip("Not currently working.")
def test_faster_sumset_returns_correct_sumset_multiples(self):
vals = [1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
T = 11
sums = self.fasterSubset.fasterSubsetSum(vals, T, 0.2)
self.assertListEqual(sums, [0, 1, 3, 4])
@unittest.skip("Not currently working. I.e some of the speed ups we done means this does not work properly")
def test_faster_simple(self):
vals = [8, 10]
T = 18
a = list(set(vals))
delta = 0.0001
fast = self.fasterSubset.fasterSubsetSum(a, T, delta)
self.assertListEqual(fast, [0, 8, 10, 18])
@unittest.skip("comment in for benchmark.")
def test_me(self):
delta = 0.0001
i = 500
a, T = dist.evenDistribution(i)
fast = self.fasterSubset.fasterSubsetSum(a, T, delta)
# expertSolution = FastIntegersFromGit().run(a, T)
# self.assertListEqual(fast, expertSolution)
```
#### File: Benchmark-SubsetSums/tests/ILPTest.py
```python
from unittest import TestCase
import numpy as np
from Implementations.FastIntegersPersonal import FastIntegersPersonal
from Implementations.ILPSubsetSum import ILPSubsetSum
from Implementations.PowerSeriesSubsetSum import PowerSeriesSubsetSum
from benchmarks.test_distributions import Distributions
class Test(TestCase):
@classmethod
def setUp(self):
self.ilpSolver = ILPSubsetSum()
def test_fast_integer_returns_correct_sumset(self):
vals = [1, 15, 3, 8, 120, 290, 530, 420, 152, 320, 150, 190]
T = 11
sums = self.ilpSolver.run(vals, T)
self.assertEqual(sum(sums), 11)
def test_fast_integer_returns_not_found(self):
vals = [1, 2, 4, 15, 17]
T = 26
sums = self.ilpSolver.run(vals, T)
self.assertEqual(sum(sums), 24)
```
#### File: Benchmark-SubsetSums/tests/test_fastIntegersFromJournal.py
```python
from unittest import TestCase
import numpy as np
from Implementations.FastIntegersFromGit import FastIntegersFromGit
from Implementations.FastIntegersFromJournal import FastIntegersFromJournal
from Implementations.FastIntegersPersonal import FastIntegersPersonal
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
from benchmarks.test_distributions import Distributions
class Test(TestCase):
@classmethod
def setUp(self):
self.fastIntegers = FastIntegersFromJournal('does not matter')
self.fastIntegersFromGit = FastIntegersFromGit()
self.RandomizedFasterSubsetSum = NearLinearBase(False, 'near_linear', 1)
def test_find_bug(self):
i = 5
delta = 0.0001
a, t = Distributions().HighEndDistribution(i)
tested = self.fastIntegers.run(a, t)
tested.sort()
expected = self.fastIntegersFromGit.run(a, t)
self.assertListEqual(tested, expected)
```
#### File: Benchmark-SubsetSums/tests/test_helper.py
```python
from unittest import TestCase
from numpy.fft import fft, ifft
import numpy as np
from Implementations.DPRegularWithCount import DynamicProgrammingWithCount
from Implementations.helpers.Helper import reduceToMultisetWithCardinalityAtMostTwo, divideAndConquerSumSet, sumSetNotComplex, \
ListToPolynomial, toNumbers
class Test(TestCase):
def test_cardinality_at_most_two_function(self):
vals = [1, 1, 1, 10, 10, 25, 25, 25, 31]
expected = [1, 2, 10, 10, 25]
T = 30
filtered = reduceToMultisetWithCardinalityAtMostTwo(vals, T)
self.assertListEqual(filtered, expected)
def test_DP_solver(self):
alreadyKnown = [7]
vals = [1,2,3]
runner = DynamicProgrammingWithCount()
proposedSolution = runner.subsetSumDP(vals, 12)
print()
def test_cardinality_at_most_two_function_with_mult_4(self):
vals = [1, 1, 1, 10, 10, 25, 25, 25, 25, 25, 25, 31]
expected = [1, 2, 10, 10, 25, 25, 31, 50, 50]
T = 100
filtered = reduceToMultisetWithCardinalityAtMostTwo(vals, T)
self.assertListEqual(filtered, expected)
def test_cardinality_at_most_two_handles_cardinality_overlap(self):
vals = [1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 4, 4, 8]
expected = [1, 2, 2, 4, 8, 16]
T = 100
filtered = reduceToMultisetWithCardinalityAtMostTwo(vals, T)
self.assertListEqual(filtered, expected)
def test_all_sumsets_in_S(self):
vals = [1, 2, 3, 4]
sums = divideAndConquerSumSet(vals, 11)
self.assertListEqual(sums, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
def test_sum_set_not_complex(self):
A, B = [3, 2, 1, 0], [0, 10, 20]
sumset = sumSetNotComplex(A, B)
self.assertListEqual(sumset, [0, 1, 2, 3, 10, 11, 12, 13, 20, 21, 22, 23])
def test_sum_set_withThreshold(self):
A, B = [3, 2, 1, 0], [0, 10, 20]
sumset = sumSetNotComplex(A, B, 11)
self.assertListEqual(sumset, [0, 1, 2, 3, 10, 11])
def test_convertIFFTSumsToRealValues(self):
eps = 0.00001
A = [3, 2, 1]
polyA = ListToPolynomial(A)
ptWiseA = fft(polyA)
ShouldBeA = ifft(ptWiseA)
threshold = 10
sums = toNumbers(np.select([ShouldBeA[:int(threshold + 1)] > eps], [1]))
self.assertListEqual(sums, toNumbers(polyA))
```
#### File: Benchmark-SubsetSums/tests/test_KMeans.py
```python
from unittest import TestCase
from Implementations.helpers.ComputeKMeansGrouping import ComputeKMeansGrouping
class Test(TestCase):
def test_kMeans(self):
xs = [1, 2, 3, 10, 11, 12, 20, 23]
kmeans = ComputeKMeansGrouping(xs)
kmeans.computeClusters(3)
solution = kmeans.clusters(3)
self.assertListEqual(solution[0], [1, 2, 3])
self.assertListEqual(solution[1], [10, 11, 12])
self.assertListEqual(solution[2], [20, 23])
def test_kMeans_NClusters(self):
xs = [1000, 100, 203, 2031, 539, 439, 120392, 2939]
kmeans = ComputeKMeansGrouping(xs)
kmeans.computeClusters(8)
solution = kmeans.clusters(len(xs))
expected = [[100], [203], [439], [539], [1000], [2031], [2939], [120392]]
self.assertListEqual(solution, expected)
``` |
{
"source": "joaks1/bio-180-intro-bio",
"score": 2
} |
#### File: lecture-materials/bin/trait-histograms.py
```python
import os
import sys
import random
import math
import matplotlib
from pymsbayes import plotting
random.seed(48726114)
# matplotlib.rc('text',**{'usetex': True})
def get_normal_samples(mu=0, sigma=1, n=1000):
samples = []
for i in range(n):
samples.append(random.normalvariate(mu, sigma))
return samples
def get_beta_samples(alpha=1, beta=1, n=1000):
samples = []
for i in range(n):
samples.append(random.betavariate(alpha, beta))
return samples
patterns = {
'base': get_normal_samples(20, 2, 10000),
'directional': get_normal_samples(23, 1, 10000),
'stabilizing': get_normal_samples(20, 1, 10000),
'disruptive': get_normal_samples(18, 1, 5000) + get_normal_samples(22, 1, 5000),
}
all_values = []
for v in patterns.itervalues():
all_values.extend(v)
maximum=int(math.ceil(max(all_values)))
minimum=int(math.floor(min(all_values)))
bins = range(minimum, maximum)
for p, draws in patterns.iteritems():
hd = plotting.HistData(
draws,
normed = True,
bins = bins,
range = None,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '0.5',
facecolor = '0.5',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None)
sp = plotting.ScatterPlot(
hist_data_list = [hd],
y_label = 'Frequency',
x_label = 'Trait value',
xlim = (minimum, maximum),
ylim = (0.0, 0.4),
)
pg = plotting.PlotGrid(subplots = [sp],
num_columns = 1,
label_schema = None,
share_y = True,
height = 3,
width = 3,
auto_height = False)
sp.savefig('../images/generic-trait-histogram-{0}.pdf'.format(p))
beaks = get_normal_samples(9.5, 1.5, 2000)
maximum=int(math.ceil(max(beaks)))
minimum=int(math.floor(min(beaks)))
bins = range(minimum, maximum)
hd = plotting.HistData(
beaks,
normed = True,
bins = 20,
range = None,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '#FF9900',
facecolor = '#FF9900',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None,
zorder=0)
sp = plotting.ScatterPlot(
hist_data_list = [hd],
y_label = 'Frequency of finches',
x_label = 'Beak depth (mm)',
# xlim = (minimum, maximum),
# ylim = (0.0, 0.4),
)
pg = plotting.PlotGrid(subplots = [sp],
num_columns = 1,
label_schema = None,
# share_y = True,
height = 2.2,
width = 3.5,
auto_height = False)
pg.savefig('../images/my-beak-depth-histogram.pdf')
beaks = get_normal_samples(9.5, 1.5, 2000)
maximum=int(math.ceil(max(beaks)))
minimum=int(math.floor(min(beaks)))-1
hd = plotting.HistData(
beaks,
normed = True,
bins = 20,
range = None,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '0.0',
facecolor = '0.0',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None,
zorder=0)
sp = plotting.ScatterPlot(
hist_data_list = [hd],
y_label = 'Frequency of lizards',
x_label = 'Melanin concentration (ng/cell)',
xlim = (minimum, maximum),
# ylim = (0.0, 0.4),
)
pg = plotting.PlotGrid(subplots = [sp],
num_columns = 1,
label_schema = None,
# share_y = True,
height = 2.2,
width = 3.5,
auto_height = False)
pg.savefig('../images/lizard-pigment-histogram-stabilizing.eps')
beaks = get_normal_samples(8, 0.8, 2000) + get_normal_samples(11, 0.8, 2000)
hd = plotting.HistData(
beaks,
normed = True,
bins = 20,
range = None,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '0.0',
facecolor = '0.0',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None,
zorder=0)
sp = plotting.ScatterPlot(
hist_data_list = [hd],
y_label = 'Frequency of lizards',
x_label = 'Melanin concentration (ng/cell)',
xlim = (minimum, maximum),
# ylim = (0.0, 0.4),
)
pg = plotting.PlotGrid(subplots = [sp],
num_columns = 1,
label_schema = None,
# share_y = True,
height = 2.2,
width = 3.5,
auto_height = False)
pg.savefig('../images/lizard-pigment-histogram-disruptive.eps')
beaks = get_normal_samples(7.5, 0.8, 2000)
hd = plotting.HistData(
beaks,
normed = True,
bins = 20,
range = None,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '0.0',
facecolor = '0.0',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None,
zorder=0)
sp = plotting.ScatterPlot(
hist_data_list = [hd],
y_label = 'Frequency of lizards',
x_label = 'Melanin concentration (ng/cell)',
xlim = (minimum, maximum),
# ylim = (0.0, 0.4),
)
pg = plotting.PlotGrid(subplots = [sp],
num_columns = 1,
label_schema = None,
# share_y = True,
height = 2.2,
width = 3.5,
auto_height = False)
pg.savefig('../images/lizard-pigment-histogram-directional.eps')
freqs = [
(12, get_beta_samples(0.1, 0.1, 100)),
(12, get_beta_samples(2.5, 2.5, 100)),
(4, get_beta_samples(20, 1, 100)),
(4, get_beta_samples(1, 20, 100)),
]
maximum = 1.0
minimum = 0.0
splots = []
for nbins, f in freqs:
hd = plotting.HistData(
f,
normed = False,
bins = nbins,
range = None,
cumulative = False,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
rwidth = None,
log = False,
color = None,
edgecolor = '#FF9900',
facecolor = '#FF9900',
fill = True,
hatch = None,
label = None,
linestyle = None,
linewidth = None,
zorder=0)
sp = plotting.ScatterPlot(
hist_data_list = [hd],
x_label = r'Frequency of $A_1$',
y_label = '# of populations',
xlim = (minimum, maximum),
# ylim = (0.0, 0.4),
)
splots.append(sp)
pg = plotting.PlotGrid(subplots = splots,
num_columns = 2,
label_schema = 'numbers',
# share_y = True,
height = 4.5,
width = 6,
auto_height = False)
pg.savefig('../images/allele-freq-clicker-histograms.pdf')
``` |
{
"source": "joaks1/msbayes-experiments",
"score": 2
} |
#### File: empirical-analyses/bin/plot-results.py
```python
import os
import sys
import matplotlib
from pymsbayes.utils.parsing import DMCSimulationResults, spreadsheet_iter
from pymsbayes.config import MsBayesConfig
from pymsbayes.plotting import (Ticks, HistData, ScatterPlot, PlotGrid)
from pymsbayes.utils.stats import Partition
from pymsbayes.utils.probability import GammaDistribution
from pymsbayes.utils.messaging import get_logger
import project_util
_LOG = get_logger(__name__)
def get_dpp_psi_values(num_elements, shape, scale, num_sims = 100000):
conc = GammaDistribution(shape, scale)
p = Partition([1] * num_elements)
psis = []
for i in range(num_sims):
a = conc.draw()
x = p.dirichlet_process_draw(a)
psis.append(len(set(x.partition)))
return psis
def create_plots(dpp_info_path, old_info_path, out_dir):
# matplotlib.rc('text',**{'usetex': True})
# old = ([1] * 992) + ([2] * 8)
if not os.path.exists(out_dir):
os.mkdir(out_dir)
dmc_sim = DMCSimulationResults(dpp_info_path)
dmc_sim_old = DMCSimulationResults(old_info_path)
psi_path = dmc_sim.get_result_path_prefix(1, 1, 1) + '99-psi-results.txt'
psi_path_old = dmc_sim_old.get_result_path_prefix(1, 1, 1) + '99-psi-results.txt'
psis = []
for d in spreadsheet_iter([psi_path]):
n = int(round(10000 * float(d['estimated_prob'])))
psis.extend([int(d['num_of_div_events'])] * n)
psis_old = []
for d in spreadsheet_iter([psi_path_old]):
n = int(round(10000 * float(d['estimated_prob'])))
psis_old.extend([int(d['num_of_div_events'])] * n)
bins = range(1, dmc_sim.num_taxon_pairs + 2)
hd = HistData(x = psis,
normed = True,
bins = bins,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
zorder = 0)
# hd_old= HistData(x = old,
hd_old= HistData(x = psis_old,
normed = True,
bins = bins,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
zorder = 0)
tick_labels = []
for x in bins[0:-1]:
if x % 2:
tick_labels.append(str(x))
else:
tick_labels.append('')
xticks_obj = Ticks(ticks = bins,
labels = tick_labels,
horizontalalignment = 'left')
hist = ScatterPlot(hist_data_list = [hd],
x_label = 'Number of divergence events',
y_label = 'Posterior probability',
xticks_obj = xticks_obj)
hist_old = ScatterPlot(hist_data_list = [hd_old],
x_label = 'Number of divergence events',
y_label = 'Posterior probability',
xticks_obj = xticks_obj)
hist.set_xlim(left = bins[0], right = bins[-1])
hist_old.set_xlim(left = bins[0], right = bins[-1])
hist.set_ylim(bottom = 0.0, top = 0.1)
pg = PlotGrid(subplots = [hist],
num_columns = 1,
height = 4.0,
width = 6.5,
label_schema = None,
auto_height = False)
pg.auto_adjust_margins = False
pg.margin_top = 1
pg.reset_figure()
pg.savefig(os.path.join(out_dir, 'philippines-dpp-psi-posterior.pdf'))
# hist.set_ylim(bottom = 0.0, top = 1.0)
hist.set_ylim(bottom = 0.0, top = 0.5)
hist.set_ylabel('')
# hist_old.set_ylim(bottom = 0.0, top = 1.0)
hist_old.set_ylim(bottom = 0.0, top = 0.5)
pg = PlotGrid(subplots = [hist_old, hist],
num_columns = 2,
height = 3.5,
width = 8.0,
share_x = True,
share_y = True,
label_schema = None,
auto_height = False,
# column_labels = [r'\texttt{msBayes}', r'\texttt{dpp-msbayes}'],
column_labels = [r'msBayes', r'dpp-msbayes'],
column_label_size = 18.0)
pg.auto_adjust_margins = False
pg.margin_top = 0.92
pg.padding_between_horizontal = 1.0
pg.reset_figure()
pg.savefig(os.path.join(out_dir, 'philippines-dpp-psi-posterior-old-vs-dpp.pdf'))
pg.label_schema = 'uppercase'
pg.reset_figure()
pg.savefig(os.path.join(out_dir, 'philippines-dpp-psi-posterior-old-vs-dpp-labels.pdf'))
prior_psis = get_dpp_psi_values(dmc_sim.num_taxon_pairs, 1.5, 18.099702, num_sims = 100000)
prior_hd = HistData(x = prior_psis,
normed = True,
bins = bins,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
zorder = 0)
prior_hist = ScatterPlot(hist_data_list = [prior_hd],
x_label = 'Number of divergence events',
y_label = 'Probability',
xticks_obj = xticks_obj)
prior_hist.set_xlim(left = bins[0], right = bins[-1])
prior_hist.set_ylim(bottom = 0.0, top = 0.12)
hist.set_ylim(bottom = 0.0, top = 0.12)
pg = PlotGrid(subplots = [prior_hist, hist],
num_columns = 2,
height = 3.5,
width = 8.0,
share_x = True,
share_y = True,
label_schema = None,
auto_height = False,
# column_labels = [r'\texttt{msBayes}', r'\texttt{dpp-msbayes}'],
column_labels = [r'Prior', r'Posterior'],
column_label_size = 18.0)
pg.auto_adjust_margins = False
pg.margin_top = 0.92
pg.padding_between_horizontal = 1.0
pg.reset_figure()
pg.savefig(os.path.join(out_dir, 'philippines-dpp-psi-posterior-prior.pdf'))
pg.label_schema = 'uppercase'
pg.reset_figure()
pg.savefig(os.path.join(out_dir, 'philippines-dpp-psi-posterior-prior-lablels.pdf'))
prior_psis_old = []
for i in range(22):
prior_psis_old.extend([i + 1] * 100)
prior_hd_old = HistData(x = prior_psis_old,
normed = True,
bins = bins,
histtype = 'bar',
align = 'mid',
orientation = 'vertical',
zorder = 0)
prior_hist_old = ScatterPlot(hist_data_list = [prior_hd_old],
x_label = 'Number of divergence events',
y_label = 'Prior probability',
xticks_obj = xticks_obj)
prior_hist.set_xlim(left = bins[0], right = bins[-1])
prior_hist.set_ylim(bottom = 0.0, top = 0.5)
hist.set_ylim(bottom = 0.0, top = 0.5)
prior_hist.set_ylim(bottom = 0.0, top = 0.5)
for h in [hist_old, hist, prior_hist_old, prior_hist]:
h.set_ylabel(ylabel = '')
h.set_xlabel(xlabel = '')
h.set_title_text('')
h.set_extra_y_label('')
pg = PlotGrid(subplots = [hist_old, hist, prior_hist_old, prior_hist],
num_columns = 2,
height = 6.0,
width = 8.0,
share_x = True,
share_y = False,
label_schema = None,
auto_height = False,
title = r'Number of divergence events',
title_top = False,
title_size = 16.0,
y_title = 'Probability',
y_title_size = 16.0,
column_labels = [r'msBayes', r'dpp-msbayes'],
row_labels = ['Posterior', 'Prior'],
column_label_offset = 0.07,
column_label_size = 22.0,
row_label_offset = 0.04,
row_label_size = 20.0)
pg.auto_adjust_margins = False
pg.margin_top = 0.94
pg.margin_bottom = 0.045
pg.margin_right = 0.95
pg.margin_left = 0.045
pg.padding_between_vertical = 0.5
pg.padding_between_horizontal = 1.0
pg.reset_figure()
pg.set_shared_x_limits()
pg.set_shared_y_limits(by_row = True)
pg.reset_figure()
pg.savefig(os.path.join(out_dir, 'philippines-dpp-psi-posterior-old-vs-dpp-with-prior.pdf'))
def main_cli():
create_plots(project_util.PHILIPPINES_DPP_INFO,
project_util.PHILIPPINES_OLD_INFO,
project_util.PLOT_DIR)
if __name__ == '__main__':
main_cli()
```
#### File: d1/m1/sumtimes.py
```python
import os
import sys
import stat
import argparse
import subprocess
import logging
import pycoevolity
def main(argv = sys.argv):
pycoevolity.write_splash(sys.stderr)
parser = argparse.ArgumentParser()
parser.add_argument('posterior_path',
metavar = 'POSTERIOR-PATH',
type = pycoevolity.argparse_utils.arg_is_file,
help = ('Path to posterior file.'))
parser.add_argument('-p', '--prefix',
action = 'store',
type = str,
default = "",
help = ('A prefix to prepend to all output files.'))
parser.add_argument('-f', '--force',
action = 'store_true',
help = ('Overwrite any existing output files. By default, an error '
'is thrown if an output path exists.'))
parser.add_argument('-l', '--labels',
type = str,
nargs = '+',
default = [],
help = ('The labels to use for the plot.'))
parser.add_argument('-z', '--include-zero',
action = 'store_true',
help = ('By default, ggplot2 auto-magically determines the limits '
'of the time axis, which often excludes zero (present). '
'This option ensures that the time axis starts from zero.'))
parser.add_argument('-x', '--x-label',
action = 'store',
type = str,
default = "Time",
help = ('Label for the X-axis. Default: \'Time\'.'))
parser.add_argument('-y', '--y-label',
action = 'store',
type = str,
default = "Comparison",
help = ('Label for the Y-axis. Default: \'Comparison\'.'))
if argv == sys.argv:
args = parser.parse_args()
else:
args = parser.parse_args(argv)
prefix = args.prefix
if len(prefix.split(os.path.sep)) < 2:
prefix = os.path.join(os.curdir, prefix)
r_path = prefix + "pycoevolity-plot-times.R"
pdf_path = prefix + "pycoevolity-times.pdf"
png_path = prefix + "pycoevolity-times.png"
svg_path = prefix + "pycoevolity-times.svg"
output_dir = os.path.dirname(r_path)
if not output_dir:
output_dir = os.curdir
if not args.force:
for p in [r_path, pdf_path, png_path, svg_path]:
if os.path.exists(p):
raise Exception(
"\nERROR: File {0!r} already exists.\n"
"Use \'-p/--prefix\' option to specify a different prefix,\n"
"or the \'-f/--force\' option to overwrite existing "
"files.".format(p))
sys.stderr.write("Parsing posterior file...\n")
posterior = pycoevolity.parsing.get_dict_from_spreadsheets([args.posterior_path])
number_of_pairs = 0
for k in posterior.keys():
if k.startswith("PRI.t."):
number_of_pairs += 1
if not args.labels:
args.labels = ["Pair {0}".format(i + 1) for i in range(number_of_pairs)]
assert len(args.labels) == number_of_pairs
labels = []
heights = []
nsamples = None
for i in range(number_of_pairs):
hts = posterior["PRI.t.{0}".format(i + 1)]
if not nsamples:
nsamples = len(hts)
else:
assert len(hts) == nsamples
labels.extend([args.labels[i]] * nsamples)
heights.extend(hts)
plot_width = 7.0
plot_height = plot_width / 1.618034
plot_units = "in"
plot_scale = 8
plot_base_size = 14
scale_x_continuous_args = ["expand = c(0.05, 0)"]
if args.include_zero:
scale_x_continuous_args.append("limits = c(0, NA)")
rscript = """#! /usr/bin/env Rscript
library(ggplot2)
library(ggridges)
time = c({heights})
comparison = c(\"{labels}\")
data <- data.frame(time = time, comparison = comparison)
data$comparison = factor(data$comparison, levels = rev(unique(as.character(data$comparison))))
ggplot(data, aes(x = time, y = comparison, height = ..density..)) +
geom_density_ridges(stat = \"density\", scale = {plot_scale}, rel_min_height = 0.001) +
theme_minimal(base_size = {plot_base_size}) +
theme(axis.text.y = element_text(vjust = 0)) +
scale_x_continuous({scale_x_continuous_args}) +
scale_y_discrete(expand = c(0.01, 0)) +
labs(x = \"{x_label}\") +
labs(y = \"{y_label}\")
ggsave(\"{pdf_path}\", width = {plot_width}, height = {plot_height}, units = \"{plot_units}\")
ggsave(\"{png_path}\", width = {plot_width}, height = {plot_height}, units = \"{plot_units}\")
r <- tryCatch(
{{
ggsave(\"{svg_path}\", width = {plot_width}, height = {plot_height}, units = \"{plot_units}\")
}},
error = function(cond) {{
message(\"An error occurred while trying to save plot as SVG.\")
message(\"The plot has been saved in PDF and PNG format.\")
message(\"If you want the SVG file, you may need to install additional R packages.\")
message(\"Here's the original error message for details:\")
message(cond)
}},
warning = function(cond) {{
message(\"A warning occurred while trying to save the plot in SVG format.\")
message(\"The plot has been saved in PDF and PNG format.\")
message(\"If you want the SVG file, you may need to install additional R packages.\")
message(\"Here's the original warning message for details:\")
message(cond)
}},
finally = {{}})
""".format(
heights = ", ".join(str(h) for h in heights),
labels = "\", \"".join(labels),
plot_scale = plot_scale,
plot_base_size= plot_base_size,
scale_x_continuous_args = ", ".join(scale_x_continuous_args),
plot_width = plot_width,
plot_height = plot_height,
plot_units = plot_units,
x_label = args.x_label,
y_label = args.y_label,
pdf_path = os.path.basename(pdf_path),
png_path = os.path.basename(png_path),
svg_path = os.path.basename(svg_path))
with open(r_path, "w") as out:
out.write("{0}".format(rscript))
file_stat = os.stat(r_path)
os.chmod(r_path, file_stat.st_mode | stat.S_IEXEC)
sys.stderr.write("Running R script to generate plots...\n")
sout = subprocess.PIPE
serr = subprocess.PIPE
process = subprocess.Popen([r_path],
cwd = output_dir,
stdout = sout,
stderr = serr,
shell = False,
universal_newlines = True)
stdout, stderr = process.communicate()
exit_code = process.wait()
if exit_code != 0:
sys.stderr.write(
"The R plotting script exited with an error code.\n"
"However, the script is available at\n"
"{r_script_path!r}.\n"
"You may need to install the R packages ggplot2 and ggridges and "
"re-run the R script.\n"
"Here is the stderr from R:\n{stderr}\n".format(
r_script_path = r_path,
stderr = stderr))
else:
if stderr:
sys.stderr.write("Here is the stderr returned by R:\n")
sys.stderr.write("{0}\n".format("-" * 72))
sys.stderr.write("{0}\n".format(stderr))
sys.stderr.write("{0}\n".format("-" * 72))
if os.path.exists(r_path):
sys.stderr.write("Here are the outputs:\n")
sys.stderr.write(" R script: {0!r}\n".format(r_path))
if os.path.exists(pdf_path):
sys.stderr.write(" PDF plot: {0!r}\n".format(pdf_path))
if os.path.exists(png_path):
sys.stderr.write(" PNG plot: {0!r}\n".format(png_path))
if os.path.exists(svg_path):
sys.stderr.write(" SVG plot: {0!r}\n".format(svg_path))
if __name__ == "__main__":
main()
```
#### File: exchangeability/bin/parse-results.py
```python
import os
import sys
from pymsbayes.utils.parsing import DMCSimulationResults
from pymsbayes.utils.messaging import get_logger
_LOG = get_logger(__name__)
def main_cli():
bin_dir = os.path.abspath(os.path.dirname(__file__))
project_dir = os.path.abspath(os.path.dirname(bin_dir))
result_dir = os.path.abspath(os.path.join(project_dir, 'results'))
info_path = os.path.join(result_dir, 'sort', 'pymsbayes-results',
'pymsbayes-info.txt')
_LOG.info('Parsing and writing results...')
results = DMCSimulationResults(info_path)
prior_indices = results.prior_index_to_config.keys()
results.write_result_summaries(
prior_indices = prior_indices,
include_tau_exclusion_info = False)
if __name__ == '__main__':
main_cli()
```
#### File: power-comparison/bin/generate_configs.py
```python
import os
import sys
import math
import project_util
from project_util import SETTINGS
def write_observed_configs(output_dir):
for upper_tau, cfg in SETTINGS.observed_cfg_iter():
out_path = os.path.join(output_dir,
'observed-{0}.cfg'.format(upper_tau))
with open(out_path, 'w') as out:
out.write(cfg)
def write_exp_observed_configs(output_dir):
for upper_tau, cfg in SETTINGS.observed_cfg_iter(uniform_tau=False):
out_path = os.path.join(output_dir,
'exp-observed-{0}.cfg'.format(upper_tau))
with open(out_path, 'w') as out:
out.write(cfg)
def write_prior_configs(output_dir):
for div_model, cfg in SETTINGS.prior_cfg_iter():
out_path = os.path.join(output_dir,
'prior-{0}.cfg'.format(div_model))
with open(out_path, 'w') as out:
out.write(cfg)
out_path = os.path.join(output_dir, 'prior-old.cfg')
with open(out_path, 'w') as out:
out.write(SETTINGS.get_old_prior())
def write_old_observed_configs(output_dir):
for upper_tau, cfg in SETTINGS.old_observed_cfg_iter():
out_path = os.path.join(output_dir,
'old-observed-{0}.cfg'.format(upper_tau))
with open(out_path, 'w') as out:
out.write(cfg)
def main():
write_observed_configs(project_util.OBSERVED_CFG_DIR)
write_exp_observed_configs(project_util.OBSERVED_CFG_DIR)
write_old_observed_configs(project_util.OBSERVED_CFG_DIR)
write_prior_configs(project_util.PRIOR_CFG_DIR)
if __name__ == '__main__':
main()
```
#### File: response-redux/bin/plot-results.py
```python
import os
import sys
from pymsbayes import plotting
from pymsbayes.utils.parsing import spreadsheet_iter
from pymsbayes.utils.stats import get_freqs, freq_less_than, median, mode_list
import project_util
from rescale_posteriors import get_posterior_plot, summarize_results
def plot_posterior(post_path):
model_indices = range(1, 9)
scaled_indices = [5, 6]
sp, xlim, ylim = get_posterior_plot(post_path, model_indices,
scaled_indices)
rect = [0, 0, 1, 1]
sp.fig.tight_layout(pad = 0.25, rect = rect)
sp.reset_plot()
sp.savefig(os.path.join(os.path.dirname(post_path),
'mean_by_dispersion.pdf'))
def main_cli():
prior_prob_omega_less_than = 0.0887
for d in ['hickerson', 'alt']:
result_dir = os.path.join(project_util.RESULT_DIR, d,
'pymsbayes-results')
post_path = os.path.join(result_dir, 'pymsbayes-output',
'd1', 'm12345678-combined',
'd1-m12345678-combined-s1-25-posterior-sample.txt.gz')
plot_posterior(post_path)
summarize_results(post_path)
result_dir = os.path.join(project_util.PROJECT_DIR,
'hickerson-et-al-posterior', 'hickerson-posterior-1k')
post_path = os.path.join(result_dir, 'posterior-from-mike-1k.txt.gz')
plot_posterior(post_path)
summarize_results(post_path)
result_dir = os.path.join(project_util.PROJECT_DIR,
'hickerson-et-al-posterior', 'hickerson-posterior-10k')
post_path = os.path.join(result_dir, 'posterior-from-mike-10k.txt.gz')
plot_posterior(post_path)
summarize_results(post_path)
result_dir = os.path.join(project_util.PROJECT_DIR,
'hickerson-et-al-posterior', 'eureject-results')
post_path = os.path.join(result_dir, 'posterior-sample.txt.gz')
plot_posterior(post_path)
summarize_results(post_path)
if __name__ == '__main__':
main_cli()
```
#### File: response-redux/bin/rescale_posteriors.py
```python
import os
import sys
from pymsbayes import plotting
from pymsbayes.utils.parsing import spreadsheet_iter
from pymsbayes.fileio import process_file_arg
from pymsbayes.utils.stats import get_freqs, freq_less_than, median, mode_list
import project_util
def parse_results(posterior_path):
omegas = []
psis = []
models = []
for i, d in enumerate(spreadsheet_iter([posterior_path])):
models.append(int(d['PRI.model']))
omegas.append(float(d['PRI.omega']))
psis.append(int(d['PRI.Psi']))
return models, psis, omegas
def summarize_results(posterior_path):
sum_path = posterior_path + '.summary.txt'
models, psis, omegas = parse_results(posterior_path)
psi_mode = mode_list(psis)
omega_mode = mode_list(omegas)
omega_median = median(omegas)
psi_probs = get_freqs(psis)
omega_prob = freq_less_than(omegas, 0.01)
model_probs = get_freqs(models)
with open(sum_path, 'w') as out:
out.write('\nResults for {0!r}:\n'.format(posterior_path))
out.write('psi_mode = {0}\n'.format(psi_mode))
out.write('psi_probs = {0}\n'.format(psi_probs))
out.write('omega_mode = {0}\n'.format(omega_mode))
out.write('omega_median = {0}\n'.format(omega_median))
out.write('omega_prob_less_than = {0}\n'.format(omega_prob))
out.write('model_probs = {0}\n'.format(model_probs))
def rescale_posterior(in_path, out_path, scale_factor, model_indices):
header = None
out, close = process_file_arg(out_path, 'w', compresslevel=9)
omegas = []
psis = []
for i, d in enumerate(spreadsheet_iter([in_path])):
if i == 0:
header = d.keys()
out.write('{0}\n'.format('\t'.join(header)))
model_index = int(d['PRI.model'])
if model_index in model_indices:
d['PRI.E.t'] = float(d['PRI.E.t']) * scale_factor
d['PRI.var.t'] = float(d['PRI.var.t']) * (scale_factor * 0.5)
d['PRI.omega'] = float(d['PRI.omega']) * scale_factor
omegas.append(d['PRI.omega'])
psis.append(int(d['PRI.Psi']))
out.write('{0}\n'.format('\t'.join([
str(d[k]) for k in d.iterkeys()])))
out.close()
return omegas, psis
def get_omega_and_mean_tau(post_path, model_indices):
mean_tau = dict(zip([i for i in model_indices],[[] for i in model_indices]))
omega = dict(zip([i for i in model_indices],[[] for i in model_indices]))
for d in spreadsheet_iter([post_path]):
model_index = (int(d['PRI.model']))
mean_tau[model_index].append(float(d['PRI.E.t']))
omega[model_index].append(float(d['PRI.omega']))
return omega, mean_tau
def get_posterior_plot(post_path, model_indices, scaled_model_indices,
xlim = None, ylim = None,
x_label = r'$Var(\tau)/E(\tau)$ ($\Omega$)',
y_label = r'$E(\tau)$'):
omega, mean_tau = get_omega_and_mean_tau(post_path, model_indices)
scatter_data = {}
xmin, xmax = 0., 0.
ymin, ymax = 0., 0.
for i in model_indices:
markeredgecolor = '0.5'
if i in scaled_model_indices:
markeredgecolor = '0.05'
x = omega[i]
y = mean_tau[i]
sd = plotting.ScatterData(x = x, y = y,
markeredgecolor = markeredgecolor)
scatter_data[i] = sd
xmin = min([xmin] + x)
ymin = min([ymin] + y)
xmax = max([xmax] + x)
ymax = max([ymax] + y)
xbuff = (xmax - xmin) * 0.04
ybuff = (ymax - ymin) * 0.04
if not xlim:
xlim = (xmin - xbuff, xmax + xbuff)
if not ylim:
ylim = (ymin - ybuff, ymax + ybuff)
sp = plotting.ScatterPlot(
scatter_data_list = scatter_data.values(),
x_label = x_label,
y_label = y_label,
xlim = xlim,
ylim = ylim)
return sp, xlim, ylim
def plot_posteriors(post_path, scaled_post_path, model_indices = range(1, 9),
scaled_model_indices = [5, 6]):
sp, xlim, ylim = get_posterior_plot(post_path, model_indices, scaled_model_indices,
xlim = None,
ylim = None,
x_label = None)
sp_scaled, xlim, ylim = get_posterior_plot(scaled_post_path, model_indices,
scaled_model_indices,
xlim = xlim,
ylim = ylim,
x_label = None,
y_label = None)
pg = plotting.PlotGrid(
subplots = [sp, sp_scaled],
num_columns = 2,
share_x = True,
share_y = True,
title = r'$Var(\tau)/E(\tau)$ ($\Omega$)',
title_top = False,
width = 8.0,
height = 3.5,
auto_height = False)
pg.auto_adjust_margins = False
pg.margin_left = 0.0
pg.margin_top = 0.96
pg.margin_bottom = 0.06
pg.reset_figure()
return pg
def process_posterior(post_path, scaled_post_path):
prior_prob_omega_less_than = 0.0887
scaled_indices = [5, 6]
omegas, psis = rescale_posterior(in_path = post_path,
out_path = scaled_post_path,
scale_factor = 0.4,
model_indices = scaled_indices)
summarize_results(scaled_post_path)
model_indices = range(1, 9)
pg = plot_posteriors(post_path, scaled_post_path, model_indices,
scaled_indices)
pg.savefig(os.path.join(os.path.dirname(post_path),
'mean_by_dispersion_rescaled.pdf'))
def main_cli():
# rescale posterior from Hickerson et al.
result_dir = os.path.join(project_util.PROJECT_DIR,
'hickerson-et-al-posterior', 'hickerson-posterior-1k')
post_sample_path = os.path.join(result_dir, 'posterior-from-mike-1k.txt.gz')
scaled_post_path = os.path.join(result_dir,
'rescaled-posterior-from-mike.txt.gz')
process_posterior(post_sample_path, scaled_post_path)
result_dir = os.path.join(project_util.PROJECT_DIR,
'hickerson-et-al-posterior', 'hickerson-posterior-10k')
post_sample_path = os.path.join(result_dir, 'posterior-from-mike-10k.txt.gz')
scaled_post_path = os.path.join(result_dir,
'rescaled-posterior-from-mike.txt.gz')
process_posterior(post_sample_path, scaled_post_path)
# rescale posterior calculated via EuReject from Hickerson et al.
result_dir = os.path.join(project_util.PROJECT_DIR,
'hickerson-et-al-posterior', 'eureject-results')
post_sample_path = os.path.join(result_dir, 'posterior-sample.txt.gz')
scaled_post_path = os.path.join(result_dir,
'rescaled-posterior-sample.txt.gz')
process_posterior(post_sample_path, scaled_post_path)
# rescale re-analysis posteriors
for d in ['hickerson', 'alt']:
result_dir = os.path.join(project_util.RESULT_DIR, d,
'pymsbayes-results')
post_sample_path = os.path.join(result_dir, 'pymsbayes-output',
'd1', 'm12345678-combined',
'd1-m12345678-combined-s1-25-posterior-sample.txt.gz')
scaled_post_path = os.path.join(result_dir,
'rescaled-posterior.txt.gz')
process_posterior(post_sample_path, scaled_post_path)
if __name__ == '__main__':
main_cli()
```
#### File: slides/bin/partition-number-plots.py
```python
import os
import sys
import matplotlib
from pymsbayes import plotting
from pymsbayes.utils import stats
from pymsbayes.utils.messaging import get_logger
import project_util
_LOG = get_logger(__name__)
def get_ordered_divergence_model_numbers(num_pairs):
part = stats.Partition([0] * num_pairs)
prob_div_models = []
num_div_models = [part.number_of_partitions_into_k_subsets(i+1) for i in range(num_pairs)]
prob_div_models = [((1.0 / num_pairs) / x) for x in num_div_models]
return num_div_models, prob_div_models
def get_unordered_divergence_model_numbers(num_pairs):
part = stats.IntegerPartition([0] * num_pairs)
prob_div_models = []
num_div_models = part.number_of_int_partitions_by_k(num_pairs)
prob_div_models = [((1.0 / num_pairs) / x) for x in num_div_models]
return num_div_models, prob_div_models
def create_plots(n = 22,
ordered = True,
x_label_size = 24.0,
y_label_size = 24.0,
xtick_label_size = 16.0,
ytick_label_size = 14.0,
height = 6.0,
width = 8.0,
margin_bottom = 0.0,
margin_left = 0.0,
margin_top = 1.0,
margin_right = 1.0,
padding_between_vertical = 1.0):
if ordered:
num_div_models, prob_div_models = get_ordered_divergence_model_numbers(n)
else:
num_div_models, prob_div_models = get_unordered_divergence_model_numbers(n)
keys = [(i + 1) for i in range(n)]
num_bar_data = plotting.BarData(
values = num_div_models,
labels = keys,
width = 1.0,
orientation = 'vertical',
color = '0.5',
edgecolor = '0.5',
label_size = xtick_label_size,
measure_tick_label_size = ytick_label_size,
zorder = 0)
prob_bar_data = plotting.BarData(
values = prob_div_models,
labels = keys,
width = 1.0,
orientation = 'vertical',
color = '0.5',
edgecolor = '0.5',
label_size = xtick_label_size,
measure_tick_label_size = ytick_label_size,
zorder = 0)
ymax = 1.05 * max(num_div_models)
num_plot = plotting.ScatterPlot(
bar_data_list = [num_bar_data],
x_label = '# of divergence events',
y_label = '# of divergence models',
x_label_size = x_label_size,
y_label_size = y_label_size,
ylim = (0, ymax),
)
ymax = 1.05 * max(prob_div_models)
prob_plot = plotting.ScatterPlot(
bar_data_list = [prob_bar_data],
x_label = '# of divergence events',
y_label = 'Prior probability',
x_label_size = x_label_size,
y_label_size = y_label_size,
ylim = (0, ymax),
)
for p in [num_plot, prob_plot]:
yticks = [i for i in p.ax.get_yticks()]
ytick_labels = [i for i in yticks]
if len(ytick_labels) > 5:
for i in range(1, len(ytick_labels), 2):
ytick_labels[i] = ''
yticks_obj = plotting.Ticks(ticks = yticks,
labels = ytick_labels,
size = ytick_label_size)
p.yticks_obj = yticks_obj
# return num_plot, prob_plot
num_grid = plotting.PlotGrid(subplots = [num_plot],
label_schema = None,
num_columns = 1,
height = height,
width = width,
auto_height = False)
prob_grid = plotting.PlotGrid(subplots = [prob_plot],
label_schema = None,
num_columns = 1,
height = height,
width = width,
auto_height = False)
for p in [num_grid, prob_grid]:
p.auto_adjust_margins = False
p.margin_top = margin_top
p.margin_bottom = margin_bottom
p.margin_right = margin_right
p.margin_left = margin_left
p.padding_between_vertical = padding_between_vertical
p.reset_figure()
p.reset_figure()
return num_grid, prob_grid
if __name__ == '__main__':
num_plot, prob_plot = create_plots(n = 22)
num_plot.savefig(os.path.join(project_util.IMAGE_DIR, 'number-of-div-models-22.pdf'))
prob_plot.savefig(os.path.join(project_util.IMAGE_DIR, 'prob-of-div-models-22.pdf'))
num_plot, prob_plot = create_plots(n = 22, ordered = False)
num_plot.savefig(os.path.join(project_util.IMAGE_DIR, 'number-of-div-models-22-unordered.pdf'))
prob_plot.savefig(os.path.join(project_util.IMAGE_DIR, 'prob-of-div-models-22-unordered.pdf'))
num_plot, prob_plot = create_plots(n = 10,
ordered = True)
num_plot.savefig(os.path.join(project_util.IMAGE_DIR, 'number-of-div-models-10.pdf'))
prob_plot.savefig(os.path.join(project_util.IMAGE_DIR, 'prob-of-div-models-10.pdf'))
```
#### File: validation/bin/parse-dpp-results.py
```python
import os
import sys
from pymsbayes.utils.parsing import DMCSimulationResults
from pymsbayes.utils.messaging import get_logger
import project_util
_LOG = get_logger(__name__)
def main_cli():
info_path = os.path.join(project_util.RESULT_DIR, 'dpp',
'pymsbayes-results', 'pymsbayes-info.txt')
_LOG.info('Parsing and writing results...')
results = DMCSimulationResults(info_path)
prior_indices = results.prior_index_to_config.keys()
results.write_result_summaries(
prior_indices = prior_indices,
include_tau_exclusion_info = False)
if __name__ == '__main__':
main_cli()
``` |
{
"source": "joalder/billstation",
"score": 3
} |
#### File: billstation/station/tests.py
```python
from decimal import Decimal
from django.test import TestCase
# Create your tests here.
from station.models import Dude, Bill, Payment, TWOPLACES
class BillAndPaymentTests(TestCase):
def setUp(self):
self.dude_one = Dude.objects.create(name="<NAME>")
self.dude_two = Dude.objects.create(name="<NAME>")
self.dude_three = Dude.objects.create(name="<NAME>")
self.bill_even = Bill.objects.create(description="Migros", amount=12.22, owner=self.dude_one)
self.bill_even.affected_dudes = [self.dude_one, self.dude_two]
self.bill_even.save()
self.bill_odd = Bill.objects.create(description="Coop", amount=27.20, owner=self.dude_two, )
self.bill_odd.affected_dudes = [self.dude_one, self.dude_two, self.dude_three]
self.bill_odd.save()
def test_bill_basic_split(self):
self.assertEqual(self.bill_even.total_remaining(), Decimal(self.bill_even.amount / 2).quantize(TWOPLACES))
self.assertEqual(self.bill_even.remaining(self.dude_one), Decimal(0))
self.assertEqual(self.bill_even.remaining(self.dude_two),
Decimal(self.bill_even.amount / 2).quantize(TWOPLACES))
self.assertFalse(self.bill_even.is_paid())
Payment.objects.create(bill=self.bill_even, by=self.dude_two, amount=self.bill_even.remaining(self.dude_two))
self.assertEqual(self.bill_even.total_remaining(), Decimal(0))
self.assertEqual(self.bill_even.remaining(self.dude_one), Decimal(0))
self.assertEqual(self.bill_even.remaining(self.dude_two), Decimal(0))
self.assertTrue(self.bill_even.is_paid())
def test_bill_three_way_split(self):
self.assertEqual(self.bill_odd.total_remaining(),
(Decimal(self.bill_odd.amount) - Decimal(self.bill_odd.amount / 3)).quantize(TWOPLACES))
self.assertEqual(self.bill_odd.remaining(self.dude_two), Decimal(0))
self.assertEqual(self.bill_odd.remaining(self.dude_one), Decimal(self.bill_odd.amount / 3).quantize(TWOPLACES))
self.assertEqual(self.bill_odd.remaining(self.dude_three),
Decimal(self.bill_odd.amount / 3).quantize(TWOPLACES))
self.assertFalse(self.bill_odd.is_paid())
Payment.objects.create(bill=self.bill_odd, by=self.dude_one, amount=self.bill_odd.remaining(self.dude_one))
self.assertAlmostEqual(self.bill_odd.total_remaining(), Decimal(self.bill_odd.amount / 3).quantize(TWOPLACES),
places=1)
self.assertEqual(self.bill_odd.remaining(self.dude_one), Decimal(0))
self.assertEqual(self.bill_odd.remaining(self.dude_two), Decimal(0))
self.assertFalse(self.bill_odd.is_paid())
Payment.objects.create(bill=self.bill_odd, by=self.dude_three, amount=self.bill_odd.remaining(self.dude_three))
self.assertAlmostEqual(self.bill_odd.total_remaining(), Decimal(0), places=1)
self.assertEqual(self.bill_odd.remaining(self.dude_one), Decimal(0))
self.assertEqual(self.bill_odd.remaining(self.dude_two), Decimal(0))
self.assertEqual(self.bill_odd.remaining(self.dude_three), Decimal(0))
self.assertTrue(self.bill_odd.is_paid())
``` |
{
"source": "Joald/pyrec",
"score": 3
} |
#### File: Joald/pyrec/pyrec.py
```python
from functools import wraps, partial
# using convoluted name to really avoid namespace pollution
__rec_depth_kwargs_names = {
'depth': '__rec_depth_current_depth',
'default_return': '__rec_depth_default_return'
}
def rec_depth_limit(depth, default_return=0):
"""Decorator factory that limits the depth of the recursion for a function
@:param depth - how many levels of recursive calls are allowed. Setting it to 0 means that none are allowed
Note that this doesn't limit the number of calls, so if a function always does two recursive calls,
then setting depth to n will produce 2^(n+1) calls to the function
@:param default_return - the value return by any disallowed recursive call
"""
def decorate(f, **kwargs):
@wraps(f)
def _decorator(*args, **kwargs):
fkwargs = kwargs.copy()
for i in __rec_depth_kwargs_names.values():
fkwargs.pop(i)
cur_depth = kwargs[__rec_depth_kwargs_names['depth']]
if cur_depth[0] < 0:
return kwargs[__rec_depth_kwargs_names['default_return']]
else:
cur_depth[0] -= 1
rv = f(*args, **fkwargs)
cur_depth[0] += 1
return rv
return partial(_decorator, **kwargs)
cur_depth = [depth] # using a list to make the int mutable
values = [cur_depth, default_return]
kwargs = dict(zip(__rec_depth_kwargs_names.values(), values))
return partial(decorate, **kwargs)
__rec_call_limit_kwargs_names = {
'number': '__rec_call_limit_number',
'default_return': '__rec_call_limit_default_return'
}
def rec_call_limit(limit, default_return=0):
"""Decorator factory that limits the number of recursive calls for a function
@:param limit - number of calls that will be allowed
@:param default_return - the value return by any disallowed recursive call
"""
def decorate(f, **kwargs):
@wraps(f)
def _decorator(*args, **kwargs):
fkwargs = kwargs.copy()
for i in __rec_call_limit_kwargs_names.values():
fkwargs.pop(i)
count = kwargs[__rec_call_limit_kwargs_names['number']]
if count[0] < 0:
return kwargs[__rec_call_limit_kwargs_names['default_return']]
else:
count[0] -= 1
rv = f(*args, **fkwargs)
return rv
return partial(_decorator, **kwargs)
count = [limit]
values = [count, default_return]
kwargs = dict(zip(__rec_call_limit_kwargs_names.values(), values))
return partial(decorate, **kwargs)
``` |
{
"source": "Joalland/orgextended",
"score": 2
} |
#### File: Joalland/orgextended/orgcheckbox.py
```python
import sublime
import sublime_plugin
import datetime
import re
import os
import fnmatch
import OrgExtended.orgparse.node as node
import OrgExtended.orgutil.util as util
import OrgExtended.orgutil.navigation as nav
import OrgExtended.orgutil.template as templateEngine
import logging
import sys
import traceback
import OrgExtended.orgdb as db
import OrgExtended.asettings as sets
import OrgExtended.orgcapture as capture
import sys
import os.path
import fnmatch
log = logging.getLogger(__name__)
# Stolen from the original orgmode
class CheckState:
Unchecked, Checked, Indeterminate, Error = range(1, 5)
indent_regex = re.compile(r'^(\s*).*$')
summary_regex = re.compile(r'(\[\d*[/%]\d*\])')
checkbox_regex = re.compile(r'(\[[xX\- ]\])')
checkbox_line_regex = re.compile(r'\s*[-+]?\s*(\[[xX\- ]\])\s+')
# Extract the indent of this checkbox.
# RETURNS: a string with the indent of this line.
def get_indent(view, content):
if isinstance(content, sublime.Region):
content = view.substr(content)
match = indent_regex.match(content)
if(match):
return match.group(1)
else:
log.debug("Could not match indent: " + content)
return ""
RE_HEADING = re.compile('^[*]+ ')
# Try to find the parent of a region (by indent)
def find_parent(view, region):
row, col = view.rowcol(region.begin())
content = view.substr(view.line(region))
indent = len(get_indent(view, content))
row -= 1
found = False
# Look upward
while row >= 0:
point = view.text_point(row, 0)
content = view.substr(view.line(point))
if len(content.strip()):
if(RE_HEADING.search(content)):
break
cur_indent = len(get_indent(view, content))
if cur_indent < indent:
found = True
break
row -= 1
if found:
# return the parent we found.
return view.line(view.text_point(row,0))
def find_children(view, region, cre = checkbox_regex, includeSiblings=False, recursiveChildFind=False):
row, col = view.rowcol(region.begin())
line = view.line(region)
content = view.substr(line)
# print content
indent = get_indent(view, content)
if(not indent):
log.debug("Unable to locate indent for line: " + str(row))
indent = len(indent)
# print repr(indent)
row += 1
child_indent = None
children = []
last_row, _ = view.rowcol(view.size())
while row <= last_row:
point = view.text_point(row, 0)
line = view.line(point)
content = view.substr(line)
summary = get_summary(view, line)
lc = content.lstrip()
if summary and lc.startswith("*") or lc.startswith('#'):
break
if cre.search(content):
cur_indent = len(get_indent(view, content))
# check for end of descendants
if includeSiblings and cur_indent < indent:
break
elif not includeSiblings and cur_indent <= indent:
break
# only immediate children (and siblings)
if(not recursiveChildFind):
if child_indent is None:
child_indent = cur_indent
if cur_indent == child_indent:
children.append(line)
if(includeSiblings and cur_indent < child_indent):
children.append(line)
else:
children.append(line)
row += 1
return children
def find_siblings(view, child, parent):
row, col = view.rowcol(parent.begin())
parent_indent = get_indent(view, parent)
child_indent = get_indent(view, child)
siblings = []
row += 1
last_row, _ = view.rowcol(view.size())
while row <= last_row: # Don't go past end of document.
line = view.text_point(row, 0)
line = view.line(line)
content = view.substr(line)
# print content
if len(content.strip()):
cur_indent = get_indent(view, content)
if len(cur_indent) <= len(parent_indent):
break # Indent same as parent found!
if len(cur_indent) == len(child_indent):
siblings.append((line, content))
row += 1
return siblings
def get_summary(view, line):
row, _ = view.rowcol(line.begin())
content = view.substr(line)
match = summary_regex.search(content)
if not match:
return None
col_start, col_stop = match.span()
return sublime.Region(
view.text_point(row, col_start),
view.text_point(row, col_stop),
)
def get_checkbox(view, line):
row, _ = view.rowcol(line.begin())
content = view.substr(line)
# print content
match = checkbox_regex.search(content)
if not match:
return None
# checkbox = match.group(1)
# print repr(checkbox)
# print dir(match), match.start(), match.span()
col_start, col_stop = match.span()
return sublime.Region(
view.text_point(row, col_start),
view.text_point(row, col_stop),
)
def get_check_state(view, line):
if '[-]' in view.substr(line):
return CheckState.Indeterminate
if '[ ]' in view.substr(line):
return CheckState.Unchecked
if '[X]' in view.substr(line) or '[x]' in view.substr(line):
return CheckState.Checked
return CheckState.Error
def get_check_char(view, check_state):
if check_state == CheckState.Unchecked:
return ' '
elif check_state == CheckState.Checked:
return 'x'
elif check_state == CheckState.Indeterminate:
return '-'
else:
return 'E'
def recalc_summary(view, region):
recursive = sets.Get("checkboxSummaryRecursive",False)
at = db.Get().AtInView(view)
if(at):
props = at.properties
if(props and 'COOKIE_DATA' in props):
cook = props['COOKIE_DATA']
if(cook and 'notrecursive' in cook):
recursive = False
elif(cook and 'recursive' in cook):
recursive = True
children = None
children = find_children(view, region, checkbox_regex, False, recursive)
if not len(children) > 0:
return (0, 0)
num_children = len(children)
checked_children = len(
[child for child in children if (get_check_state(view,child) == CheckState.Checked)])
# print ('checked_children: ' + str(checked_children) + ', num_children: ' + str(num_children))
return (num_children, checked_children)
def update_line(view, edit, region, parent_update=True):
#print ('update_line', self.view.rowcol(region.begin())[0]+1)
(num_children, checked_children) = recalc_summary(view, region)
# No children we don't have to update anything else.
if num_children <= 0:
return False
# update region checkbox
if checked_children == num_children:
newstate = CheckState.Checked
else:
if checked_children != 0:
newstate = CheckState.Indeterminate
else:
newstate = CheckState.Unchecked
toggle_checkbox(view, edit, region, newstate)
# update region summary
update_summary(view, edit, region, checked_children, num_children)
children = find_children(view, region)
for child in children:
line = view.line(child)
summary = get_summary(view, view.line(child))
if summary:
return update_line(view, edit, line, parent_update=False)
if parent_update:
parent = find_parent(view, region)
if parent:
update_line(view, edit, parent)
return True
def update_summary(view, edit, region, checked_children, num_children):
# print('update_summary', self.view.rowcol(region.begin())[0]+1)
summary = get_summary(view, region)
if not summary:
return False
# print('checked_children: ' + str(checked_children) + ', num_children: ' + str(num_children))
line = view.substr(summary)
if("%" in line):
view.replace(edit, summary, '[{0}%]'.format(int(checked_children/num_children*100)))
else:
view.replace(edit, summary, '[%d/%d]' % (checked_children, num_children))
def toggle_checkbox(view, edit, region, checked=None, recurse_up=False, recurse_down=False):
# print 'toggle_checkbox', self.view.rowcol(region.begin())[0]+1
checkbox = get_checkbox(view, region)
if not checkbox:
return False
if checked is None:
check_state = get_check_state(view, region)
if (check_state == CheckState.Unchecked) | (check_state == CheckState.Indeterminate):
check_state = CheckState.Checked
elif (check_state == CheckState.Checked):
check_state = CheckState.Unchecked
else:
check_state = checked
view.replace(edit, checkbox, '[%s]' % ( get_check_char(view, check_state)))
if recurse_down:
# all children should follow
children = find_children(view, region)
for child in children:
toggle_checkbox(view, edit, child, check_state, recurse_down=True)
if recurse_up:
# update parent
parent = find_parent(view, region)
if parent:
update_line(view, edit, parent)
def is_checkbox(view, sel):
names = view.scope_name(sel.end())
return 'orgmode.checkbox' in names or 'orgmode.checkbox.checked' in names or 'orgmode.checkbox.blocked' in names
def is_checkbox_line(view,sel=None):
point = None
if(sel == None):
row = view.curRow()
point = view.text_point(row, 0)
else:
point = sel.end()
line = view.line(point)
content = view.substr(line)
return checkbox_line_regex.search(content)
def find_all_summaries(view):
return view.find_by_selector("orgmode.checkbox.summary")
def recalculate_checkbox_summary(view, sel, edit):
line = view.line(sel.begin())
update_line(view, edit, line)
def recalculate_all_checkbox_summaries(view, edit):
sums = find_all_summaries(view)
for sel in sums:
recalculate_checkbox_summary(view, sel, edit)
cline_info_regex = re.compile(r'^(\s*)([-+0-9](\.)?)?.*$')
class OrgInsertCheckboxCommand(sublime_plugin.TextCommand):
def run(self, edit,insertHere=True):
row = self.view.curRow()
line = self.view.getLine(row)
match = cline_info_regex.match(line)
indent = match.group(1)
start = match.group(2)
if(start):
indent = indent + start + " [ ] "
reg = self.view.curLine()
list_regex = re.compile(r'\s*(([-+]\s\[)|[^#*|+-])')
children = find_children(self.view, reg, list_regex, not insertHere)
if(children and len(children) > 0):
reg = children[len(children) - 1]
row,_ =self.view.rowcol(reg.begin())
self.view.insert(edit,reg.end(),"\n" + indent)
# Move to end of line
row = row + 1
pt = self.view.text_point(row,0)
ln = self.view.line(pt)
self.view.sel().clear()
self.view.sel().add(ln.end())
uline_info_regex = re.compile(r'^(\s*)([-+]) .*$')
def isUnorderedList(line):
return uline_info_regex.match(line)
RE_THING = re.compile(r'^\s*[+-](\s\[[ xX-]\])?\s(?P<data>.*)$')
RE_NOTHEADERS = re.compile(r'^\s*[\#|0-9]')
def getListAtPointForSorting(view):
parent = view.findParentByIndent(view.curLine(),RE_NOTHEADERS, RE_THING)
if(None != parent):
prow, _ = view.rowcol(parent.begin())
list_regex = re.compile(r'\s*(([-+]\s\[)|[^#*|+-])')
children = find_children(view, parent, list_regex, True)
sortby = view.getLine(prow)
m = RE_THING.search(sortby)
if(m):
sortby = m.group('data')
things = [[[prow,0],sortby]]
for c in children:
srow, _ = view.rowcol(c.begin())
if(len(things) > 0):
things[len(things)-1][0][1] = srow
sortby = view.getLine(srow)
m = RE_THING.search(sortby)
if(m):
sortby = m.group('data')
things.append([[srow,0],sortby])
if(len(things) > 0):
srow, _ = view.rowcol(children[len(children)-1].end())
things[len(things)-1][0][1] = srow+1
return things
return None
def getListAtPoint(view,pt=None):
if(pt):
line = view.line(pt)
else:
line = view.curLine()
parent = view.findParentByIndent(line,RE_NOTHEADERS, RE_THING)
if(None != parent):
prow, _ = view.rowcol(parent.begin())
list_regex = re.compile(r'\s*(([-+]\s\[)|[^#*|+-])')
children = find_children(view, parent, list_regex, True)
sortby = view.getLine(prow)
m = RE_THING.search(sortby)
if(m):
sortby = m.group('data')
things = []
lastAppend = False
for c in children:
srow, _ = view.rowcol(c.begin())
if(lastAppend and len(things) > 0):
things[len(things)-1][0][1] = srow
lastAppend = False
sortby = view.getLine(srow)
m = RE_THING.search(sortby)
if(m):
sortby = m.group('data')
things.append([[srow,0],sortby])
lastAppend = True
if(len(things) > 0):
srow, _ = view.rowcol(children[len(children)-1].end())
things[len(things)-1][0][1] = srow+1
return things
return None
class OrgInsertUnorderedListCommand(sublime_plugin.TextCommand):
def run(self, edit,insertHere=True):
row = self.view.curRow()
line = self.view.getLine(row)
match = uline_info_regex.match(line)
indent = match.group(1)
start = match.group(2)
if(start):
indent = indent + start + " "
reg = self.view.curLine()
list_regex = re.compile(r'\s*([-+]|[^#*|])')
children = find_children(self.view, reg, list_regex, not insertHere)
if(children and len(children) > 0):
reg = children[len(children) - 1]
row,_ =self.view.rowcol(reg.begin())
self.view.insert(edit,reg.end(),"\n" + indent)
# Move to end of line
row = row + 1
pt = self.view.text_point(row,0)
ln = self.view.line(pt)
self.view.sel().clear()
self.view.sel().add(ln.end())
cbsline_info_regex = re.compile(r'^(\s*)(.*)\[\s*[0-9]*/[0-9]\s*\]\s*$')
class OrgInsertCheckboxSummaryCommand(sublime_plugin.TextCommand):
def run(self, edit):
row = self.view.curRow()
line = self.view.getLine(row)
match = cbsline_info_regex.match(line)
if(not match):
reg = self.view.curLine()
self.view.insert(edit,reg.end()," [/] ")
recalculate_all_checkbox_summaries(self.view, edit)
class OrgToggleCheckboxCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
for sel in view.sel():
if(not is_checkbox_line(view, sel)):
continue
line = view.line(sel.end())
toggle_checkbox(view, edit, line, recurse_up=True, recurse_down=True)
recalculate_all_checkbox_summaries(self.view, edit)
class OrgRecalcCheckboxSummaryCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
backup = []
for sel in view.sel():
if 'orgmode.checkbox.summary' not in view.scope_name(sel.end()):
continue
backup.append(sel)
#summary = view.extract_scope(sel.end())
line = view.line(sel.end())
update_line(view, edit, line)
view.sel().clear()
for region in backup:
view.sel().add(region)
class OrgRecalcAllCheckboxSummariesCommand(sublime_plugin.TextCommand):
def run(self, edit):
recalculate_all_checkbox_summaries(self.view, edit)
``` |
{
"source": "joalmeid/AzureTRE",
"score": 2
} |
#### File: db/repositories/operations.py
```python
from datetime import datetime
import uuid
from typing import List
from azure.cosmos import CosmosClient
from pydantic import parse_obj_as
from models.domain.authentication import User
from core import config
from db.repositories.base import BaseRepository
from db.errors import EntityDoesNotExist
from models.domain.operation import Operation, Status
class OperationRepository(BaseRepository):
def __init__(self, client: CosmosClient):
super().__init__(client, config.STATE_STORE_OPERATIONS_CONTAINER)
@staticmethod
def operations_query():
return 'SELECT * FROM c WHERE'
def create_operation_item(self, resource_id: str, status: Status, action: str, message: str, resource_path: str, user: User) -> Operation:
operation_id = str(uuid.uuid4())
timestamp = datetime.utcnow().timestamp()
operation = Operation(
id=operation_id,
resourceId=resource_id,
resourcePath=resource_path,
status=status,
resourceVersion=0, # Resource versioning coming in future
createdWhen=timestamp,
updatedWhen=timestamp,
action=action,
message=message,
user=user
)
self.save_item(operation)
return operation
def update_operation_status(self, operation_id: str, status: Status, message: str) -> Operation:
operation = self.get_operation_by_id(operation_id)
operation.status = status
operation.message = message
operation.updatedWhen = datetime.utcnow().timestamp()
self.update_item(operation)
return operation
def get_operation_by_id(self, operation_id: str) -> Operation:
"""
returns a single operation doc
"""
query = self.operations_query() + f' c.id = "{operation_id}"'
operation = self.query(query=query)
if not operation:
raise EntityDoesNotExist
return parse_obj_as(Operation, operation[0])
def get_operations_by_resource_id(self, resource_id: str) -> List[Operation]:
"""
returns a list of operations for this resource
"""
query = self.operations_query() + f' c.resourceId = "{resource_id}"'
operations = self.query(query=query)
return parse_obj_as(List[Operation], operations)
def resource_has_deployed_operation(self, resource_id: str) -> bool:
"""
checks whether this resource has a successful "deployed" operation
"""
query = self.operations_query() + f' c.resourceId = "{resource_id}" AND c.status = "{Status.Deployed}"'
operations = self.query(query=query)
return len(operations) > 0
```
#### File: test_db/test_repositories/test_resource_repository.py
```python
import copy
import uuid
import pytest
from mock import patch, MagicMock
from jsonschema.exceptions import ValidationError
from tests_ma.test_api.test_routes.test_resource_helpers import FAKE_CREATE_TIMESTAMP, FAKE_UPDATE_TIMESTAMP
from tests_ma.test_api.conftest import create_test_user
from db.errors import EntityDoesNotExist
from db.repositories.resources import ResourceRepository
from models.domain.resource import Resource, ResourceHistoryItem
from models.domain.resource_template import ResourceTemplate
from models.domain.user_resource_template import UserResourceTemplate
from models.domain.workspace import ResourceType
from models.schemas.resource import ResourcePatch
from models.schemas.workspace import WorkspaceInCreate
RESOURCE_ID = str(uuid.uuid4())
@pytest.fixture
def resource_repo():
with patch('azure.cosmos.CosmosClient') as cosmos_client_mock:
yield ResourceRepository(cosmos_client_mock)
@pytest.fixture
def workspace_input():
return WorkspaceInCreate(templateName="base-tre", properties={"display_name": "test", "description": "test", "app_id": "123"})
def sample_resource() -> Resource:
return Resource(
id=RESOURCE_ID,
isActive=True,
isEnabled=True,
resourcePath="/resource/path",
templateName="template_name",
templateVersion="template_version",
properties={
'display_name': 'initial display name',
'description': 'initial description',
'computed_prop': 'computed_val'
},
resourceType=ResourceType.Workspace,
etag="some-etag-value",
resourceVersion=0,
updatedWhen=FAKE_CREATE_TIMESTAMP,
user=create_test_user()
)
def sample_resource_template() -> ResourceTemplate:
return ResourceTemplate(id="123",
name="tre-user-resource",
description="description",
version="0.1.0",
resourceType=ResourceType.UserResource,
current=True,
required=['os_image', 'title'],
properties={
'title': {
'type': 'string',
'title': 'Title of the resource'
},
'os_image': {
'type': 'string',
'title': 'Windows image',
'description': 'Select Windows image to use for VM',
'enum': [
'Windows 10',
'Server 2019 Data Science VM'
],
'updateable': False
},
'vm_size': {
'type': 'string',
'title': 'Windows image',
'description': 'Select Windows image to use for VM',
'enum': [
'small',
'large'
],
'updateable': True
}
},
actions=[]).dict(exclude_none=True)
@patch("db.repositories.resources.ResourceRepository._get_enriched_template")
@patch("db.repositories.resources.ResourceRepository._validate_resource_parameters", return_value=None)
def test_validate_input_against_template_returns_template_version_if_template_is_valid(_, enriched_template_mock, resource_repo, workspace_input):
enriched_template_mock.return_value = ResourceTemplate(id="123",
name="template1",
description="description",
version="0.1.0",
resourceType=ResourceType.Workspace,
current=True,
required=[],
properties={},
customActions=[]).dict()
template_version = resource_repo.validate_input_against_template("template1", workspace_input, ResourceType.Workspace)
assert template_version == "0.1.0"
@patch("db.repositories.resources.ResourceRepository._get_enriched_template")
def test_validate_input_against_template_raises_value_error_if_template_does_not_exist(enriched_template_mock, resource_repo, workspace_input):
enriched_template_mock.side_effect = EntityDoesNotExist
with pytest.raises(ValueError):
resource_repo.validate_input_against_template("template_name", workspace_input, ResourceType.Workspace)
@patch("db.repositories.resources.ResourceRepository._get_enriched_template")
def test_validate_input_against_template_raises_value_error_if_the_user_resource_template_does_not_exist_for_the_given_workspace_service(enriched_template_mock, resource_repo, workspace_input):
enriched_template_mock.side_effect = EntityDoesNotExist
with pytest.raises(ValueError):
resource_repo.validate_input_against_template("template_name", workspace_input, ResourceType.UserResource, "parent_template_name")
@patch("db.repositories.resources.ResourceRepository._get_enriched_template")
def test_validate_input_against_template_raises_value_error_if_payload_is_invalid(enriched_template_mock, resource_repo):
enriched_template_mock.return_value = ResourceTemplate(id="123",
name="template1",
description="description",
version="0.1.0",
resourceType=ResourceType.Workspace,
current=True,
required=["display_name"],
properties={},
customActions=[]).dict()
# missing display name
workspace_input = WorkspaceInCreate(templateName="template1")
with pytest.raises(ValidationError):
resource_repo.validate_input_against_template("template1", workspace_input, ResourceType.Workspace)
@patch("db.repositories.resources.ResourceTemplateRepository.get_current_template")
def test_get_enriched_template_returns_the_enriched_template(get_current_mock, resource_repo):
workspace_template = ResourceTemplate(id="abc", name="template1", description="", version="", resourceType=ResourceType.Workspace, current=True, required=[], properties={}, customActions=[])
get_current_mock.return_value = workspace_template
template = resource_repo._get_enriched_template("template1", ResourceType.Workspace)
get_current_mock.assert_called_once_with('template1', ResourceType.Workspace, '')
assert "display_name" in template["properties"]
@patch("db.repositories.resources.ResourceTemplateRepository.get_current_template")
def test_get_enriched_template_returns_the_enriched_template_for_user_resources(get_current_mock, resource_repo):
user_resource_template = UserResourceTemplate(id="abc", name="template1", description="", version="", resourceType=ResourceType.Workspace, current=True, required=[], properties={}, customActions=[], parentWorkspaceService="parent-template1")
get_current_mock.return_value = user_resource_template
template = resource_repo._get_enriched_template("template1", ResourceType.UserResource, "parent-template1")
get_current_mock.assert_called_once_with('template1', ResourceType.UserResource, 'parent-template1')
assert "display_name" in template["properties"]
def test_get_resource_dict_by_id_queries_db(resource_repo):
item_id = "123"
resource_repo.query = MagicMock(return_value=[{"id": item_id}])
resource_repo.get_resource_dict_by_id(item_id)
resource_repo.query.assert_called_once_with(query='SELECT * FROM c WHERE c.isActive != false AND c.id = "123"')
def test_get_resource_dict_by_id_raises_entity_does_not_exist_if_no_resources_come_back(resource_repo):
item_id = "123"
resource_repo.query = MagicMock(return_value=[])
with pytest.raises(EntityDoesNotExist):
resource_repo.get_resource_dict_by_id(item_id)
@patch('db.repositories.resources.ResourceRepository.validate_patch')
@patch('db.repositories.resources.ResourceRepository.get_timestamp', return_value=FAKE_UPDATE_TIMESTAMP)
def test_patch_resource_preserves_property_history(_, __, resource_repo):
"""
Tests that properties are copied into a history array and only certain values in the root are updated
"""
resource_repo.update_item_with_etag = MagicMock(return_value=None)
resource_patch = ResourcePatch(isEnabled=True, properties={'display_name': 'updated name'})
etag = "some-etag-value"
user = create_test_user()
resource = sample_resource()
expected_resource = sample_resource()
expected_resource.history = [
ResourceHistoryItem(
isEnabled=True,
resourceVersion=0,
updatedWhen=FAKE_CREATE_TIMESTAMP,
properties={'display_name': 'initial display name', 'description': 'initial description', 'computed_prop': 'computed_val'},
user=user)]
expected_resource.properties['display_name'] = 'updated name'
expected_resource.resourceVersion = 1
expected_resource.user = user
expected_resource.updatedWhen = FAKE_UPDATE_TIMESTAMP
resource_repo.patch_resource(resource, resource_patch, None, etag, None, user)
resource_repo.update_item_with_etag.assert_called_once_with(expected_resource, etag)
# now patch again
new_resource = copy.deepcopy(expected_resource) # new_resource is after the first patch
new_patch = ResourcePatch(isEnabled=False, properties={'display_name': 'updated name 2'})
expected_resource.history.append(
ResourceHistoryItem(
isEnabled=True,
resourceVersion=1,
updatedWhen=FAKE_UPDATE_TIMESTAMP,
properties={'display_name': 'updated name', 'description': 'initial description', 'computed_prop': 'computed_val'},
user=user
)
)
expected_resource.resourceVersion = 2
expected_resource.properties['display_name'] = "updated name 2"
expected_resource.isEnabled = False
expected_resource.user = user
resource_repo.patch_resource(new_resource, new_patch, None, etag, None, user)
resource_repo.update_item_with_etag.assert_called_with(expected_resource, etag)
@patch('db.repositories.resources.ResourceTemplateRepository.enrich_template')
def test_validate_patch_with_good_fields_passes(template_repo, resource_repo):
"""
Make sure that patch is NOT valid when non-updateable fields are included
"""
template_repo.enrich_template = MagicMock(return_value=sample_resource_template())
template = sample_resource_template()
# check it's valid when updating a single updateable prop
patch = ResourcePatch(isEnabled=True, properties={'vm_size': 'large'})
resource_repo.validate_patch(patch, template_repo, template)
@patch('db.repositories.resources.ResourceTemplateRepository.enrich_template')
def test_validate_patch_with_bad_fields_fails(template_repo, resource_repo):
"""
Make sure that patch is NOT valid when non-updateable fields are included
"""
template_repo.enrich_template = MagicMock(return_value=sample_resource_template())
template = sample_resource_template()
# check it's invalid when sending an unexpected field
patch = ResourcePatch(isEnabled=True, properties={'vm_size': 'large', 'unexpected_field': 'surprise!'})
with pytest.raises(ValidationError):
resource_repo.validate_patch(patch, template_repo, template)
# check it's invalid when sending a bad value
patch = ResourcePatch(isEnabled=True, properties={'vm_size': 'huge'})
with pytest.raises(ValidationError):
resource_repo.validate_patch(patch, template_repo, template)
# check it's invalid when trying to update a non-updateable field
patch = ResourcePatch(isEnabled=True, properties={'vm_size': 'large', 'os_image': 'linux'})
with pytest.raises(ValidationError):
resource_repo.validate_patch(patch, template_repo, template)
``` |
{
"source": "joalmeid/openhack-devops-proctor",
"score": 2
} |
#### File: openhack-devops-proctor/pyOHDeploy/deploy.py
```python
from flask import Flask, render_template, request, redirect, url_for, stream_with_context, Response, session, flash
from config import c_password, c_userid, c_script, c_tmpdir
import os
import subprocess
app = Flask(__name__)
app.secret_key = os.urandom(24)
@app.route('/')
def upload_a_file():
if not session.get('logged_in'):
return render_template('login.html')
else:
return render_template('upload.html')
@app.route('/login', methods=['POST'])
def do_admin_login():
#
###
###### Ugh!!! Hard coded userid/password
###
#
if request.form['password'] == <PASSWORD> and request.form['username'] == c_userid:
session['logged_in'] = True
return upload_a_file()
else:
session['logged_in'] = False
return render_template('login.html')
@app.route("/logout")
def logout():
session['logged_in'] = False
return upload_a_file()
@app.route('/deploy', methods = ['GET', 'POST'])
def deploy():
return render_template('deploy.html', value = session['filename'])
@app.route('/deployment', methods = ['GET', 'POST'])
def deployment():
if request.method == 'POST':
def generate():
#
# script execution assumes no parameters need to be passed to the bash script.
# Modify subprocess.Popen(..) if necessary
# Each parameter is a separate entry inside []
#
script = c_tmpdir + c_script
p=subprocess.Popen([script],
stdout=subprocess.PIPE)
for line in p.stdout:
yield line.decode('utf-8') + '<br>'
return Response(stream_with_context(generate()))
@app.route('/uploader', methods = ['GET', 'POST'])
def upload_file():
if request.method == 'POST':
try:
f = request.files['file']
# need to add an appropriate temporary location/path
fname = c_tmpdir + f.filename
f.save(fname)
session['filename']= fname
return deploy()
except:
# user pressed upload w/o selecting a file first
return upload_a_file()
return upload_a_file()
if __name__ == '__main__':
app.run(host='0.0.0.0')
``` |
{
"source": "joalon/leetcode-python",
"score": 4
} |
#### File: leetcode-python/add_two_numbers/add_two_numbers.py
```python
import unittest
# Definition for singly-linked list.
class ListNode:
def __init__(self, x, next_in=None):
self.val = x
self.next = next_in
def __eq__(self, other):
if other is not None:
return False
return self.val == other.val and self.next == other.next
def reverseLinkedList(root: ListNode) -> ListNode:
lastNode = None
currentNode = root
nextNode = root.next
while currentNode.next is not None:
nextNode = currentNode.next
# Reverse
currentNode.next = lastNode
# Save current nodes
lastNode = currentNode
# Step to nextNode
currentNode = nextNode
return currentNode
#def _reverseLinkedList(currentNode: ListNode, lastNode: ListNode) -> ListNode:
# if currentNode.next == None:
# currentNode.next = lastNode
# return currentNode
#
# newHead = _reverseLinkedList(currentNode.next, currentNode)
# currentNode.next = lastNode
# return currentNode
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
pass
class TestSolution(unittest.TestCase):
def testAddTwoNumbers(self):
solution = Solution()
list1 = ListNode(2, ListNode(4, ListNode(3)))
list2 = ListNode(5, ListNode(6, ListNode(4)))
result = solution.addTwoNumbers(l1=list1, l2=list2)
expected = ListNode(7, ListNode(0, ListNode(8)))
self.assertEqual(result, expected)
def testEqual(self):
list1 = ListNode(2, ListNode(4, ListNode(3)))
list2 = ListNode(2, ListNode(4, ListNode(3)))
self.assertEqual(list1, list2)
def testReverse(self):
list1 = ListNode(2, ListNode(4, ListNode(3)))
reversedll = reverseLinkedList(list1)
self.assertEqual(True, list1 == reverseLinkedList(reversedll))
if __name__ == "__main__":
unittest.main()
```
#### File: leetcode-python/valid-parens/valid_parens.py
```python
import unittest
class Solution:
def isValid(self, s: str) -> bool:
stack = []
left_parens = ['[', '(', '{']
depth = 0
for c in s:
if c in left_parens:
stack.append(c)
depth += 1
else:
if len(stack) == 0 or c != getOtherParen(stack.pop()):
return False
else:
depth -= 1
return depth == 0
def getOtherParen(char):
left_parens = ['[', '(', '{']
right_parens = [']', ')', '}']
for i in range(len(right_parens)):
if char == left_parens[i]:
return right_parens[i]
class Tester(unittest.TestCase):
def test(self):
string = "()()"
self.assertEqual(True, Solution().isValid(string))
string = ""
self.assertEqual(True, Solution().isValid(string))
string = "]"
self.assertEqual(False, Solution().isValid(string))
string = "("
self.assertEqual(False, Solution().isValid(string))
string = "([])"
self.assertEqual(True, Solution().isValid(string))
string = "([]"
self.assertEqual(False, Solution().isValid(string))
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joalon/rboard",
"score": 2
} |
#### File: rboard/rboard/__init__.py
```python
import os
from flask import Flask, Blueprint, render_template
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_migrate import Migrate
from flask_wtf import CSRFProtect
db = SQLAlchemy()
migrate = Migrate()
login = LoginManager()
csrf = CSRFProtect()
def make_app():
app = Flask(__name__)
if app.config['ENV'] == 'dev':
print("Starting in dev")
app.config.from_pyfile('config/dev.cfg')
elif app.config['ENV'] == 'prod':
print("Starting in prod")
app.config.from_pyfile('config/prod.cfg')
else:
print("Expected FLASK_ENV to be either 'prod' or 'dev'")
exit(1)
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
csrf.init_app(app)
base_bp = Blueprint("base", __name__)
app.register_blueprint(base_bp)
from rboard.main import blueprint as main_bp
app.register_blueprint(main_bp)
from rboard.user import blueprint as user_bp
app.register_blueprint(user_bp)
from rboard.board import blueprint as board_bp
app.register_blueprint(board_bp)
from rboard.post import blueprint as post_bp
app.register_blueprint(post_bp)
return app
@login.user_loader
def load_user(user_id):
return User.query.get(user_id)
from rboard.models import User
```
#### File: rboard/main/routes.py
```python
from rboard import db
from rboard.models import Board
from rboard.main import blueprint
from flask import request, redirect, url_for, render_template, flash
from flask_login import current_user
@blueprint.route("/", methods=["GET"])
def index():
boards = Board.query.all()
return render_template("main.html", boards=boards)
```
#### File: rboard/rboard/models.py
```python
from rboard import db
from flask_login import UserMixin
from werkzeug.security import check_password_hash
from datetime import datetime
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(40), unique=True)
passwordHash = db.Column(db.String(100))
joined_at = db.Column(db.DateTime, default=datetime.utcnow)
posts = db.relationship("Post", backref="user")
comments = db.relationship("Comment", backref="user")
def check_password(self, password):
return check_password_hash(self.passwordHash, password)
def __repr__(self):
return f"<User '{self.username}'>"
def __eq__(self, other):
if isinstance(self, other.__class__):
return self.id == other.id
return False
moderators = db.Table(
"moderators",
db.Column("moderator_id", db.Integer, db.ForeignKey("user.id")),
db.Column("board_id", db.Integer, db.ForeignKey("board.id")),
)
class Board(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(40), unique=True)
description = db.Column(db.String(100))
created_at = db.Column(db.DateTime, default=datetime.utcnow())
posts = db.relationship(
"Post", backref="board", order_by="Post.posted_at.desc()"
)
moderators = db.relationship("User", secondary=moderators)
def __repr__(self):
return f"<Board '{self.name}'>"
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(40))
text = db.Column(db.String(140))
posted_at = db.Column(db.DateTime, default=datetime.utcnow)
author_id = db.Column(db.Integer, db.ForeignKey("user.id"))
board_id = db.Column(db.Integer, db.ForeignKey("board.id"))
comments = db.relationship(
"Comment", backref="post"
)
def __repr__(self):
return f"<Post '{self.title}'>"
class Comment(db.Model):
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(140))
posted_at = db.Column(db.DateTime, default=datetime.utcnow)
post_id = db.Column(db.Integer, db.ForeignKey("post.id"))
author_id = db.Column(db.Integer, db.ForeignKey("user.id"))
parent_comment_id = db.Column(db.Integer, db.ForeignKey("comment.id"))
parent = db.relationship("Comment", backref="comments", remote_side=[id])
def __repr__(self):
return f"<Comment '{self.id}'>"
``` |
{
"source": "joalricha869/PyPrompt",
"score": 2
} |
#### File: PyPrompt/BetaPrompt/main.py
```python
from __future__ import division
import os
import string
import random
import socket
from random import choice
from random import randint
import platform
import fnmatch
import subprocess
import time
import shutil
import sys
import argparse
import re
from time import sleep
import urllib.request
from urllib.request import urlopen
import json
import webbrowser
import uuid
try:
import speedtest
import geocoder
import paramiko
import wget
import urlopen
import requests
except:
print("Requests Package Not Found")
print("Installing Now")
os.system("pip install requests")
os.system("cls||clear")
print("Urlopen Package Not Found")
print("Installing Now")
os.system("pip install urlopen")
os.system("cls||clear")
print("Wget Package Not Found")
print("Installing Now")
os.system("pip install wget")
os.system("cls||clear")
print("Speedtest Package Not Found")
print("Installing Now...")
os.system("pip install speedtest-cli")
os.system('cls||clear')
print("Geocoder Package Not Found")
print("Installing Now...")
os.system("pip install geocoder")
os.system("cls||clear")
print("Paramiko Package Not Found")
print("Installing Now...")
os.system("pip install paramiko")
os.system("cls||clear")
print("If error thrown, update your Python or reinstall.")
time.sleep(3)
os.system('cls||clear')
print("PyPrompt Closing in 5 Seconds")
time.sleep(1)
os.system('cls||clear')
print("PyPrompt Closing in 4 Seconds")
time.sleep(1)
os.system('cls||clear')
print("PyPrompt Closing in 3 Seconds")
time.sleep(1)
os.system('cls||clear')
print("PyPrompt Closing in 2 Seconds")
time.sleep(1)
os.system('cls||clear')
print("PyPrompt Closing in 1 Second")
time.sleep(1)
exit()
print("="*40, "PyPrompt", "="*40)
joalricha = '''
_ _ _ _ ___ __ ___
(_) | | (_) | | / _ \ / // _ \
_ ___ __ _| |_ __ _ ___| |__ __ _| (_) |/ /| (_) |
| |/ _ \ / _` | | '__| |/ __| '_ \ / _` |> _ <| '_ \__, |
| | (_) | (_| | | | | | (__| | | | (_| | (_) | (_) |/ /
| |\___/ \__,_|_|_| |_|\___|_| |_|\__,_|\___/ \___//_/
_/ |
|__/
'''
taco = '''
____ _ ____ _______
| _ \(_) | _ \ |__ __|
| |_) |_ __ _| |_) | ___ _ _| | __ _ ___ ___
| _ <| |/ _` | _ < / _ \| | | | |/ _` |/ __/ _ \
| |_) | | (_| | |_) | (_) | |_| | | (_| | (_| (_) |
|____/|_|\__, |____/ \___/ \__, |_|\__,_|\___\___/
__/ | __/ |
|___/ |___/
'''
dwij = '''
_ _ _ _____ _ _
(_) | | | | __ \ (_|_)
_ __| | | _| | | |_ ___ _
| |/ _` | |/ / | | \ \ /\ / / | |
| | (_| | <| |__| |\ V V /| | |
|_|\__,_|_|\_\_____/ \_/\_/ |_| |
_/ |
|__/
'''
print('Made by:' + joalricha + 'it says joalricha https://github.com/joalricha869')
print(" ")
print('Thanks to ' + taco + 'for help https://github.com/BigBoyTaco')
print(" ")
print('Based on Termithon by' + dwij + 'https://github.com/IdkDwij/Termithon')
print(" ")
print("The source is at my GitHub page! 'https://github.com/joalricha869/PyPrompt'")
print("Type in 'help' for the command list.")
print("")
hostnamecomputer = socket.gethostname()
current_dir = os.getcwd()
def listToString(s):
str1 = ""
for ele in s:
str1 += ele
return str1
commands = '''
_____ _ _ _
|_ _| | | | | | |
| | _ __ | |_ ___ __ _ _ __ __ _| |_ ___ __| |
| | | '_ \| __/ _ \/ _` | '__/ _` | __/ _ \/ _` |
_| |_| | | | || __/ (_| | | | (_| | || __/ (_| |
|_____|_| |_|\__\___|\__, |_| \__,_|\__\___|\__,_|
__/ |
|___/
1. ip (Gives you your IP)
2. hostname (Gives you your Computer's ID)
3. mac (Retrieves the Physical MAC Address of The Device)
4. ping (lets you ping a website)
5. calc (A simple calculator)
6. passgen (A very efficient password generator)
7. sysinfo (Gets relevant system info)
8. test (Tests PyPrompt Sample Command)
9. mp3search (Searches your File System for mp3 files)
10. mp4search (Searches your File System for mp4 files)
11. pysearch (Searches your File System for py files)
12. docxsearch (Searches your File System for docx files)
13. mailgen (Generates dummy E-Mail Addresses)
14. ver (Reports PyPrompt Version)
15. clear (Clears screen)
16. loadbarTest (Tests the loadbar)
17. intro (Displays initial text)
18. sqrt (Enter a number and it will calculate the square root)
19. date (Displays date)
20. cd (Navigate through folders)
21. iplocation (Find the physical location of your IP address)
22. speedtest (Speedtest.net but built into PyPrompt!)
23. encryptdecrypt (Uses the RSA Algorithm to encrypt and decrypt a message!)
24. troubleshoot (Troubleshoots extra modules neccessary for PyPrompt to run)
25. ssh (An SSH Client made in Python) DO NOT USE THIS TOOL FOR ILLEGAL PURPOSES!
26. macosdownloader (A simple macOS downloader) no longer based on gibMacOS
27. filesearch (Searches files via their extension)
28. filedownloader (Download any file via their url)
29. locateme (Obtains info about your location) This can't work under restricted proxy (ex: school wifi)
30. unblockedgames (A collection of unblocked games and sites for school) something that no one asked for but happened anyway...
31. unhelp (i'm not sure what this is. it just exists.)
The PyPrompt can be used as an alternative terminal shell. It can run every shell command from WIndows and UNIX
'''
def whatiscommand(current_dir):
args = cmd.split()
if cmd == 'help':
print(commands)
main(current_dir)
elif cmd == 'dir':
print(os.listdir(current_dir))
main(current_dir)
elif cmd == 'exit':
exit()
elif cmd == 'ip':
print("Your IP Address is " + getip())
main(current_dir)
elif cmd == 'hostname':
uname = platform.uname()
print(hostnamecomputer)
main(current_dir)
elif cmd == "mac":
getmac()
main(current_dir)
elif "calc" in cmd:
calc()
main(current_dir)
elif cmd == "passgen":
passGen()
elif cmd == "sysinfo":
getSystemInfo()
main(current_dir)
elif cmd == "ver":
ver()
main(current_dir)
elif cmd == "test":
testFunc()
main(current_dir)
elif cmd == "mp3search":
mp3search()
main(current_dir)
elif cmd == "mp4search":
mp3search()
main(current_dir)
elif cmd == "pysearch":
pysearch()
main(current_dir)
elif cmd == "docxsearch":
docxsearch()
main(current_dir)
elif cmd == "mailgen":
mailGen()
main(current_dir)
elif cmd == "clear":
clear()
elif "loadbarTest" in cmd:
progressbar()
main(current_dir)
elif "intro" in cmd:
intro()
main(current_dir)
elif "sqrt" in cmd:
sqrt()
main(current_dir)
elif "date" in cmd:
date()
main(current_dir)
elif "ignore" in cmd:
easterEgg()
main(current_dir)
elif cmd == "speedtest":
speedtestapp()
main(current_dir)
elif cmd == "iplocation":
iplocation()
main(current_dir)
elif "encryptdecrypt" in cmd:
encryptdecrypt()
main(current_dir)
elif cmd == "unhelp":
print("The command is 'ignore'")
main(current_dir)
elif cmd == "troubleshoot":
troubleshoot()
main(current_dir)
elif "cd" in cmd:
args.remove('cd')
args = ' '.join(args)
if cmd == "cd":
main(current_dir)
old_dir = current_dir
if os.path.isdir(args) == True:
current_dir = args
main(args)
elif os.path.isdir(old_dir + '\\' + args):
new_dir = old_dir + '\\' + args
current_dir = new_dir
main(new_dir)
else:
print('The system cannot find the path specified. \n')
main(current_dir)
elif cmd == "ssh":
sshclient()
elif cmd == "macosdownloader":
macOSDownloader()
main(current_dir)
elif cmd == "filesearch":
fileSearch()
main(current_dir)
elif cmd == "filedownloader":
fileDownloader()
main(current_dir)
elif cmd == "locateme":
locateMe()
main(current_dir)
elif cmd == "unblockedgames":
unblockedGames()
main(current_dir)
elif "locator" in cmd:
locator()
main(current_dir)
elif str(cmd) in cmd:
print("This MUST be a shell command in the OS else your command won't work!")
os.system(cmd)
main(current_dir)
else:
error()
def main(current_dir):
global old_dir
old_dir = current_dir
global cmd
cmd = input(current_dir + '>')
whatiscommand(current_dir)
def ver():
print("PyPrompt Version: " + y)
print("(C) 2022 joalricha869, All Rights Reserved.")
def getSystemInfo():
print("="*40, "System Information", "="*40)
uname = platform.uname()
print(f"System: {uname.system}")
print(f"Node Name: {uname.node}")
print(f"Release: {uname.release}")
print(f"Version: {uname.version}")
print(f"Machine: {uname.machine}")
print(f"Processor: {uname.processor}")
print("System Info Retrieved!")
def calc():
#addition
if "+" in cmd:
numbers = cmd.split()
first_number = float(numbers[1])
second_number = float(numbers[3])
print(first_number + second_number)
#subtraction
elif "-" in cmd:
numbers = cmd.split()
first_number = float(numbers[1])
second_number = float(numbers[3])
print(first_number - second_number)
#division
elif "/" in cmd:
numbers = cmd.split()
first_number = float(numbers[1])
second_number = float(numbers[3])
print(first_number / second_number)
#multiplication
elif "*" in cmd:
numbers = cmd.split()
first_number = int(numbers[1])
second_number = int(numbers[3])
print(first_number * second_number)
elif cmd == "calc help":
print("proper use of calculator: 1 + 2")
print("only two numbers are allowed")
print('''supports:
1. addition
2. subtraction
3. division
4. multiplication''')
else:
print('error... use "calc help" for more help')
def passGen():
characters = string.ascii_letters + string.punctuation + string.digits
password = "".join(choice(characters) for x in range(randint(8, 16)))
print("Is your Generated Password: ",password)
repeatGen = input("Generate another one? ")
if repeatGen == "yes":
passGen()
else:
main(current_dir)
def getmac():
print ("The MAC address of this Device is : ", end="")
print (':'.join(['{:02x}'.format((uuid.getnode() >> ele) & 0xff)
for ele in range(0,8*6,8)][::-1]))
def getip():
st = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
st.connect(('10.255.255.255', 1))
IP = st.getsockname()[0]
except Exception:
IP = '127.0.0.1'
finally:
st.close()
return IP
def clear():
os.system('cls||clear')
main(current_dir)
def error():
if(cmd == ""):
main(current_dir)
else:
print("'" + str(cmd) + "'" + ''' is not recognized as an internal or external command''')
print("For more help go to: https://github.com/joalricha869/PyPrompt or https://github.com/IdkDwij/Termithon")
main(current_dir)
def testFunc():
print("If this command works, then your PyPrompt is fine... maybe")
def mp3search():
rootPath = '/'
pattern = '*.mp3'
for root, dirs, files in os.walk(rootPath):
for filename in fnmatch.filter(files, pattern):
print( os.path.join(root, filename))
def mp4search():
rootPath = '/'
pattern = '*.mp4'
for root, dirs, files in os.walk(rootPath):
for filename in fnmatch.filter(files, pattern):
print( os.path.join(root, filename))
def pysearch():
rootPath = '/'
pattern = '*.py'
for root, dirs, files in os.walk(rootPath):
for filename in fnmatch.filter(files, pattern):
print( os.path.join(root, filename))
def docxsearch():
rootPath = '/'
pattern = '*.docx'
for root, dirs, files in os.walk(rootPath):
for filename in fnmatch.filter(files, pattern):
print( os.path.join(root, filename))
def mailGen():
extensions = ['com']
domains = ['gmail','yahoo','comcast','verizon','charter','hotmail','outlook','frontier','icloud','yandex']
characters = string.ascii_letters + string.digits
winext = extensions[random.randint(0,len(extensions)-1)]
windom = domains[random.randint(0,len(domains)-1)]
acclen = random.randint(1,20)
winacc = ''.join(choice(characters) for _ in range(acclen))
finale = winacc + "@" + windom + "." + winext
progressbar()
print("Your Generated E-Mail Address is: ",finale)
again = input("Generate another address? ")
if again == "yes":
progressbar()
mailGen()
else:
main(current_dir)
def progressbar():
def loadbar(iteration, total, prefix='', suffix='', decimals=1, length=100, fill='█'):
percent = ('{0:.' + str(decimals) + 'f}').format(100 * (iteration/float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '▒' * (length - filledLength)
print(f'\r{prefix} |{bar}| {percent}% {suffix}',end='\r')
if iteration == total:
print()
items = list(range(0, 50))
l = len(items)
loadbar(0, l, prefix='Generating...', suffix='Done!', length=l)
for i, item in enumerate(items):
sleep(0.1)
loadbar(i + 1, l, prefix='Generating...', suffix='Done!', length=l)
def intro():
print("=" * 40, "PyPrompt", "=" * 40)
print('Made by:' + joalricha + 'it says joalricha869 https://github.com/joalricha869')
print(" ")
print('Thanks to ' + taco + 'for help https://github.com/BigBoyTaco')
print(" ")
print('Based on Termithon Shell by' + dwij + 'https://github.com/IdkDwij/Termithon')
print(" ")
print("Type in 'help' for the command list.")
print("")
def sqrt():
num = float(input('Enter a number: '))
num_sqrt = num ** 0.5
print('The square root of %0.3f is %0.3f'%(num ,num_sqrt))
def date():
from datetime import date
today = date.today()
d2 = today.strftime("%B %d, %Y")
print("Today's date is:", d2)
main(current_dir)
def easterEgg():
print("This terminal was made by Jose a.k.a joalricha869")
print("Base code that this is powered by made by Dwij a.k.a idkDwij")
print("Some help by Nathan a.k.a BigBoyTaco")
print("This used to be Termithron 3.0 once.")
print("Search up BigBoyTaco Studios on YouTube for a tutorial on Termithon")
print("Thanks to theopensour for his unblocked games site")
print("Check him out in Codeberg")
print("btw he was banned from github")
def speedtestapp():
speed=speedtest.Speedtest()
option=int(input('''
What do you want to know:
1) Download speed
2) Upload speed
3) Both Download and Upload
4) Ping
Your choice: '''))
if option<1 or option>4:
sleep(2)
print('You have entered wrong choice, please enter again with values from 1 to 4')
else:
sleep(1)
print()
print('Pls wait, test in progress...')
print()
down_speed=round(speed.download()/1000000,3)
up_speed=round(speed.upload()/1000000,3)
print('One more sec please...')
sleep(2.5)
print()
if option == 1:
print('Your Download speed is: ',down_speed,'Mbps')
elif option == 2:
print('Your Upload speed is: ',up_speed,'Mbps')
elif option == 3:
print('Your Download speed is: ',down_speed,'Mbps',end=" ")
print(',and your Upload speed is: ',up_speed,'Mbps')
elif option == 4:
s=[]
speed.get_servers(s)
print(speed.results.ping,'ms')
else:
print('Sorry, something went wrong, pls try again...')
def iplocation():
g = geocoder.ipinfo('me')
print(g.latlng)
def encryptdecrypt():
def isPrime(n):
prime = [True for i in range(n+1)]
p = 2
while p*p<=n:
if prime[p]==True:
for i in range(p*p,n+1,p):
prime[i]=False
p+=1
return prime[n]
def gcd(a,b):
while b!=0:
r = a%b
a=b
b=r
return a
def Multiplicative_inverse(a,b):
s1 = 1
s2 = 0
m = b
while b!=0:
q=a//b
r=a%b
a=b
b=r
s=s1-q*s2
s1=s2
s2=s
if s1<0:
s1+=m
return s1
def powermod(x,y,p):
res = 1
x = x%p
while (y>0):
if (y%2) == 1:
res = (res*x)%p
y = y//2
x = (x*x)%p
return res
if __name__ == '__main__':
while (True):
res = input('Do you want to enter prime numbers (y) or let the algorithm do it for you (n) or exit (e)? (y/n/e): ')
if res == 'y':
while True:
p = 13
p = int(input('Enter a prime number: '))
if isPrime(p):
break
else:
print(p,'is not a prime number')
continue
while True:
q = 17
q = int(input('Enter a different prime number: '))
if isPrime(q) and (p*q>26):
break
else:
print('Both the prime numbers are same!! or product of both the prime numbers is less than 26!!')
continue
n = p*q
phi_n = (p-1)*(q-1)
a = 19
while True:
a = int(input('Enter a number such that Greatest Common Divisor of that number with '+ str(phi_n) + ' is 1: '))
if gcd(a,phi_n)!=1:
continue
else:
break
b = Multiplicative_inverse(a,phi_n)
message = input('Enter the message to be encrypted (lower case): ')
message = message.lower()
encrypted_string = ""
encrypted_num = []
for i in range(len(message)):
ch = message[i]
if ch!=' ':
m = ord(ch) - 97
e = powermod(m,a,n)
encrypted_num.append(e)
encrypted_string += chr(e%26 + 97)
else:
encrypted_string +=' '
print('Encrypted message is:', encrypted_string)
print(encrypted_num)
res = input("Do you want to decrypt it too? (y/n): ")
if res == 'y':
decrypted = ''
j=0
for i in range(len(encrypted_string)):
ch = message[i]
if ch != ' ':
e = encrypted_num[j]
m = powermod(e,b,n)
ch = chr(m+97)
decrypted+=ch
j+=1
else:
decrypted+=' '
print("Decrypted message is:",decrypted)
else:
ans = input("Do you want to continue? (y/n): ")
if ans == 'y':
continue
else:
break
elif res == 'n':
p = 13
q = 17
n = p*q
a = 5
b = 77
message = input('Enter the message to be encrypted (lower case): ')
message = message.lower()
encrypted_string = ""
encrypted_num = []
for i in range(len(message)):
ch = message[i]
if ch!=' ':
m = ord(ch) - 97
e = powermod(m,a,n)
encrypted_num.append(e)
encrypted_string += chr(e%26 + 97)
else:
encrypted_string +=' '
print('Encrypted message is:', encrypted_string)
res = input("Do you want to decrypt it too? (y/n): ")
if res == 'y':
decrypted = ''
j=0
for i in range(len(encrypted_string)):
ch = encrypted_string[i]
if ch != ' ':
e = encrypted_num[j]
m = powermod(e,b,n)
ch = chr(m+97)
decrypted+=ch
j+=1
else:
decrypted+=' '
print("Decrypted message is:",decrypted)
else:
ans = input("Do you want to continue? (y/n): ")
if ans == 'y':
continue
else:
break
elif res == 'e':
break
else:
print('Invalid command!')
continue
def troubleshoot():
confirmation = input("Troubleshoot Modules? ")
if confirmation == "yes":
print("Uninstalling wget")
os.system("pip uninstall wget")
os.system("cls||clear")
print("Uninstalling Speedtest")
os.system("pip uninstall speedtest-cli")
os.system("cls||clear")
print("Uninstalling geocoder")
os.system("pip uninstall geocoder")
os.system("cls||clear")
print("Uninstalling paramiko")
os.system("pip uninstall paramiko")
os.system("cls||clear")
print("Unins")
print("Now Reinstalling Modules")
print("Installing wget")
os.system("pip install wget")
os.system("cls||clear")
print("Installing Speedtest")
os.system("pip install speedtest-cli")
os.system("cls||clear")
print("Installing geocoder")
os.system("pip install geocoder")
os.system("cls||clear")
print("Installing paramiko")
os.system("pip install paramiko")
os.system("cls||clear")
print("Installing Scripts")
os.system("pip install Scripts")
os.system("cls||clear")
os.system("cls||clear")
print("PyPrompt Closing in 3 seconds")
os.system("cls||clear")
print("PyPrompt Closing in 2 seconds")
os.system("cls||clear")
print("PyPrompt Closing in 1 second")
exit()
def sshclient():
print("This may have compatability issues with earlier versions of Python.")
print("Make sure you have Python 3.9 or later!")
print("DISCLAIMER: This software can't be used for any type of illegal activity.")
print("What you do here is now your OWN RESPONSIBILITY!!!")
hostname = input("Enter hostname: ")
port = input("Enter Port: ")
username = input("Enter Username: ")
password = input("Enter Password: ")
paramiko.util.log_to_file('paramiko.log')
s = paramiko.SSHClient()
s.load_system_host_keys()
s.connect(hostname, port, username, password)
stdin, stdout, stderr = s.exec_command('ifconfig')
print(stdout.read())
s.close()
main()
def macOSDownloader():
print("macOS Downloader")
print("This downloader retrieves the files from Apple's official server and will download them using this command")
print("1) macOS Monterey 12.3.1 (IPSW Version M1) (Requires a Real Mac)")
print("2) macOS Monterey 12.2 (Requires a Real Mac)")
print("3) macOS Monterey 12.1 (IPSW Version) (Requires a Real Mac)")
print("4) macOS Big Sur 11.6.5 (Requires a Real Mac)")
print("5) macOS Big Sur 11.6 (IPSW Version) (Requires a Real Mac)")
print("6) macOS Catalina 10.15 (Requires a Real Mac)")
print("7) macOS Catalina Patcher (Not from Apple)")
versionSelecter = input("Which version of macOS do you want to download?: ")
if versionSelecter == "1":
wget.download("https://updates.cdn-apple.com/2022SpringFCS/fullrestores/002-79219/851BEDF0-19DB-4040-B765-0F4089D1530D/UniversalMac_12.3.1_21E258_Restore.ipsw")
main(current_dir)
elif versionSelecter == "2":
wget.download("https://swcdn.apple.com/content/downloads/41/34/002-57041-A_P59UQKRDXZ/h73bziwp3o4m5kuk3ool1g55vgplpmkwqv/InstallAssistant.pkg")
main(current_dir)
elif versionSelecter == "3":
wget.download("https://updates.cdn-apple.com/2022WinterFCS/fullrestores/002-66272/FB0B40F5-49EB-421B-81EC-8B56B8468D3C/UniversalMac_12.2.1_21D62_Restore.ipsw")
main(current_dir)
elif versionSelecter == "4":
wget.download("https://swcdn.apple.com/content/downloads/15/10/002-77154-A_LAKRVPO4Y6/dbmkv9538dfpvqaqdygjciw8775qjuytbh/InstallAssistant.pkg")
main(current_dir)
elif versionSelecter == "5":
wget.download("https://updates.cdn-apple.com/2021FallFCS/fullrestores/071-97388/C361BF5E-0E01-47E5-8D30-5990BC3C9E29/UniversalMac_11.6_20G165_Restore.ipsw")
main(current_dir)
elif versionSelecter == "6":
wget.download("http://swcdn.apple.com/content/downloads/61/56/041-83630-A_8RCIBB415Y/7jqh3nh97ood2mjej7hdgpx7fgh5c3fi9g/InstallESDDmg.pkg")
main(current_dir)
elif versionSelecter == "7":
print("Go to this website!")
print("http://dosdude1.com/catalina/")
main(current_dir)
def fileSearch():
rootPath = '/'
print("Note that the file extension format must be '*.extension' without the apostrophe obv")
print("Depending on the speed of your HDD/SSD this may take a while (depending on the extension asw)")
pattern = input("Specify File Extension Here: ")
for root, dirs, files in os.walk(rootPath):
for filename in fnmatch.filter(files, pattern):
print( os.path.join(root, filename))
def fileDownloader():
wget.download = input("Enter URL for file download: ")
main(current_dir)
def locateMe():
def getPublicIP():
data = requests.get('http://checkip.dyndns.com/').content
return re.compile(rb'Address: (\d+.\d+.\d+.\d+)').search(data).group(1)
IP = str(getPublicIP())
url = 'http://ipinfo.io/' + IP + '/json'
response = urllib.request.urlopen(url)
data = json.load(response)
city = data['city']
region = data['region']
country = data['country']
location = data['loc']
org = data['org']
print("Your City : " + city)
print("Your Region : " + region)
print("Your Country : " + country)
print("Your Location : " + location)
print("Your ISP : " + org)
def unblockedGames():
print("uNbLoCkEd GaMeS fOr ScHoOl")
print("1) A collection of games by theopensour (virgin site poor css really made by idkdwij might not work)")
print("(he really just copied the code so please report him asap)")
print("2) An unblocker that actually WORKS")
print("3) Incognito (chad website)")
print("4) The actual website made by idkdwij")
openwhatwebsite = input("Where to next?: ")
if openwhatwebsite == "1":
webbrowser.open('https://theopensour.codeberg.page/gamesunblocked/@main/')
elif openwhatwebsite == "2":
webbrowser.open('https://ijustateacorndog.gq')
elif openwhatwebsite == "3":
webbrowser.open("https://contextsearch.org")
elif openwhatwebsite == "4":
webbrowser.open("https://idkdwij.github.io/Site")
def locator():
t=input("Enter the location: ")
g = geocoder.arcgis(t)
print(g.latlng)
y = "1.4.6.beta1"
main(current_dir)
``` |
{
"source": "joamatab/angler",
"score": 2
} |
#### File: angler/angler/linalg.py
```python
import numpy as np
import scipy.sparse as sp
import scipy.sparse.linalg as spl
try:
from pyMKL import pardisoSolver
SOLVER = 'pardiso'
except:
SOLVER = 'scipy'
from time import time
from angler.constants import DEFAULT_MATRIX_FORMAT, DEFAULT_SOLVER
from angler.constants import EPSILON_0, MU_0
from angler.pml import S_create
from angler.derivatives import createDws
def grid_average(center_array, w):
# computes values at cell edges
xy = {'x': 0, 'y': 1}
center_shifted = np.roll(center_array, 1, axis=xy[w])
avg_array = (center_shifted+center_array)/2
return avg_array
def dL(N, xrange, yrange=None):
# solves for the grid spacing
if yrange is None:
L = np.array([np.diff(xrange)[0]]) # Simulation domain lengths
else:
L = np.array([np.diff(xrange)[0],
np.diff(yrange)[0]]) # Simulation domain lengths
return L/N
def is_equal(matrix1, matrix2):
# checks if two sparse matrices are equal
return (matrix1 != matrix2).nnz == 0
def construct_A(omega, xrange, yrange, eps_r, NPML, pol, L0,
averaging=True,
timing=False,
matrix_format=DEFAULT_MATRIX_FORMAT):
# makes the A matrix
N = np.asarray(eps_r.shape) # Number of mesh cells
M = np.prod(N) # Number of unknowns
EPSILON_0_ = EPSILON_0*L0
MU_0_ = MU_0*L0
if pol == 'Ez':
vector_eps_z = EPSILON_0_*eps_r.reshape((-1,))
T_eps_z = sp.spdiags(vector_eps_z, 0, M, M, format=matrix_format)
(Sxf, Sxb, Syf, Syb) = S_create(omega, L0, N, NPML, xrange, yrange, matrix_format=matrix_format)
# Construct derivate matrices
Dyb = Syb.dot(createDws('y', 'b', dL(N, xrange, yrange), N, matrix_format=matrix_format))
Dxb = Sxb.dot(createDws('x', 'b', dL(N, xrange, yrange), N, matrix_format=matrix_format))
Dxf = Sxf.dot(createDws('x', 'f', dL(N, xrange, yrange), N, matrix_format=matrix_format))
Dyf = Syf.dot(createDws('y', 'f', dL(N, xrange, yrange), N, matrix_format=matrix_format))
A = (Dxf*1/MU_0_).dot(Dxb) \
+ (Dyf*1/MU_0_).dot(Dyb) \
+ omega**2*T_eps_z
elif pol == 'Hz':
if averaging:
vector_eps_x = grid_average(EPSILON_0_*eps_r, 'x').reshape((-1,))
vector_eps_y = grid_average(EPSILON_0_*eps_r, 'y').reshape((-1,))
else:
vector_eps_x = EPSILON_0_*eps_r.reshape((-1,))
vector_eps_y = EPSILON_0_*eps_r.reshape((-1,))
# Setup the T_eps_x, T_eps_y, T_eps_x_inv, and T_eps_y_inv matrices
T_eps_x = sp.spdiags(vector_eps_x, 0, M, M, format=matrix_format)
T_eps_y = sp.spdiags(vector_eps_y, 0, M, M, format=matrix_format)
T_eps_x_inv = sp.spdiags(1/vector_eps_x, 0, M, M, format=matrix_format)
T_eps_y_inv = sp.spdiags(1/vector_eps_y, 0, M, M, format=matrix_format)
(Sxf, Sxb, Syf, Syb) = S_create(omega, L0, N, NPML, xrange, yrange, matrix_format=matrix_format)
# Construct derivate matrices
Dyb = Syb.dot(createDws('y', 'b', dL(N, xrange, yrange), N, matrix_format=matrix_format))
Dxb = Sxb.dot(createDws('x', 'b', dL(N, xrange, yrange), N, matrix_format=matrix_format))
Dxf = Sxf.dot(createDws('x', 'f', dL(N, xrange, yrange), N, matrix_format=matrix_format))
Dyf = Syf.dot(createDws('y', 'f', dL(N, xrange, yrange), N, matrix_format=matrix_format))
A = Dxf.dot(T_eps_x_inv).dot(Dxb) \
+ Dyf.dot(T_eps_y_inv).dot(Dyb) \
+ omega**2*MU_0_*sp.eye(M)
else:
raise ValueError("something went wrong and pol is not one of Ez, Hz, instead was given {}".format(pol))
derivs = {
'Dyb' : Dyb,
'Dxb' : Dxb,
'Dxf' : Dxf,
'Dyf' : Dyf
}
return (A, derivs)
def solver_eigs(A, Neigs, guess_value=0, guess_vector=None, timing=False):
# solves for the eigenmodes of A
if timing:
start = time()
(values, vectors) = spl.eigs(A, k=Neigs, sigma=guess_value, v0=guess_vector, which='LM')
if timing:
end = time()
print('Elapsed time for eigs() is %.4f secs' % (end - start))
return (values, vectors)
def solver_direct(A, b, timing=False, solver=SOLVER):
# solves linear system of equations
b = b.astype(np.complex128)
b = b.reshape((-1,))
if not b.any():
return np.zeros(b.shape)
if timing:
t = time()
if solver.lower() == 'pardiso':
pSolve = pardisoSolver(A, mtype=13) # Matrix is complex unsymmetric due to SC-PML
pSolve.factor()
x = pSolve.solve(b)
pSolve.clear()
elif solver.lower() == 'scipy':
x = spl.spsolve(A, b)
else:
raise ValueError('Invalid solver choice: {}, options are pardiso or scipy'.format(str(solver)))
if timing:
print('Linear system solve took {:.2f} seconds'.format(time()-t))
return x
def solver_complex2real(A11, A12, b, timing=False, solver=SOLVER):
# solves linear system of equations [A11, A12; A21*, A22*]*[x; x*] = [b; b*]
b = b.astype(np.complex128)
b = b.reshape((-1,))
N = b.size
if not b.any():
return np.zeros(b.shape)
b_re = np.real(b).astype(np.float64)
b_im = np.imag(b).astype(np.float64)
Areal = sp.vstack((sp.hstack((np.real(A11) + np.real(A12), - np.imag(A11) + np.imag(A12))),
sp.hstack((np.imag(A11) + np.imag(A12), np.real(A11) - np.real(A12)))))
if timing:
t = time()
if solver.lower() == 'pardiso':
pSolve = pardisoSolver(Areal, mtype=11) # Matrix is real unsymmetric
pSolve.factor()
x = pSolve.solve(np.hstack((b_re, b_im)))
pSolve.clear()
elif solver.lower() == 'scipy':
x = spsolve(Areal, np.hstack((b_re, b_im)))
else:
raise ValueError('Invalid solver choice: {}, options are pardiso or scipy'.format(str(solver)))
if timing:
print('Linear system solve took {:.2f} seconds'.format(time()-t))
return (x[:N] + 1j*x[N:2*N])
```
#### File: angler/angler/nonlinearity.py
```python
import numpy as np
from angler.linalg import *
class Nonlinearity:
def __init__(self, chi, nl_region, nl_type='kerr', eps_scale=False, eps_max=None):
self.chi = chi
self.nl_region = nl_region
self.nl_type = nl_type
self.eps_scale = eps_scale
self.eps_max = eps_max
self.eps_nl = []
self.dnl_de = []
self.dnl_deps = []
if self.nl_type == 'kerr':
if self.eps_scale:
if self.eps_max is None:
raise AssertionError("Must provide eps_max when eps_scale is True")
else:
kerr_nonlinearity = lambda e, eps_r:3*chi*nl_region*np.square(np.abs(e))*((eps_r-1)/(eps_max - 1))
kerr_nl_de = lambda e, eps_r:3*chi*nl_region*np.conj(e)*((eps_r-1)/(eps_max - 1))
kerr_nl_deps = lambda e, eps_r:3*chi*nl_region*np.square(np.abs(e))*(1/(eps_max - 1))
else:
kerr_nonlinearity = lambda e, eps_r:3*chi*nl_region*np.square(np.abs(e))
kerr_nl_de = lambda e, eps_r:3*chi*nl_region*np.conj(e)
kerr_nl_deps = lambda e, eps_r:0
self.eps_nl = kerr_nonlinearity
self.dnl_de = kerr_nl_de
self.dnl_deps = kerr_nl_deps
else:
raise AssertionError("Only 'kerr' type nonlinearity is currently supported")
```
#### File: angler/data/get_stats.py
```python
import numpy as np
from device_saver import load_device
""" Opens a device and prints its stored stats for the paper"""
def get_stats(fname):
print("\n============================================================")
D = load_device(fname)
print('input power of {:.4f} mW/um'.format(D.W_in*1000))
if hasattr(D, 'index_shift'):
index_shift = D.index_shift
else:
index_shift = D.simulation.compute_index_shift()
print('index shift: {:.2E}'.format(np.max(index_shift)))
print('Q-factor: {:.2E}'.format(D.Q))
print('bandwidth: {:.1f} GHz'.format(D.FWHM / 1e9))
if D.structure_type == 'two_port':
print('linear transmission: {:.4f}'.format(D.T_lin))
print('nonlinear transmission: {:.4f}'.format(D.T_nl))
elif D.structure_type == 'ortho_port':
print('linear transmission (right) = {:.4f} %'.format(100*D.W_right_lin / D.W_in))
print('linear transmission (top) = {:.4f} %'.format(100*D.W_top_lin / D.W_in))
print('nonlinear transmission (right) = {:.4f} %'.format(100*D.W_right_nl / D.W_in))
print('nonlinear transmission (top) = {:.4f} %'.format(100*D.W_top_nl / D.W_in))
print("============================================================\n")
if __name__ == '__main__':
fname2 = 'data/figs/devices/2_port.p'
get_stats(fname2)
fnameT = 'data/figs/devices/T_port.p'
get_stats(fnameT)
```
#### File: angler/data/gif_generator_Tport.py
```python
import numpy as np
import matplotlib.pylab as plt
import copy
# add angler to path (not necessary if pip installed)
import sys
sys.path.append("..")
# import the main simulation and optimization classes
from angler import Simulation, Optimization
from angler.plot import Temp_plt
# import some structure generators
from angler.structures import three_port, two_port, ortho_port
lambda0 = 2e-6 # free space wavelength (m)
c0 = 3e8 # speed of light in vacuum (m/s)
omega = 2*np.pi*c0/lambda0 # angular frequency (2pi/s)
dl = 0.4e-1 # grid size (L0)
NPML = [25, 25] # number of pml grid points on x and y borders
pol = 'Ez' # polarization (either 'Hz' or 'Ez')
source_amp = 6 # amplitude of modal source (A/L0^2?)
# material constants
n_index = 2.44 # refractive index
eps_m = n_index**2 # relative permittivity
chi3 = 4.1*1e-19 # Al2S3 from Boyd (m^2/V^2)
# max_ind_shift = 5.8e-3 # maximum allowed nonlinear refractive index shift (computed from damage threshold)
# geometric parameters
L1 = 6 # length waveguides in design region (L0)
L2 = 6 # width of box (L0)
H1 = 6 # height waveguides in design region (L0)
H2 = 6 # height of box (L0)
w = .3 # width of waveguides (L0)
l = 3 # length of waveguide from PML to box (L0)
spc = 2 # space between box and PML (L0)
# define permittivity of three port system
eps_r, design_region = ortho_port(L1, L2, H1, H2, w, l, dl, NPML, eps_m)
(Nx, Ny) = eps_r.shape
nx, ny = int(Nx/2), int(Ny/2) # halfway grid points
simulation = Simulation(omega,eps_r,dl,NPML,pol)
# set the modal source and probes
simulation = Simulation(omega, eps_r, dl, NPML, 'Ez')
simulation.add_mode(np.sqrt(eps_m), 'x', [NPML[0]+int(l/2/dl), ny], int(H1/2/dl), scale=source_amp)
simulation.setup_modes()
# left modal profile
right = Simulation(omega, eps_r, dl, NPML, 'Ez')
right.add_mode(np.sqrt(eps_m), 'x', [-NPML[0]-int(l/2/dl), ny], int(H1/2/dl))
right.setup_modes()
J_right = np.abs(right.src)
# top modal profile
top = Simulation(omega, eps_r, dl, NPML, 'Ez')
top.add_mode(np.sqrt(eps_m), 'y', [nx, -NPML[1]-int(l/2/dl)], int(L1/2/dl))
top.setup_modes()
J_top = np.abs(top.src)
# compute straight line simulation
eps_r_wg, _ = two_port(L1, H1, w, l, spc, dl, NPML, eps_start=eps_m)
(Nx_wg, Ny_wg) = eps_r_wg.shape
nx_wg, ny_wg = int(Nx_wg/2), int(Ny_wg/2) # halfway grid points
simulation_wg = Simulation(omega, eps_r_wg, dl, NPML, 'Ez')
simulation_wg.add_mode(np.sqrt(eps_m), 'x', [NPML[0]+int(l/2/dl), ny_wg], int(Ny/3), scale=source_amp)
simulation_wg.setup_modes()
# compute normalization
sim_out = Simulation(omega, eps_r_wg, dl, NPML, 'Ez')
sim_out.add_mode(np.sqrt(eps_m), 'x', [-NPML[0]-int(l/2/dl), ny], int(Ny/3))
sim_out.setup_modes()
J_out = np.abs(sim_out.src)
(_, _, Ez_wg) = simulation_wg.solve_fields()
SCALE = np.sum(np.square(np.abs(Ez_wg))*J_out)
J_out = J_out
J_right = J_right / SCALE
J_top = J_top / SCALE
# changes design region. 'style' can be in {'full', 'empty', 'halfway', 'random'}
np.random.seed(0)
simulation.init_design_region(design_region, eps_m, style='halfway')
# add nonlinearity
nl_region = copy.deepcopy(design_region)
simulation.nonlinearity = [] # This is needed in case you re-run this cell, for example (or you can re-initialize simulation every time)
simulation.add_nl(chi3, nl_region, eps_scale=True, eps_max=eps_m)
# define objective function
import autograd.numpy as npa
def J(e, e_nl):
linear_right = 1*npa.sum(npa.square(npa.abs(e))*J_right)
linear_top = -1*npa.sum(npa.square(npa.abs(e))*J_top)
nonlinear_right = -1*npa.sum(npa.square(npa.abs(e_nl))*J_right)
nonlinear_top = 1*npa.sum(npa.square(npa.abs(e_nl))*J_top)
objfn = (linear_right + linear_top + nonlinear_right + nonlinear_top)/2
return objfn
# make optimization object
R = 5 # filter radius of curvature (pixels) (takes a while to set up as R > 5-10)
beta = 500 # projection strength
eta= 0.50 # projection halfway
temp_plt = Temp_plt(it_plot=1, plot_what=('eps', 'elin', 'enl'), folder='figs/data/temp_im/',
figsize=(14,4), dpi=100)
optimization = Optimization(J=J, simulation=simulation, design_region=design_region, eps_m=eps_m, R=R, beta=beta, eta=eta)
(grad_avm, grad_num) = optimization.check_deriv(Npts=5, d_rho=5e-4)
# print('adjoint gradient = {}\nnumerical gradient = {}'.format(grad_avm, grad_num))
optimization.run(method='lbfgs', Nsteps=400, temp_plt=temp_plt)
```
#### File: angler/tests/test_gradient.py
```python
import unittest
import numpy as np
from numpy.testing import assert_allclose
import copy
import sys
sys.path.append('..')
from angler import Simulation, Optimization
from angler.structures import three_port
import autograd.numpy as npa
class TestGradient(unittest.TestCase):
def setUp(self):
# create a simulation to test just like in notebook
lambda0 = 2e-6 # free space wavelength (m)
c0 = 3e8 # speed of light in vacuum (m/s)
omega = 2*np.pi*c0/lambda0 # angular frequency (2pi/s)
dl = 1.1e-1 # grid size (L0)
NPML = [15, 15] # number of pml grid points on x and y borders
pol = 'Ez' # polarization (either 'Hz' or 'Ez')
source_amp = 100 # amplitude of modal source (A/L0^2?)
# material constants
n_index = 2.44 # refractive index
eps_m = n_index**2 # relative permittivity
max_ind_shift = 5.8e-2 # maximum allowed nonlinear index shift
# geometric parameters
L = 4 # length of box (L0)
H = 4 # height of box (L0)
w = .2 # width of waveguides (L0)
d = H/2.44 # distance between waveguides (L0)
l = 3 # length of waveguide from PML to box (L0)
spc = 2 # space between box and PML (L0)
# define permittivity of three port system
(eps_r, design_region) = three_port(L, H, w, d, dl, l, spc, NPML, eps_start=eps_m)
(Nx, Ny) = eps_r.shape
nx, ny = int(Nx/2), int(Ny/2) # halfway grid points
# set the modal source and probes
self.simulation = Simulation(omega, eps_r, dl, NPML, 'Ez')
self.simulation.add_mode(np.sqrt(eps_m), 'x', [NPML[0]+int(l/2/dl), ny], int(H/2/dl), scale=source_amp)
self.simulation.setup_modes()
self.simulation.init_design_region(design_region, eps_m)
# top modal profile
top = Simulation(omega, eps_r, dl, NPML, 'Ez')
top.add_mode(np.sqrt(eps_m), 'x', [-NPML[0]-int(l/2/dl), ny+int(d/2/dl)], int(H/2/dl))
top.setup_modes()
J_top = np.abs(top.src)
# bottom modal profile
bot = Simulation(omega, eps_r, dl, NPML, 'Ez')
bot.add_mode(np.sqrt(eps_m), 'x', [-NPML[0]-int(l/2/dl), ny-int(d/2/dl)], int(d/dl))
bot.setup_modes()
J_bot = np.abs(bot.src)
# define linear and nonlinear parts of objective function + the total objective function form
J = lambda e, e_nl: npa.sum(npa.square(npa.abs(e))*J_top) + npa.sum(npa.square(npa.abs(e_nl))*J_bot)
import autograd.numpy as npa
def J(e, e_nl):
linear_top = 1*npa.sum(npa.square(npa.abs(e))*J_top)
linear_bot = -1*npa.sum(npa.square(npa.abs(e))*J_bot)
nonlinear_top = -1*npa.sum(npa.square(npa.abs(e_nl))*J_top)
nonlinear_bot = 1*npa.sum(npa.square(npa.abs(e_nl))*J_bot)
objfn = linear_top + nonlinear_top + nonlinear_bot + linear_top
return objfn
self.design_region = design_region
self.optimization = Optimization(J=J, simulation=self.simulation, design_region=self.design_region, eps_m=eps_m)
def test_linear_gradient(self):
avm_grads, num_grads = self.optimization.check_deriv(Npts=5, d_rho=1e-6)
avm_grads = np.array(avm_grads)
num_grads = np.array(num_grads)
print('linear regime: \n\tanalytical: {}\n\tnumerical: {}'.format(avm_grads, num_grads))
assert_allclose(avm_grads, num_grads, rtol=1e-03, atol=.1)
def test_nonlinear_gradient(self):
chi3 = 4.1*1e-19 # Al2S3 from Boyd (m^2/V^2)
# add nonlinearity
nl_region = copy.deepcopy(self.design_region)
self.simulation.nonlinearity = []
self.simulation.add_nl(chi3, nl_region, eps_scale=True, eps_max=self.optimization.eps_m)
avm_grads, num_grads = self.optimization.check_deriv(Npts=5, d_rho=1e-6)
avm_grads = np.array(avm_grads)
num_grads = np.array(num_grads)
print('nonlinear regime: \n\tanalytical: {}\n\tnumerical: {}'.format(avm_grads, num_grads))
assert_allclose(avm_grads, num_grads, rtol=1e-03, atol=.1)
if __name__ == '__main__':
unittest.main()
```
#### File: angler/tests/test_nonlinear_solvers.py
```python
import unittest
import numpy as np
import matplotlib.pylab as plt
from numpy.testing import assert_allclose
from angler import Simulation
class Test_NLSolve(unittest.TestCase):
def test_born_newton(self):
"""Tests whether born and newton methods get the same result"""
n0 = 3.4
omega = 2*np.pi*200e12
dl = 0.01
chi3 = 2.8E-18
width = 1
L = 5
L_chi3 = 4
width_voxels = int(width/dl)
L_chi3_voxels = int(L_chi3/dl)
Nx = int(L/dl)
Ny = int(3.5*width/dl)
eps_r = np.ones((Nx, Ny))
eps_r[:, int(Ny/2-width_voxels/2):int(Ny/2+width_voxels/2)] = np.square(n0)
nl_region = np.zeros(eps_r.shape)
nl_region[int(Nx/2-L_chi3_voxels/2):int(Nx/2+L_chi3_voxels/2), int(Ny/2-width_voxels/2):int(Ny/2+width_voxels/2)] = 1
simulation = Simulation(omega, eps_r, dl, [15, 15], 'Ez')
simulation.add_mode(n0, 'x', [17, int(Ny/2)], width_voxels*3)
simulation.setup_modes()
simulation.add_nl(chi3, nl_region, eps_scale=True, eps_max=np.max(eps_r))
srcval_vec = np.logspace(1, 3, 3)
pwr_vec = np.array([])
T_vec = np.array([])
for srcval in srcval_vec:
simulation.setup_modes()
simulation.src *= srcval
# Newton
simulation.solve_fields_nl(solver_nl='newton')
E_newton = simulation.fields["Ez"]
# Born
simulation.solve_fields_nl(solver_nl='born')
E_born = simulation.fields["Ez"]
# More solvers (if any) should be added here with corresponding calls to assert_allclose() below
assert_allclose(E_newton, E_born, rtol=1e-3)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joamatab/dispersion",
"score": 3
} |
#### File: src/dispersion/material.py
```python
from __future__ import print_function
import codecs
import numpy as np
#from dispersion import _str_to_class
import dispersion.spectral_data as spectral_data
from dispersion import Spectrum
from dispersion import Constant, Interpolation, \
Extrapolation
#from dispersion.spectral_data import _numeric_to_string_table
from dispersion.io import (Reader, _numeric_to_string_table,
_str_table_to_numeric)
def _check_table_shape(table, ncols, name):
"""
check that numpy array shape has the correct number of columns
"""
if (not len(table.shape) == 2 or
not table.shape[1] == ncols):
raise ValueError("tabulated {} data ".format(name) +
"must have shape Nx{}".format(ncols))
class Material():
'''
Class for processing refractive index and permittivity data
Parameters
----------
file_path: str
file path from which to load data
fixed_n: float
fixed real part of refractive index
fixed_nk: complex
fixed complex refractive index
fixed_eps_r: float
fixed real part of permittivity
fixed_eps: complex
fixed complex permittivity
tabulated_n: Nx2 array
table of real part of refractive index to interpolate
tabulated_nk: Nx3 array
table of real and imaginary refractive index values to interpolate
tabulated_eps: Nx3 array
table of real and imaginary permittivity values to interpolate
model_kw: dict
model parameters
spectrum_type: str
sets the default spectrum type
unit: str
sets the default unit
meta_data: dict
contains the meta data for the material
data: dict
holds one or two SpectralData objects to describe the data
options: dict
holds options for the material object
defaults: dict
default values for spectrum data
Warnings
--------
the parameters file_path, fixed_n, fixed_nk, fixed_eps_r, fixed_eps,
tabulated_n, tabulated_nk, tabulated_eps and model_kw are mututally
exclusive.
'''
def __init__(self, **kwargs):
#parsing arguments
parsed_args = self._parse_args(kwargs)
#set inputs and defaults
file_path = parsed_args["file_path"]
#self.meta_data = None
self.meta_data = {}
self.meta_data['Reference'] = ""
self.meta_data['Comment'] = ""
self.meta_data['Name'] = ""
self.meta_data['FullName'] = ""
self.meta_data['Author'] = ""
self.meta_data['Alias'] = ""
self.meta_data['MetaComment'] = ""
self.meta_data['Specification'] = {}
self._file_data = None
self.data = {'name': "",
'real': None,
'imag': None,
'complex':None}
self.options = {'interp_oder':parsed_args["interp_order"]}
self.defaults = {'unit':parsed_args["unit"],
'spectrum_type':parsed_args["spectrum_type"]}
#process input arguments
if file_path is not None:
reader = Reader(file_path)
file_data = reader.read_file()
self._process_file_data(file_data)
elif parsed_args['model_kw'] is not None:
self._process_model_dict(parsed_args['model_kw'])
elif parsed_args['tabulated_nk'] is not None:
self._process_table(parsed_args['tabulated_nk'], 'nk')
elif parsed_args['tabulated_n'] is not None:
self._process_table(parsed_args['tabulated_n'], 'n')
elif parsed_args['tabulated_eps'] is not None:
self._process_table(parsed_args['tabulated_eps'], 'eps')
else:
self._process_fixed_value(parsed_args)
self._complete_partial_data()
def _parse_args(self, args):
"""
validated the dictionary of class inputs
"""
mutually_exclusive = {"file_path", "fixed_n", "fixed_nk",
"fixed_eps_r", "fixed_eps",
"tabulated_nk", "tabulated_n",
"tabulated_eps",
"model_kw"}
inputs = {}
n_mutually_exclusive = 0
for arg in args.keys():
if arg in args and args[arg] is not None:
if arg in mutually_exclusive:
n_mutually_exclusive += 1
inputs[arg] = args[arg]
if n_mutually_exclusive == 0:
raise ValueError("At least one of the following" +
" inputs is required: "+
"{}".format(mutually_exclusive))
elif n_mutually_exclusive > 1:
raise ValueError("Only one of the following" +
"inputs is allowed: "+
"{}".format(mutually_exclusive))
# Check types
str_args = {'file_path', 'spectrum_type', 'unit'}
str_types = {str}
self._check_type(inputs, str_args, str_types)
if inputs['spectrum_type'] is None:
inputs['spectrum_type'] = 'wavelength'
if inputs['unit'] is None:
inputs['unit'] = 'nanometer'
if 'interp_order' not in inputs:
inputs['interp_order'] = 1
# pylint: disable=no-member
# bug in pylint does not recognise numpy data types
int_args = {'interp_oder'}
int_types = {int}
self._check_type(inputs, int_args, int_types)
float_args = {"fixed_n", "fixed_eps_r"}
float_types = {float, np.double}
self._check_type(inputs, float_args, float_types)
complex_args = {"fixed_nk", "fixed_eps"}
complex_types = {complex, np.cdouble}
self._check_type(inputs, complex_args, complex_types)
dict_args = {'model_kw'}
dict_types = {dict}
self._check_type(inputs, dict_args, dict_types)
array_args = {'tabulated_nk', 'tabulated_n', 'tabulated_eps'}
array_types = {np.ndarray}
self._check_type(inputs, array_args, array_types)
if inputs['tabulated_nk'] is not None:
_check_table_shape(inputs['tabulated_nk'], 3, 'nk')
if inputs['tabulated_n'] is not None:
_check_table_shape(inputs['tabulated_n'], 2, 'n')
if inputs['tabulated_eps'] is not None:
_check_table_shape(inputs['tabulated_eps'], 3, 'eps')
return inputs
@staticmethod
def _check_type(args, names, types):
"""
raises TypeError if the names keys in args dict are not in the
set of types. If name is not in args, place a default value of None.
"""
for arg in names:
if arg in args and args[arg] is not None:
invalid_type = False
for _type in types:
if isinstance(args[arg], _type):
invalid_type = True
if invalid_type is False:
raise TypeError("argument " +
"{} must be".format(arg) +
" of types: {}".format(types))
else:
args[arg] = None
def _complete_partial_data(self):
"""
if only partial data was provided then set remaining parameters
to constant value of 0.
"""
if self.data['real'] is None:
self.data['real'] = Constant(0.0)
if self.data['imag'] is None:
self.data['imag'] = Constant(0.0)
def remove_absorption(self):
"""
sets loss (k or epsi) to constant zero value
Warnings
--------
has no effect if the material is defined as via complex data instead of
separate real and imaginary parts.
"""
self.data['imag'] = Constant(0.0)
def extrapolate(self, new_spectrum, spline_order=2):
"""extrpolates the material data
extrapolates the material data to cover the range defined by the
spectrum new_spectrum. if new_spectrum has only one element, the data
will be extrapolated from the relevant end of its valid range up to the
value given by new_spectrum. spline_order defines the order of the
spline used for extrapolation. The results of the extrapolation depend
heavily on the order chosen, so please check the end result to make
sure it make physical sense.
Parameters
----------
new_spectrum: Spectrum
the values to exrapolate to
spline_order: int
the order of spline to use for interpolation -> extrpolation
Raises
------
NotImplementedError
if the material is defined as via a complex value
"""
if self.data['complex'] is None:
for data_name in ['real', 'imag']:
if isinstance(self.data[data_name], Constant):
continue
self.data[data_name] = Extrapolation(self.data[data_name],
new_spectrum,
spline_order=spline_order)
else:
raise NotImplementedError("extrapolation not implemented " +
"for materials with real and imaginary "+
"parts not independent from each other")
def _process_fixed_value(self, inputs):
'''use fixed value inputs to set n/k or permittivity
the fixed value is converted to a SpectralData.Constant object and
included in the data dict
Parameters
----------
inputs: dict
the dict holding the fixed value
'''
if inputs['fixed_n'] is not None:
self.data['name'] = 'nk'
self.data['real'] = Constant(inputs['fixed_n'])
#self._k = Constant(0.0)
elif inputs['fixed_nk'] is not None:
self.data['name'] = 'nk'
self.data['real'] = Constant(np.real(inputs['fixed_nk']))
self.data['imag'] = Constant(np.imag(inputs['fixed_nk']))
elif inputs['fixed_eps_r'] is not None:
self.data['name'] = 'eps'
self.data['real'] = Constant(inputs['fixed_eps_r'])
#self._epsi = Constant(0.0)
elif inputs['fixed_eps'] is not None:
self.data['name'] = 'eps'
self.data['real'] = Constant(np.real(inputs['fixed_eps']))
self.data['imag'] = Constant(np.imag(inputs['fixed_eps']))
else:
raise RuntimeError("Failed to set a constant value for n,k or eps")
def _process_model_dict(self, model_dict):
"""use model parameter input to set n/k or permittivity
use model_dict to return a SpectralData.Model object and sets the
relevant n/k or permittivity class attributes
Parameters
----------
model_dict: dict
contains data for model creates (see notes)
Raises
------
ValueError
if the model output does not yield n/k or permittivity
Notes
-----
model_dict must contain the fields:
name: str
class name of the model (see spectral_data.py)
spectrum_type: str
spectrum Type (see spctrum.py)
unit: str
spetrum unit (see spctrum.py)
valid_range: 2x1 np.array
min and max of the spectral range for which the model is valid
parameters: np.array
all paramters (i.e. coefficients) needed for the model
"""
model_class = self._str_to_class(model_dict['name'])
#model_class = MODELS[model_dict['name']]
kws = {}
if "spectrum_type" in model_dict:
kws['spectrum_type'] = model_dict['spectrum_type']
self.defaults['spectrum_type'] = model_dict['spectrum_type']
if "unit" in model_dict:
kws['unit'] = model_dict['unit']
self.defaults['unit'] = model_dict['unit']
model = model_class(model_dict['parameters'],
model_dict['valid_range'], **kws)
if model.output == 'n':
self.data['name'] = 'nk'
self.data['real'] = model
elif model.output == 'k':
self.data['name'] = 'nk'
self.data['imag'] = model
elif model.output == 'nk':
self.data['name'] = 'nk'
self.data['complex'] = model
elif model.output == 'epsr':
self.data['name'] = 'eps'
self.data['real'] = model
elif model.output == 'epsi':
self.data['name'] = 'eps'
self.data['imag'] = model
elif model.output == 'eps':
self.data['name'] = 'eps'
self.data['complex'] = model
else:
raise ValueError("model output <{}> invalid".format(model.output))
@staticmethod
def _str_to_class(field):
"""evaluates string as a class.
tries to evaluate the given string as a class from the spectral_data
module.
Parameters
----------
field: str
name to convert to class
Raises
------
NameError
the given field is not an attribute
TypeError
the given field is an attribute but not a class
"""
try:
identifier = getattr(spectral_data, field)
except AttributeError:
raise NameError("%s doesn't exist." % field)
if isinstance(identifier, type):
return identifier
raise TypeError("%s is not a class." % field)
def _process_file_data(self, file_dict):
"""set meta_data and data from dictionary"""
self._file_data = file_dict
self.meta_data = {}
self.meta_data['Reference'] = file_dict['MetaData']['Reference']
self.meta_data['Comment'] = file_dict['MetaData']['Comment']
self.meta_data['Name'] = file_dict['MetaData']['Name']
self.meta_data['FullName'] = file_dict['MetaData']['FullName']
self.meta_data['Author'] = file_dict['MetaData']['Author']
self.meta_data['MetaComment'] = file_dict['MetaData']['MetaComment']
self.meta_data['Specification'] = file_dict['MetaData']['Specification']
datasets = file_dict['Datasets']
#self.dataTypes = []
#self.dataSets = []
for dataset in datasets:
data_type, identifier = dataset['DataType'].split()
#meta_data = dataset['MetaData']
if data_type == 'tabulated':
#data is tabulated
dataset['Data'] = _str_table_to_numeric(dataset.pop('Data'))
self._process_table(dataset['Data'], identifier,
meta_data=dataset)
elif data_type in {'formula', 'model'}:
#data is a formula with coefficients
self._process_formula_data(dataset)
else:
raise ValueError("data type {} not supported".format(data_type))
def _process_table(self, table, identifier, meta_data=None):
"""
Uses a table(np.ndarray) and metadata to set relevant class attributes.
"""
if meta_data is None:
meta_data = {}
if ('SpectrumType' in meta_data and \
meta_data['SpectrumType'] and \
meta_data['SpectrumType'] is not None):
self.defaults['spectrum_type'] = meta_data['SpectrumType']
if ('Unit' in meta_data and \
meta_data['Unit'] and \
meta_data['Unit'] is not None):
self.defaults['unit'] = meta_data['Unit']
if identifier == 'nk':
self.data['name'] = 'nk'
self.data['real'] = self._spec_data_from_table(table[:, [0, 1]])
self.data['imag'] = self._spec_data_from_table(table[:, [0, 2]])
elif identifier == 'n':
self.data['name'] = 'nk'
self.data['real'] = self._spec_data_from_table(table)
elif identifier == 'k':
self.data['name'] = 'nk'
self.data['imag'] = self._spec_data_from_table(table)
elif identifier == 'eps':
self.data['name'] = 'eps'
self.data['real'] = self._spec_data_from_table(table[:, [0, 1]])
self.data['imag'] = self._spec_data_from_table(table[:, [0, 2]])
def _spec_data_from_table(self, data):
'''
Convert table to SpectralData object.
Parameters
----------
data: Nx2 np.array
the tabulated spectral data
Returns
-------
Constant(SpectralData)
if tabulated data has 1 row the data is constant
Interpolation(SpectralData)
interpolation of the tabulated data
'''
n_rows = data.shape[0]
spec_type = self.defaults['spectrum_type']
unit = self.defaults['unit']
if n_rows == 1:
return Constant(data[0, 1],
valid_range=(data[0, 0], data[0, 0]),
spectrum_type=spec_type, unit=unit)
return Interpolation(data, spectrum_type=spec_type,
unit=unit)
def _process_formula_data(self, data_dict):
'''prepare dictionary of data for processing.
create model_dict and call process_model_dict use range and coefficients
in input dictionary to return a SpectralData.Model
'''
model_dict = {}
meta_data = data_dict
data_type, identifier = meta_data['DataType'].split()
if not (data_type in {'formula', 'model'}):
raise ValueError("dataType <{}>".format(data_type) +
" not a valid formula or model")
if data_type == 'formula':
identifier = int(identifier)
if meta_data['ValidRange']:
valid_range = meta_data['ValidRange'].split()
for i_valid_range, v_range in enumerate(valid_range):
valid_range[i_valid_range] = float(v_range)
model_dict['valid_range'] = valid_range
coefficients = data_dict['Data'].split()
for iter_coeff, coeff in enumerate(coefficients):
coefficients[iter_coeff] = float(coeff)
model_dict['parameters'] = np.array(coefficients)
if meta_data['SpectrumType']:
model_dict['spectrum_type'] = meta_data['SpectrumType']
else:
model_dict['spectrum_type'] = self.defaults['spectrum_type']
if meta_data['Unit']:
model_dict['unit'] = meta_data['Unit']
else:
model_dict['unit'] = self.defaults['unit']
method_ids = {1: 'Sellmeier', 2: 'Sellmeier2',
3: 'Polynomial', 4: 'RefractiveIndexInfo',
5: 'Cauchy', 6: 'Gases',
7: 'Herzberger', 8: 'Retro',
9: 'Exotic'}
if isinstance(identifier, int):
model_dict['name'] = method_ids[identifier]
else:
model_dict['name'] = identifier
self._process_model_dict(model_dict)
def get_nk_data(self, spectrum,
spectrum_type='wavelength',
unit='meter'):
'''
return complex refractive index for a given input spectrum.
Parameters
----------
spectrum: np.array or Spectrum
the spectral values to evaluate
spectrum_type: str {'wavelength', 'frequency', 'energy'}
type of spectrum
unit: str {'meter', 'nanometer', 'micrometer', 'hertz', 'electronvolt'}
unit of spectrum (must match spectrum type)
Returns
-------
np.complex128
the complex n/k values (if input spectrum has size == 1)
np.array with np.complex128 dtype
the complex n/k values (if input spectrum has size > 1)
'''
if isinstance(spectrum, Spectrum):
spectrum_values = spectrum.values
spectrum_type = spectrum.spectrum_type
unit = spectrum.unit
else:
spectrum_values = spectrum
spectrum = Spectrum(spectrum_values,
spectrum_type=spectrum_type,
unit=unit)
if not (self.data['name'] == 'nk' or self.data['name'] == 'eps'):
raise ValueError("data type {}".format(self.data['name']) +
"cannot be converted to refractive index")
if self.data['complex'] is None:
real = self.data['real'].evaluate(spectrum)
imag = 1j*self.data['imag'].evaluate(spectrum)
complex_val = real+imag
else:
complex_val = self.data['complex'].evaluate(spectrum)
if self.data['name'] == 'eps':
complex_val = np.sqrt(complex_val)
return complex_val
def get_permittivity(self, spectrum_values,
spectrum_type='wavelength',
unit='meter'):
'''
return complex permittivity for a given input spectrum.
Parameters
----------
spectrum: np.array or Spectrum
the spectral values to evaluate
spectrum_type: str {'wavelength', 'frequency', 'energy'}
type of spectrum
unit: str {'meter', 'nanometer', 'micrometer', 'hertz', 'electronvolt'}
unit of spectrum (must match spectrum type)
Returns
-------
np.complex128
the complex permittivity values (if input spectrum has size == 1)
np.array with np.complex128 dtype
the complex permittivity values (if input spectrum has size > 1)
'''
if isinstance(spectrum_values, Spectrum):
spectrum = spectrum_values
else:
spectrum = Spectrum(spectrum_values,
spectrum_type=spectrum_type,
unit=unit)
if not (self.data['name'] == 'nk' or self.data['name'] == 'eps'):
raise ValueError("data type {}".format(self.data['name']) +
"cannot be converted to refractive index")
if self.data['complex'] is None:
real = self.data['real'].evaluate(spectrum)
imag = 1j*self.data['imag'].evaluate(spectrum)
complex_val = real+imag
else:
complex_val = self.data['complex'].evaluate(spectrum)
if self.data['name'] == 'nk':
complex_val = np.power(complex_val, 2)
return complex_val
def get_maximum_valid_range(self):
"""find maximum spectral range that spans real and imaginary data.
Checks both real and imaginary parts of spectral data and finds the
maximum spectral range which is valid for both parts.
Returns
-------
2x1 np.array
the maximum valid range
"""
if not(self.data['name'] == 'nk' or self.data['name'] == 'eps'):
raise RuntimeError("valid_range cannot be defined as "+
"Material does not yet contain "+
" a valid n/k or permittivity spectrum")
if self.data['complex'] is None:
real_range_std = self.data['real'].valid_range.standard_rep
imag_range_std = self.data['imag'].valid_range.standard_rep
real_lower = np.min(real_range_std)
real_upper = np.max(real_range_std)
imag_lower = np.min(imag_range_std)
imag_upper = np.max(imag_range_std)
lower = np.max([real_lower, imag_lower])
upper = np.min([real_upper, imag_upper])
else:
lower = np.min(self.data['complex'].valid_range.values)
upper = np.max(self.data['complex'].valid_range.values)
max_range = np.array([lower, upper])
spec = Spectrum(max_range)
return spec.convert_to(self.defaults['spectrum_type'],
self.defaults['unit'])
@staticmethod
def utf8_to_ascii(string):
"""converts a string from utf8 to ascii"""
uni_str = codecs.encode(string, 'utf-8')
ascii_str = codecs.decode(uni_str, 'ascii', 'ignore')
return ascii_str
def print_reference(self):
"""print material reference"""
print(self.utf8_to_ascii(self.meta_data['Reference']))
def print_comment(self):
"""print material comment"""
print(self.utf8_to_ascii(self.meta_data['Comment']))
def plot_nk_data(self, **kwargs):
"""plots the real and imaginary part of the refractive index"""
self._plot_data('nk', **kwargs)
def plot_permittivity(self, **kwargs):
"""plots the real and imaginary part of the permittivity"""
self._plot_data('permittivity', **kwargs)
def _plot_data(self, data_label, **kwargs):
"""internal function used for plotting spectral data"""
try:
import matplotlib.pyplot as plt
except ModuleNotFoundError as exp:
raise ModuleNotFoundError("plotting requires the matplotlib",
" package to be installed")
plot_data = self._prepare_plot_data(**kwargs)
if 'axes' not in kwargs:
plot_data['axes'] = plt.axes()
else:
plot_data['axes'] = kwargs['axes']
if data_label == 'nk':
data = self.get_nk_data(plot_data['spectrum'])
labels = ['n', 'k']
elif data_label == 'permittivity':
data = self.get_permittivity(plot_data['spectrum'])
labels = ['eps_r', 'eps_i']
data_r = np.real(data)
data_i = np.imag(data)
# pylint: disable=protected-access
# this is the only way to access the color cycler
axes = plot_data['axes']
spectrum = plot_data['spectrum']
if spectrum.values.size == 1:
color = next(axes._get_lines.prop_cycler)['color']
plt.axhline(data_r, label=labels[0], color=color)
color = next(axes._get_lines.prop_cycler)['color']
plt.axhline(data_i, label=labels[1], ls='--', color=color)
else:
plt.plot(spectrum.values, data_r, label=labels[0])
plt.plot(spectrum.values, data_i, ls='--', label=labels[1])
plt.legend(loc='best')
plt.ylabel("{}, {}".format(labels[0], labels[1]))
xlabel = spectrum.get_type_unit_string()
plt.xlabel(xlabel)
def _prepare_plot_data(self, **kwargs):
"""internal function to prepare data for plotting"""
plot_data = {}
if 'spectrum_type' not in kwargs:
plot_data['spectrum_type'] = self.defaults['spectrum_type']
else:
plot_data['spectrum_type'] = kwargs['spectrum_type']
if 'unit' not in kwargs:
plot_data['unit'] = self.defaults['unit']
else:
plot_data['unit'] = kwargs['unit']
if 'values' not in kwargs:
spectrum = self.get_sample_spectrum()
values = spectrum.convert_to(plot_data['spectrum_type'],
plot_data['unit'])
else:
values = kwargs['values']
if isinstance(values, (list, tuple)):
values = np.array(values)
spectrum = Spectrum(values,
spectrum_type=plot_data['spectrum_type'],
unit=plot_data['unit'])
plot_data['spectrum'] = spectrum
return plot_data
def get_sample_spectrum(self):
"""spectrum which covers the maximum valid range of the material data"""
max_range = self.get_maximum_valid_range()
if max_range[0] == 0.0 or max_range[1] == np.inf:
values = np.geomspace(100, 2000, 1000)
spectrum = Spectrum(values, spectrum_type='wavelength',
unit='nm')
else:
values = np.geomspace(max_range[0], max_range[1], 1000)
if values[0] < max_range[0]:
values[0] = max_range[0]
if values[-1] > max_range[1]:
values[-1] = max_range[1]
spectrum = Spectrum(values,
spectrum_type=self.defaults['spectrum_type'],
unit=self.defaults['unit'])
return spectrum
def prepare_file_dict(self):
#if self._file_data is not None:
# return self._file_data
file_dict = {}
file_dict['MetaData'] = {}
#file_dict[]
for key in self.meta_data:
if key == "Alias":
continue
file_dict['MetaData'][key] = self.meta_data[key]
file_dict['Datasets'] = self.dataset_to_dict()
return file_dict
def add_dtype_suffix(self, dtype, data_part):
if self.data['name'] == 'nk':
if data_part == 'real':
dtype += " n"
elif data_part == 'imag':
dtype += " k"
elif data_part == 'complex':
dtype += ' nk'
elif self.data['name'] == 'eps':
if data_part == 'real':
dtype += " eps_r"
elif data_part == 'imag':
dtype += ' eps_k'
elif data_part == 'complex':
dtype += ' eps'
else:
raise ValueError("data name could not be parsed")
return dtype
def dataset_to_dict(self):
"""
generate a file_data type dictionary from this object
Parameters
----------
material_data: dict
keys: name, real, imag, complex
Returns
-------
dict
a list of dicts that has a format suitable for writing to file
"""
datasets = []
if self.data['complex'] is None:
data_parts = ['real', 'imag']
else:
data_parts = 'complex'
for data_part in data_parts:
spec_data = self.data[data_part]
data_dict = spec_data.dict_repr()
if isinstance(spec_data, (Constant, Interpolation)):
dtype = data_dict['DataType']
dtype = self.add_dtype_suffix(dtype, data_part)
data_dict['DataType'] = dtype
datasets.append(data_dict)
datasets = self.collapse_datasets(datasets)
return datasets
def collapse_datasets(self, datasets):
n_collapsable = 0
for dataset in datasets:
if dataset['DataType'] == 'tabulated n':
n_data = _str_table_to_numeric(dataset['Data'])
n_collapsable += 1
if dataset['DataType'] == 'tabulated k':
k_data = _str_table_to_numeric(dataset['Data'])
n_collapsable += 1
if n_collapsable < 2:
return datasets
collapse = True
if not np.all(n_data[:, 0] == k_data[:, 0]):
collapse = False
if datasets[0]['Unit'] != datasets[1]['Unit']:
collapse = False
if datasets[0]['SpectrumType'] != datasets[1]['SpectrumType']:
collapse = False
if not collapse:
return datasets
new_dataset = {}
new_dataset['Unit'] = datasets[0]['Unit']
new_dataset['SpectrumType'] = datasets[0]['SpectrumType']
new_dataset['DataType'] = 'tabulated nk'
k_data = k_data[:, 1].reshape(k_data.shape[0], 1)
new_data = np.concatenate([n_data, k_data], axis=1)
new_dataset['Data'] = _numeric_to_string_table(new_data)
return [new_dataset]
class EffectiveMedium(Material):
def __init__(self, spectrum, material1, material2, filling_fraction):
self.data = {'name': "",
'real': None,
'imag': None,
'complex':None}
self.options = {'interp_oder':1}
self.defaults = {'unit':'m',
'spectrum_type':'wavelength'}
self.spectrum = spectrum
self.mat1 = material1
self.mat2 = material2
self.frac = filling_fraction
if self.frac < 0. or self.frac > 1.0:
raise ValueError("filling fraction must be between "+
"0. and 1.")
self.create_effective_data()
def create_effective_data(self):
"""
this must be implemented in a subclass
"""
raise NotImplementedError("create_effective_data must be defined" +
" in a subclass")
class MaxwellGarnett(EffectiveMedium):
def create_effective_data(self):
#def get_maxwell_garnet(eps_base, eps_incl, vol_incl):
small_number_cutoff = 1e-6
print(self.frac)
eps_base = self.mat1.get_permittivity(self.spectrum)
eps_incl = self.mat2.get_permittivity(self.spectrum)
factor_up = 2*(1-self.frac)*eps_base+(1+2*self.frac)*eps_incl
factor_down = (2+self.frac)*eps_base+(1-self.frac)*eps_incl
if np.any(abs(factor_down)) < small_number_cutoff:
raise ValueError('effective medium is approximately singular')
eps_eff = eps_base*factor_up/factor_down
self.spectrum.convert_to("wavelength", 'm', in_place=True)
table = np.concatenate([[self.spectrum.values],
[np.real(eps_eff)],
[np.imag(eps_eff)]]).T
self._process_table(table, "eps")
class Bruggeman(EffectiveMedium):
def create_effective_data(self):
eps_b = self.mat1.get_permittivity(self.spectrum)
eps_a = self.mat2.get_permittivity(self.spectrum)
solution1 = self._get_bruggeman_solution1(eps_b, eps_a)
solution2 = self._get_bruggeman_solution2(eps_b, eps_a)
self.sol1 = solution1
self.sol2 = solution2
indices1 = np.imag(solution1) >= 0.0
indices2 = np.imag(solution2) >= 0.0
eps_eff = np.zeros(eps_b.shape, dtype=np.cdouble)
eps_eff[indices2] = solution2[indices2]
eps_eff[indices1] = solution1[indices1]
self.spectrum.convert_to("wavelength", 'm', in_place=True)
table = np.concatenate([[self.spectrum.values],
[np.real(eps_eff)],
[np.imag(eps_eff)]]).T
self._process_table(table, "eps")
def _get_bruggeman_solution1(self, eps_b, eps_a):
q = -0.5*eps_a*eps_b
p = 0.5*(self.frac*(eps_b-2*eps_a) + (1-self.frac)*(eps_a-2*eps_b) )
return -p*0.5 + np.sqrt((0.5*p)**2 - q)
def _get_bruggeman_solution2(self, eps_b, eps_a):
q = -0.5*eps_a*eps_b
p = 0.5*(self.frac*(eps_b-2*eps_a) + (1-self.frac)*(eps_a-2*eps_b) )
return -p*0.5 - np.sqrt((0.5*p)**2 - q)
if __name__ == "__main__":
pass
```
#### File: dispersion/tests/test_material.py
```python
import pytest
import numpy as np
import os
from dispersion import Material
from dispersion import Spectrum
from dispersion import get_config
spectrum = Spectrum(0.5,unit='um')
root_path = "../data"
def test_mat_init():
md = Material(fixed_n=1.0)
n = md.get_nk_data(spectrum)
assert np.isclose(np.real(n), 1.0)
assert np.isclose(np.imag(n), 0.0)
def test_from_yml_file():
relpath = os.path.join('RefractiveIndexInfo',
'data', 'main', 'Ag',
'Hagemann.yml')
filepath = os.path.join(root_path,relpath)
md = Material(file_path=filepath,
spectrum_type='wavelength',
unit='micrometer')
n = md.get_nk_data(spectrum)
assert np.isclose(np.real(n), 0.23805806451612901)
assert np.isclose(np.imag(n), 3.126040322580645)
def test_from_txt_file():
relpath = os.path.join('UserData','AlSb.txt')
filepath = os.path.join(root_path,relpath)
md = Material(file_path=filepath,
spectrum_type='wavelength',
unit='micrometer')
n = md.get_nk_data(spectrum)
assert np.isclose(np.real(n), 4.574074754901961)
assert np.isclose(np.imag(n), 0.4318627450980393)
def test_from_model():
wp = 8.55 # eV
loss = 18.4e-3 #eV
model_kw = {'name':'Drude','parameters':[wp, loss],
'valid_range':[0.0, np.inf],
'spectrum_type':'energy', 'unit':'ev'}
md = Material(model_kw=model_kw)
n = md.get_nk_data(spectrum)
assert np.isclose(np.real(n), 0.013366748652710245)
assert np.isclose(np.imag(n), 3.2997524521729824)
if __name__ == "__main__":
pass
``` |
{
"source": "joamatab/dotfiles",
"score": 2
} |
#### File: scripts/deprecated/shortcuts.py
```python
import csv
from re import sub
from re import compile
import os
# fishshortcuts = ""
qute_shortcuts = ""
ranger_shortcuts = ""
bash_shortcuts = ""
fish_shortcuts = ""
home = os.getenv("HOME") + "/"
ranger_location = home + "dotfiles/ranger/shortcuts.conf"
zsh_location = home + ".shortcuts"
fish_location = home + ".shortcuts.fish"
qute_location = home + ".config/qutebrowser/config.py"
folders_location = home + ".bmdirs"
configs_location = home + ".bmfiles"
# These are the labels that demarcate where the shortcuts
# go in the config files.
beg = "# DO NOT DELETE LMAO\n"
end = "# DO NOT DELETE LMAO"
# First we open the list of folder shortcuts and go down each line adding each
# in the required syntax to each of the three configs:
with open(folders_location) as fold:
for line in csv.reader(fold, dialect="excel-tab"):
# Adds the ranger go, tab, move and yank commands:
ranger_shortcuts += "map g" + line[0] + " cd " + line[1] + "\n"
ranger_shortcuts += "map t" + line[0] + " tab_new " + line[1] + "\n"
ranger_shortcuts += "map m" + line[0] + " shell mv %s " + line[1] + "\n"
ranger_shortcuts += (
"map Y" + line[0] + " shell cp -r %s " + line[1] + "\n"
)
# Adds the bash_shortcuts shortcuts:
bash_shortcuts += (
"alias " + line[0] + '="cd ' + line[1] + ' && ls "' + "\n"
)
fish_shortcuts += (
"abbr -a " + line[0] + ' "cd ' + line[1] + '; and ls -a"' + "\n"
)
# qutebrowser shortcuts:
qute_shortcuts += (
"config.bind(';"
+ line[0]
+ "', 'set downloads.location.directory "
+ line[1]
+ " ;; hint links download')"
+ "\n"
)
# Goes thru the config file file and adds the shortcuts to both
# bash_shortcuts and ranger.
with open(configs_location) as conf:
for line in csv.reader(conf, dialect="excel-tab"):
# fishshortcuts+=("alias "+line[0]+"=\"vi "+line[1]+"\"\n")
# fishshortcuts+=("abbr --add "+line[0]+" \"vi "+line[1]+"\"\n")
bash_shortcuts += "alias " + line[0] + '="vi ' + line[1] + '"' + "\n"
fish_shortcuts += "abbr -a " + line[0] + ' "vi ' + line[1] + '"' + "\n"
ranger_shortcuts += "map " + line[0] + " shell vi " + line[1] + "\n"
def replaceInMarkers(text, shortcuts):
markers = compile(beg + "(.|\s)*" + end)
replacement = beg + shortcuts + end
return sub(markers, replacement, text)
def writeShortcuts(location, shortcuts):
with open(location, "r+") as input:
input.write(shortcuts)
# final = ""
# final += input.read()
# final = replaceInMarkers(final, shortcuts)
# input.seek(0)
# input.write(final)
# input.truncate()
def main():
writeShortcuts(ranger_location, ranger_shortcuts)
writeShortcuts(zsh_location, bash_shortcuts)
writeShortcuts(fish_location, fish_shortcuts)
writeShortcuts(qute_location, qute_shortcuts)
if __name__ == "__main__":
main()
```
#### File: dotfiles/scripts/spotify-dl.py
```python
import urllib
import urllib2
from bs4 import BeautifulSoup
import argparse
#import spotify
import json
from StringIO import StringIO
import subprocess
import traceback
RED = "\033[31m"
GREEN = "\033[32m"
BLUE = "\033[34m"
YELLOW = "\033[36m"
DEFAULT = "\033[0m"
ACTION = BLUE + "[+] " + DEFAULT
ERROR = RED + "[+] " + DEFAULT
OK = GREEN + "[+] " + DEFAULT
#=======================
# Spotify application
#=======================
CLIENT_ID=""
CALL_BACK_URL=""
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
DEVELOPER_KEY = "REPLACE_ME"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtube_search(options):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_response = youtube.search().list(
q=options.q,
part="id,snippet",
maxResults=options.max_results
).execute()
videos = []
channels = []
playlists = []
# Add each result to the appropriate list, and then display the lists of
# matching videos, channels, and playlists.
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
videos.append("%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["videoId"]))
elif search_result["id"]["kind"] == "youtube#channel":
channels.append("%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["channelId"]))
elif search_result["id"]["kind"] == "youtube#playlist":
playlists.append("%s (%s)" % (search_result["snippet"]["title"],
search_result["id"]["playlistId"]))
print "Videos:\n", "\n".join(videos), "\n"
print "Channels:\n", "\n".join(channels), "\n"
print "Playlists:\n", "\n".join(playlists), "\n"
def searchYoutube(trackname):
textToSearch = trackname
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
#we return the first result
return "https://youtube.com" + soup.findAll(attrs={'class':'yt-uix-tile-link'})[0]['href']
def getTrackName(id, access_token):
""" get the spotify track name from id """
print ACTION + " getting track name"
proc = subprocess.Popen('curl -sS -X GET "https://api.spotify.com/v1/tracks/'+ id +'?market=ES" -H "Authorization: Bearer '+ access_token +'"', shell=True, stdout=subprocess.PIPE)
tmp = proc.stdout.read()
#convert from json to string
#io = StringIO()
#json.dump(tmp, io)
data = json.loads(tmp)
if 'error' in data:
print ERROR + "can't found song name"
print ERROR + data['error']['message']
return None
else:
print OK + "name is " + data["name"]
return data["name"]
def genUrl():
""" gen url for getting access token """
print ACTION + " generating url for access token"
print OK + "https://accounts.spotify.com/authorize?client_id="+ CLIENT_ID + "&response_type=token&redirect_uri=" + CALL_BACK_URL
def getAccessToken():
""" get access token """
print ACTION + " getting access token"
proc = subprocess.Popen('curl -sS -X GET "https://accounts.spotify.com/authorize?client_id='+ CLIENT_ID +'&response_type=token&redirect_uri='+ CALL_BACK_URL +'" -H "Accept: application/json"', shell=True, stdout=subprocess.PIPE)
tmp = proc.stdout.read()
data = json.loads(tmp)
print data
def downloadYoutube(link):
""" downloading the track """
print ACTION + "downloading song .."
proc = subprocess.Popen('youtube-dl --extract-audio --audio-format mp3 '+ link, shell=True, stdout=subprocess.PIPE)
tmp = proc.stdout.read()
print OK + "Song Downloaded"
def header():
""" header informations """
print RED + "@ spotify-dl.py version 0.0.1"
print YELLOW + "@ author : Naper"
print BLUE + "@ Designed for OSx/linux"
print "" + DEFAULT
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='spotify-dl allows you to download your spotify songs')
parser.add_argument('--verbose',
action='store_true',
help='verbose flag' )
parser.add_argument('--dl', nargs=1, help="set the download methode")
parser.add_argument('--user', nargs=1, help="set the spotify login")
parser.add_argument('--password', nargs=1, help="set the spotify password")
parser.add_argument('--traceback', action='store_true', help="enable traceback")
parser.add_argument('--gen_url', action='store_true', help="generate url for getting access_token")
parser.add_argument('--track', nargs=1, help="spotify track id")
parser.add_argument('--access_token', nargs=1, help="set the access_token")
parser.add_argument('-m', nargs=1, help="set a methode")
args = parser.parse_args()
try:
header();
if args.gen_url:
genUrl()
else:
if args.dl and args.access_token and args.dl[0] == 'youtube':
if args.track:
#genUrl()
#getAccessToken()
name = getTrackName(args.track[0], args.access_token[0])
link = searchYoutube(name)
downloadYoutube(link)
else :
print ERROR + "use --help for help"
except Exception, err:
print ERROR + "An HTTP error occurred\n"
if args.traceback:
traceback.print_exc()
``` |
{
"source": "joamatab/emepy",
"score": 3
} |
#### File: emepy/emepy/fd.py
```python
import numpy as np
import pickle
from matplotlib import pyplot as plt
import EMpy_gpu
from EMpy_gpu.modesolvers.FD import stretchmesh
from typing import Callable
from emepy.mode import Mode, Mode1D, EigenMode
from emepy.tools import interp, interp1d, Si, SiO2, get_epsfunc, rectangle_to_n
class ModeSolver(object):
"""The ModeSolver object is the heart of finding eigenmodes for use in eigenmode expansion or simple examination. This parent class should be inherited and used as a wrapper for certain modules such as EMpy, Lumerical, Pickled data, Neural Networks, etc."""
def __init__(self, **kwargs) -> None:
"""ModeSolver class constructor"""
raise NotImplementedError
def solve(self) -> None:
"""Solves the eigenmode solver for the specific eigenmodes of desire"""
raise NotImplementedError
def clear(self) -> None:
"""Clears the modesolver's eigenmodes to make memory"""
raise NotImplementedError
def get_mode(self, mode_num: int) -> EigenMode:
"""Must extract the mode of choice
Parameters
----------
mode_num : int
index of the mode of choice
"""
raise NotImplementedError
class ModeSolver1D(ModeSolver):
pass
class MSEMpy(ModeSolver):
"""Electromagnetic Python Modesolver. Uses the EMpy library See Modesolver. Parameterizes the cross section as a rectangular waveguide."""
def __init__(
self,
wl: float,
width: float = None,
thickness: float = None,
num_modes: int = 1,
cladding_width: float = 2.5e-6,
cladding_thickness: float = 2.5e-6,
core_index: float = None,
cladding_index: float = None,
x: "np.ndarray" = None,
y: "np.ndarray" = None,
mesh: int = 128,
accuracy: float = 1e-8,
boundary: str = "0000",
epsfunc: Callable[["np.ndarray", "np.ndarray"], "np.ndarray"] = None,
n: "np.ndarray" = None,
PML: bool = False,
subpixel: bool = True,
center: tuple = (0, 0),
**kwargs
) -> None:
"""MSEMpy class constructor
Parameters
----------
wl : number
wavelength of the eigenmodes
width : number
width of the core in the cross section
thickness : number
thickness of the core in the cross section
num_modes : int
number of modes to solve for (default:1)
cladding_width : number
width of the cladding in the cross section (default:5e-6)
cladding_thickness : number
thickness of the cladding in the cross section (default:5e-6)
core_index : number
refractive index of the core (default:Si)
cladding_index : number
refractive index of the cladding (default:SiO2)
mesh : int
number of mesh points in each direction (xy)
x : numpy array
the cross section grid in the x direction (z propagation) (default:None)
y : numpy array
the cross section grid in the y direction (z propagation) (default:None)
mesh : int
the number of mesh points in each xy direction
accuracy : number
the minimum accuracy of the finite difference solution (default:1e-8)
boundary : string
the boundaries according to the EMpy library (default:"0000")
epsfunc : function
the function which defines the permittivity based on a grid (see EMpy library) (default:"0000")
n : numpy array
2D profile of the refractive index
PML : bool
if True, will use PML boundaries. Default : False, PEC
subpixel : bool
if true, will use subpixel smoothing, assuming asking for a waveguide cross section and not providing an index map (recommended)
"""
self.wl = wl
self.width = width
self.thickness = thickness
self.num_modes = num_modes
self.cladding_width = cladding_width
self.cladding_thickness = cladding_thickness
self.core_index = core_index
self.cladding_index = cladding_index
self.x = x
self.y = y
self.mesh = mesh
self.accuracy = accuracy
self.boundary = boundary
self.epsfunc = epsfunc
self.n = n
self.PML = PML
if core_index is None:
self.core_index = Si(wl * 1e6)
if cladding_index is None:
self.cladding_index = SiO2(wl * 1e6)
if x is None:
self.x = np.linspace(-0.5 * cladding_width, 0.5 * cladding_width, mesh)
if y is None:
self.y = np.linspace(-0.5 * cladding_width, 0.5 * cladding_width, mesh)
if self.PML: # Create a PML at least half a wavelength long
dx = np.diff(self.x)
dy = np.diff(self.y)
layer_xp = int(np.abs(0.5 * self.wl / dx[-1]))
layer_xn = int(np.abs(0.5 * self.wl / dx[0]))
layer_yp = int(np.abs(0.5 * self.wl / dy[-1]))
layer_yn = int(np.abs(0.5 * self.wl / dy[0]))
self.nlayers = [layer_yp, layer_yn, layer_xp, layer_xn]
factor = 1 + 2j
self.x, self.y, _, _, _, _ = stretchmesh(self.x, self.y, self.nlayers, factor)
if epsfunc is None and (not subpixel) or (self.width is None):
self.epsfunc = get_epsfunc(
self.width,
self.thickness,
self.cladding_width,
self.cladding_thickness,
self.core_index,
self.cladding_index,
profile=self.n,
nx=self.x,
ny=self.y,
)
elif epsfunc is None and subpixel and (self.width is not None):
n = rectangle_to_n(
center, self.width, self.thickness, self.x, self.y, subpixel, self.core_index, self.cladding_index
)
self.x = ((self.x)[1:] + (self.x)[:-1]) / 2
self.y = ((self.y)[1:] + (self.y)[:-1]) / 2
self.epsfunc = get_epsfunc(
None,
None,
self.cladding_width,
self.cladding_thickness,
self.core_index,
self.cladding_index,
profile=n,
nx=self.x,
ny=self.y,
)
self.after_x = self.x
self.after_y = self.y
self.n = np.sqrt(self.epsfunc(self.x, self.y))
def solve(self) -> ModeSolver:
"""Solves for the eigenmodes"""
self.solver = EMpy_gpu.modesolvers.FD.VFDModeSolver(self.wl, self.x, self.y, self.epsfunc, self.boundary).solve(
self.num_modes, self.accuracy
)
return self
def clear(self) -> ModeSolver:
"""Clears the modesolver's eigenmodes to make memory"""
self.solver = None
return self
def get_mode(self, mode_num: int = 0) -> EigenMode:
"""Get the indexed mode number
Parameters
----------
mode_num : int
index of the mode of choice
Returns
-------
Mode
the eigenmode of index mode_num
"""
x = self.solver.modes[mode_num].get_x()
y = self.solver.modes[mode_num].get_y()
x0, y0 = [np.real(x), np.real(y)]
diffx, diffy = [np.diff(x0), np.diff(y0)]
x0_new, y0_new = [np.ones(len(x) + 1), np.ones(len(y) + 1)]
x0_new[0:-1], y0_new[0:-1] = [x0, y0]
x0_new[-1], y0_new[-1] = [x0[-1] + diffx[-1], y0[-1] + diffy[-1]]
if not self.PML:
self.nlayers = [1, 0, 1, 0]
Ex = interp(x0_new, y0_new, x0, y0, self.solver.modes[mode_num].get_field("Ex"), True)[
self.nlayers[1] : -self.nlayers[0], self.nlayers[3] : -self.nlayers[2]
]
Ey = interp(x0_new, y0_new, x0, y0, self.solver.modes[mode_num].get_field("Ey"), True)[
self.nlayers[1] : -self.nlayers[0], self.nlayers[3] : -self.nlayers[2]
]
Ez = interp(x0_new, y0_new, x0, y0, self.solver.modes[mode_num].get_field("Ez"), True)[
self.nlayers[1] : -self.nlayers[0], self.nlayers[3] : -self.nlayers[2]
]
Hx = interp(x0_new, y0_new, x0, y0, self.solver.modes[mode_num].get_field("Hx"), False)[
self.nlayers[1] : -self.nlayers[0], self.nlayers[3] : -self.nlayers[2]
]
Hy = interp(x0_new, y0_new, x0, y0, self.solver.modes[mode_num].get_field("Hy"), False)[
self.nlayers[1] : -self.nlayers[0], self.nlayers[3] : -self.nlayers[2]
]
Hz = interp(x0_new, y0_new, x0, y0, self.solver.modes[mode_num].get_field("Hz"), False)[
self.nlayers[1] : -self.nlayers[0], self.nlayers[3] : -self.nlayers[2]
]
self.x = x0_new[self.nlayers[1] : -self.nlayers[0]]
self.y = y0_new[self.nlayers[3] : -self.nlayers[2]]
neff = self.solver.modes[mode_num].neff
n = np.sqrt(self.epsfunc(self.x, self.y))
return Mode(x=self.x, y=self.y, wl=self.wl, neff=neff, Hx=Hx, Hy=Hy, Hz=Hz, Ex=Ex, Ey=Ey, Ez=Ez, n=n)
def plot_material(self) -> None:
"""Plots the index of refraction profile"""
plt.imshow(
np.sqrt(np.real(self.n)).T,
extent=[self.x[0] * 1e6, self.x[-1] * 1e6, self.y[0] * 1e6, self.y[-1] * 1e6],
cmap="Greys",
)
plt.colorbar()
plt.title("Index of Refraction")
plt.xlabel("x (µm)")
plt.ylabel("y (µm)")
class MSEMpy1D(ModeSolver):
"""NOTICE: DOES NOT CURRENTLY WORK!! Electromagnetic Python Modesolver. Uses the EMpy library See Modesolver. Parameterizes the cross section as a rectangular waveguide."""
def __init__(
self,
wl: float,
width: float = None,
num_modes: int = 1,
cladding_width: float = 2.5e-6,
core_index: float = None,
cladding_index: float = None,
x: "np.ndarray" = None,
mesh: int = 128,
accuracy: float = 1e-8,
boundary: str = "0000",
epsfunc: Callable[["np.ndarray", "np.ndarray"], "np.ndarray"] = None,
n: "np.ndarray" = None,
PML: bool = False,
**kwargs
):
"""MSEMpy class constructor
Parameters
----------
wl : number
wavelength of the eigenmodes
width : number
width of the core in the cross section
num_modes : int
number of modes to solve for (default:1)
cladding_width : number
width of the cladding in the cross section (default:5e-6)
core_index : number
refractive index of the core (default:Si)
cladding_index : number
refractive index of the cladding (default:SiO2)
mesh : int
number of mesh points in each direction (xy)
x : numpy array
the cross section grid in the x direction (z propagation) (default:None)
mesh : int
the number of mesh points in each xy direction
accuracy : number
the minimum accuracy of the finite difference solution (default:1e-8)
boundary : string
the boundaries according to the EMpy library (default:"0000")
epsfunc : function
the function which defines the permittivity based on a grid (see EMpy library) (default:"0000")
n : numpy array
2D profile of the refractive index
PML : boolean
if True, will use PML boundaries. Default : False, PEC
"""
self.wl = wl
self.width = width
self.num_modes = num_modes
self.cladding_width = cladding_width
self.core_index = core_index
self.cladding_index = cladding_index
self.x = x
self.mesh = mesh
self.accuracy = accuracy
self.boundary = boundary
self.epsfunc = epsfunc
self.n = n
self.PML = PML
if core_index is None:
self.core_index = Si(wl * 1e6)
if cladding_index is None:
self.cladding_index = SiO2(wl * 1e6)
if x is None:
self.x = np.linspace(-0.5 * cladding_width, 0.5 * cladding_width, mesh)
if self.PML: # Create a PML at least half a wavelength long
dx = np.diff(self.x)
layer_xp = int(np.abs(0.5 * self.wl / dx[-1]))
layer_xn = int(np.abs(0.5 * self.wl / dx[0]))
self.nlayers = [layer_xp, layer_xn, 0, 0]
factor = 1 + 2j
self.x, _, _, _, _, _ = stretchmesh(self.x, np.zeros(1), self.nlayers, factor)
if epsfunc is None:
self.epsfunc = get_epsfunc(
self.width,
None,
self.cladding_width,
None,
self.core_index,
self.cladding_index,
profile=self.n,
nx=self.x,
)
self.after_x = self.x
self.n = self.epsfunc(self.x, np.zeros(1))
def solve(self) -> ModeSolver:
"""Solves for the eigenmodes"""
self.solver = EMpy_gpu.modesolvers.FD.VFDModeSolver(
self.wl, self.x, np.zeros(1), self.epsfunc, self.boundary
).solve(self.num_modes, self.accuracy)
return self
def clear(self) -> ModeSolver:
"""Clears the modesolver's eigenmodes to make memory"""
self.solver = None
return self
def get_mode(self, mode_num: int = 0) -> EigenMode:
"""Get the indexed mode number
Parameters
----------
mode_num : int
index of the mode of choice
Returns
-------
Mode
the eigenmode of index mode_num
"""
x = self.solver.modes[mode_num].get_x()
x0, y0 = [np.real(x), np.real(x)]
diffx, diffy = [np.diff(x0), np.diff(y0)]
x0_new, y0_new = [np.ones(len(x) + 1), np.ones(len(x) + 1)]
x0_new[0:-1], y0_new[0:-1] = [x0, y0]
x0_new[-1], y0_new[-1] = [x0[-1] + diffx[-1], y0[-1] + diffy[-1]]
if not self.PML:
self.nlayers = [1, 0, 1, 0]
Ex = interp1d(x0_new, x0.self.solver.modes[mode_num].get_field("Ex"), True)[self.nlayers[1] : -self.nlayers[0]]
Ey = interp1d(x0_new, x0.self.solver.modes[mode_num].get_field("Ey"), True)[self.nlayers[1] : -self.nlayers[0]]
Ez = interp1d(x0_new, x0.self.solver.modes[mode_num].get_field("Ez"), True)[self.nlayers[1] : -self.nlayers[0]]
Hx = interp1d(x0_new, x0.self.solver.modes[mode_num].get_field("Hx"), False)[self.nlayers[1] : -self.nlayers[0]]
Hy = interp1d(x0_new, x0.self.solver.modes[mode_num].get_field("Hy"), False)[self.nlayers[1] : -self.nlayers[0]]
Hz = interp1d(x0_new, x0.self.solver.modes[mode_num].get_field("Hz"), False)[self.nlayers[1] : -self.nlayers[0]]
self.x = x0_new[self.nlayers[1] : -self.nlayers[0]]
neff = self.solver.modes[mode_num].neff
n = np.sqrt(self.epsfunc(self.x, np.zeros(0)))
return Mode1D(x=self.x, wl=self.wl, neff=neff, Hx=Hx, Hy=Hy, Hz=Hz, Ex=Ex, Ey=Ey, Ez=Ez, n=n)
def plot_material(self) -> None:
"""Plots the index of refraction profile"""
plt.plot(self.x, self.n)
plt.title("Index of Refraction")
plt.xlabel("x (µm)")
plt.ylabel("y (µm)")
class MSPickle(object):
"""Pickle Modesolver. See Modesolver. Pickle should serialize a list of Mode objects that can be opened here."""
def __init__(
self, filename: str, index: int = None, width: float = None, thickness: float = None, **kwargs
) -> None:
"""MSPickle class constructor
Parameters
----------
filename : string
the name of the pickled file where the eigenmode is stored
index : int
the index of the mode in the pickle file if the data stored is an array of Modes (default:None)
width : number
width of the core in the cross section, used for drawing (default:None)
thickness : number
thickness of the core in the cross section, used for drawing (default:None)
"""
self.filename = filename
self.index = index
self.width = width
self.thickness = thickness
self.PML = False
def solve(self) -> ModeSolver:
"""Solves for the eigenmodes by loading them from the pickle file"""
with open(self.filename, "rb") as f:
self.mode = pickle.load(f)[self.index] if self.index else pickle.load(f)
self.x = self.mode.x
self.y = self.mode.y
return self
def clear(self) -> ModeSolver:
"""Clears the modesolver's eigenmodes to make memory"""
self.x = None
self.y = None
self.mode = None
return self
def get_mode(self, mode_num: int = 0) -> EigenMode:
"""Get the stored mode
Returns
-------
Mode
the eigenmode of index mode_num
"""
return self.mode
``` |
{
"source": "joamatab/gcl",
"score": 2
} |
#### File: gcl/gcli/gitlab_python.py
```python
import gitlab
from gcli.config import CONFIG
gl = gitlab.Gitlab(url=CONFIG["git_url"], private_token=CONFIG["private_token"])
def search_issue(regex):
gl.search("issues", regex)
def _print_projects_list(projects_list):
for p in projects_list:
a = p.attributes
print(a["id"], a["path_with_namespace"])
def projects_list(search=None):
""" lists gitlab projects """
projects = gl.projects.list(search=search)
_print_projects_list(projects)
def get_project_attributes(project_id):
""" get project attributes """
p = gl.projects.get(project_id)
return p.attributes
def _demo_get_project_attributes():
assert get_project_attributes(191)
if __name__ == "__main__":
# projects_list(search="pdk")
# projects_list()
print(get_project_attributes(191))
```
#### File: gcl/gcli/pull_repos.py
```python
import os
import git
from gcli.config import CONFIG
def pull_repo(repo_path):
""" pull repo
"""
if os.path.isdir(repo_path):
print("git pull: {}".format(repo_path))
g = git.cmd.Git(repo_path)
g.pull()
def pull_repos():
""" git pull repos installed through the CLI
reads repo paths and url ~/.gitcli.yml
"""
path2url = CONFIG.get("path2url")
if path2url:
for path, url in path2url.items():
pull_repo(path)
if __name__ == "__main__":
pull_repos()
```
#### File: gcli/tests/test_version.py
```python
from click.testing import CliRunner
from gcli.cli import cli
from gcli import __version__
def test_version():
""" checks that the CLI returns the correct version """
runner = CliRunner()
result = runner.invoke(cli, ["config", "version"])
assert result.exit_code == 0
assert result.output.startswith(__version__)
if __name__ == "__main__":
test_version()
```
#### File: joamatab/gcl/setup.py
```python
from setuptools import setup, find_packages
def get_install_requires():
with open("requirements.txt", "r") as f:
return [line.strip() for line in f.readlines() if not line.startswith("-")]
setup(
version="0.0.2",
name="gitcli",
packages=find_packages(),
# package_data={"": ["*.lsf", "*.json"]},
include_package_data=True,
scripts=["gcli/cli.py"],
# use_scm_version=True,
# setup_requires=['setuptools_scm'],
description="Git Command Line Interface",
author="joaquin",
install_requires=get_install_requires(),
tests_require=["pytest", "tox"],
python_requires=">=3",
entry_points="""
[console_scripts]
gcli=gcli.cli:cli
""",
)
``` |
{
"source": "joamatab/gdstk",
"score": 2
} |
#### File: gdstk/benchmarks/flexpath-param.py
```python
import numpy
import gdspy
import gdstk
def bench_gdspy(output=None):
def broken(p0, v0, p1, v1, p2, w):
den = v1[1] * v0[0] - v1[0] * v0[1]
lim = 1e-12 * (v0[0] ** 2 + v0[1] ** 2) * (v1[0] ** 2 + v1[1] ** 2)
if den ** 2 < lim:
u0 = u1 = 0
p = 0.5 * (p0 + p1)
else:
dx = p1[0] - p0[0]
dy = p1[1] - p0[1]
u0 = (v1[1] * dx - v1[0] * dy) / den
u1 = (v0[1] * dx - v0[0] * dy) / den
p = 0.5 * (p0 + v0 * u0 + p1 + v1 * u1)
if u0 <= 0 and u1 >= 0:
return [p]
return [p0, p2, p1]
def pointy(p0, v0, p1, v1):
r = 0.5 * numpy.sqrt(numpy.sum((p0 - p1) ** 2))
v0 /= numpy.sqrt(numpy.sum(v0 ** 2))
v1 /= numpy.sqrt(numpy.sum(v1 ** 2))
return [p0, 0.5 * (p0 + p1) + 0.5 * (v0 - v1) * r, p1]
sp0 = gdspy.FlexPath(
[(0, 0), (0, 1)],
[0.1, 0.3, 0.5],
offset=[-0.2, 0, 0.4],
layer=[0, 1, 2],
corners=broken,
ends=pointy,
datatype=3,
)
sp0.segment((3, 3), offset=[-0.5, -0.1, 0.5])
sp0.segment((4, 1), width=[0.2, 0.2, 0.2], offset=[-0.2, 0, 0.2])
sp0.segment((0, -1), relative=True)
def spiral(u):
r = 2 - u
theta = 5 * u * numpy.pi
return (r * numpy.cos(theta) - 2, r * numpy.sin(theta))
sp1 = gdspy.FlexPath([(2, 6)], 0.2, layer=2, max_points=8190)
sp1.parametric(spiral)
if output:
cell = gdspy.Cell("MAIN", exclude_from_current=True)
cell.add([sp0, sp1])
cell.write_svg(output, 50)
def bench_gdstk(output=None):
def broken(p0, v0, p1, v1, p2, w):
p0 = numpy.array(p0)
v0 = numpy.array(v0)
p1 = numpy.array(p1)
v1 = numpy.array(v1)
p2 = numpy.array(p2)
den = v1[1] * v0[0] - v1[0] * v0[1]
lim = 1e-12 * (v0[0] ** 2 + v0[1] ** 2) * (v1[0] ** 2 + v1[1] ** 2)
if den ** 2 < lim:
u0 = u1 = 0
p = 0.5 * (p0 + p1)
else:
dx = p1[0] - p0[0]
dy = p1[1] - p0[1]
u0 = (v1[1] * dx - v1[0] * dy) / den
u1 = (v0[1] * dx - v0[0] * dy) / den
p = 0.5 * (p0 + v0 * u0 + p1 + v1 * u1)
if u0 <= 0 and u1 >= 0:
return [p]
return [p0, p2, p1]
def pointy(p0, v0, p1, v1):
p0 = numpy.array(p0)
v0 = numpy.array(v0)
p1 = numpy.array(p1)
v1 = numpy.array(v1)
r = 0.5 * numpy.sqrt(numpy.sum((p0 - p1) ** 2))
return [p0, 0.5 * (p0 + p1) + 0.5 * (v0 - v1) * r, p1]
sp0 = gdstk.FlexPath(
[(0, 0), (0, 1)],
[0.1, 0.3, 0.5],
offset=[-0.2, 0, 0.4],
layer=[0, 1, 2],
joins=broken,
ends=pointy,
datatype=3,
)
sp0.segment((3, 3), offset=[-0.5, -0.1, 0.5])
sp0.segment((4, 1), width=[0.2, 0.2, 0.2], offset=[-0.2, 0, 0.2])
sp0.segment((0, -1), relative=True)
def spiral(u):
r = 2 - u
theta = 5 * u * numpy.pi
return (r * numpy.cos(theta) - 2, r * numpy.sin(theta))
sp1 = gdstk.FlexPath((2, 6), 0.2, layer=2)
sp1.parametric(spiral)
if output:
cell = gdstk.Cell("MAIN")
cell.add(sp0, sp1)
cell.write_svg(output, 50)
if __name__ == "__main__":
bench_gdspy("/tmp/gdspy.svg")
bench_gdstk("/tmp/gdstk.svg")
```
#### File: gdstk/benchmarks/robustpath.py
```python
import numpy
import gdspy
import gdstk
def bench_gdspy(output=None):
rp = gdspy.RobustPath(
(50, 0),
[2, 0.5, 1, 1],
[0, 0, -1, 1],
ends=["extended", "round", "flush", "flush"],
layer=[0, 2, 1, 1],
datatype=[0, 1, 2, 3],
max_points=8190,
)
rp.segment((45, 0))
rp.segment(
(5, 0),
width=[lambda u: 2 + 16 * u * (1 - u), 0.5, 1, 1],
offset=[
0,
lambda u: 8 * u * (1 - u) * numpy.cos(12 * numpy.pi * u),
lambda u: -1 - 8 * u * (1 - u),
lambda u: 1 + 8 * u * (1 - u),
],
)
rp.segment((0, 0))
rp.smooth(
[(5, 10)],
angles=[0.5 * numpy.pi, 0],
width=0.5,
offset=[-0.25, 0.25, -0.75, 0.75],
)
rp.bezier([(0, 10), (10, 10), (10, -10), (20, -10), (20, 0), (30, 0)])
if output:
cell = gdspy.Cell("MAIN", exclude_from_current=True)
cell.add(rp)
cell.write_svg(output, 10)
def bench_gdstk(output=None):
# Offset signs must be inverted to be compatible with gdspy!
rp = gdstk.RobustPath(
(50, 0),
[2, 0.5, 1, 1],
[0, 0, 1, -1],
ends=["extended", "round", "flush", "flush"],
layer=[0, 2, 1, 1],
datatype=[0, 1, 2, 3],
)
rp.segment((45, 0))
rp.segment(
(5, 0),
width=[lambda u: 2 + 16 * u * (1 - u), 0.5, 1, 1],
offset=[
0,
lambda u: -8 * u * (1 - u) * numpy.cos(12 * numpy.pi * u),
lambda u: 1 + 8 * u * (1 - u),
lambda u: -1 - 8 * u * (1 - u),
],
)
rp.segment((0, 0))
rp.interpolation(
[(5, 10)],
angles=[0.5 * numpy.pi, 0],
width=0.5,
offset=[0.25, -0.25, 0.75, -0.75],
)
rp.bezier(
[(0, 10), (10, 10), (10, -10), (20, -10), (20, 0), (30, 0)], relative=True
)
if output:
cell = gdstk.Cell("MAIN")
cell.add(rp)
cell.write_svg(output, 10)
if __name__ == "__main__":
bench_gdspy("/tmp/gdspy.svg")
bench_gdstk("/tmp/gdstk.svg")
``` |
{
"source": "joamatab/grating_coupler_meep",
"score": 2
} |
#### File: grating_coupler_meep/grating_coupler_meep/fiber.py
```python
import sys
from functools import partial
from typing import Optional, Tuple
import hashlib
import time
import pathlib
import omegaconf
import pandas as pd
import meep as mp
import numpy as np
import fire
nm = 1e-3
nSi = 3.48
nSiO2 = 1.44
Floats = Tuple[float, ...]
def dict_to_name(**kwargs) -> str:
"""Returns name from a dict."""
kv = []
for key in sorted(kwargs):
if isinstance(key, str):
value = kwargs[key]
if value is not None:
kv += [f"{key}{to_string(value)}"]
return "_".join(kv)
def to_string(value):
if isinstance(value, list):
settings_string_list = [to_string(i) for i in value]
return "_".join(settings_string_list)
if isinstance(value, dict):
return dict_to_name(**value)
else:
return str(value)
def fiber(
period: float = 0.66,
fill_factor: float = 0.5,
widths: Optional[Floats] = None,
gaps: Optional[Floats] = None,
fiber_angle_deg: float = 20.0,
fiber_xposition: float = 1.0,
fiber_core_diameter: float = 10.4,
fiber_numerical_aperture: float = 0.14,
fiber_nclad: float = nSiO2,
resolution: int = 64, # pixels/um
ncore: float = nSi,
nclad: float = nSiO2,
nsubstrate: float = nSi,
n_periods: int = 30,
box_thickness: float = 2.0,
clad_thickness: float = 2.0,
core_thickness: float = 220 * nm,
etch_depth: float = 70 * nm,
wavelength_min: float = 1.4,
wavelength_max: float = 1.7,
wavelength_points: int = 50,
run: bool = True,
overwrite: bool = False,
dirpath: Optional[str] = None,
decay_by: float = 1e-3,
dtaper: float = 1,
ncores: int = 1,
) -> pd.DataFrame:
"""Returns simulation results from grating coupler with fiber.
na**2 = ncore**2 - nclad**2
ncore = sqrt(na**2 + ncore**2)
Args:
period: grating coupler period
fill_factor:
widths: overrides n_periods period and fill_factor
gaps: overrides n_periods period and fill_factor
fiber_angle_deg: angle fiber in degrees
decay_by: 1e-9
"""
wavelengths = np.linspace(wavelength_min, wavelength_max, wavelength_points)
wavelength = np.mean(wavelengths)
freqs = 1 / wavelengths
widths = widths or n_periods * [period * fill_factor]
gaps = gaps or n_periods * [period * (1 - fill_factor)]
settings = dict(
period=period,
fill_factor=fill_factor,
fiber_angle_deg=fiber_angle_deg,
fiber_xposition=fiber_xposition,
fiber_core_diameter=fiber_core_diameter,
fiber_numerical_aperture=fiber_core_diameter,
fiber_nclad=fiber_nclad,
resolution=resolution,
ncore=ncore,
nclad=nclad,
nsubstrate=nsubstrate,
n_periods=n_periods,
box_thickness=box_thickness,
clad_thickness=clad_thickness,
etch_depth=etch_depth,
wavelength_min=wavelength_min,
wavelength_max=wavelength_max,
wavelength_points=wavelength_points,
decay_by=decay_by,
dtaper=dtaper,
widths=widths,
gaps=gaps,
ncores=ncores,
)
settings_string = to_string(settings)
settings_hash = hashlib.md5(settings_string.encode()).hexdigest()[:8]
filename = f"fiber_{settings_hash}.yml"
dirpath = dirpath or pathlib.Path(__file__).parent / "data"
dirpath = pathlib.Path(dirpath)
dirpath.mkdir(exist_ok=True, parents=True)
filepath = dirpath / filename
filepath_csv = filepath.with_suffix(".csv")
length_grating = np.sum(widths) + np.sum(gaps)
substrate_thickness = 1.0
hair = 4
core_material = mp.Medium(index=ncore)
clad_material = mp.Medium(index=nclad)
dbuffer = 0.5
dpml = 1
fiber_clad = 120
fiber_angle = np.radians(fiber_angle_deg)
hfiber_geom = 100 # Some large number to make fiber extend into PML
fiber_ncore = (fiber_numerical_aperture ** 2 + fiber_nclad ** 2) ** 0.5
fiber_clad_material = mp.Medium(index=fiber_nclad)
fiber_core_material = mp.Medium(index=fiber_ncore)
# MEEP's computational cell is always centered at (0,0), but code has beginning of grating at (0,0)
sxy = 2 * dpml + dtaper + length_grating + 2 * dbuffer
sz = (
2 * dbuffer
+ box_thickness
+ core_thickness
+ hair
+ substrate_thickness
+ 2 * dpml
)
comp_origin_x = 0
y_offset = 0
offset_vector = mp.Vector3(0, 0, 0)
# We will do x-z plane simulation
cell_size = mp.Vector3(sxy, sz)
geometry = []
# clad
geometry.append(
mp.Block(
material=clad_material,
center=mp.Vector3(0, clad_thickness / 2) - offset_vector,
size=mp.Vector3(mp.inf, clad_thickness),
)
)
# BOX
geometry.append(
mp.Block(
material=clad_material,
center=mp.Vector3(0, -0.5 * box_thickness) - offset_vector,
size=mp.Vector3(mp.inf, box_thickness),
)
)
# Fiber (defined first to be overridden)
geometry.append(
mp.Block(
material=fiber_clad_material,
center=mp.Vector3(x=fiber_xposition) - offset_vector,
size=mp.Vector3(fiber_clad, hfiber_geom),
e1=mp.Vector3(x=1).rotate(mp.Vector3(z=1), -1 * fiber_angle),
e2=mp.Vector3(y=1).rotate(mp.Vector3(z=1), -1 * fiber_angle),
)
)
geometry.append(
mp.Block(
material=fiber_core_material,
center=mp.Vector3(x=fiber_xposition) - offset_vector,
size=mp.Vector3(fiber_core_diameter, hfiber_geom),
e1=mp.Vector3(x=1).rotate(mp.Vector3(z=1), -1 * fiber_angle),
e2=mp.Vector3(y=1).rotate(mp.Vector3(z=1), -1 * fiber_angle),
)
)
# waveguide
geometry.append(
mp.Block(
material=core_material,
center=mp.Vector3(0, core_thickness / 2) - offset_vector,
size=mp.Vector3(mp.inf, core_thickness),
)
)
# grating etch
x = -length_grating / 2
for width, gap in zip(widths, gaps):
geometry.append(
mp.Block(
material=clad_material,
center=mp.Vector3(x + gap / 2, core_thickness - etch_depth / 2)
- offset_vector,
size=mp.Vector3(gap, etch_depth),
)
)
x += width + gap
# Substrate
geometry.append(
mp.Block(
material=mp.Medium(index=nsubstrate),
center=mp.Vector3(
0,
-0.5 * (core_thickness + substrate_thickness + dpml + dbuffer)
- box_thickness,
)
- offset_vector,
size=mp.Vector3(mp.inf, substrate_thickness + dpml + dbuffer),
)
)
# PMLs
boundary_layers = [mp.PML(dpml)]
# mode frequency
fcen = 1 / wavelength
waveguide_port_center = mp.Vector3(-dtaper - length_grating / 2, 0) - offset_vector
waveguide_port_size = mp.Vector3(0, 2 * clad_thickness - 0.2)
fiber_port_center = (
mp.Vector3(
(0.5 * sz - dpml + y_offset - 1) * np.sin(fiber_angle) + fiber_xposition,
0.5 * sz - dpml + y_offset - 1,
)
- offset_vector
)
fiber_port_size = mp.Vector3(sxy * 3 / 5 - 2 * dpml - 2, 0)
# Waveguide source
sources = [
mp.EigenModeSource(
src=mp.GaussianSource(fcen, fwidth=0.1 * fcen),
size=waveguide_port_size,
center=waveguide_port_center,
eig_band=1,
direction=mp.X,
eig_match_freq=True,
eig_parity=mp.ODD_Z,
)
]
# symmetries = [mp.Mirror(mp.Y,-1)]
symmetries = []
sim = mp.Simulation(
resolution=resolution,
cell_size=cell_size,
boundary_layers=boundary_layers,
geometry=geometry,
# geometry_center=mp.Vector3(x_offset, y_offset),
sources=sources,
dimensions=2,
symmetries=symmetries,
eps_averaging=True,
)
# Ports
waveguide_monitor_port = mp.ModeRegion(
center=waveguide_port_center + mp.Vector3(x=0.2), size=waveguide_port_size
)
waveguide_monitor = sim.add_mode_monitor(
freqs, waveguide_monitor_port, yee_grid=True
)
fiber_monitor_port = mp.ModeRegion(
center=fiber_port_center - mp.Vector3(y=0.2),
size=fiber_port_size,
direction=mp.NO_DIRECTION,
)
fiber_monitor = sim.add_mode_monitor(freqs, fiber_monitor_port)
if not run:
sim.plot2D()
filepath.write_text(omegaconf.OmegaConf.to_yaml(settings))
print(f"write {filepath}")
return pd.DataFrame()
if filepath_csv.exists() and not overwrite:
return pd.read_csv(filepath_csv)
else:
start = time.time()
# Run simulation
# sim.run(until=400)
field_monitor_point = (-dtaper, 0, 0)
sim.run(
until_after_sources=mp.stop_when_fields_decayed(
dt=50, c=mp.Ez, pt=field_monitor_point, decay_by=decay_by
)
)
# Extract mode information
transmission_waveguide = sim.get_eigenmode_coefficients(
waveguide_monitor, [1], eig_parity=mp.ODD_Z, direction=mp.X
).alpha
kpoint = mp.Vector3(y=-1).rotate(mp.Vector3(z=1), -1 * fiber_angle)
reflection_fiber = sim.get_eigenmode_coefficients(
fiber_monitor,
[1],
direction=mp.NO_DIRECTION,
eig_parity=mp.ODD_Z,
kpoint_func=lambda f, n: kpoint,
).alpha
end = time.time()
a1 = transmission_waveguide[:, :, 0].flatten() # forward wave
b1 = transmission_waveguide[:, :, 1].flatten() # backward wave
a2 = reflection_fiber[:, :, 0].flatten() # forward wave
b2 = reflection_fiber[:, :, 1].flatten() # backward wave
s11 = np.squeeze(b1 / a1)
s12 = np.squeeze(a2 / a1)
s22 = s11.copy()
s21 = s12.copy()
simulation = dict(
settings=settings,
compute_time_seconds=end - start,
)
filepath.write_text(omegaconf.OmegaConf.to_yaml(simulation))
r = dict(s11=s11, s12=s12, s21=s21, s22=s22, wavelengths=wavelengths)
keys = [key for key in r.keys() if key.startswith("s")]
s = {f"{key}a": list(np.unwrap(np.angle(r[key].flatten()))) for key in keys}
s.update({f"{key}m": list(np.abs(r[key].flatten())) for key in keys})
s["wavelength"] = wavelengths
df = pd.DataFrame(s, index=wavelengths)
df.to_csv(filepath_csv, index=False)
return df
# remove silicon to clearly see the fiber (for debugging)
fiber_no_silicon = partial(fiber, ncore=nSiO2, nsubstrate=nSiO2, run=False)
if __name__ == "__main__":
# import matplotlib.pyplot as plt
# fiber_no_silicon()
# fiber(run=False, fiber_xposition=0)
# plt.show()
fire.Fire(fiber)
``` |
{
"source": "joamatab/install_new_computer",
"score": 3
} |
#### File: python/decorators2020/1_waste_time.py
```python
from decorators import timer
@timer
def waste_time(number):
total = 0
for num in range(number):
total += sum(n for n in range(num))
return total
```
#### File: python/decorators2020/decorators.py
```python
import functools
import time
REGISTERED = {}
def register(func):
REGISTERED[func.__name__] = func
return func
def timer(func):
""" template for decorators """
@functools.wraps(func)
def _timer(*args, **kwargs):
t0 = time.perf_counter()
value = func(*args, **kwargs)
t1 = time.perf_counter()
print(f"elapsed time: {t1-t0} seconds")
return value
return _timer
def repeat_n(num_times=2):
""" repeat n times"""
def decorator_repeat(func):
@functools.wraps(func)
def _wrapper(*args, **kwargs):
for _ in range(num_times):
value = func(*args, **kwargs)
return value
return _wrapper
return decorator_repeat
def repeat(_func=None, *, num_times=2):
def decorator_repeat(func):
@functools.wraps(func)
def wrapper_repeat(*args, **kwargs):
for _ in range(num_times):
value = func(*args, **kwargs)
return value
return wrapper_repeat
if _func is None:
return decorator_repeat
else:
return decorator_repeat(_func)
def trace(func):
""" trace """
@functools.wraps(func)
def _wrapper(*args, **kwargs):
args_repr = [repr(a) for a in args]
kwargs_repr = [f"{k}={v!r}" for k, v in kwargs.items()]
signature = ", ".join(args_repr + kwargs_repr)
print(f"calling {func.__name__}({signature})")
value = func(*args, **kwargs)
print(f"{func.__name__!r}({signature}) returned {value!r}")
return value
return _wrapper
# def count_calls(func):
# print('called')
# if hasattr(func, 'num_calls'):
# func.num_calls += 1
# else:
# func.num_calls = 1
# return func
def count_calls(func):
""" count the number of calls to a function
shows how to keep state in your decorator
"""
@functools.wraps(func)
def _count_calls(*args, **kwargs):
_count_calls.num_calls += 1
return func(*args, **kwargs)
_count_calls.num_calls = 0
return _count_calls
class Adder:
def __init__(self, number):
self.number = number
def __call__(self, other):
return other + self.number
class CountCalls:
""" count number of calls to a function"""
def __init__(self, func):
self.func = func
self.num_calls = 0
functools.update_wrapper(self, func)
def __call__(self, *args, **kwargs):
self.num_calls += 1
return self.func(*args, **kwargs)
if __name__ == "__main__":
add_3 = Adder(3)
print(add_3(5))
```
#### File: python/decorators/mydeco.py
```python
def mydeco(func):
def wrapper(*args, **kwargs):
return f"{func(*args, **kwargs)}!!!"
return wrapper
if __name__ == "__main__":
@mydeco
def add(a, b):
return a + b
print(add(2, 2))
print(add(3, 3))
``` |
{
"source": "joamatab/modesolverpy",
"score": 3
} |
#### File: modesolverpy/modes/materials.py
```python
from typing import Union
import opticalmaterialspy as mat
from numpy import float64
def si(wl: Union[float, float64]) -> float64:
return mat.RefractiveIndexWeb(
"https://refractiveindex.info/?shelf=main&book=Si&page=Li-293K"
).n(wl)
def sio2(wl: Union[float, float64]) -> float64:
return mat.SiO2().n(wl)
def air(wl):
return mat.Air().n(wl)
def nitride(wl: float) -> float64:
return mat.RefractiveIndexWeb(
"https://refractiveindex.info/?shelf=main&book=Si3N4&page=Luke"
).n(wl)
if __name__ == "__main__":
print(nitride(1.3))
print(si(1.55))
```
#### File: modesolverpy/modes/_mode_solver_lib.py
```python
import collections as col
from builtins import range, zip
from typing import List, Optional, Tuple, Union
import numpy
import scipy
from numpy import complex128, float64, ndarray
from scipy.interpolate import interp2d
from scipy.sparse.csr import csr_matrix
from modes._structure import RidgeWaveguide
def trapz2(
f: ndarray,
x: Optional[ndarray] = None,
y: Optional[ndarray] = None,
dx: float = 1.0,
dy: float = 1.0,
) -> complex128:
"""Double integrate."""
return numpy.trapz(numpy.trapz(f, x=y, dx=dy), x=x, dx=dx)
def centered1d(x: ndarray) -> ndarray:
return (x[1:] + x[:-1]) / 2.0
def centered2d(x: ndarray) -> ndarray:
return (x[1:, 1:] + x[1:, :-1] + x[:-1, 1:] + x[:-1, :-1]) / 4.0
class _ModeSolverSemiVectorial:
"""
This function calculates the modes of a dielectric waveguide
using the semivectorial finite difference method.
It is slightly faster than the full-vectorial VFDModeSolver,
but it does not accept non-isotropic permittivity. For example,
birefringent materials, which have
different refractive indices along different dimensions cannot be used.
It is adapted from the svmodes.m matlab code of <NAME> and co-workers.
Parameters
----------
wl : float
optical wavelength
units are arbitrary, but must be self-consistent. It's recommended to just work in microns.
x : 1D array of floats
Array of x-values
y : 1D array of floats
Array of y-values
epsfunc : function
This is a function that provides the relative permittivity (square of the refractive index)
as a function of the x and y position. The function must be of the form:
``myRelativePermittivity(x,y)``
The function can either return a single float, corresponding the an isotropic refractive index,
or, it may a length-5 tuple. In the tuple case, the relative permittivity is given in the form
(epsxx, epsxy, epsyx, epsyy, epszz).
boundary : str
This is a string that identifies the type of boundary conditions applied.
The following options are available:
'A' - Hx is antisymmetric, Hy is symmetric.
'S' - Hx is symmetric and, Hy is antisymmetric.
'0' - Hx and Hy are zero immediately outside of the boundary.
The string identifies all four boundary conditions, in the order: North, south, east, west.
For example, boundary='000A'
method : str
must be 'Ex', 'Ey', or 'scalar'
this identifies the field that will be calculated.
Returns
-------
self : an instance of the SVFDModeSolver class
Typically self.solve() will be called in order to actually find the modes.
"""
def __init__(
self,
wl: float,
structure: RidgeWaveguide,
boundary: str = "0000",
method: str = "Ex",
) -> None:
# Polarisation bug fix.
assert method in ("Ex", "Ey"), "Invalid polarisation method."
if method == "Ex":
method = "Ey"
elif method == "Ey":
method = "Ex"
self.wl = wl
self.x = structure.y
self.y = structure.x
self.boundary = boundary
self.method = method
self.structure = structure
def build_matrix(self) -> csr_matrix:
from scipy.sparse import coo_matrix
wl = self.wl
x = self.x
y = self.y
structure = self.structure
boundary = self.boundary
method = self.method
dx = numpy.diff(x)
dy = numpy.diff(y)
dx = numpy.r_[dx[0], dx, dx[-1]].reshape(-1, 1)
dy = numpy.r_[dy[0], dy, dy[-1]].reshape(1, -1)
xc = (x[:-1] + x[1:]) / 2
yc = (y[:-1] + y[1:]) / 2
eps = structure.eps_func(yc, xc)
eps = numpy.c_[eps[:, 0:1], eps, eps[:, -1:]]
eps = numpy.r_[eps[0:1, :], eps, eps[-1:, :]]
nx = len(xc)
ny = len(yc)
self.nx = nx
self.ny = ny
k = 2 * numpy.pi / wl
ones_nx = numpy.ones((nx, 1))
ones_ny = numpy.ones((1, ny))
n = numpy.dot(ones_nx, 0.5 * (dy[:, 2:] + dy[:, 1:-1])).flatten()
s = numpy.dot(ones_nx, 0.5 * (dy[:, 0:-2] + dy[:, 1:-1])).flatten()
e = numpy.dot(0.5 * (dx[2:, :] + dx[1:-1, :]), ones_ny).flatten()
w = numpy.dot(0.5 * (dx[0:-2, :] + dx[1:-1, :]), ones_ny).flatten()
p = numpy.dot(dx[1:-1, :], ones_ny).flatten()
q = numpy.dot(ones_nx, dy[:, 1:-1]).flatten()
en = eps[1:-1, 2:].flatten()
es = eps[1:-1, 0:-2].flatten()
ee = eps[2:, 1:-1].flatten()
ew = eps[0:-2, 1:-1].flatten()
ep = eps[1:-1, 1:-1].flatten()
# three methods: Ex, Ey and scalar
if method == "Ex":
# Ex
An = 2 / n / (n + s)
As = 2 / s / (n + s)
Ae = (
8
* (p * (ep - ew) + 2 * w * ew)
* ee
/ (
(p * (ep - ee) + 2 * e * ee)
* (p ** 2 * (ep - ew) + 4 * w ** 2 * ew)
+ (p * (ep - ew) + 2 * w * ew)
* (p ** 2 * (ep - ee) + 4 * e ** 2 * ee)
)
)
Aw = (
8
* (p * (ep - ee) + 2 * e * ee)
* ew
/ (
(p * (ep - ee) + 2 * e * ee)
* (p ** 2 * (ep - ew) + 4 * w ** 2 * ew)
+ (p * (ep - ew) + 2 * w * ew)
* (p ** 2 * (ep - ee) + 4 * e ** 2 * ee)
)
)
Ap = ep * k ** 2 - An - As - Ae * ep / ee - Aw * ep / ew
elif method == "Ey":
# Ey
An = (
8
* (q * (ep - es) + 2 * s * es)
* en
/ (
(q * (ep - en) + 2 * n * en)
* (q ** 2 * (ep - es) + 4 * s ** 2 * es)
+ (q * (ep - es) + 2 * s * es)
* (q ** 2 * (ep - en) + 4 * n ** 2 * en)
)
)
As = (
8
* (q * (ep - en) + 2 * n * en)
* es
/ (
(q * (ep - en) + 2 * n * en)
* (q ** 2 * (ep - es) + 4 * s ** 2 * es)
+ (q * (ep - es) + 2 * s * es)
* (q ** 2 * (ep - en) + 4 * n ** 2 * en)
)
)
Ae = 2 / e / (e + w)
Aw = 2 / w / (e + w)
Ap = ep * k ** 2 - An * ep / en - As * ep / es - Ae - Aw
elif method == "scalar":
# scalar
An = 2 / n / (n + s)
As = 2 / s / (n + s)
Ae = 2 / e / (e + w)
Aw = 2 / w / (e + w)
Ap = ep * k ** 2 - An - As - Ae - Aw
else:
raise ValueError("unknown method")
ii = numpy.arange(nx * ny).reshape(nx, ny)
# north boundary
ib = ii[:, -1]
if boundary[0] == "S":
Ap[ib] += An[ib]
elif boundary[0] == "A":
Ap[ib] -= An[ib]
# else:
# raise ValueError('unknown boundary')
# south
ib = ii[:, 0]
if boundary[1] == "S":
Ap[ib] += As[ib]
elif boundary[1] == "A":
Ap[ib] -= As[ib]
# else:
# raise ValueError('unknown boundary')
# east
ib = ii[-1, :]
if boundary[2] == "S":
Ap[ib] += Ae[ib]
elif boundary[2] == "A":
Ap[ib] -= Ae[ib]
# else:
# raise ValueError('unknown boundary')
# west
ib = ii[0, :]
if boundary[3] == "S":
Ap[ib] += Aw[ib]
elif boundary[3] == "A":
Ap[ib] -= Aw[ib]
# else:
# raise ValueError('unknown boundary')
iall = ii.flatten()
i_n = ii[:, 1:].flatten()
i_s = ii[:, :-1].flatten()
i_e = ii[1:, :].flatten()
i_w = ii[:-1, :].flatten()
I = numpy.r_[iall, i_w, i_e, i_s, i_n]
J = numpy.r_[iall, i_e, i_w, i_n, i_s]
V = numpy.r_[Ap[iall], Ae[i_w], Aw[i_e], An[i_s], As[i_n]]
A = coo_matrix((V, (I, J))).tocsr()
return A
def solve(
self,
neigs: int,
tol: float = 0,
mode_profiles: bool = True,
initial_mode_guess: None = None,
) -> "_ModeSolverSemiVectorial":
from scipy.sparse.linalg import eigen
self.nmodes = neigs
self.tol = tol
A = self.build_matrix()
eigs = eigen.eigs(
A,
k=neigs,
which="LR",
tol=0.001,
ncv=None,
v0=initial_mode_guess,
return_eigenvectors=mode_profiles,
)
if mode_profiles:
eigvals, eigvecs = eigs
else:
eigvals = eigs
eigvecs = None
neff = self.wl * scipy.sqrt(eigvals) / (2 * numpy.pi)
if mode_profiles:
phi = []
for ieig in range(neigs):
tmp = eigvecs[:, ieig].reshape(self.nx, self.ny)
phi.append(tmp)
# sort and save the modes
idx = numpy.flipud(numpy.argsort(neff))
self.neff = neff[idx]
if mode_profiles:
tmp = []
for i in idx:
tmp.append(phi[i])
if self.method == "scalar":
self.phi = tmp
elif self.method == "Ex":
self.Ex = tmp
elif self.method == "Ey":
self.Ey = tmp
self.modes = tmp
return self
def __str__(self):
descr = (
"Semi-Vectorial Finite Difference Modesolver\n\tmethod: %s\n" % self.method
)
return descr
class _ModeSolverVectorial:
"""
The VFDModeSolver class computes the electric and magnetic fields for modes of a dielectric
waveguide using the "Vector Finite Difference (VFD)" method, as described in
<NAME>, <NAME> and <NAME>, "Vector Finite Difference Modesolver for
Anisotropic Dielectric Waveguides", J. Lightwave Technol. 26(11), 1423-1431, (2008).
Parameters
----------
wl : float
The wavelength of the optical radiation (units are arbitrary, but must be self-consistent
between all inputs. Recommandation is to just use micron for everthing)
x : 1D array of floats
Array of x-values
y : 1D array of floats
Array of y-values
epsfunc : function
This is a function that provides the relative permittivity (square of the refractive index)
as a function of the x and y position. The function must be of the form:
``myRelativePermittivity(x,y)``
The function can either return a single float, corresponding the an isotropic refractive index,
or, ir may a length-5 tuple. In the tuple case, the relative permittivity is given in the form
(epsxx, epsxy, epsyx, epsyy, epszz).
The light is `z` propagating.
boundary : str
This is a string that identifies the type of boundary conditions applied.
The following options are available:
'A' - Hx is antisymmetric, Hy is symmetric.
'S' - Hx is symmetric and, Hy is antisymmetric.
'0' - Hx and Hy are zero immediately outside of the boundary.
The string identifies all four boundary conditions, in the order: North, south, east, west.
For example, boundary='000A'
Returns
-------
self : an instance of the VFDModeSolver class
Typically self.solve() will be called in order to actually find the modes.
"""
def __init__(
self, wl: Union[float, float64], structure: RidgeWaveguide, boundary: str
) -> None:
self.wl = wl
self.x = structure.y
self.y = structure.x
self.epsfunc = structure.eps_func
self.boundary = boundary
def build_matrix(self) -> csr_matrix:
from scipy.sparse import coo_matrix
wl = self.wl
x = self.x
y = self.y
epsfunc = self.epsfunc
boundary = self.boundary
dx = numpy.diff(x)
dy = numpy.diff(y)
dx = numpy.r_[dx[0], dx, dx[-1]].reshape(-1, 1)
dy = numpy.r_[dy[0], dy, dy[-1]].reshape(1, -1)
xc = (x[:-1] + x[1:]) / 2
yc = (y[:-1] + y[1:]) / 2
tmp = epsfunc(yc, xc)
if isinstance(tmp, tuple):
tmp = [numpy.c_[t[:, 0:1], t, t[:, -1:]] for t in tmp]
tmp = [numpy.r_[t[0:1, :], t, t[-1:, :]] for t in tmp]
epsyy, epsyx, epsxy, epsxx, epszz = tmp
else:
tmp = numpy.c_[tmp[:, 0:1], tmp, tmp[:, -1:]]
tmp = numpy.r_[tmp[0:1, :], tmp, tmp[-1:, :]]
epsxx = epsyy = epszz = tmp
epsxy = epsyx = numpy.zeros_like(epsxx)
nx = len(x)
ny = len(y)
self.nx = nx
self.ny = ny
k = 2 * numpy.pi / wl
ones_nx = numpy.ones((nx, 1))
ones_ny = numpy.ones((1, ny))
n = numpy.dot(ones_nx, dy[:, 1:]).flatten()
s = numpy.dot(ones_nx, dy[:, :-1]).flatten()
e = numpy.dot(dx[1:, :], ones_ny).flatten()
w = numpy.dot(dx[:-1, :], ones_ny).flatten()
exx1 = epsxx[:-1, 1:].flatten()
exx2 = epsxx[:-1, :-1].flatten()
exx3 = epsxx[1:, :-1].flatten()
exx4 = epsxx[1:, 1:].flatten()
eyy1 = epsyy[:-1, 1:].flatten()
eyy2 = epsyy[:-1, :-1].flatten()
eyy3 = epsyy[1:, :-1].flatten()
eyy4 = epsyy[1:, 1:].flatten()
exy1 = epsxy[:-1, 1:].flatten()
exy2 = epsxy[:-1, :-1].flatten()
exy3 = epsxy[1:, :-1].flatten()
exy4 = epsxy[1:, 1:].flatten()
eyx1 = epsyx[:-1, 1:].flatten()
eyx2 = epsyx[:-1, :-1].flatten()
eyx3 = epsyx[1:, :-1].flatten()
eyx4 = epsyx[1:, 1:].flatten()
ezz1 = epszz[:-1, 1:].flatten()
ezz2 = epszz[:-1, :-1].flatten()
ezz3 = epszz[1:, :-1].flatten()
ezz4 = epszz[1:, 1:].flatten()
ns21 = n * eyy2 + s * eyy1
ns34 = n * eyy3 + s * eyy4
ew14 = e * exx1 + w * exx4
ew23 = e * exx2 + w * exx3
axxn = (
(2 * eyy4 * e - eyx4 * n) * (eyy3 / ezz4) / ns34
+ (2 * eyy1 * w + eyx1 * n) * (eyy2 / ezz1) / ns21
) / (n * (e + w))
axxs = (
(2 * eyy3 * e + eyx3 * s) * (eyy4 / ezz3) / ns34
+ (2 * eyy2 * w - eyx2 * s) * (eyy1 / ezz2) / ns21
) / (s * (e + w))
ayye = (2 * n * exx4 - e * exy4) * exx1 / ezz4 / e / ew14 / (n + s) + (
2 * s * exx3 + e * exy3
) * exx2 / ezz3 / e / ew23 / (n + s)
ayyw = (2 * exx1 * n + exy1 * w) * exx4 / ezz1 / w / ew14 / (n + s) + (
2 * exx2 * s - exy2 * w
) * exx3 / ezz2 / w / ew23 / (n + s)
axxe = (
2 / (e * (e + w))
+ (eyy4 * eyx3 / ezz3 - eyy3 * eyx4 / ezz4) / (e + w) / ns34
)
axxw = (
2 / (w * (e + w))
+ (eyy2 * eyx1 / ezz1 - eyy1 * eyx2 / ezz2) / (e + w) / ns21
)
ayyn = (
2 / (n * (n + s))
+ (exx4 * exy1 / ezz1 - exx1 * exy4 / ezz4) / (n + s) / ew14
)
ayys = (
2 / (s * (n + s))
+ (exx2 * exy3 / ezz3 - exx3 * exy2 / ezz2) / (n + s) / ew23
)
axxne = +eyx4 * eyy3 / ezz4 / (e + w) / ns34
axxse = -eyx3 * eyy4 / ezz3 / (e + w) / ns34
axxnw = -eyx1 * eyy2 / ezz1 / (e + w) / ns21
axxsw = +eyx2 * eyy1 / ezz2 / (e + w) / ns21
ayyne = +exy4 * exx1 / ezz4 / (n + s) / ew14
ayyse = -exy3 * exx2 / ezz3 / (n + s) / ew23
ayynw = -exy1 * exx4 / ezz1 / (n + s) / ew14
ayysw = +exy2 * exx3 / ezz2 / (n + s) / ew23
axxp = (
-axxn
- axxs
- axxe
- axxw
- axxne
- axxse
- axxnw
- axxsw
+ k ** 2
* (n + s)
* (eyy4 * eyy3 * e / ns34 + eyy1 * eyy2 * w / ns21)
/ (e + w)
)
ayyp = (
-ayyn
- ayys
- ayye
- ayyw
- ayyne
- ayyse
- ayynw
- ayysw
+ k ** 2
* (e + w)
* (exx1 * exx4 * n / ew14 + exx2 * exx3 * s / ew23)
/ (n + s)
)
axyn = (
eyy3 * eyy4 / ezz4 / ns34
- eyy2 * eyy1 / ezz1 / ns21
+ s * (eyy2 * eyy4 - eyy1 * eyy3) / ns21 / ns34
) / (e + w)
axys = (
eyy1 * eyy2 / ezz2 / ns21
- eyy4 * eyy3 / ezz3 / ns34
+ n * (eyy2 * eyy4 - eyy1 * eyy3) / ns21 / ns34
) / (e + w)
ayxe = (
exx1 * exx4 / ezz4 / ew14
- exx2 * exx3 / ezz3 / ew23
+ w * (exx2 * exx4 - exx1 * exx3) / ew23 / ew14
) / (n + s)
ayxw = (
exx3 * exx2 / ezz2 / ew23
- exx4 * exx1 / ezz1 / ew14
+ e * (exx4 * exx2 - exx1 * exx3) / ew23 / ew14
) / (n + s)
axye = (eyy4 * (1 + eyy3 / ezz4) - eyy3 * (1 + eyy4 / ezz4)) / ns34 / (
e + w
) - (
2 * eyx1 * eyy2 / ezz1 * n * w / ns21
+ 2 * eyx2 * eyy1 / ezz2 * s * w / ns21
+ 2 * eyx4 * eyy3 / ezz4 * n * e / ns34
+ 2 * eyx3 * eyy4 / ezz3 * s * e / ns34
+ 2 * eyy1 * eyy2 * (1.0 / ezz1 - 1.0 / ezz2) * w ** 2 / ns21
) / e / (
e + w
) ** 2
axyw = (eyy2 * (1 + eyy1 / ezz2) - eyy1 * (1 + eyy2 / ezz2)) / ns21 / (
e + w
) - (
2 * eyx1 * eyy2 / ezz1 * n * e / ns21
+ 2 * eyx2 * eyy1 / ezz2 * s * e / ns21
+ 2 * eyx4 * eyy3 / ezz4 * n * w / ns34
+ 2 * eyx3 * eyy4 / ezz3 * s * w / ns34
+ 2 * eyy3 * eyy4 * (1.0 / ezz3 - 1.0 / ezz4) * e ** 2 / ns34
) / w / (
e + w
) ** 2
ayxn = (exx4 * (1 + exx1 / ezz4) - exx1 * (1 + exx4 / ezz4)) / ew14 / (
n + s
) - (
2 * exy3 * exx2 / ezz3 * e * s / ew23
+ 2 * exy2 * exx3 / ezz2 * w * n / ew23
+ 2 * exy4 * exx1 / ezz4 * e * s / ew14
+ 2 * exy1 * exx4 / ezz1 * w * n / ew14
+ 2 * exx3 * exx2 * (1.0 / ezz3 - 1.0 / ezz2) * s ** 2 / ew23
) / n / (
n + s
) ** 2
ayxs = (exx2 * (1 + exx3 / ezz2) - exx3 * (1 + exx2 / ezz2)) / ew23 / (
n + s
) - (
2 * exy3 * exx2 / ezz3 * e * n / ew23
+ 2 * exy2 * exx3 / ezz2 * w * n / ew23
+ 2 * exy4 * exx1 / ezz4 * e * s / ew14
+ 2 * exy1 * exx4 / ezz1 * w * s / ew14
+ 2 * exx1 * exx4 * (1.0 / ezz1 - 1.0 / ezz4) * n ** 2 / ew14
) / s / (
n + s
) ** 2
axyne = +eyy3 * (1 - eyy4 / ezz4) / (e + w) / ns34
axyse = -eyy4 * (1 - eyy3 / ezz3) / (e + w) / ns34
axynw = -eyy2 * (1 - eyy1 / ezz1) / (e + w) / ns21
axysw = +eyy1 * (1 - eyy2 / ezz2) / (e + w) / ns21
ayxne = +exx1 * (1 - exx4 / ezz4) / (n + s) / ew14
ayxse = -exx2 * (1 - exx3 / ezz3) / (n + s) / ew23
ayxnw = -exx4 * (1 - exx1 / ezz1) / (n + s) / ew14
ayxsw = +exx3 * (1 - exx2 / ezz2) / (n + s) / ew23
axyp = -(axyn + axys + axye + axyw + axyne + axyse + axynw + axysw) - k ** 2 * (
w * (n * eyx1 * eyy2 + s * eyx2 * eyy1) / ns21
+ e * (s * eyx3 * eyy4 + n * eyx4 * eyy3) / ns34
) / (e + w)
ayxp = -(ayxn + ayxs + ayxe + ayxw + ayxne + ayxse + ayxnw + ayxsw) - k ** 2 * (
n * (w * exy1 * exx4 + e * exy4 * exx1) / ew14
+ s * (w * exy2 * exx3 + e * exy3 * exx2) / ew23
) / (n + s)
ii = numpy.arange(nx * ny).reshape(nx, ny)
# NORTH boundary
ib = ii[:, -1]
if boundary[0] == "S":
sign = 1
elif boundary[0] == "A":
sign = -1
elif boundary[0] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
axxs[ib] += sign * axxn[ib]
axxse[ib] += sign * axxne[ib]
axxsw[ib] += sign * axxnw[ib]
ayxs[ib] += sign * ayxn[ib]
ayxse[ib] += sign * ayxne[ib]
ayxsw[ib] += sign * ayxnw[ib]
ayys[ib] -= sign * ayyn[ib]
ayyse[ib] -= sign * ayyne[ib]
ayysw[ib] -= sign * ayynw[ib]
axys[ib] -= sign * axyn[ib]
axyse[ib] -= sign * axyne[ib]
axysw[ib] -= sign * axynw[ib]
# SOUTH boundary
ib = ii[:, 0]
if boundary[1] == "S":
sign = 1
elif boundary[1] == "A":
sign = -1
elif boundary[1] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
axxn[ib] += sign * axxs[ib]
axxne[ib] += sign * axxse[ib]
axxnw[ib] += sign * axxsw[ib]
ayxn[ib] += sign * ayxs[ib]
ayxne[ib] += sign * ayxse[ib]
ayxnw[ib] += sign * ayxsw[ib]
ayyn[ib] -= sign * ayys[ib]
ayyne[ib] -= sign * ayyse[ib]
ayynw[ib] -= sign * ayysw[ib]
axyn[ib] -= sign * axys[ib]
axyne[ib] -= sign * axyse[ib]
axynw[ib] -= sign * axysw[ib]
# EAST boundary
ib = ii[-1, :]
if boundary[2] == "S":
sign = 1
elif boundary[2] == "A":
sign = -1
elif boundary[2] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
axxw[ib] += sign * axxe[ib]
axxnw[ib] += sign * axxne[ib]
axxsw[ib] += sign * axxse[ib]
ayxw[ib] += sign * ayxe[ib]
ayxnw[ib] += sign * ayxne[ib]
ayxsw[ib] += sign * ayxse[ib]
ayyw[ib] -= sign * ayye[ib]
ayynw[ib] -= sign * ayyne[ib]
ayysw[ib] -= sign * ayyse[ib]
axyw[ib] -= sign * axye[ib]
axynw[ib] -= sign * axyne[ib]
axysw[ib] -= sign * axyse[ib]
# WEST boundary
ib = ii[0, :]
if boundary[3] == "S":
sign = 1
elif boundary[3] == "A":
sign = -1
elif boundary[3] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
axxe[ib] += sign * axxw[ib]
axxne[ib] += sign * axxnw[ib]
axxse[ib] += sign * axxsw[ib]
ayxe[ib] += sign * ayxw[ib]
ayxne[ib] += sign * ayxnw[ib]
ayxse[ib] += sign * ayxsw[ib]
ayye[ib] -= sign * ayyw[ib]
ayyne[ib] -= sign * ayynw[ib]
ayyse[ib] -= sign * ayysw[ib]
axye[ib] -= sign * axyw[ib]
axyne[ib] -= sign * axynw[ib]
axyse[ib] -= sign * axysw[ib]
# Assemble sparse matrix
iall = ii.flatten()
i_s = ii[:, :-1].flatten()
i_n = ii[:, 1:].flatten()
i_e = ii[1:, :].flatten()
i_w = ii[:-1, :].flatten()
i_ne = ii[1:, 1:].flatten()
i_se = ii[1:, :-1].flatten()
i_sw = ii[:-1, :-1].flatten()
i_nw = ii[:-1, 1:].flatten()
Ixx = numpy.r_[iall, i_w, i_e, i_s, i_n, i_ne, i_se, i_sw, i_nw]
Jxx = numpy.r_[iall, i_e, i_w, i_n, i_s, i_sw, i_nw, i_ne, i_se]
Vxx = numpy.r_[
axxp[iall],
axxe[i_w],
axxw[i_e],
axxn[i_s],
axxs[i_n],
axxsw[i_ne],
axxnw[i_se],
axxne[i_sw],
axxse[i_nw],
]
Ixy = numpy.r_[iall, i_w, i_e, i_s, i_n, i_ne, i_se, i_sw, i_nw]
Jxy = numpy.r_[iall, i_e, i_w, i_n, i_s, i_sw, i_nw, i_ne, i_se] + nx * ny
Vxy = numpy.r_[
axyp[iall],
axye[i_w],
axyw[i_e],
axyn[i_s],
axys[i_n],
axysw[i_ne],
axynw[i_se],
axyne[i_sw],
axyse[i_nw],
]
Iyx = numpy.r_[iall, i_w, i_e, i_s, i_n, i_ne, i_se, i_sw, i_nw] + nx * ny
Jyx = numpy.r_[iall, i_e, i_w, i_n, i_s, i_sw, i_nw, i_ne, i_se]
Vyx = numpy.r_[
ayxp[iall],
ayxe[i_w],
ayxw[i_e],
ayxn[i_s],
ayxs[i_n],
ayxsw[i_ne],
ayxnw[i_se],
ayxne[i_sw],
ayxse[i_nw],
]
Iyy = numpy.r_[iall, i_w, i_e, i_s, i_n, i_ne, i_se, i_sw, i_nw] + nx * ny
Jyy = numpy.r_[iall, i_e, i_w, i_n, i_s, i_sw, i_nw, i_ne, i_se] + nx * ny
Vyy = numpy.r_[
ayyp[iall],
ayye[i_w],
ayyw[i_e],
ayyn[i_s],
ayys[i_n],
ayysw[i_ne],
ayynw[i_se],
ayyne[i_sw],
ayyse[i_nw],
]
I = numpy.r_[Ixx, Ixy, Iyx, Iyy]
J = numpy.r_[Jxx, Jxy, Jyx, Jyy]
V = numpy.r_[Vxx, Vxy, Vyx, Vyy]
A = coo_matrix((V, (I, J))).tocsr()
return A
def compute_other_fields(
self, neffs: ndarray, Hxs: List[ndarray], Hys: List[ndarray]
) -> Tuple[List[ndarray], List[ndarray], List[ndarray], List[ndarray]]:
from scipy.sparse import coo_matrix
wl = self.wl
x = self.x
y = self.y
epsfunc = self.epsfunc
boundary = self.boundary
Hzs = []
Exs = []
Eys = []
Ezs = []
for neff, Hx, Hy in zip(neffs, Hxs, Hys):
dx = numpy.diff(x)
dy = numpy.diff(y)
dx = numpy.r_[dx[0], dx, dx[-1]].reshape(-1, 1)
dy = numpy.r_[dy[0], dy, dy[-1]].reshape(1, -1)
xc = (x[:-1] + x[1:]) / 2
yc = (y[:-1] + y[1:]) / 2
tmp = epsfunc(yc, xc)
if isinstance(tmp, tuple):
tmp = [numpy.c_[t[:, 0:1], t, t[:, -1:]] for t in tmp]
tmp = [numpy.r_[t[0:1, :], t, t[-1:, :]] for t in tmp]
epsxx, epsxy, epsyx, epsyy, epszz = tmp
else:
tmp = numpy.c_[tmp[:, 0:1], tmp, tmp[:, -1:]]
tmp = numpy.r_[tmp[0:1, :], tmp, tmp[-1:, :]]
epsxx = epsyy = epszz = tmp
epsxy = epsyx = numpy.zeros_like(epsxx)
nx = len(x)
ny = len(y)
k = 2 * numpy.pi / wl
ones_nx = numpy.ones((nx, 1))
ones_ny = numpy.ones((1, ny))
n = numpy.dot(ones_nx, dy[:, 1:]).flatten()
s = numpy.dot(ones_nx, dy[:, :-1]).flatten()
e = numpy.dot(dx[1:, :], ones_ny).flatten()
w = numpy.dot(dx[:-1, :], ones_ny).flatten()
exx1 = epsxx[:-1, 1:].flatten()
exx2 = epsxx[:-1, :-1].flatten()
exx3 = epsxx[1:, :-1].flatten()
exx4 = epsxx[1:, 1:].flatten()
eyy1 = epsyy[:-1, 1:].flatten()
eyy2 = epsyy[:-1, :-1].flatten()
eyy3 = epsyy[1:, :-1].flatten()
eyy4 = epsyy[1:, 1:].flatten()
exy1 = epsxy[:-1, 1:].flatten()
exy2 = epsxy[:-1, :-1].flatten()
exy3 = epsxy[1:, :-1].flatten()
exy4 = epsxy[1:, 1:].flatten()
eyx1 = epsyx[:-1, 1:].flatten()
eyx2 = epsyx[:-1, :-1].flatten()
eyx3 = epsyx[1:, :-1].flatten()
eyx4 = epsyx[1:, 1:].flatten()
ezz1 = epszz[:-1, 1:].flatten()
ezz2 = epszz[:-1, :-1].flatten()
ezz3 = epszz[1:, :-1].flatten()
ezz4 = epszz[1:, 1:].flatten()
b = neff * k
bzxne = (
0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* eyx4
/ ezz4
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy3
* eyy1
* w
* eyy2
+ 0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (1 - exx4 / ezz4)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* exx1
* s
) / b
bzxse = (
-0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* eyx3
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy1
* w
* eyy2
+ 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (1 - exx3 / ezz3)
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* n
* exx1
* exx4
) / b
bzxnw = (
-0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* eyx1
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy2
* e
- 0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (1 - exx1 / ezz1)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* exx4
* s
) / b
bzxsw = (
0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* eyx2
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* e
- 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (1 - exx2 / ezz2)
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx3
* n
* exx1
* exx4
) / b
bzxn = (
(
0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* n
* ezz1
* ezz2
/ eyy1
* (2 * eyy1 / ezz1 / n ** 2 + eyx1 / ezz1 / n / w)
+ 0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* n
* ezz4
* ezz3
/ eyy4
* (2 * eyy4 / ezz4 / n ** 2 - eyx4 / ezz4 / n / e)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
(ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (
0.5
* ezz4
* (
(1 - exx1 / ezz1) / n / w
- exy1 / ezz1 * (2.0 / n ** 2 - 2 / n ** 2 * s / (n + s))
)
/ exx1
* ezz1
* w
+ (ezz4 - ezz1) * s / n / (n + s)
+ 0.5
* ezz1
* (
-(1 - exx4 / ezz4) / n / e
- exy4 / ezz4 * (2.0 / n ** 2 - 2 / n ** 2 * s / (n + s))
)
/ exx4
* ezz4
* e
)
- (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (
-ezz3 * exy2 / n / (n + s) / exx2 * w
+ (ezz3 - ezz2) * s / n / (n + s)
- ezz2 * exy3 / n / (n + s) / exx3 * e
)
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzxs = (
(
0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* s
* ezz2
* ezz1
/ eyy2
* (2 * eyy2 / ezz2 / s ** 2 - eyx2 / ezz2 / s / w)
+ 0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* s
* ezz3
* ezz4
/ eyy3
* (2 * eyy3 / ezz3 / s ** 2 + eyx3 / ezz3 / s / e)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
(ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (
-ezz4 * exy1 / s / (n + s) / exx1 * w
- (ezz4 - ezz1) * n / s / (n + s)
- ezz1 * exy4 / s / (n + s) / exx4 * e
)
- (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (
0.5
* ezz3
* (
-(1 - exx2 / ezz2) / s / w
- exy2 / ezz2 * (2.0 / s ** 2 - 2 / s ** 2 * n / (n + s))
)
/ exx2
* ezz2
* w
- (ezz3 - ezz2) * n / s / (n + s)
+ 0.5
* ezz2
* (
(1 - exx3 / ezz3) / s / e
- exy3 / ezz3 * (2.0 / s ** 2 - 2 / s ** 2 * n / (n + s))
)
/ exx3
* ezz3
* e
)
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzxe = (
(n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (
0.5 * n * ezz4 * ezz3 / eyy4 * (2.0 / e ** 2 - eyx4 / ezz4 / n / e)
+ 0.5
* s
* ezz3
* ezz4
/ eyy3
* (2.0 / e ** 2 + eyx3 / ezz3 / s / e)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
-0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* ezz1
* (1 - exx4 / ezz4)
/ n
/ exx4
* ezz4
- 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* ezz2
* (1 - exx3 / ezz3)
/ s
/ exx3
* ezz3
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzxw = (
(-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (
0.5 * n * ezz1 * ezz2 / eyy1 * (2.0 / w ** 2 + eyx1 / ezz1 / n / w)
+ 0.5
* s
* ezz2
* ezz1
/ eyy2
* (2.0 / w ** 2 - eyx2 / ezz2 / s / w)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* ezz4
* (1 - exx1 / ezz1)
/ n
/ exx1
* ezz1
+ 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* ezz3
* (1 - exx2 / ezz2)
/ s
/ exx2
* ezz2
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzxp = (
(
(-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (
0.5
* n
* ezz1
* ezz2
/ eyy1
* (
-2.0 / w ** 2
- 2 * eyy1 / ezz1 / n ** 2
+ k ** 2 * eyy1
- eyx1 / ezz1 / n / w
)
+ 0.5
* s
* ezz2
* ezz1
/ eyy2
* (
-2.0 / w ** 2
- 2 * eyy2 / ezz2 / s ** 2
+ k ** 2 * eyy2
+ eyx2 / ezz2 / s / w
)
)
+ (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (
0.5
* n
* ezz4
* ezz3
/ eyy4
* (
-2.0 / e ** 2
- 2 * eyy4 / ezz4 / n ** 2
+ k ** 2 * eyy4
+ eyx4 / ezz4 / n / e
)
+ 0.5
* s
* ezz3
* ezz4
/ eyy3
* (
-2.0 / e ** 2
- 2 * eyy3 / ezz3 / s ** 2
+ k ** 2 * eyy3
- eyx3 / ezz3 / s / e
)
)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
(ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (
0.5
* ezz4
* (
-(k ** 2) * exy1
- (1 - exx1 / ezz1) / n / w
- exy1 / ezz1 * (-2.0 / n ** 2 - 2 / n ** 2 * (n - s) / s)
)
/ exx1
* ezz1
* w
+ (ezz4 - ezz1) * (n - s) / n / s
+ 0.5
* ezz1
* (
-(k ** 2) * exy4
+ (1 - exx4 / ezz4) / n / e
- exy4 / ezz4 * (-2.0 / n ** 2 - 2 / n ** 2 * (n - s) / s)
)
/ exx4
* ezz4
* e
)
- (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (
0.5
* ezz3
* (
-(k ** 2) * exy2
+ (1 - exx2 / ezz2) / s / w
- exy2 / ezz2 * (-2.0 / s ** 2 + 2 / s ** 2 * (n - s) / n)
)
/ exx2
* ezz2
* w
+ (ezz3 - ezz2) * (n - s) / n / s
+ 0.5
* ezz2
* (
-(k ** 2) * exy3
- (1 - exx3 / ezz3) / s / e
- exy3 / ezz3 * (-2.0 / s ** 2 + 2 / s ** 2 * (n - s) / n)
)
/ exx3
* ezz3
* e
)
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzyne = (
0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (1 - eyy4 / ezz4)
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy3
* eyy1
* w
* eyy2
+ 0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* exy4
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* exx1
* s
) / b
bzyse = (
-0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (1 - eyy3 / ezz3)
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy1
* w
* eyy2
+ 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* exy3
/ ezz3
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* n
* exx1
* exx4
) / b
bzynw = (
-0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (1 - eyy1 / ezz1)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy2
* e
- 0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* exy1
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* exx4
* s
) / b
bzysw = (
0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (1 - eyy2 / ezz2)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* e
- 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* exy2
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx3
* n
* exx1
* exx4
) / b
bzyn = (
(
0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* ezz1
* ezz2
/ eyy1
* (1 - eyy1 / ezz1)
/ w
- 0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* ezz4
* ezz3
/ eyy4
* (1 - eyy4 / ezz4)
/ e
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (
0.5 * ezz4 * (2.0 / n ** 2 + exy1 / ezz1 / n / w) / exx1 * ezz1 * w
+ 0.5
* ezz1
* (2.0 / n ** 2 - exy4 / ezz4 / n / e)
/ exx4
* ezz4
* e
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzys = (
(
-0.5
* (-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* ezz2
* ezz1
/ eyy2
* (1 - eyy2 / ezz2)
/ w
+ 0.5
* (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* ezz3
* ezz4
/ eyy3
* (1 - eyy3 / ezz3)
/ e
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
- (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (
0.5 * ezz3 * (2.0 / s ** 2 - exy2 / ezz2 / s / w) / exx2 * ezz2 * w
+ 0.5
* ezz2
* (2.0 / s ** 2 + exy3 / ezz3 / s / e)
/ exx3
* ezz3
* e
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzye = (
(
(-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (
-n * ezz2 / eyy1 * eyx1 / e / (e + w)
+ (ezz1 - ezz2) * w / e / (e + w)
- s * ezz1 / eyy2 * eyx2 / e / (e + w)
)
+ (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (
0.5
* n
* ezz4
* ezz3
/ eyy4
* (
-(1 - eyy4 / ezz4) / n / e
- eyx4 / ezz4 * (2.0 / e ** 2 - 2 / e ** 2 * w / (e + w))
)
+ 0.5
* s
* ezz3
* ezz4
/ eyy3
* (
(1 - eyy3 / ezz3) / s / e
- eyx3 / ezz3 * (2.0 / e ** 2 - 2 / e ** 2 * w / (e + w))
)
+ (ezz4 - ezz3) * w / e / (e + w)
)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* ezz1
* (2 * exx4 / ezz4 / e ** 2 - exy4 / ezz4 / n / e)
/ exx4
* ezz4
* e
- 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* ezz2
* (2 * exx3 / ezz3 / e ** 2 + exy3 / ezz3 / s / e)
/ exx3
* ezz3
* e
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzyw = (
(
(-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (
0.5
* n
* ezz1
* ezz2
/ eyy1
* (
(1 - eyy1 / ezz1) / n / w
- eyx1 / ezz1 * (2.0 / w ** 2 - 2 / w ** 2 * e / (e + w))
)
- (ezz1 - ezz2) * e / w / (e + w)
+ 0.5
* s
* ezz2
* ezz1
/ eyy2
* (
-(1 - eyy2 / ezz2) / s / w
- eyx2 / ezz2 * (2.0 / w ** 2 - 2 / w ** 2 * e / (e + w))
)
)
+ (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (
-n * ezz3 / eyy4 * eyx4 / w / (e + w)
- s * ezz4 / eyy3 * eyx3 / w / (e + w)
- (ezz4 - ezz3) * e / w / (e + w)
)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
0.5
* (ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* ezz4
* (2 * exx1 / ezz1 / w ** 2 + exy1 / ezz1 / n / w)
/ exx1
* ezz1
* w
- 0.5
* (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* ezz3
* (2 * exx2 / ezz2 / w ** 2 - exy2 / ezz2 / s / w)
/ exx2
* ezz2
* w
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
bzyp = (
(
(-n * ezz4 * ezz3 / eyy4 - s * ezz3 * ezz4 / eyy3)
* (
0.5
* n
* ezz1
* ezz2
/ eyy1
* (
-(k ** 2) * eyx1
- (1 - eyy1 / ezz1) / n / w
- eyx1 / ezz1 * (-2.0 / w ** 2 + 2 / w ** 2 * (e - w) / e)
)
+ (ezz1 - ezz2) * (e - w) / e / w
+ 0.5
* s
* ezz2
* ezz1
/ eyy2
* (
-(k ** 2) * eyx2
+ (1 - eyy2 / ezz2) / s / w
- eyx2 / ezz2 * (-2.0 / w ** 2 + 2 / w ** 2 * (e - w) / e)
)
)
+ (n * ezz1 * ezz2 / eyy1 + s * ezz2 * ezz1 / eyy2)
* (
0.5
* n
* ezz4
* ezz3
/ eyy4
* (
-(k ** 2) * eyx4
+ (1 - eyy4 / ezz4) / n / e
- eyx4 / ezz4 * (-2.0 / e ** 2 - 2 / e ** 2 * (e - w) / w)
)
+ 0.5
* s
* ezz3
* ezz4
/ eyy3
* (
-(k ** 2) * eyx3
- (1 - eyy3 / ezz3) / s / e
- eyx3 / ezz3 * (-2.0 / e ** 2 - 2 / e ** 2 * (e - w) / w)
)
+ (ezz4 - ezz3) * (e - w) / e / w
)
)
/ ezz4
/ ezz3
/ (n * eyy3 + s * eyy4)
/ ezz2
/ ezz1
/ (n * eyy2 + s * eyy1)
/ (e + w)
* eyy4
* eyy3
* eyy1
* w
* eyy2
* e
+ (
(ezz3 / exx2 * ezz2 * w + ezz2 / exx3 * ezz3 * e)
* (
0.5
* ezz4
* (
-2.0 / n ** 2
- 2 * exx1 / ezz1 / w ** 2
+ k ** 2 * exx1
- exy1 / ezz1 / n / w
)
/ exx1
* ezz1
* w
+ 0.5
* ezz1
* (
-2.0 / n ** 2
- 2 * exx4 / ezz4 / e ** 2
+ k ** 2 * exx4
+ exy4 / ezz4 / n / e
)
/ exx4
* ezz4
* e
)
- (ezz4 / exx1 * ezz1 * w + ezz1 / exx4 * ezz4 * e)
* (
0.5
* ezz3
* (
-2.0 / s ** 2
- 2 * exx2 / ezz2 / w ** 2
+ k ** 2 * exx2
+ exy2 / ezz2 / s / w
)
/ exx2
* ezz2
* w
+ 0.5
* ezz2
* (
-2.0 / s ** 2
- 2 * exx3 / ezz3 / e ** 2
+ k ** 2 * exx3
- exy3 / ezz3 / s / e
)
/ exx3
* ezz3
* e
)
)
/ ezz3
/ ezz2
/ (w * exx3 + e * exx2)
/ ezz4
/ ezz1
/ (w * exx4 + e * exx1)
/ (n + s)
* exx2
* exx3
* n
* exx1
* exx4
* s
) / b
ii = numpy.arange(nx * ny).reshape(nx, ny)
# NORTH boundary
ib = ii[:, -1]
if boundary[0] == "S":
sign = 1
elif boundary[0] == "A":
sign = -1
elif boundary[0] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
bzxs[ib] += sign * bzxn[ib]
bzxse[ib] += sign * bzxne[ib]
bzxsw[ib] += sign * bzxnw[ib]
bzys[ib] -= sign * bzyn[ib]
bzyse[ib] -= sign * bzyne[ib]
bzysw[ib] -= sign * bzynw[ib]
# SOUTH boundary
ib = ii[:, 0]
if boundary[1] == "S":
sign = 1
elif boundary[1] == "A":
sign = -1
elif boundary[1] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
bzxn[ib] += sign * bzxs[ib]
bzxne[ib] += sign * bzxse[ib]
bzxnw[ib] += sign * bzxsw[ib]
bzyn[ib] -= sign * bzys[ib]
bzyne[ib] -= sign * bzyse[ib]
bzynw[ib] -= sign * bzysw[ib]
# EAST boundary
ib = ii[-1, :]
if boundary[2] == "S":
sign = 1
elif boundary[2] == "A":
sign = -1
elif boundary[2] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
bzxw[ib] += sign * bzxe[ib]
bzxnw[ib] += sign * bzxne[ib]
bzxsw[ib] += sign * bzxse[ib]
bzyw[ib] -= sign * bzye[ib]
bzynw[ib] -= sign * bzyne[ib]
bzysw[ib] -= sign * bzyse[ib]
# WEST boundary
ib = ii[0, :]
if boundary[3] == "S":
sign = 1
elif boundary[3] == "A":
sign = -1
elif boundary[3] == "0":
sign = 0
else:
raise ValueError("unknown boundary conditions")
bzxe[ib] += sign * bzxw[ib]
bzxne[ib] += sign * bzxnw[ib]
bzxse[ib] += sign * bzxsw[ib]
bzye[ib] -= sign * bzyw[ib]
bzyne[ib] -= sign * bzynw[ib]
bzyse[ib] -= sign * bzysw[ib]
# Assemble sparse matrix
iall = ii.flatten()
i_s = ii[:, :-1].flatten()
i_n = ii[:, 1:].flatten()
i_e = ii[1:, :].flatten()
i_w = ii[:-1, :].flatten()
i_ne = ii[1:, 1:].flatten()
i_se = ii[1:, :-1].flatten()
i_sw = ii[:-1, :-1].flatten()
i_nw = ii[:-1, 1:].flatten()
Izx = numpy.r_[iall, i_w, i_e, i_s, i_n, i_ne, i_se, i_sw, i_nw]
Jzx = numpy.r_[iall, i_e, i_w, i_n, i_s, i_sw, i_nw, i_ne, i_se]
Vzx = numpy.r_[
bzxp[iall],
bzxe[i_w],
bzxw[i_e],
bzxn[i_s],
bzxs[i_n],
bzxsw[i_ne],
bzxnw[i_se],
bzxne[i_sw],
bzxse[i_nw],
]
Izy = numpy.r_[iall, i_w, i_e, i_s, i_n, i_ne, i_se, i_sw, i_nw]
Jzy = numpy.r_[iall, i_e, i_w, i_n, i_s, i_sw, i_nw, i_ne, i_se] + nx * ny
Vzy = numpy.r_[
bzyp[iall],
bzye[i_w],
bzyw[i_e],
bzyn[i_s],
bzys[i_n],
bzysw[i_ne],
bzynw[i_se],
bzyne[i_sw],
bzyse[i_nw],
]
I = numpy.r_[Izx, Izy]
J = numpy.r_[Jzx, Jzy]
V = numpy.r_[Vzx, Vzy]
B = coo_matrix((V, (I, J))).tocsr()
HxHy = numpy.r_[Hx, Hy]
Hz = B * HxHy.ravel() / 1j
Hz = Hz.reshape(Hx.shape)
# in xc e yc
exx = epsxx[1:-1, 1:-1]
exy = epsxy[1:-1, 1:-1]
eyx = epsyx[1:-1, 1:-1]
eyy = epsyy[1:-1, 1:-1]
ezz = epszz[1:-1, 1:-1]
edet = exx * eyy - exy * eyx
h = e.reshape(nx, ny)[:-1, :-1]
v = n.reshape(nx, ny)[:-1, :-1]
# in xc e yc
Dx = neff * centered2d(Hy) + (
Hz[:-1, 1:] + Hz[1:, 1:] - Hz[:-1, :-1] - Hz[1:, :-1]
) / (2j * k * v)
Dy = -neff * centered2d(Hx) - (
Hz[1:, :-1] + Hz[1:, 1:] - Hz[:-1, 1:] - Hz[:-1, :-1]
) / (2j * k * h)
Dz = (
(Hy[1:, :-1] + Hy[1:, 1:] - Hy[:-1, 1:] - Hy[:-1, :-1]) / (2 * h)
- (Hx[:-1, 1:] + Hx[1:, 1:] - Hx[:-1, :-1] - Hx[1:, :-1]) / (2 * v)
) / (1j * k)
Ex = (eyy * Dx - exy * Dy) / edet
Ey = (exx * Dy - eyx * Dx) / edet
Ez = Dz / ezz
Hzs.append(Hz)
Exs.append(Ex)
Eys.append(Ey)
Ezs.append(Ez)
return (Hzs, Exs, Eys, Ezs)
def solve(
self,
neigs: int = 4,
tol: float = 0,
guess: None = None,
mode_profiles: bool = True,
initial_mode_guess: None = None,
) -> "_ModeSolverVectorial":
"""
This function finds the eigenmodes.
Parameters
----------
neigs : int
number of eigenmodes to find
tol : float
Relative accuracy for eigenvalues. The default value of 0 implies machine precision.
guess : float
a guess for the refractive index. Only finds eigenvectors with an effective refractive index
higher than this value.
Returns
-------
self : an instance of the VFDModeSolver class
obtain the fields of interest for specific modes using, for example:
solver = EMpy.modesolvers.FD.VFDModeSolver(wavelength, x, y, epsf, boundary).solve()
Ex = solver.modes[0].Ex
Ey = solver.modes[0].Ey
Ez = solver.modes[0].Ez
"""
from scipy.sparse.linalg import eigen
self.nmodes = neigs
self.tol = tol
A = self.build_matrix()
if guess is not None:
# calculate shift for eigs function
k = 2 * numpy.pi / self.wl
shift = (guess * k) ** 2
else:
shift = None
[eigvals, eigvecs] = eigen.eigs(
A,
k=neigs,
which="LR",
tol=0.001,
ncv=None,
v0=initial_mode_guess,
return_eigenvectors=mode_profiles,
sigma=shift,
)
neffs = self.wl * scipy.sqrt(eigvals) / (2 * numpy.pi)
if mode_profiles:
Hxs = []
Hys = []
nx = self.nx
ny = self.ny
for ieig in range(neigs):
Hxs.append(eigvecs[: nx * ny, ieig].reshape(nx, ny))
Hys.append(eigvecs[nx * ny :, ieig].reshape(nx, ny))
# sort the modes
idx = numpy.flipud(numpy.argsort(neffs))
neffs = neffs[idx]
self.neff = neffs
if mode_profiles:
tmpx = []
tmpy = []
for i in idx:
tmpx.append(Hxs[i])
tmpy.append(Hys[i])
Hxs = tmpx
Hys = tmpy
[Hzs, Exs, Eys, Ezs] = self.compute_other_fields(neffs, Hxs, Hys)
self.modes = []
for (neff, Hx, Hy, Hz, Ex, Ey, Ez) in zip(
neffs, Hxs, Hys, Hzs, Exs, Eys, Ezs
):
self.modes.append(
FDMode(
self.wl, self.x, self.y, neff, Ey, Ex, Ez, Hy, Hx, Hz
).normalize()
)
return self
def __str__(self):
descr = "Vectorial Finite Difference Modesolver\n"
return descr
class FDMode:
def __init__(
self,
wl: Union[float, float64],
x: ndarray,
y: ndarray,
neff: complex128,
Ex: ndarray,
Ey: ndarray,
Ez: ndarray,
Hx: ndarray,
Hy: ndarray,
Hz: ndarray,
) -> None:
self.wl = wl
self.x = x
self.y = y
self.neff = neff
self.Ex = Ex
self.Ey = Ey
self.Ez = Ez
self.Hx = Hx
self.Hy = Hy
self.Hz = Hz
self.fields = col.OrderedDict(
{"Ex": Ex, "Ey": Ey, "Ez": Ez, "Hx": Hx, "Hy": Hy, "Hz": Hz}
)
def norm(self) -> complex128:
x = centered1d(self.x)
y = centered1d(self.y)
return scipy.sqrt(trapz2(self.intensity(), x=x, y=y))
def normalize(self) -> "FDMode":
n = self.norm()
self.Ex /= n
self.Ey /= n
self.Ez /= n
self.Hx /= n
self.Hy /= n
self.Hz /= n
return self
def intensityTETM(self, x: None = None, y: None = None) -> Tuple[ndarray, ndarray]:
I_TE = self.Ex * centered2d(numpy.conj(self.Hy)) / 2.0
I_TM = -self.Ey * centered2d(numpy.conj(self.Hx)) / 2.0
if x is None and y is None:
return (I_TE, I_TM)
else:
x0 = centered1d(self.x)
y0 = centered1d(self.y)
I_TE_ = interp2d(x, y, x0, y0, I_TE)
I_TM_ = interp2d(x, y, x0, y0, I_TM)
return (I_TE_, I_TM_)
def intensity(self, x: None = None, y: None = None) -> ndarray:
I_TE, I_TM = self.intensityTETM(x, y)
return I_TE + I_TM
```
#### File: modesolverpy/modes/_mode_solver_semi_vectorial.py
```python
from pathlib import PosixPath
from typing import Dict, List, Optional, Union
import matplotlib.pylab as plt
import numpy as np
from numpy import ndarray
from modes import _analyse as anal
from modes import _mode_solver_lib as ms
from modes._mode_solver import _ModeSolver
class ModeSolverSemiVectorial(_ModeSolver):
"""
A semi-vectorial mode solver object used to
setup and run a mode solving simulation.
Args:
n_eigs (int): The number of eigen-values to solve for.
tol (float): The precision of the eigen-value/eigen-vector
solver. Default is 0.001.
boundary (str): The boundary conditions to use.
This is a string that identifies the type of boundary conditions applied.
The following options are available: 'A' - Hx is antisymmetric, Hy is symmetric,
'S' - Hx is symmetric and, Hy is antisymmetric, and '0' - Hx and Hy are zero
immediately outside of the boundary.
The string identifies all four boundary conditions, in the order:
North, south, east, west. For example, boundary='000A'. Default is '0000'.
mode_profiles (bool): `True if the the mode-profiles should be found, `False`
if only the effective indices should be found.
initial_mode_guess (list): An initial mode guess for the modesolver.
semi_vectorial_method (str): Either 'Ex' or 'Ey'. If 'Ex', the mode solver
will only find TE modes (horizontally polarised to the simulation window),
if 'Ey', the mode solver will find TM modes (vertically polarised to the
simulation window).
"""
def __init__(
self,
n_eigs: int,
tol: float = 0.001,
boundary: str = "0000",
mode_profiles: bool = True,
initial_mode_guess: Optional[float] = None,
semi_vectorial_method: str = "Ex",
wg: None = None,
) -> None:
self._semi_vectorial_method = semi_vectorial_method
_ModeSolver.__init__(
self, n_eigs, tol, boundary, mode_profiles, initial_mode_guess
)
self.name = "mode_solver_semi_vectorial"
self.wg = wg
self.results = None
def solve(self) -> Dict[str, Union[ndarray, List[ndarray]]]:
"""Find the modes of a given structure.
Returns:
dict: The 'n_effs' key gives the effective indices
of the modes. The 'modes' key exists of mode
profiles were solved for; in this case, it will
return arrays of the mode profiles.
"""
structure = self._structure = self.wg
wavelength = self.wg._wl
self._ms = ms._ModeSolverSemiVectorial(
wavelength, structure, self._boundary, self._semi_vectorial_method
)
self._ms.solve(
self._n_eigs,
self._tol,
self._mode_profiles,
initial_mode_guess=self._initial_mode_guess,
)
self.n_effs = self._ms.neff
r = {"n_effs": self.n_effs}
if self._mode_profiles:
r["modes"] = self._ms.modes
self._ms.modes[0] = np.real(self._ms.modes[0])
self._initial_mode_guess = np.real(self._ms.modes[0])
self.modes = self._ms.modes
return r
def write_modes_to_file(
self,
filename: PosixPath = "mode.dat",
plot: bool = True,
analyse: bool = True,
logscale: bool = False,
) -> List[ndarray]:
"""
Writes the mode fields to a file and optionally plots them.
Args:
filename (str): The nominal filename to use for the saved
data. The suffix will be automatically be changed to
identifiy each mode number. Default is 'mode.dat'
plot (bool): `True` if plots should be generates,
otherwise `False`. Default is `True`.
analyse (bool): `True` if an analysis on the fundamental
mode should be performed. The analysis adds to the
plot of the fundamental mode the power mode-field
diameter (MFD) and marks it on the output, and it
marks with a cross the maximum E-field value.
Default is `True`.
Returns:
dict: A dictionary containing the effective indices
and mode field profiles (if solved for).
"""
for i, mode in enumerate(self._ms.modes):
filename_mode = self._get_mode_filename(
self._semi_vectorial_method, i, filename
)
self._write_mode_to_file(np.real(mode), filename_mode)
if plot:
self.plot_modes(filename=filename, analyse=analyse, logscale=logscale)
return self.modes
def plot_modes(
self,
filename: PosixPath = "mode.dat",
analyse: bool = True,
logscale: bool = False,
) -> None:
for i, mode in enumerate(self.modes):
filename_mode = self._get_mode_filename(
self._semi_vectorial_method, i, filename
)
if i == 0 and analyse:
A, centre, sigma_2 = anal.fit_gaussian(
self.wg.xc, self.wg.yc, np.abs(mode)
)
subtitle = (
"E_{max} = %.3f, (x_{max}, y_{max}) = (%.3f, %.3f), MFD_{x} = %.3f, "
"MFD_{y} = %.3f"
) % (A, centre[0], centre[1], sigma_2[0], sigma_2[1])
plt.figure()
self._plot_mode(
self._semi_vectorial_method,
i,
filename_mode,
self.n_effs[i],
subtitle,
sigma_2[0],
sigma_2[1],
centre[0],
centre[1],
wavelength=self.wg._wl,
logscale=logscale,
)
else:
plt.figure()
self._plot_mode(
self._semi_vectorial_method,
i,
filename_mode,
self.n_effs[i],
wavelength=self.wg._wl,
logscale=logscale,
)
```
#### File: modesolverpy/modes/waveguide.py
```python
from collections.abc import Iterable
from typing import Callable, List, Union
import matplotlib.pylab as plt
import numpy as np
from modes._structure import RidgeWaveguide, WgArray
from modes.autoname import autoname
from modes.config import CONFIG
from modes.materials import nitride, si, sio2
WaveguideType = Union[RidgeWaveguide, WgArray]
@autoname
def waveguide(
x_step: float = 0.02,
y_step: float = 0.02,
thickness: float = 0.22,
width: float = 0.5,
slab_thickness: float = 0,
sub_thickness: float = 0.5,
sub_width: float = 2.0,
clad_thickness: List[float] = [0.5],
n_sub: Union[Callable, float] = sio2,
n_wg: Union[Callable, float] = si,
n_clads: List[Union[Callable, float]] = [sio2],
wavelength: float = 1.55,
angle: float = 90.0,
) -> RidgeWaveguide:
"""Return waveguide structure
Args:
x_step: x grid step (um)
y_step: y grid step (um)
thickness: waveguide thickness (um)
width: 0.5 (um)
slab_thickness: 0 (um)
sub_width: 2.0 related to the total simulation width (um)
sub_thickness: 0.5 bottom simulation margin (um)
clad_thickness: [0.5] List of claddings (top simulation margin)
n_sub: substrate index material
n_wg: core waveguide index material
n_clads: list of cladding refractive index or function [sio2]
wavelength: 1.55 wavelength (um)
angle: 90 sidewall angle (degrees)
::
_________________________________
clad_thickness
width
<---------->
___________ _ _ _ _ _ _
| |
_____| |____ |
thickness
slab_thickness |
_______________________ _ _ _ _ __
sub_thickness
_________________________________
<------------------------------->
sub_width
To define a waveguide we need to define:
- the material functions or refractive indices of box, waveguide and clad
- thickness of each material
- x and y_steps for structure grid
- sidewall angle
- wavelength that can be used in case the refractive index are a function of the wavelength
Where all units are in um
.. plot::
:include-source:
import modes as ms
wg = ms.waveguide(width=0.5, thickness=0.22, slab_thickness=0.09, angle=80)
ms.write_material_index(wg)
"""
if not isinstance(n_clads, Iterable):
raise ValueError(f"nclads not Iterable, got {n_clads}")
if not isinstance(clad_thickness, Iterable):
raise ValueError(f"clad_thickness not Iterable, got {clad_thickness}")
n_wg = n_wg(wavelength) if callable(n_wg) else n_wg
n_sub = n_sub(wavelength) if callable(n_sub) else n_sub
n_clad = [n_clad(wavelength) if callable(n_clad) else n_clad for n_clad in n_clads]
film_thickness = thickness
thickness = film_thickness - slab_thickness
return RidgeWaveguide(
wavelength=wavelength,
x_step=x_step,
y_step=y_step,
thickness=thickness,
width=width,
sub_thickness=sub_thickness,
sub_width=sub_width,
clad_thickness=clad_thickness,
n_sub=n_sub,
n_wg=n_wg,
angle=angle,
n_clad=n_clad,
film_thickness=film_thickness,
)
@autoname
def waveguide_array(
wg_gaps: List[float],
widths: List[float],
x_step: float = 0.02,
y_step: float = 0.02,
thickness: float = 0.22,
slab_thickness: int = 0,
sub_thickness: float = 0.5,
sub_width: float = 2.0,
clad_thickness: List[float] = [0.5],
n_sub: Callable = sio2,
n_wg: Callable = si,
n_clads: List[Callable] = [sio2],
wavelength: float = 1.55,
angle: float = 90.0,
) -> WgArray:
"""Returns a evanescent coupled waveguides ::
__________________________________________________________
clad_thickness
widths[0] wg_gaps[0] widths[1]
<-----------><----------><-----------> _ _ _ _ _ _
___________ ___________
| | | |
_____| |____________| |____ |
thickness
slab_thickness |
________________________________________________ _ _ _ _ _
sub_thickness
__________________________________________________________
<-------------------------------------------------------->
sub_width
To define a waveguide we need to define
Args:
wg_gaps: between waveguides
widths: of each waveguide (list)
x_step: grid x step (um)
y_step: grid y step(um)
n_sub: substrate refractive index value or function(wavelength)
n_wg: waveguide refractive index value or function(wavelength)
n_clads: waveguide refractive index value or function(wavelength)
slab_thickness: slab thickness (um)
sub_thickness: substrate thickness (um)
clad_thickness: cladding thickness (um)
wavelength: in um
angle: sidewall angle in degrees
Where all units are in um
.. plot::
:include-source:
import modes as ms
wg_array = ms.waveguide_array(wg_gaps=[0.2], widths=[0.5, 0.5], slab_thickness=0.09)
ms.write_material_index(wg_array)
"""
n_wg = n_wg(wavelength) if callable(n_wg) else n_wg
n_sub = n_sub(wavelength) if callable(n_sub) else n_sub
n_clad = [n_clad(wavelength) if callable(n_clad) else n_clad for n_clad in n_clads]
film_thickness = thickness
thickness = film_thickness - slab_thickness
return WgArray(
widths=widths,
wg_gaps=wg_gaps,
wavelength=wavelength,
x_step=x_step,
y_step=y_step,
thickness=thickness,
sub_thickness=sub_thickness,
sub_width=sub_width,
clad_thickness=clad_thickness,
n_sub=n_sub,
n_wg=n_wg,
angle=angle,
n_clad=n_clad,
film_thickness=film_thickness,
)
def get_waveguide_filepath(wg):
return CONFIG.cache / f"{wg.name}.dat"
def write_material_index(wg, filepath=None):
""" writes the waveguide refractive index into filepath"""
filepath = filepath or get_waveguide_filepath(wg)
wg.write_to_file(filepath)
def test_waveguide_name() -> None:
wg1 = waveguide(angle=80, width=0.5)
wg2 = waveguide(width=0.5, angle=80)
assert wg1.name == wg2.name, (
f"{wg1} and {wg2} waveguides have the same settings and should have the same"
" name"
)
def test_waveguide_material_index() -> None:
wg = waveguide()
n = wg.n
sx, sy = np.shape(n)
n_wg = wg.n[sx // 2][sy // 2]
assert n_wg == si(wg._wl)
def test_waveguide_array_material_index() -> None:
wg = waveguide_array(wg_gaps=[0.2], widths=[0.5] * 2)
n = wg.n
sx, sy = np.shape(n)
n_wg = wg.n[sx // 2][sy // 2]
assert n_wg == sio2(wg._wl)
if __name__ == "__main__":
wg = waveguide(
width=0.5,
angle=80,
n_wg=si,
clad_thickness=[50e-3, 50e-3, 0.5],
n_clads=[sio2, nitride, sio2],
)
# wg = waveguide_array(widths=[0.5] * 2, wg_gaps=[0.2], slab_thickness=0.09)
# print(wg)
# test_waveguide_material_index()
# test_waveguide_array_material_index()
write_material_index(wg)
plt.show()
``` |
{
"source": "joamatab/photonic-coupling-drivers",
"score": 3
} |
#### File: plab/lasers/newport_venturi.py
```python
import pygpib as gpib
from plab.lasers.laser import Laser
import time
class NewportVenturi(Laser):
def __init__(self, gpib_num, gpib_dev_num, units="mW"):
self._dev = gpib.dev(gpib_num, gpib_dev_num)
self.set_power_units(units)
self._sleep = 0.1
def _write(self, cmd):
gpib.write(self._dev, cmd + "\r\n")
time.sleep(self._sleep)
def _read(self, num_bytes=100):
data = gpib.read(self._dev, num_bytes)
time.sleep(self._sleep)
return data.decode("ascii")
def _query(self, cmd, num_bytes=100):
self._write(cmd)
data = self._read(num_bytes)
data = [d.strip() for d in data.split()[1:]]
return data
def get_power_W(self):
power = float(self._query(":conf:tls:powe?")[0])
if self._units == "mW":
power_W = power * 1.0e-3
elif self._units == "dBm":
power_W = las.laser.dbm_to_watts(power)
return power_W
def set_power_W(self, power_W):
power_mW = power_W * 1.0e3
return float(self._query(":conf:tls:powe %.5f" % power_mW)[0])
def get_on_or_off(self):
on_off = self._query(":conf:tls:outp?")[0]
return True if on_off == "ON" else False
def turn_on(self):
return self._qurey(":conf:tls:outp on")[0]
def turn_off(self):
return self._query(":conf:tls:outp off")[0]
def set_power_units(self, units):
assert units in ("mW", "dBm")
units = self._query(":config:tls:unit %s" % units)[0]
self._units = units
return units
def get_power_units(self):
return self._query(":config:tls:unit?")[0]
def get_wavelength_m(self):
return float(self._query(":conf:tls:wave?")[0])
def set_wavelength_m(self, wavelength_m):
wavelength_nm = wavelength_m * 1.0e9
return float(self._query(":conf:tls:wave %.3f" % wavelength_nm)[0])
def start_sweep(self):
self._query(":init")
def get_sweep_start_wavelength_nm(self):
return float(self._query(":conf:swee:start?")[1])
def set_sweep_start_wavelength_nm(self, wavelength_nm):
return float(
self._query(":conf:swee:start %s %.3f" % (self._mode, wavelength_nm))[1]
)
def get_sweep_stop_wavelength_nm(self):
return float(self._query(":conf:swee:stop?")[1])
def set_sweep_stop_wavelength_nm(self, wavelength_nm):
return float(
self._query(":conf:swee:stop %s %.3f" % (self._mode, wavelength_nm))[1]
)
def get_sweep_speed_nm_s(self):
return float(self._query(":conf:swee:rate?")[0])
def set_sweep_speed_nm_s(self, sweep_speed_nm_s):
return float(self._query(":conf:swee:rate %.1f" % sweep_speed_nm_s)[0])
def get_sweep_mode(self):
return self._query(":conf:swee:mode?")[0]
def set_sweep_mode(self, mode):
mode = mode.lower()
assert mode in ("cont", "continuous", "step", "time")
self._mode = mode
return self._query(":conf:swee:mode %s" % mode)[0]
def wait_command_complete(self):
assert self._query("*opc?")[0] == "1/1"
return True
def set_num_sweeps(self, num_sweeps):
"""
Number of times to run the sweep when do
sweep is run. If `0`, the laser will
run continuously.
Args:
num_sweeps(int): Number of times to run the sweep.
0 for infinite repeats.
Returns:
int: Number of times the sweep will be run.
"""
num_sweeps = int(num_sweeps)
assert 0 <= num_sweeps <= 10000
return int(self._query(":conf:swee:coun %i" % num_sweeps)[0])
def get_num_sweeps(self):
return int(self._query(":conf:swee:coun?")[0])
def sweep(
self, start_wavelength_nm, stop_wavelength_nm, sweep_speed_nm_s, power_mW
):
self.set_sweep_start_wavelength_nm(start_wavelength_nm)
self.set_sweep_stop_wavelength_nm(stop_wavelength_nm)
self.set_sweep_speed_nm_s(sweep_speed_nm_s)
self.set_power_mW(power_mW)
self.start_sweep()
return self.wait_command_complete()
```
#### File: plab/lasers/sacher_maxon_epos2.py
```python
import ctypes as ct
_char = ct.c_int8
_int8 = ct.c_int8
_byte = ct.c_uint8
_short = ct.c_int16
_long = ct.c_int32
_word = ct.c_uint16
_dword = ct.c_uint32
_bool = ct.c_int32
_p = ct.pointer
def _cast(c_value, c_type):
ptr = ct.cast(ct.byref(c_value), ct.POINTER(c_type))
return ptr.contents.value
def _cast_u32_to_float(value_u32):
return _cast(value_u32, ct.c_float)
def _cast_float_u32(value_double):
return _cast(value_float, ct.c_uint32)
class MaxonEpos2:
def __init__(
self,
wavelength_nm=None,
velocity_nm_s=None,
acceleration_nm_s2=None,
deceleration_nm_s2=None,
):
self._lib = ct.CDLL("libEposCmd.so")
self._handle = ct.c_void_p()
self._error_code = ct.c_uint32(0)
self._error_code_ref = ct.byref(self._error_code)
self._node_id = ct.c_uint32(1)
# Initialisation flowchart from Sacher
self._open_device("EPOS2", "MAXON SERIAL V2", "USB", "USB0")
self._set_protocol_stack_settings(100, 1000000)
self._clear_fault_state()
if self.get_state() == "ST_ENABLED":
self._set_disable_state()
# self._set_encoder_parameters(512, 4) # Error saying sensor type 4 is out of range.
assert self.get_state() == "ST_DISABLED"
operation_mode = "Position Profile Mode"
if self._get_operation_mode() != operation_mode:
self._set_operation_mode(operation_mode)
v, a, d = self.get_position_profile()
if v > 11400 / 180 or a > 20000 / 180 or d > 20000 / 180:
self.set_position_profile(1, 1, 1)
# Motor [step] <-> Wavelength [nm] coefficients
self._A = -6.31908e-12
self._B = 0.000163586
self._C = 1582.65
# Absolute maximum motor range
self._wl_min = 1500.0
self._wl_max = 1640.0
self._bow = self._determine_bow()
self._motor_min = self.get_motor_pos_from_wavelength(self._wl_min)
self._motor_max = self.get_motor_pos_from_wavelength(self._wl_max)
# Wavelength [nm] per motor revolution
self._step_per_rev = 102400
self._nm_per_rev = (
self.get_wavelength_from_motor_pos(self._step_per_rev) - self._C
)
self._nm_per_step = self._nm_per_rev / self._step_per_rev
self._max_step_per_sec = 11400
# Set parameters if specified
if wavelength_nm:
self.set_wavelength_nm(wavelength_nm)
pos_profile = list(self.get_position_profile())
if velocity_nm_s:
pos_profile[0] = velocity_nm_s
if acceleration_nm_s2:
pos_profile[1] = acceleration_nm_s2
if deceleration_nm_s2:
pos_profile[2] = deceleration_nm_s2
self.set_position_profile(*pos_profile)
def _read_stored_position(self):
return self._read_memory(0x2081, 0, 4)
def _write_stored_position(self, value):
return self._write_memory(0x2081, 0, ct.c_int32(value), 4)
def calc_motor_acc(self, nm_per_sec_sec):
acc = nm_per_sec_sec / self._nm_per_step
return acc # [step/s/s]
def _open_device(self, device_name, protocol_stack_name, interface_name, port_name):
dn = ct.c_char_p(device_name.encode())
psn = ct.c_char_p(protocol_stack_name.encode())
inter = ct.c_char_p(interface_name.encode())
pn = ct.c_char_p(port_name.encode())
self._handle = self._lib.VCS_OpenDevice(
dn, psn, inter, pn, self._error_code_ref
)
assert not self._error_code.value, "Cannot connect to motor controller."
def _set_protocol_stack_settings(self, timeout_ms, baud_rate=1000000):
r = self._lib.VCS_SetProtocolStackSettings(
self._handle, _dword(baud_rate), _dword(timeout_ms), self._error_code_ref
)
assert r
def _clear_fault_state(self):
r = self._lib.VCS_ClearFault(self._handle, self._node_id, self._error_code_ref)
assert r
def _set_encoder_parameters(self, counts, sensor_type):
r = self._lib.VCS_SetEncoderParameter(
self._handle, self._node_id, _word(512), _word(4), self._error_code_ref
)
assert r
def _get_encoder_parameters(self):
counts = _word()
sensor_type = _word()
r = self._lib.VCS_SetEncoderParameter(
self._handle,
self._node_id,
ct.byref(counts),
ct.byref(sensor_type),
self._error_code_ref,
)
return counts.value, sensor_type.value
def _get_operation_mode(self):
pp = _int8()
r = self._lib.VCS_GetOperationMode(
self._handle, self._node_id, ct.byref(pp), self._error_code_ref
)
assert r
pp_lookup = {
1: "Position Profile Mode",
3: "Position Velocity Mode",
6: "Homing Mode",
7: "Interpolated Position Mode",
-1: "Position Mode",
-2: "Velocity Mode",
-3: "Current Mode",
-5: "Master Encoder Mode",
-6: "Step Direction Mode",
}
return pp_lookup[pp.value]
def _set_operation_mode(self, mode):
pp_lookup = {
"Position Profile Mode": 1,
"Position Velocity Mode": 3,
"Homing Mode": 6,
"Interpolated Position Mode": 7,
"Position Mode": -1,
"Velocity Mode": -2,
"Current Mode": -3,
"Master Encoder Mode": -5,
"Step Direction Mode": -6,
}
mode_num = _int8(pp_lookup[mode])
r = self._lib.SetOperationMode(
self._handle, self._node_id, mode_num, self._error_code_ref
)
assert r
def set_position_profile(self, velocity, acceleration, deceleration):
assert 0.008 <= velocity <= 35
assert acceleration <= 25
assert deceleration <= 25
velocity *= 180
acceleration *= 180
deceleration *= 180
v = _dword(round(velocity))
a = _dword(round(acceleration))
d = _dword(round(deceleration))
r = self._lib.VCS_SetPositionProfile(
self._handle, self._node_id, v, a, d, self._error_code_ref
)
assert r
# Set save all parameters (register 4112d)
evas = ct.c_uint32(0x65766173) # 'e' 'v' 'a' 's'
num_bytes_written = _dword()
r = self._lib.VCS_SetObject(
self._handle,
self._node_id,
_word(0x1010), # Address 4112d
_byte(1), # Subindex 1
ct.byref(evas), # Data
_dword(4), # Write all 4 bytes of the data
ct.byref(num_bytes_written), # Number of bytes written
self._error_code_ref,
)
assert r
def get_position_profile(self):
v = _dword()
a = _dword()
d = _dword()
r = self._lib.VCS_GetPositionProfile(
self._handle,
self._node_id,
ct.byref(v),
ct.byref(a),
ct.byref(d),
self._error_code_ref,
)
assert r
v = v.value / 180.0
a = a.value / 180.0
d = d.value / 180.0
return v, a, d
def get_velocity_nm_s(self):
v, _, _ = self.get_position_profile()
return v
def set_velocity_nm_s(self, velocity_nm_s):
_, a, d = self.get_position_profile()
self.set_position_profile(velocity_nm_s, a, d)
def _get_movement_state(self):
status = _bool()
r = self._lib.VCS_GetMovementState(
self._handle, self._node_id, ct.byref(status), self._error_code_ref
)
assert r
return bool(status.value)
def _move_rel(self, rel_move_steps):
delta_wl_nm = self.get_wavelength_from_motor_pos(
rel_move_steps
) - self.get_wavelength_from_motor_pos(0)
delta_wl_nm = abs(round(delta_wl_nm))
sleep_interv_s = 0.2
ab = _bool(False)
im = _bool(True)
rm = _long(rel_move_steps)
self._set_enable_state()
r = self._lib.VCS_MoveToPosition(
self._handle, self._node_id, rm, ab, im, self._error_code_ref
)
assert r
while not self._get_movement_state():
print(self._get_motor_current())
# time.sleep(sleep_interv_s)
print(self._get_motor_current())
self._set_disable_state()
print(self._get_motor_current())
def set_wavelength_m(self, wavelength_m):
"""
Moves the motor to a wavelength [nm].
Process:
1. get current position from register + offset
2. get target position from wavelength
3. 2. - 1. to get relative move to wavelength position
4. do relative move by 3.
5. VCS_GetPositionIs to read encoder
6. update home: 1. + 5. -> 0x2081
7. get wavelength
Args:
wavelength (float): Target wavelength [nm] to move to.
Returns:
(float): Actual wavelength moved to.
"""
wavelength_nm = wavelength_m * 1.0e9
pos_reg = self._read_stored_position() # 1.
target_pos = self.get_motor_pos_from_wavelength(wavelength_nm) # 2.
pos_encoder_before = self._get_current_position()
assert self._motor_min <= target_pos <= self._motor_max
rel_move_steps = target_pos - pos_reg # 3.
# 4.
self._set_enable_state()
if rel_move_steps <= 0:
self._move_rel(rel_move_steps)
else:
self._move_rel(rel_move_steps - 10000)
self._move_rel(10000)
self._set_disable_state()
pos_encoder_after = self._get_current_position() # 5.
pos_encoder_rel = pos_encoder_after - pos_encoder_before
## 6.
pos_reg_new = pos_reg + pos_encoder_rel
self._write_memory(0x2081, 0, ct.c_int32(pos_reg_new), 4)
wl = self.get_wavelength_from_motor_pos(pos_reg_new) # 7.
return wl
def _get_current_position(self):
pos = _long()
r = self._lib.VCS_GetPositionIs(
self._handle, self._node_id, ct.byref(pos), self._error_code_ref
)
assert r
return pos.value
def _set_enable_state(self):
r = self._lib.VCS_SetEnableState(
self._handle, self._node_id, self._error_code_ref
)
assert r
def _set_disable_state(self):
r = self._lib.VCS_SetDisableState(
self._handle, self._node_id, self._error_code_ref
)
assert r
def get_state(self):
st = ct.pointer(_word())
r = self._lib.VCS_GetState(
self._handle, self._node_id, st, self._error_code_ref
)
assert r
state = st.contents.value
if state == 0x0000:
state = "ST_DISABLED"
elif state == 0x0001:
state = "ST_ENABLED"
elif state == 0x0002:
state = "ST_QUICKSTOP"
elif state == 0x0003:
state = "ST_FAULT"
return state
def get_error_code(self):
return hex(self._error_code.value)
def stop_move(self):
r = self._lib.VCS_HaltPositionMovement(
self._handle, self._node_id, self._error_code_ref
)
assert r
def _read_memory(self, object_index, object_subindex, bytes_to_read):
oi = _word(object_index)
osi = _byte(object_subindex)
btr = _dword(bytes_to_read)
data = ct.c_int32()
bytes_read = ct.pointer(_dword())
r = self._lib.VCS_GetObject(
self._handle,
self._node_id,
oi,
osi,
ct.byref(data),
btr,
bytes_read,
self._error_code_ref,
)
assert r
return data.value
def _write_memory(self, object_index, object_subindex, data, num_bytes_to_write):
oi = _word(object_index)
osi = _byte(object_subindex)
nbtw = _dword(num_bytes_to_write)
num_bytes_written = _dword()
r = self._lib.VCS_SetObject(
self._handle,
self._node_id,
oi,
osi,
ct.byref(data),
nbtw,
ct.byref(num_bytes_written),
self._error_code_ref,
)
assert r
return num_bytes_written
def _get_wavelength_range_nm(self):
value = self._read_memory(0x200C, 4, 4)
wl_min_nm = (value >> 16) / 10
wl_max_nm = (value & 0x0000FFFF) / 10
return wl_min_nm, wl_max_nm
def _get_wavelength_motor_pos_coefs(self):
A = ct.c_uint32(self._read_memory(0x200C, 1, 4))
B = ct.c_uint32(self._read_memory(0x200C, 2, 4))
C = ct.c_uint32(self._read_memory(0x200C, 3, 4))
A = _cast_u32_to_float(A)
B = _cast_u32_to_float(B)
C = _cast_u32_to_float(C)
return A, B, C
def get_wavelength_from_motor_pos(self, position):
assert self._motor_min <= position <= self._motor_max
A = self._A
B = self._B
C = self._C
wl_nm = A * position ** 2 + B * position + C
return wl_nm
def _solve_quadratic(self, wavelength):
A = self._A
B = self._B
C = self._C
k = -B / (2 * A)
j = B ** 2 / (4 * A ** 2) - (C - wavelength) / A
j = j ** 0.5
assert j.imag == 0
return k, j
def get_motor_pos_from_wavelength(self, wavelength):
assert self._wl_min <= wavelength <= self._wl_max
k, j = self._solve_quadratic(wavelength)
if self._bow == "neg":
pos = k - j
elif self._bow == "pos":
pos = k + j
return round(pos)
def get_wavelength_m(self):
pos_reg = self._read_stored_position()
pos = pos_reg
return self.get_wavelength_from_motor_pos(pos) * 1e-9
def _determine_bow(self):
vals = []
for wavelength in [self._wl_min, self._wl_max]:
k, j = self._solve_quadratic(wavelength)
vals.append(k - j)
vals.append(k + j)
if vals[0] < 0 < vals[2]:
bow = "neg"
elif vals[1] < 0 < vals[3]:
bow = "pos"
else:
assert False
return bow
def update_motor_position(self, measured_wavelength_nm):
new_motor_pos = self.get_motor_pos_from_wavelength(measured_wavelength_nm)
self._write_stored_position(new_motor_pos)
def set_default_settings(self):
r = self._lib.VCS_Restore(self._handle, self._node_id, self._error_code_ref)
assert r
def _get_motor_current(self):
curr = _short()
r = self._lib.VCS_GetCurrentIs(
self._handle, self._node_id, ct.byref(curr), self._error_code_ref
)
assert r
return curr.value
def _get_motor_parameters(self):
nom_curr = _word()
max_curr = _word()
therm_tc = _word()
r = self._lib.VCS_GetDcMotorParameter(
self._handle,
self._node_id,
ct.byref(nom_curr),
ct.byref(max_curr),
ct.byref(therm_tc),
self._error_code_ref,
)
assert r
return nom_curr.value, max_curr.value, therm_tc.value
```
#### File: photonic-coupling-drivers/plab/measurement.py
```python
from typing import Optional, Dict, Union
import functools
import inspect
import hashlib
import dataclasses
import pathlib
import time
from pydantic import validate_arguments
import pandas as pd
import numpy as np
from omegaconf import OmegaConf
import omegaconf
from plab.config import PATH, logger, CONFIG
MAX_NAME_LENGTH = 64
@dataclasses.dataclass
class Measurement:
data: Optional[pd.DataFrame] = None
metadata: Optional[Union[omegaconf.DictConfig, omegaconf.ListConfig]] = None
def write(
self,
filename: Optional[str] = None,
dirpath: pathlib.Path = PATH.labdata,
overwrite: bool = False,
timestamp: bool = True,
) -> None:
filename = filename or f"{self.metadata.name}.csv"
filename = (
f"{self.metadata.time.timestamp}_{filename}" if timestamp else filename
)
csvpath = dirpath / filename
yamlpath = csvpath.with_suffix(".yml")
if csvpath.exists() and not overwrite:
raise FileExistsError(f"File {csvpath} exists")
logger.info(f"Writing {csvpath}")
self.data.to_csv(csvpath)
logger.info(f"Writing {yamlpath}")
yamlpath.write_text(OmegaConf.to_yaml(self.metadata))
def read(self, filename: str, dirpath: pathlib.Path = PATH.labdata) -> None:
"""
Args:
filename: name without .csv extenstion
"""
self.data = pd.read_csv(dirpath / f"{filename}.csv")
self.metadata = OmegaConf.load(dirpath / f"{filename}.yml")
def ls(self, glob: str = "*.csv") -> None:
"""List all measured files"""
for csv in PATH.labdata.glob(glob):
print(csv.stem)
CACHE = {}
_remap = {
" ": "_",
"'": "",
}
def measurement_without_validator(func):
"""measurement decorator.
Adds a measurement name based on input parameters
logs measurement metadata into CONFIG
"""
@functools.wraps(func)
def _measurement(*args, **kwargs):
args_repr = [repr(a) for a in args]
# timestamp = time.strftime("%y%m%d%H%M%S", time.localtime())
kwargs_copy = kwargs.copy()
kwargs_copy.pop("description", "")
kwargs_repr = [f"{k}={v!r}" for k, v in kwargs_copy.items()]
name = f"{func.__name__}_{'_'.join(kwargs_repr)}"
if len(name) > MAX_NAME_LENGTH:
name_hash = hashlib.md5(name.encode()).hexdigest()[:8]
name = f"{func.__name__[:(MAX_NAME_LENGTH - 9)]}_{name_hash}"
for k, v in _remap.items():
name = name.replace(k, v)
arguments = ", ".join(args_repr + kwargs_repr)
if args:
raise ValueError(
f"measurement supports only Keyword args for `{func.__name__}({arguments})`"
)
assert callable(
func
), f"{func} got decorated with @measurement! @measurement decorator is only for functions"
sig = inspect.signature(func)
t0 = time.time()
logger.info(f"Starting {func.__name__}({','.join(kwargs_repr)}))")
data = func(*args, **kwargs)
if not isinstance(data, pd.DataFrame):
logger.warning(f"{func.__name__} needs to return a pandas.DataFrame")
t1 = time.time()
dt = t1 - t0
logger.info(f"Finished {func.__name__}({','.join(kwargs_repr)})), took {dt}")
settings = {}
settings.update(
**{
p.name: p.default
for p in sig.parameters.values()
if not callable(p.default)
}
)
settings.update(**kwargs)
timestamp = time.strftime("%y-%m-%d_%H:%M:%S", time.localtime())
time_dict = dict(t0=t0, t1=t1, dt=dt, timestamp=timestamp)
metadata = OmegaConf.create(
dict(name=name, time=time_dict, settings=settings, config=CONFIG)
)
measurement = Measurement(data=data, metadata=metadata)
CACHE[name] = measurement
return measurement
return _measurement
def measurement(func, *args, **kwargs) -> Measurement:
return measurement_without_validator(validate_arguments(func), *args, **kwargs)
@measurement
def demo(
vmin: float = 0.0, vmax: float = 1.0, vsteps: int = 20, **kwargs
) -> Measurement:
"""
Args:
vmin: min voltage
vmax: max voltage
vsteps: number of steps between min and max voltage
**kwargs: any labstate arguments that we want to log
"""
voltages = np.linspace(vmin, vmax, vsteps)
df = pd.DataFrame(dict(v=voltages))
return df
if __name__ == "__main__":
m = demo(sample="demo2")
print(m.metadata.name)
m.write(overwrite=True, dirpath=PATH.cwd)
```
#### File: plab/photon_counters/Idq801.py
```python
import sys
import numpy as np
import shutil
import time
import itertools as it
import collections
import ctypes as ct
import os
import copy
sys.path.append(os.path.dirname(__file__))
from ThreadStoppable import ThreadStoppable
class Idq801(object):
def __init__(
self,
deviceId=-1,
timestamp_buffer_size=int(1e6),
integration_time_ms=0.5 * 1e3,
coincidence_window_bins=1000,
max_retry=3,
delay_retry_sec=0.01,
clean_data_directory=False,
data_directory="Idq801Data",
processing="external",
):
self._max_retry = max_retry
self._set_check_delay = delay_retry_sec # Delay in seconds between setting and
# checking that a parameter was set.
self._data_directory = data_directory
self._wait_for_settings = 1
self._processing_dict = {"i": "internal", "e": "external"}
processing = processing.lower()
assert processing in self._processing.values()
self._processing = processing
if not os.path.isdir(data_directory):
os.mkdir(data_directory)
if clean_data_directory:
self.clean_data_directory()
module_path = os.path.dirname(__file__) + "/"
if sys.platform == "linux":
self.idq801Lib = ct.CDLL(module_path + "libtdcbase.so")
elif sys.platform == "win32":
self.idq801Lib = ct.CDLL(module_path + "./tdcbase.dll")
else:
raise OSError("Invalid operating system")
if self.idq801Lib.TDC_init(deviceId):
raise RuntimeError("Could not connect to the ID801 counter.")
# Initial parameters.
self.unset_channel(-1)
self.set_timestamp_buffer_size(timestamp_buffer_size)
self.integration_time_ms = integration_time_ms
if self._processing == self._processing_dict["i"]:
self.set_integration_time(integration_time_ms)
else:
self.set_integration_time(1.0e-3) # 1us integration time.
self.set_coincidence_window_bins(1000)
self._time_last_get_timestamps = time.time()
self.channel_delays = {
"1": 0,
"2": 0,
"3": 0,
"4": 0,
"5": 0,
"6": 0,
"7": 0,
"8": 0,
}
self.set_channel_delays_ns(self.channel_delays)
self.accidental_delay = 0
def __del__(self):
self.idq801Lib.TDC_deInit()
def _set_value(self, set_value, setter, getter):
"""Sets a value and makes sure it was set."""
attempt = 0
is_set = False
while not is_set and attempt < self._max_retry:
attempt += 1
setter(set_value)
time.sleep(self._set_check_delay)
try:
if list(set_value) == list(getter()):
is_set = True
except TypeError:
if set_value == getter():
is_set = True
if not is_set:
raise RuntimeError(
"Unable to set the value using %s to %s after %i attempts."
% (setter.__name__, str(set_value), self._max_retry)
)
def _get_device_params(self):
cm = ct.c_int32()
cw = ct.c_int32()
ew = ct.c_int32()
self.idq801Lib.TDC_getDeviceParams(ct.byref(cm), ct.byref(cw), ct.byref(ew))
return (cm, cw, ew)
def _set_processing(self, processing):
processing = processing.lower()
assert processing in self._processing_dict.values()
self._processing = processing
if processing == self._processing_dict["i"]:
self.set_integration_time(self.integration_time_ms)
return self._processing
def set_processing_internal(self):
return self._set_processing("internal")
def set_processing_external(self):
return self._set_processing("external")
def clean_data_directory(self):
"""
Deletes all data in the `Idq801Data` directory.
"""
shutil.rmtree(self._data_directory, ignore_errors=True)
os.mkdir(self._data_directory)
def get_timebase(self):
self.idq801Lib.TDC_getTimebase.restype = ct.c_double
tb = self.idq801Lib.TDC_getTimebase()
return tb
def get_mask_channels(self):
cm, _, _ = self._get_device_params()
return cm.value
def get_status_channels(self):
cm, cw, ew = self._get_device_params()
channels_enabled = [bool(int(c)) for c in bin(cm.value)[2:]][::-1]
padLength = 8 - len(channels_enabled)
channels_enabled.extend([False] * padLength)
return tuple(channels_enabled)
def get_enabled_channels(self):
channels_status = self.get_status_channels()
channels_enabled = tuple(
i + 1 for i, v in enumerate(channels_status) if v == True
)
return channels_enabled
def get_disabled_channels(self):
channels_status = self.get_status_channels()
channels_disabled = tuple(
i + 1 for i, v in enumerate(channels_status) if v == False
)
return channels_disabled
def is_channel_enabled(self, channel):
assert 1 <= channel <= 8, "Invalid choice channel range."
channel -= 1
channel_status = self.get_status_channels()[channel]
return channel_status
def _get_channel_mask(self, channel, set_unset):
def channel_mask_from_channel_list(channels_enabled):
channel_mask = 0
for b in channels_enabled[::-1]:
channel_mask = (channel_mask << b - 1) | True
return channel_mask
set_unset = set_unset.lower()
assert set_unset in ("set", "unset"), (
"Invalid `set_unset` choice %s." % set_unset
)
if isinstance(channel, str):
channel = channel.lower()
if channel == "all" or channel == -1:
channel_mask = 0xFF
elif channel in range(1, 9):
channel_mask = 1 << channel
elif isinstance(channel, collections.Iterable):
channel_mask = channel_mask_from_channel_list(channel)
else:
raise TypeError("Invalid `channel` choice.")
if set_unset == "unset":
channel_mask ^= 0xFF
return channel_mask
def _set_unset_channel(self, channel, set_unset):
self._channel_mask = self._get_channel_mask(channel, set_unset)
self._set_value(
self._channel_mask,
self.idq801Lib.TDC_enableChannels,
self.get_mask_channels,
)
return self._channel_mask
def set_channel(self, channel):
"""Choose which channels to enable.
Options include:
* -1 or 'all' for (all channels).
* A single number for channel to be enabled.
* An iterable containing the channels
to be enables. e.g. (1,4,5)
* Default is no channels are enabled.
"""
return self._set_unset_channel(channel, "set")
def unset_channel(self, channel):
"""Choose which channels to disable.
Options include:
* -1 or 'all' for (all channels).
* A single number for channel to be disabled.
* An iterable containing the channels
to be disables. e.g. (1,4,5)
* Default is no channels are disabled.
"""
return self._set_unset_channel(channel, "unset")
def get_coincidence_window_bins(self):
cm, cw, ew = self._get_device_params()
return cw.value
def get_coincidence_window_ns(self):
bin = self.get_timebase()
return bin * self.get_coincidence_window_bins() * 1e9
def set_coincidence_window_bins(self, coincidence_window_bins):
coincidence_window_bins = int(coincidence_window_bins)
if not 0 < coincidence_window_bins <= 65535:
raise ValueError(
"The chosen number of coincidence \
window bins is not in the range (0,65535]."
)
self._set_value(
coincidence_window_bins,
self.idq801Lib.TDC_setCoincidenceWindow,
self.get_coincidence_window_bins,
)
def set_coincidence_window_ns(self, coincidence_window_ns):
bin = self.get_timebase()
coincidence_window_bins = int(coincidence_window_ns * 1e-9 / bin)
return self.set_coincidence_window_bins(coincidence_window_bins)
def get_integration_time(self):
cm, cw, ew = self._get_device_params()
return ew.value
def freeze_buffers(self):
self.idq801Lib.TDC_freezeBuffers(True)
def unfreeze_buffers(self):
self.idq801Lib.TDC_freezeBuffers(False)
def set_integration_time(self, window_time_ms):
window_time_ms = round(window_time_ms)
if self._processing == self._processing_dict["i"]:
if not 0 < window_time_ms <= 65535:
raise ValueError(
"The chosen exposure window is not \
in the range (0,65535]. Can't do more than 65.5s \
integration time internally."
)
self._set_value(
self.window_time_ms,
self.idq801Lib.TDC_setExposureTime,
self.get_integration_time,
)
def get_data_lost_status(self):
"""Returns true if data is being lost, and false
if data is not being lost.
"""
# Get the status of the lost latch.
lost = ct.c_int32()
self.idq801Lib.TDC_getDataLost(ct.byref(lost))
latch = lost.value
# Calls the function again to clear the lost latch.
self.idq801Lib.TDC_getDataLost(ct.byref(lost))
return latch
def get_timestamp_buffer_size(self):
size = ct.c_int32()
self.idq801Lib.TDC_getTimestampBufferSize(ct.byref(size))
return size.value
def set_timestamp_buffer_size(self, size):
"""`size` is the amount of timestamps that the
the counter will store. Range is 1->1000000
"""
self._set_value(
size,
self.idq801Lib.TDC_setTimestampBufferSize,
self.get_timestamp_buffer_size,
)
def get_timestamps(self, clear_retrieved_timestamps=True, trim_time_s=None):
"""
Gets all the time stamps in the buffer and returns
a dictionary corresponding to the timestamps in each
channel.
args:
clear_retrieved_timestamps(bool): Clears the timestamp
buffer of the IDQ801 after reading.
trim_time_s(float, None): The amount of timestamps, in
seconds, from the import first timestamps to keep.
If `None`, all timestamps are returned. Multiple
channels are all trimmed starting from the lowest
timestamps of all the channels combined.
returns:
dict: A dictionary containing numpy arrays with the
timestamps of each channel. The time from the
last calling of this function is also returned
in the dictionary.
"""
if self.get_timestamp_buffer_size() == 0:
raise RuntimeError(
"The timestamp buffer size is 0. \
Can't get timestamps. Need to set the timestamp \
buffer."
)
r = ct.c_int32(clear_retrieved_timestamps)
ts = (ct.c_int64 * self.get_timestamp_buffer_size())()
c = (ct.c_int8 * self.get_timestamp_buffer_size())()
v = ct.c_int32()
self.idq801Lib.TDC_getLastTimestamps(r, ts, c, ct.byref(v))
time_read = time.time()
time_diff = time_read - self._time_last_get_timestamps
self._time_last_get_timestamps = time_read
channel = np.frombuffer(c, dtype=np.int8)
channel_masks = [
channel == i for i in range(4) if self._channel_mask & (1 << i)
]
timestamps = np.frombuffer(ts, dtype=np.int64)
timestamps_masked = {
str(c + 1): timestamps[c_m] for c, c_m in enumerate(channel_masks)
}
timestamps_masked.update((k, v[v > 0]) for k, v in timestamps_masked.items())
last_counts = []
if trim_time_s:
for timestamps in timestamps_masked.values():
if timestamps.size:
first_count = timestamps[0]
last_counts.append(
first_count + int(trim_time_s / self.get_timebase() + 0.5)
)
if len(last_counts):
last_count = np.min(last_counts)
for channel, timestamps in timestamps_masked.items():
if timestamps.size:
last_idx = np.searchsorted(timestamps, last_count, "right")
timestamps_masked[channel] = timestamps[: last_idx - 1]
timestamps_masked["time_diff"] = time_diff
return timestamps_masked
def _get_coins(self, timestamps_1, timestamps_2, method="2"):
t2 = np.array(timestamps_2, dtype=np.int64)
assert method in ("1", "2"), "Invalid method chosen."
if method == "1":
t1 = np.empty(len(timestamps_1) + 2, dtype=np.int64)
t1[0] = 0
t1[-1] = np.iinfo(np.int64).max
t1[1:-1] = timestamps_1
t2_pos = np.searchsorted(t1, t2)
t1_pos_forw = t2_pos
t1_pos_back = t2_pos - 1
t1_pos_back[t1_pos_back == -1] = 0
dt_forw = np.abs(t1[t1_pos_forw] - t2) <= self.get_coincidence_window_bins()
dt_back = np.abs(t1[t1_pos_back] - t2) <= self.get_coincidence_window_bins()
coin_forw_args = dt_forw.nonzero()[0]
coin_back_args = dt_back.nonzero()[0]
coins_forw = np.c_[t1_pos_forw[coin_forw_args] - 1, coin_forw_args]
coins_back = np.c_[t1_pos_back[coin_back_args] - 1, coin_back_args]
coins = np.vstack((coins_back, coins_forw))
elif method == "2":
t1 = np.array(timestamps_1, dtype=np.int64)
l = np.searchsorted(t1, t2 - self.get_coincidence_window_bins() / 2)
r = np.searchsorted(t1, t2 + self.get_coincidence_window_bins() / 2)
args = np.where(l != r)[0]
coins = np.c_[r[args], args]
return coins
def get_coin_counts(
self, coin_channels, accidentals_delay_ns=None, trim_time_s=None
):
bin = self.get_timebase()
timestamps = self.get_timestamps(
clear_retrieved_timestamps=True, trim_time_s=trim_time_s
)
time_diff = timestamps["time_diff"]
timestamps.pop("time_diff", None)
coin_counts = {}
acc_counts = {}
# Get singles counts
for c in coin_channels:
if str(c) in timestamps:
coin_counts[str(c)] = len(timestamps[str(c)])
else:
coin_counts[str(c)] = 0
coin_combinations = list(it.combinations(coin_channels, 2))
for c in coin_combinations:
# Get coincidence counts
if str(c[0]) in timestamps and str(c[1]) in timestamps:
coin_counts[str(c[0]) + "/" + str(c[1])] = len(
self._get_coins(timestamps[str(c[0])], timestamps[str(c[1])])
)
else:
coin_counts[str(c[0]) + "/" + str(c[1])] = 0
if accidentals_delay_ns != None:
accidentals_delay_bin = int(accidentals_delay_ns * 1e-9 / bin)
for c in coin_combinations:
# Get accidental counts
if str(c[0]) in timestamps and str(c[1]) in timestamps:
acc_counts[str(c[0]) + "/" + str(c[1])] = len(
self._get_coins(
timestamps[str(c[0])],
timestamps[str(c[1])] + accidentals_delay_bin,
)
)
else:
acc_counts[str(c[0]) + "/" + str(c[1])] = 0
return coin_counts, acc_counts, timestamps
def scan_channel_delay(
self, coin_channels, scan_channel, scan_range_ns, integration_time=1.0
):
"""
Scans channel delay electronically - integrates once then applies delays to the timestamps to find coins
Args:
coin_channels: channels to look at coins
scan_channel: channel to scan
scan_range_ns: +/- range of delay in ns
integration_time: initial integration time
Returns: max coin reading, delay in ns of the max, all coin counts, delay range
"""
current_delays_bins = self.get_channel_delays_bins()
self.set_channel_delays_ns({str(coin_channels[0]): 0, str(coin_channels[1]): 0})
bin = self.get_timebase()
self.get_timestamps()
time.sleep(integration_time)
original_timestamps = self.get_timestamps()
delay_range = range(-scan_range_ns, scan_range_ns + 1)
coin_counts = np.zeros(len(delay_range))
timestamps = copy.deepcopy(original_timestamps)
for idd, d in enumerate(delay_range):
timestamps[str(scan_channel)] = copy.deepcopy(
original_timestamps[str(scan_channel)]
) + int(d * 1e-9 / bin)
coin_counts[idd] = len(
self._get_coins(
timestamps[str(coin_channels[0])], timestamps[str(coin_channels[1])]
)
)
print(
"delay channel = %s, delay = %s ns, coin counts = %s"
% (scan_channel, d, int(coin_counts[idd]))
)
max_coin = np.max(coin_counts)
max_coin_delay = delay_range[np.argmax(coin_counts)]
self.set_channel_delays_bins(current_delays_bins)
return max_coin, max_coin_delay, coin_counts, delay_range
def get_timestamps_continuous(self, seconds=-1):
"""Runs `gets_timestamps` continuously in a separate
thread for `seconds` amount of seconds in a loop.
If seconds == -1, it doesn't timeout. Returns a
thread object that can be stopped and started.
"""
time.sleep(self._wait_for_settings)
clear_retrieved_timestamps = True
t = ThreadStoppable(
self.get_timestamps, seconds, True, args=(clear_retrieved_timestamps,)
)
return t
def write_timestamps_to_file(self):
"""Writes the timestamps in the buffer to a
file.
"""
timestamp_dir = "Timestamps"
if not os.path.isdir(self._data_directory + "/" + timestamp_dir):
os.mkdir(self._data_directory + "/" + timestamp_dir)
filename_prefix = (
self._data_directory + "/" + timestamp_dir + "/" + "timestamp_channel_"
)
filenames = [filename_prefix + str(i) + ".dat" for i in range(1, 9)]
for fn in filenames:
if not os.path.exists(fn):
open(fn, "w").close()
ts = self.get_timestamps(clear_retrieved_timestamps=True)
for i, fn in enumerate(filenames):
with open(fn, "a") as fs:
try:
for t in ts[str(i + 1)]:
fs.write(str(t) + "\n")
except KeyError:
pass
def write_timestamps_to_file_continuous(self, seconds=-1):
"""Runs `write_timestamps_to_file` continuously in a separate
thread for `seconds` amount of seconds in a loop. If
seconds == -1, it doesn't timeout. Returns a thread object
that can be stopped and started.
"""
time.sleep(self._wait_for_settings)
t = ThreadStoppable(self.write_timestamps_to_file, seconds)
return t
def get_counters(self):
"""Returns a list of the most recent value of
of the counters.
"""
counters = (ct.c_int32 * 19)()
self.idq801Lib.TDC_getCoincCounters(counters, None)
return list(counters)
def get_counters_continuous(self, seconds=-1):
"""Runs `get_counters` continuously in a separate thread for
`seconds` amount of seconds in a loop. If seconds == -1,
it doesn't timeout. Returns a thread object that can be
stopped and started.
"""
time.sleep(self._wait_for_settings)
t = ThreadStoppable(self.get_counters, seconds, True)
return t
def write_counters_to_file(self, filename="counters.dat"):
"""Writes the most recent values of the internal
counters and coincidence counters to a file
named `filename`.
"""
fn = self._data_directory + "/" + filename
if not os.path.exists(fn):
with open(fn, "w") as fs:
header = (
"1,2,3,4,5,6,7,8,1/2,1/3,1/4,2/3,2/4,3/4,"
"1/2/3,1/2/4,1/3/4,2/3/4,1/2/3/4"
)
fs.write("#" + header + "\n")
counters = self.get_counters()
counters_str = ",".join([str(c) for c in counters])
with open(fn, "a") as fs:
fs.write(counters_str + "\n")
def write_counters_to_file_continuous(self, seconds=-1, filename="counters.dat"):
"""Runs `write_counters_to_file` continuously in a separate
thread for `seconds` amount of seconds in a loop. If
seconds == -1, it doesn't timeout. Returns a thread
object that can be stopped and started.
"""
time.sleep(self._wait_for_settings)
t = ThreadStoppable(
self.write_counters_to_file, seconds, False, args=(filename,)
)
return t
def _get_channel_delays(self):
channels = range(8)
channels = (ct.c_int32 * len(channels))(*channels)
self.idq801Lib.TDC_getChannelDelays(channels)
return channels
def get_channel_delays_bins(self):
return list(self._get_channel_delays())
def get_channel_delays_ns(self):
bin = self.get_timebase()
delays_bins = list(self._get_channel_delays())
return [d * 1e9 * bin for d in delays_bins]
def set_channel_delays_bins(self, delays_bins):
delays = (ct.c_int * len(delays_bins))(*delays_bins)
return self._set_value(
delays, self.idq801Lib.TDC_setChannelDelays, self._get_channel_delays
)
def set_channel_delays_ns(self, delays_ns_dict):
"""
Set channel delays in ns. The delays are in a dictionary.
Args:
delays_ns_dict:
Returns:
"""
delays_ns = self.get_channel_delays_ns()
for channel in delays_ns_dict.keys():
self.channel_delays[str(channel)] = delays_ns[int(channel) - 1]
delays_ns[int(channel) - 1] = delays_ns_dict[str(channel)]
bin = self.get_timebase()
delays_bins = [int(d * 1e-9 / bin) for d in delays_ns]
return self.set_channel_delays_bins(delays_bins)
def main():
idq801 = Idq801()
idq801.clean_data_directory()
idq801.set_channel((1, 2))
# t1 = idq801.write_counters_to_file_continuous(2)
# t2 = idq801.write_timestamps_to_file_continuous(2)
#
if __name__ == "__main__":
main()
```
#### File: plab/power_meters/agilent_lightwave_connection.py
```python
import pygpib as gpib
import serial as ser
class AgilentLightWaveConnection:
def __init__(self, serial_port=None, gpib_num=None, gpib_dev_num=None):
assert serial_port or (gpib_num and gpib_dev_num)
if gpib_num and gpib_dev_num:
self._dev = gpib.dev(gpib_num, gpib_dev_num)
self._gpib_used = True
elif serial_port:
self._dev = ser.Serial("/dev/" + serial_port, 38400)
self._gpib_used = False
def _write(self, cmd):
if self._gpib_used:
gpib.write(self._dev, cmd)
else:
self._dev.write(cmd.encode())
def _read(self, num_bytes=100):
if self._gpib_used:
data = gpib.read(self._dev, num_bytes)
else:
data = self._dev.readline(num_bytes)
return data.decode("ascii")
def _read_raw(self, num_bytes=100):
if self._gpib_used:
data = gpib.read(self._dev, num_bytes)
else:
data = self._dev.read(num_bytes)
return data
def _query(self, cmd, num_bytes=100):
self._write(cmd)
data = self._read(num_bytes)
return data
def _query_raw(self, cmd, num_bytes=100):
self._write(cmd)
data = self._read_raw(num_bytes)
return data
```
#### File: plab/smu/sweep_current.py
```python
from typing import Iterable, Union, Optional
from time import strftime, localtime
import pandas as pd
import numpy as np
from tqdm import tqdm
import qontrol
from plab.config import logger, CONFIG
from plab.measurement import measurement, Measurement
from plab.smu.smu_control import smu_control
@measurement
def sweep_current(
imin: float = 0, imax: float = 50e-3, steps: int = 20, n: int = 1
) -> pd.DataFrame:
"""Sweep current and measure voltage. works only for q8iv
Args:
imin: min current
imax: max current
steps: number of steps
n: number of channels to sweep
"""
currents = np.linspace(imin, imax, steps)
df = pd.DataFrame(dict(i=currents))
if isinstance(n, int):
channels = range(n)
else:
channels = n
for channel in channels:
currents = np.zeros_like(currents)
# set all channels to zero
q.v[:] = 0
for j, voltage in enumerate(currents):
q.i[channel] = float(voltage)
measured_voltage = q.v[channel]
measured_current = q.i[channel]
currents[j] = measured_current
df[f"i_{channel}"] = currents
return df
def get_current(channel: int, voltage: float) -> float:
"""Sets voltage for a channel and returns measured current.
Args:
channel:
voltage:
"""
q = smu_qontrol()
q.v[channel] = float(voltage)
return q.i[channel]
def zero_voltage() -> None:
"""Sets all voltage channels to zero."""
q = smu_qontrol()
q.v[:] = 0
return
if __name__ == "__main__":
zero_voltage()
# print(get_current(62, 0.1))
# m = sweep_voltage(vmax=3, channels=(1,))
# m.write()
```
#### File: stages/luminos_stage/tla_constants.py
```python
from .zaber import serial as zs
from collections import OrderedDict
_instruction_fields = (
"Instruction Name",
"Command #",
"Command Data",
"Command Type",
"Reply Data",
)
_instruction_fields_key = OrderedDict(
zip(_instruction_fields, range(len(_instruction_fields)))
)
commands = (
("Reset", "0", "Ignored", "Command", "None"),
("Home", "1", "Ignored", "Command", "Final position (in this case 0)"),
("Renumber", "2", "Ignored", "Command", "Device Id"),
("Move Tracking", "8", "n/a", "Reply", "Tracking Position"),
("Limit Active", "9", "n/a", "Reply", "Final Position"),
("Manual Move Tracking", "10", "n/a", "Reply", "Tracking Position"),
("Store Current Position", "16", "Address", "Command", "Address"),
("Return Stored Position", "17", "Address", "Command", "Stored Position"),
("Move To Stored Position", "18", "Address", "Command", "Final Position"),
("Move Absolute", "20", "Absolute Position", "Command", "Final Position"),
("Move Relative", "21", "Relative Position", "Command", "Final Position"),
("Move At Constant Speed", "22", "Speed", "Command", "Speed"),
("Stop", "23", "Ignored", "Command", "Final Position"),
("Read Or Write Memory", "35", "Data", "Command", "Data"),
("Restore Settings", "36", "Peripheral Id", "Command", "Peripheral Id"),
("Set Microstep Resolution", "37", "Microsteps", "Setting", "Microsteps"),
("Set Running Current", "38", "Value", "Setting", "Value"),
("Set Hold Current", "39", "Value", "Setting", "Value"),
("Set Device Mode", "40", "Mode", "Setting", "Mode"),
("Set Home Speed", "41", "Speed", "Setting", "Speed"),
("Set Target Speed", "42", "Speed", "Setting", "Speed"),
("Set Acceleration", "43", "Acceleration", "Setting", "Acceleration"),
("Set Maximum Position", "44", "Range", "Setting", "Range"),
("Set Current Position", "45", "New Position", "Setting", "New Position"),
("Set Maximum Relative Move", "46", "Range", "Setting", "Range"),
("Set Home Offset", "47", "Offset", "Setting", "Offset"),
("Set Alias Number", "48", "Alias Number", "Setting", "Alias Number"),
("Set Lock State", "49", "Lock Status", "Command", "Lock Status"),
("Return Device Id", "50", "Ignored", "Read-Only Setting", "Device Id"),
("Return Firmware Version", "51", "Ignored", "Read-Only Setting", "Version"),
("Return Power Supply Voltage", "52", "Ignored", "Read-Only Setting", "Voltage"),
("Return Setting", "53", "Setting Number", "Command", "Setting Value"),
("Return Status", "54", "Ignored", "Read-Only Setting", "Status"),
("Echo Data", "55", "Data", "Command", "Data"),
("Return Current Position", "60", "Ignored", "Read-Only Setting", "Position"),
("Error", "255", "n/a", "Reply", "Error Code"),
)
def get_command_names():
command_names = tuple(c[0] for c in commands)
return command_names
def get_command_full(name):
command_names = get_command_names()
idx = command_names.index(name)
return commands[idx]
def get_command_number(name):
command_full = get_command_full(name)
idx = _instruction_fields_key["Command #"]
return command_full[idx]
def get_command_data(name):
command_full = get_command_full(name)
idx = _instruction_fields_key["Command Data"]
return command_full[idx]
def get_command_type(name):
command_full = get_command_full(name)
idx = _instruction_fields_key["Command Type"]
return command_full[idx]
def get_reply_data(name):
command_full = get_command_full(name)
idx = _instruction_fields_key["Reply Data"]
return command_full[idx]
def binary_command(device_index, command_name, command_data=None):
command_number = get_command_number(command_name)
if command_data is None:
cd = -1
assert (
get_command_data(command_name) == "Ignored"
), "No data given for a command that requires data."
else:
cd = command_data
command = zs.BinaryCommand(device_index, int(command_number), int(cd))
return command
def send_command(port, device_index, command_name, command_data=None):
bc = binary_command(device_index, command_name, command_data)
# Clear the buffer before sending command to avoid unexpected responses.
bytes_in_buffer = port._ser.in_waiting
if bytes_in_buffer:
port._ser.read(bytes_in_buffer)
port.write(bc)
```
#### File: stages/newport_stage/newport_picomotor.py
```python
import time
from .. import stage as st
from ... import usb_device as usb_dev
class PicomotorSystem:
def __init__(self, usb):
self._usb = usb
def get_product_id(self):
return self._usb.write_read("*IDN?\n\r")
def wait_scan_done(self, poll_time_ms=50.0, timeout_ms=5000.0):
retries = int(round(timeout_ms / poll_time_ms))
for _ in range(retries):
status = self._usb.write_read("SD?\n\r").strip()
if status == "0":
time.sleep(poll_time_ms / 1000.0)
elif status == "1":
break
else:
raise RuntimeError("Invalid device reply.")
return status
def resolve_address_conflicts(self):
self._usb.write("SC2\n\r")
return self.wait_scan_done()
def get_controllers_scan(self):
self._usb.write("SC0\n\r")
self.wait_scan_done()
scan = self._usb.write_read("SC?\n\r")
scan_bits = bin(int(scan))
return scan_bits
class PicomotorStages(st.Stages3):
def __init__(
self,
axis_controller_dict_input=None,
axis_controller_dict_chip=None,
axis_controller_dict_output=None,
C1=None,
C2=None,
c1_c2_distance_mask_um=None,
update_position_absolute=100.0,
filename=None,
x_axis_motor="x",
y_axis_motor="y",
z_axis_motor="z",
resolve_address_conflicts=False,
):
PicomotorStages._usb = usb_dev.UsbDevice(0x104D, 0x4000)
self.picomotor_system = PicomotorSystem(PicomotorStages._usb)
if resolve_address_conflicts:
self.picomotor_system.resolve_address_conflicts()
# todo check this works!!
stages_dict = {}
self.input = PicomotorStage(
axis_controller_dict_input,
C1=C1,
C2=C2,
c1_c2_distance_mask_um=c1_c2_distance_mask_um,
update_position_absolute=update_position_absolute,
filename=filename,
reverse_axis_x=False,
reverse_axis_y=False,
reverse_axis_z=False,
x_axis_motor=x_axis_motor,
y_axis_motor=y_axis_motor,
z_axis_motor=z_axis_motor,
resolve_address_conflicts=resolve_address_conflicts,
picomotor_system=self.picomotor_system,
PicomotorStage_usb=PicomotorStages._usb,
)
stages_dict["input"] = self.input
self.chip = PicomotorStage(
axis_controller_dict_chip,
C1=C1,
C2=C2,
c1_c2_distance_mask_um=c1_c2_distance_mask_um,
update_position_absolute=update_position_absolute,
filename=filename,
reverse_axis_x=False,
reverse_axis_y=False,
reverse_axis_z=False,
z_axis_motor="x",
resolve_address_conflicts=resolve_address_conflicts,
picomotor_system=self.picomotor_system,
PicomotorStage_usb=PicomotorStages._usb,
)
stages_dict["chip"] = self.chip
self.output = PicomotorStage(
axis_controller_dict_output,
C1=C1,
C2=C2,
c1_c2_distance_mask_um=c1_c2_distance_mask_um,
update_position_absolute=update_position_absolute,
filename=filename,
reverse_axis_x=True,
reverse_axis_y=False,
reverse_axis_z=False,
x_axis_motor=x_axis_motor,
y_axis_motor=y_axis_motor,
z_axis_motor=z_axis_motor,
resolve_address_conflicts=resolve_address_conflicts,
picomotor_system=self.picomotor_system,
PicomotorStage_usb=PicomotorStages._usb,
)
stages_dict["output"] = self.output
super().__init__(stages_dict=stages_dict, filename=filename)
class PicomotorStage(st.Stage):
def __init__(
self,
axis_controller_dict,
C1=None,
C2=None,
c1_c2_distance_mask_um=None,
update_position_absolute=100.0,
filename=None,
reverse_axis_x=False,
reverse_axis_y=False,
reverse_axis_z=False,
x_axis_motor="x",
y_axis_motor="y",
z_axis_motor="z",
resolve_address_conflicts=False,
picomotor_system=None,
PicomotorStage_usb=None,
):
# if not PicomotorStage._usb:
# print('connecting to usb')
# PicomotorStage._usb = usb_dev.UsbDevice(0x104d, 0x4000)
# PicomotorStage.picomotor_system = PicomotorSystem(PicomotorStage._usb)
# if resolve_address_conflicts:
# PicomotorStage.picomotor_system.resolve_address_conflicts()
if not picomotor_system and not PicomotorStage_usb:
try:
PicomotorStage._usb = usb_dev.UsbDevice(0x104D, 0x4000)
self.picomotor_system = PicomotorSystem(PicomotorStage._usb)
if resolve_address_conflicts:
self.picomotor_system.resolve_address_conflicts()
except:
print("usb connection fail")
else:
PicomotorStage._usb = PicomotorStage_usb
self.picomotor_system = picomotor_system
axes_dict = {}
for axis in axis_controller_dict:
try:
axes_dict[axis] = PicomotorAxisABC(
PicomotorStage._usb,
axis_controller_dict[axis][0],
motor_number=axis_controller_dict[axis][1],
update_position_absolute=update_position_absolute,
)
except:
print("something went wrong connecting")
if ("A" in axes_dict.keys()) and ("B" in axes_dict.keys()):
axes_dict["y"] = PicomotorYAxis(
axes_dict["A"], axes_dict["B"], reverse_axis_y
)
if ("APrime" in axes_dict.keys()) and ("BPrime" in axes_dict.keys()):
axes_dict["z"] = PicomotorZAxis(
axes_dict["APrime"], axes_dict["BPrime"], reverse_axis_z
)
if "C" in axes_dict.keys():
axes_dict["x"] = PicomotorXAxis(axes_dict["C"], reverse_axis_x)
super().__init__(
axes_dict=axes_dict,
C1=C1,
C2=C2,
c1_c2_distance_mask_um=c1_c2_distance_mask_um,
reverse_axis_x=reverse_axis_x,
reverse_axis_y=reverse_axis_y,
reverse_axis_z=reverse_axis_z,
x_axis_motor=x_axis_motor,
y_axis_motor=y_axis_motor,
z_axis_motor=z_axis_motor,
filename=filename,
)
@staticmethod
def _write(usb, data):
return usb.write(data)
@staticmethod
def _read(usb):
return usb.read().strip()
def _write_motor(self, command, motor_number):
controller_number = (
self._controller_number_ab
if motor_number in (1, 2, 3, 4)
else self._controller_number_c
)
data = "%i>%i%s\n\r" % (controller_number, motor_number, command)
self._write(self._usb, data)
def _write_read_motor(self, command, motor_number):
self._write_motor(command, motor_number)
return self._read(self._usb)
class PicomotorAxis(st.Axis):
def __init__(
self,
usb,
controller_number,
motor_number,
reverse_axis=False,
update_position_absolute=100,
):
motor_number = int(motor_number)
assert motor_number in (1, 2, 3, 4, 5), (
"Invalid axis `%i` given." % motor_number
)
self._usb = usb
self._controller_number = controller_number
self._motor_number = motor_number
super().__init__(
reverse_axis=reverse_axis, update_position_absolute=update_position_absolute
)
def _write_motor(self, command):
data = "%i>%i%s\n\r" % (self._controller_number, self._motor_number, command)
return PicomotorStage._write(self._usb, data)
def _write_read_motor(self, command):
self._write_motor(command)
return self._read()
def _read(self):
return PicomotorStage._read(self._usb)[2:]
def wait_motor_moved(self, poll_time_ms=50.0, timeout_ms=60.0e3):
retries = int(round(timeout_ms / poll_time_ms))
for _ in range(retries):
status = self._write_read_motor("MD?")
if status == "0":
time.sleep(poll_time_ms / 1000.0)
elif status == "1":
break
else:
raise RuntimeError("Invalid device reply.")
return status
class PicomotorAxisLinear(PicomotorAxis, st.AxisLinear):
def __init__(
self,
usb,
controller_number,
motor_number,
reverse_axis=False,
update_position_absolute=100,
):
self._step_size_nm = 10.0
super().__init__(
usb,
controller_number,
motor_number=motor_number,
reverse_axis=reverse_axis,
update_position_absolute=update_position_absolute,
)
def _move_abs_nm(self, distance_from_home_nm):
steps = distance_from_home_nm / self._step_size_nm
self._write_motor("PA%i" % steps)
self.wait_motor_moved()
r = self.get_current_position_nm()
return r
def _get_current_position_nm(self):
r = float(self._write_read_motor("PA?")) * self._step_size_nm
return r
def _get_home_position(self):
r = float(self._write_read_motor("DH?"))
return r
def _set_home_position(self):
self._write_motor("DH")
# r = self._get_home_position()
# return r
class PicomotorAxisABC(PicomotorAxisLinear):
def __init__(
self,
usb,
controller_number,
motor_number,
reverse_axis=False,
update_position_absolute=100,
):
super().__init__(
usb,
controller_number,
motor_number=motor_number,
reverse_axis=reverse_axis,
update_position_absolute=update_position_absolute,
)
@property
def _position_absolute_min_nm(self):
return 0.0
@property
def _position_absolute_max_nm(self):
return 1.0e9
class PicomotorYZAxis(st.AxisY):
def __init__(self, a_axis, b_axis, reverse_axis=False):
self._a_axis = a_axis
self._b_axis = b_axis
super().__init__(reverse_axis)
# todo add set home feature - finds hardware limit and sets that to zero
# todo use 2d scan to find two spots
# todo check step size with fibre array
# todo optimise spot size separation with 2d scans
# todo add chip with manual x axis
# todo couple fibre to chip
# todo add all home function
# todo add centre all axes function
@property
def _position_absolute_min_nm(self):
return 0.0
@property
def _position_absolute_max_nm(self):
return 3.0e6
def _move_abs_nm(self, distance_from_home_nm):
step_nm = 10.0e3
curr_pos_nm = self._get_current_position_nm()
total_rel_move_nm = abs(distance_from_home_nm - curr_pos_nm)
while total_rel_move_nm >= step_nm:
self._a_axis.move_rel_nm(step_nm)
self._b_axis.move_rel_nm(step_nm)
total_rel_move_nm -= step_nm
r1 = self._a_axis._move_abs_nm(distance_from_home_nm)
r2 = self._b_axis._move_abs_nm(distance_from_home_nm)
r = 0.5 * (r1 + r2)
return r
def _get_current_position_nm(self):
r1 = self._a_axis.get_current_position_nm()
r2 = self._b_axis.get_current_position_nm()
r = 0.5 * (r1 + r2)
return r
def get_home_position(self):
r1 = self._a_axis._get_home_position()
r2 = self._b_axis._get_home_position()
return r1, r2
def set_home_position(self):
r1 = self._a_axis._set_home_position()
r2 = self._b_axis._set_home_position()
return r1, r2
class PicomotorYAxis(PicomotorYZAxis):
def __init__(self, a_axis, b_axis, reverse_axis=False):
PicomotorYZAxis.__init__(self, a_axis, b_axis, reverse_axis)
class PicomotorZAxis(PicomotorYZAxis):
def __init__(self, a_prime_axis, b_prime_axis, reverse_axis=False):
PicomotorYZAxis.__init__(self, a_prime_axis, b_prime_axis, reverse_axis)
class PicomotorXAxis(st.AxisX):
def __init__(self, c_axis, reverse_axis=False):
self._c_axis = c_axis
super().__init__(reverse_axis)
@property
def _position_absolute_min_nm(self):
return 0.0
@property
def _position_absolute_max_nm(self):
return 3.0e6
@property
def _movement_compensation(self):
return [150 / 127.0, 102 / 127.0]
def _move_abs_nm(self, distance_from_home_nm):
return self._c_axis._move_abs_nm(distance_from_home_nm)
def _get_current_position_nm(self):
return self._c_axis._get_current_position_nm()
def get_home_position(self):
return self._c_axis._get_home_position()
def set_home_position(self):
r = self._c_axis._set_home_position()
return r
```
#### File: thorpy/comm/port.py
```python
import serial
import select
import threading
import time
import queue
import weakref
class Port:
# List to make "quasi-singletons"
static_port_list = weakref.WeakValueDictionary()
static_port_list_lock = threading.RLock()
def __init__(self, port, sn):
super().__init__()
self._lock = threading.RLock()
self._lock.acquire()
self._buffer = b""
self._unhandled_messages = queue.Queue()
self._serial = serial.Serial(
port,
baudrate=115200,
bytesize=serial.EIGHTBITS,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
rtscts=True,
)
# The Thorlabs protocol description recommends toggeling the RTS pin and resetting the
# input and output buffer. This makes sense, since the internal controller of the Thorlabs
# device does not know what data has reached us of the FTDI RS232 converter.
# Similarly, we do not know the state of the controller input buffer.
# Be toggling the RTS pin, we let the controller know that it should flush its caches.
self._serial.setRTS(1)
time.sleep(0.05)
self._serial.reset_input_buffer()
self._serial.reset_output_buffer()
time.sleep(0.05)
self._serial.setRTS(0)
self._port = port
self._debug = False
from ..message import (
MGMSG_HW_NO_FLASH_PROGRAMMING,
MGMSG_HW_REQ_INFO,
MGMSG_HW_START_UPDATEMSGS,
MGMSG_HW_STOP_UPDATEMSGS,
)
self.send_message(MGMSG_HW_NO_FLASH_PROGRAMMING(source=0x01, dest=0x50))
# Now that the input buffer of the device is flushed, we can tell it to stop reporting updates and
# then flush away any remaining messages.
self.send_message(MGMSG_HW_STOP_UPDATEMSGS())
time.sleep(0.5)
self._serial.reset_input_buffer()
self._info_message = None
while self._info_message is None:
self.send_message(MGMSG_HW_REQ_INFO())
try:
self._info_message = self._recv_message(blocking=True)
except: # TODO: Be more specific on what we catch here
self._buffer = b""
self._serial.flushInput()
self._serial_number = int(sn)
if self._serial_number is None:
self._serial_number = self._info_message["serial_number"]
time.sleep(1)
self.send_message(MGMSG_HW_START_UPDATEMSGS(update_rate=1))
self._stages = weakref.WeakValueDictionary()
self._lock.release()
self.daemon = False
# print("Constructed: {0!r}".format(self))
self._thread_main = threading.current_thread()
self._thread_worker_initialized = threading.Event()
self._thread_worker = threading.Thread(
target=Port.run, args=(weakref.proxy(self),)
)
self._thread_worker.start()
self._thread_worker_initialized.wait()
# def __del__(self):
# #print("Destructed: {0!r}".format(self))
# self._thread_worker.join()
def send_message(self, msg):
with self._lock:
if self._debug:
print("> ", msg)
self._serial.write(bytes(msg))
@staticmethod
def run(self):
try:
self._continue = True
timeout = 1
self._thread_worker_initialized.set()
while self._thread_main.is_alive():
# Trick to avoid holding lock
r, w, e = select.select([self._serial], [], [], timeout)
msg = self._recv_message(False)
if msg is not None:
message_handled = self._handle_message(msg)
if not message_handled:
print("Unhandled message", msg)
self._unhandled_messages.put(msg)
self._serial.close()
except ReferenceError:
pass # Object deleted
def _recv(self, l=1, blocking=False):
with self._lock:
if not blocking:
r, w, e = select.select([self._serial], [], [], 0)
if len(r) == 0:
return 0
new_data = self._serial.read(l)
self._buffer += new_data
return len(new_data)
def fileno(self):
with self._lock:
return self._serial.fileno()
def recv_message(self, block=True, timeout=None):
try:
return self._unhandled_messages.get(block, timeout)
except queue.Empty:
return None
def _recv_message(self, blocking=False, timeout=None):
with self._lock:
from ..message import Message, IncompleteMessageException
msg = None
start_time = time.time()
while msg is None:
try:
msg = Message.parse(self._buffer)
except IncompleteMessageException:
msg = None
length = self._recv(blocking=blocking)
# We were not able to read data
if length == 0 and not blocking:
return None
# Passed timeout...
if (
blocking
and timeout is not None
and start_time < time.time() - timeout
):
return None
self._buffer = self._buffer[len(msg) :]
if self._debug:
print("< ", msg)
return msg
@property
def serial_number(self):
return self._serial_number
@property
def channel_count(self):
# _info_message is immutable, no worries about lock
return self._info_message["nchs"]
def _handle_message(self, msg):
return False
def __repr__(self):
return "{0}({1!r},{2!r})".format(
self.__class__.__name__, self._port, self._serial_number
)
def get_stages(self, only_chan_idents=None):
return {}
@classmethod
def create(cls, port, sn):
with Port.static_port_list_lock:
try:
return Port.static_port_list[port]
except KeyError:
# Do we have a BSC103 or BBD10x? These are card slot controllers
if sn[:2] in ("70", "73", "94"):
p = CardSlotPort(port, sn)
else:
p = SingleControllerPort(port, sn)
Port.static_port_list[port] = p
return p
class CardSlotPort(Port):
def __init__(self, port, sn=None):
raise NotImplementedError("Card slot ports are not supported yet")
class SingleControllerPort(Port):
def __init__(self, port, sn=None):
super().__init__(port, sn)
if self.channel_count != 1:
raise NotImplementedError("Multiple channel devices are not supported yet")
def send_message(self, msg):
msg.source = 0x01
msg.dest = 0x50
super().send_message(msg)
def _recv_message(self, blocking=False):
msg = super()._recv_message(blocking)
if msg is None:
return msg
# assert msg.source == 0x50
# assert msg.dest == 0x01
return msg
def _handle_message(self, msg):
# Is it a channel message? In that case the stage object has to handle it
if "chan_ident" in msg:
try:
return self._stages[msg["chan_ident"]]._handle_message(msg)
except KeyError:
# Keep messages to stages that don't exist
return False
# This is a system message, handle it ourselves
# Not handled
return False
def get_stages(self, only_chan_idents=None):
from thorpy.stages import stage_name_from_get_hw_info, GenericStage
if only_chan_idents is None:
only_chan_idents = [0x01]
assert len(only_chan_idents) <= 1
assert all(x == 1 for x in only_chan_idents)
ret = dict([(k, self._stages.get(k, None)) for k in only_chan_idents])
for k in only_chan_idents:
if ret[k] is None:
ret[k] = GenericStage(
self, 0x01, stage_name_from_get_hw_info(self._info_message)
)
self._stages[k] = ret[k]
return ret
```
#### File: photonic-coupling-drivers/plab/usb_device.py
```python
import usb.core
import usb.util
class UsbDevice:
def __init__(self, id_vendor, id_product):
dev = usb.core.find(idVendor=id_vendor, idProduct=id_product)
self._dev = dev
if dev is None:
raise ValueError("Device not found")
# set the active configuration. With no arguments, the first
# configuration will be the active one
dev.set_configuration()
# get an endpoint instance
cfg = dev.get_active_configuration()
intf = cfg[(0, 0)]
self._ep_out = usb.util.find_descriptor(
intf,
custom_match=lambda e: usb.util.endpoint_direction(e.bEndpointAddress)
== usb.util.ENDPOINT_OUT,
)
self._ep_in = usb.util.find_descriptor(
intf,
custom_match=lambda e: usb.util.endpoint_direction(e.bEndpointAddress)
== usb.util.ENDPOINT_IN,
)
assert self._ep_out is not None
assert self._ep_in is not None
def write(self, data):
return self._dev.write(self._ep_out.bEndpointAddress, data)
def read_raw(self):
data_raw = self._dev.read(
self._ep_in.bEndpointAddress, self._ep_in.wMaxPacketSize
)
return data_raw
def read(self):
data_raw = self.read_raw()
data = "".join([chr(d) for d in data_raw])
return data
def write_read(self, data):
self.write(data)
return self.read()
def write_read_raw(self, data):
self.write(data)
return self.read_raw()
``` |
{
"source": "joamatab/rectpack",
"score": 3
} |
#### File: rectpack/tests/test_decimal.py
```python
from unittest import TestCase
import random
import math
import decimal
from rectpack.guillotine import GuillotineBssfSas
from rectpack.maxrects import MaxRectsBssf
from rectpack.skyline import SkylineMwfWm
from rectpack.packer import PackerBFF, float2dec
def random_rectangle(max_side, min_side):
width = decimal.Decimal(str(round(random.uniform(max_side, min_side), 1)))
height = decimal.Decimal(str(round(random.uniform(max_side, min_side), 1)))
return (width, height)
def random_rectangle_generator(num, max_side=30, min_side=8):
"""
Generate a random rectangle list with dimensions within
specified parameters.
Arguments:
max_dim (number): Max rectangle side length
min_side (number): Min rectangle side length
max_ratio (number):
Returns:
Rectangle list
"""
return (random_rectangle(max_side, min_side) for i in range(0, num))
class TestDecimal(TestCase):
"""
Test all work when using decimal instead of integers
"""
def setUp(self):
self.rectangles = [r for r in random_rectangle_generator(500)]
self.bins = [(80, 80, 1), (100, 100, 30)]
def setup_packer(self, packer):
for b in self.bins:
packer.add_bin(b[0], b[1], b[2])
for r in self.rectangles:
packer.add_rect(r[0], r[1])
def test_maxrects(self):
m = PackerBFF(pack_algo=MaxRectsBssf, rotation=True)
self.setup_packer(m)
m.pack()
m.validate_packing()
self.assertTrue(len(m)>1)
def test_guillotine(self):
g = PackerBFF(pack_algo=GuillotineBssfSas, rotation=True)
self.setup_packer(g)
g.pack()
g.validate_packing()
self.assertTrue(len(g)>1)
def test_skyline(self):
s = PackerBFF(pack_algo=SkylineMwfWm, rotation=True)
self.setup_packer(s)
s.pack()
s.validate_packing()
self.assertTrue(len(s)>1)
class TestFloat2Dec(TestCase):
def test_rounding(self):
"""Test rounding is allways up"""
d = float2dec(3.141511, 3)
self.assertEqual(decimal.Decimal('3.142'), d)
d = float2dec(3.444444, 3)
self.assertEqual(decimal.Decimal('3.445'), d)
d = float2dec(3.243234, 0)
self.assertEqual(decimal.Decimal('4'), d)
d = float2dec(7.234234, 0)
self.assertEqual(decimal.Decimal('8'), d)
def test_decimal_places(self):
"""Test rounded to correct decimal place"""
d = float2dec(4.2, 3)
self.assertEqual(decimal.Decimal('4.201'), d)
d = float2dec(5.7, 3)
self.assertEqual(decimal.Decimal('5.701'), d)
d = float2dec(2.2, 4)
self.assertEqual(decimal.Decimal('2.2001'), d)
def test_integer(self):
"""Test integers are also converted, but not rounded"""
d = float2dec(7, 3)
self.assertEqual(decimal.Decimal('7.000'), d)
d = float2dec(2, 3)
self.assertEqual(decimal.Decimal('2.000'), d)
def test_not_rounded(self):
"""Test floats are only rounded when needed"""
d = float2dec(3.0, 3)
self.assertEqual(decimal.Decimal('3.000'), d)
```
#### File: rectpack/tests/test_maxrects.py
```python
from unittest import TestCase
from rectpack.geometry import Rectangle, Point
import rectpack.maxrects as maxrects
class TestMaxRects(TestCase):
def test_init(self):
# Test initial maximal rectangle
m = maxrects.MaxRects(20, 50)
self.assertEqual(m._max_rects[0], Rectangle(0, 0, 20, 50))
self.assertEqual(m.width, 20)
self.assertEqual(m.height, 50)
def test_reset(self):
# Test _max_rects and rectangles is initialized
m = maxrects.MaxRects(100, 200)
self.assertTrue(m.add_rect(30, 30))
self.assertTrue(m.add_rect(50, 50))
self.assertEqual(len(m), 2)
m.reset()
self.assertEqual(len(m), 0)
self.assertEqual(len(m._max_rects), 1)
self.assertEqual(len(m.rectangles), 0)
self.assertEqual(m._max_rects[0], Rectangle(0, 0, 100, 200))
def test_add_rect(self):
# Basic packing test.
m = maxrects.MaxRects(200, 100)
self.assertEqual(m.add_rect(50, 30), Rectangle(0, 0, 50, 30))
self.assertEqual(len(m._max_rects), 2)
self.assertEqual(m.add_rect(70, 200), Rectangle(0, 30, 200, 70))
self.assertEqual(len(m._max_rects), 1)
self.assertEqual(m.add_rect(20, 20), Rectangle(50, 0, 20, 20))
self.assertEqual(len(m._max_rects), 2)
self.assertEqual(m.add_rect(50, 50), None)
self.assertEqual(m.add_rect(30, 100), Rectangle(70, 0, 100, 30))
#Test with rotation disabled
m = maxrects.MaxRects(200, 50, rot=False)
self.assertEqual(m.add_rect(40, 80), None)
m = maxrects.MaxRects(200, 50, rot=True)
self.assertEqual(m.add_rect(40, 80), Rectangle(0, 0, 80, 40))
def test_remove_duplicates(self):
# Test duplicated collisions removed
m = maxrects.MaxRects(100, 100)
rect1 = Rectangle(0, 0, 60, 40)
rect2 = Rectangle(30, 20, 60, 40)
rect3 = Rectangle(35, 25, 10, 10)
rect4 = Rectangle(90, 90, 10, 10)
m._max_rects = [rect1, rect2, rect3, rect4]
m._remove_duplicates()
self.assertTrue(rect1 in m._max_rects)
self.assertTrue(rect2 in m._max_rects)
self.assertTrue(rect4 in m._max_rects)
self.assertEqual(len(m._max_rects), 3)
# Test with only one max_rect
m = maxrects.MaxRects(100, 100)
m._remove_duplicates()
self.assertEqual(len(m._max_rects), 1)
def test_iter(self):
m = maxrects.MaxRects(100, 100)
self.assertTrue(m.add_rect(10, 15))
self.assertTrue(m.add_rect(40, 40))
rectangles = []
for r in m:
rectangles.append(r)
self.assertTrue(Rectangle(0, 0, 10, 15) in rectangles)
self.assertTrue(Rectangle(10, 0, 40, 40) in rectangles)
self.assertEqual(len(rectangles), 2)
def test_fitness(self):
mr = maxrects.MaxRects(100, 200, rot=True)
m = maxrects.MaxRects(100, 200, rot=False)
self.assertEqual(m.fitness(200, 100), None)
self.assertEqual(mr.fitness(200, 100), 0)
self.assertEqual(m.fitness(100, 100), 0)
def test_split(self):
m = maxrects.MaxRects(100, 100)
m.add_rect(20, 20)
self.assertTrue(Rectangle(20, 0, 80, 100) in m._max_rects)
self.assertTrue(Rectangle(0, 20, 100, 80) in m._max_rects)
self.assertEqual(len(m._max_rects), 2)
m._split(Rectangle(20, 20, 20, 20))
self.assertEqual(len(m._max_rects), 6)
m._remove_duplicates()
self.assertEqual(len(m._max_rects), 4)
def test_generate_splits(self):
m = maxrects.MaxRects(40, 40)
mr = Rectangle(20, 20, 40, 40)
# The same
rects = m._generate_splits(mr, Rectangle(20, 20, 40, 40))
self.assertFalse(rects)
# Contained
rects = m._generate_splits(mr, Rectangle(0, 0, 80, 80))
self.assertFalse(rects)
# Center
rects = m._generate_splits(mr, Rectangle(30, 30, 10, 10))
self.assertTrue(Rectangle(20, 20, 10, 40) in rects) # Left
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertTrue(Rectangle(20, 40, 40, 20) in rects) # Top
self.assertEqual(len(rects), 4)
# Top - Center
rects = m._generate_splits(mr, Rectangle(30, 30, 10, 30))
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(20, 20, 10, 40) in rects) # Left
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 3)
rects = m._generate_splits(mr, Rectangle(30, 30, 10, 100))
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(20, 20, 10, 40) in rects) # Left
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 3)
# Bottom - Center
rects = m._generate_splits(mr, Rectangle(30, 20, 10, 10))
self.assertTrue(Rectangle(20, 30, 40, 30) in rects) # Top
self.assertTrue(Rectangle(20, 20, 10, 40) in rects) # Left
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 3)
rects = m._generate_splits(mr, Rectangle(30, 0, 10, 30))
self.assertTrue(Rectangle(20, 30, 40, 30) in rects) # Top
self.assertTrue(Rectangle(20, 20, 10, 40) in rects) # Left
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 3)
# Left - Center
rects = m._generate_splits(mr, Rectangle(20, 30, 20, 10))
self.assertTrue(Rectangle(20, 40, 40, 20) in rects) # Top
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 3)
rects = m._generate_splits(mr, Rectangle(0, 30, 40, 10))
self.assertTrue(Rectangle(20, 40, 40, 20) in rects) # Top
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 3)
# Right - Center
rects = m._generate_splits(mr, Rectangle(40, 30, 20, 20))
self.assertTrue(Rectangle(20, 50, 40, 10) in rects) # Top
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(20, 20, 20, 40) in rects) # Left
self.assertEqual(len(rects), 3)
rects = m._generate_splits(mr, Rectangle(40, 30, 90, 20))
self.assertTrue(Rectangle(20, 50, 40, 10) in rects) # Top
self.assertTrue(Rectangle(20, 20, 40, 10) in rects) # Bottom
self.assertTrue(Rectangle(20, 20, 20, 40) in rects) # Left
self.assertEqual(len(rects), 3)
# Top - Right
rects = m._generate_splits(mr, Rectangle(40, 40, 20, 20))
self.assertTrue(Rectangle(20, 20, 20, 40) in rects) # Left
self.assertTrue(Rectangle(20, 20, 40, 20) in rects) # Bottom
self.assertEqual(len(rects), 2)
rects = m._generate_splits(mr, Rectangle(40, 40, 30, 30))
self.assertTrue(Rectangle(20, 20, 20, 40) in rects) # Left
self.assertTrue(Rectangle(20, 20, 40, 20) in rects) # Bottom
self.assertEqual(len(rects), 2)
# Bottom - Left
rects = m._generate_splits(mr, Rectangle(20, 20, 20, 20))
self.assertTrue(Rectangle(20, 40, 40, 20) in rects) # Top
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 2)
rects = m._generate_splits(mr, Rectangle(10, 10, 30, 30))
self.assertTrue(Rectangle(20, 40, 40, 20) in rects) # Top
self.assertTrue(Rectangle(40, 20, 20, 40) in rects) # Right
self.assertEqual(len(rects), 2)
# Top - Full
rects = m._generate_splits(mr, Rectangle(20, 40, 40, 20))
self.assertTrue(Rectangle(20, 20, 40, 20) in rects)
self.assertEqual(len(rects), 1)
rects = m._generate_splits(mr, Rectangle(10, 40, 60, 60))
self.assertTrue(Rectangle(20, 20, 40, 20) in rects)
self.assertEqual(len(rects), 1)
# Bottom - Full
rects = m._generate_splits(mr, Rectangle(20, 20, 40, 20))
self.assertTrue(Rectangle(20, 40, 40, 20) in rects)
self.assertEqual(len(rects), 1)
rects = m._generate_splits(mr, Rectangle(10, 10, 50, 30))
self.assertTrue(Rectangle(20, 40, 40, 20) in rects)
self.assertEqual(len(rects), 1)
# Right - Full
rects = m._generate_splits(mr, Rectangle(40, 20, 20, 40))
self.assertTrue(Rectangle(20, 20, 20, 40) in rects)
self.assertEqual(len(rects), 1)
rects = m._generate_splits(mr, Rectangle(40, 10, 30, 60))
self.assertTrue(Rectangle(20, 20, 20, 40) in rects)
self.assertEqual(len(rects), 1)
# Left - Full
rects = m._generate_splits(mr, Rectangle(20, 20, 20, 40))
self.assertTrue(Rectangle(40, 20, 20, 40) in rects)
self.assertEqual(len(rects), 1)
rects = m._generate_splits(mr, Rectangle(10, 10, 30, 60))
self.assertTrue(Rectangle(40, 20, 20, 40) in rects)
self.assertEqual(len(rects), 1)
def test_getitem(self):
m = maxrects.MaxRectsBl(100, 100, rot=False)
m.add_rect(40, 40)
m.add_rect(20, 20)
m.add_rect(60, 40)
self.assertEqual(m[0], Rectangle(0, 0, 40, 40))
self.assertEqual(m[1], Rectangle(40, 0, 20, 20))
self.assertEqual(m[2], Rectangle(40, 20, 60, 40))
self.assertEqual(m[-1], Rectangle(40, 20, 60, 40))
self.assertEqual(m[1:],
[Rectangle(40, 0, 20, 20), Rectangle(40, 20, 60, 40)])
class TestMaxRectBL(TestCase):
def test_select_position(self):
m = maxrects.MaxRectsBl(100, 100, rot=False)
self.assertEqual(m.add_rect(40, 40), Rectangle(0, 0, 40, 40))
self.assertFalse(m.add_rect(100, 100))
self.assertEqual(m.add_rect(20, 20), Rectangle(40, 0, 20, 20))
self.assertEqual(m.add_rect(60, 40), Rectangle(40, 20, 60, 40))
class TestMaxRectBAF(TestCase):
def test_rect_fitness(self):
m = maxrects.MaxRectsBaf(100, 100, rot=False)
self.assertEqual(m.add_rect(60, 10), Rectangle(0, 0, 60, 10))
self.assertTrue(m.fitness(40, 40) < m.fitness(50, 50))
self.assertTrue(m.fitness(40, 40) < m.fitness(35, 35))
self.assertEqual(m.add_rect(40, 40), Rectangle(60, 0, 40, 40))
class TestMaxRectBLSF(TestCase):
def test_rect_fitnesss(self):
m = maxrects.MaxRectsBlsf(100, 100, rot=False)
self.assertEqual(m.add_rect(60, 10), Rectangle(0, 0, 60, 10))
self.assertTrue(m.fitness(30, 90) < m.fitness(40, 89))
self.assertTrue(m.fitness(99, 10) < m.fitness(99, 5))
class TestMaxRectBSSF(TestCase):
def test_rect_fitness(self):
m = maxrects.MaxRectsBssf(100, 100, rot=False)
self.assertEqual(m.add_rect(60, 10), Rectangle(0, 0, 60, 10))
self.assertTrue(m.fitness(30, 91) > m.fitness(30, 92))
self.assertTrue(m.fitness(38, 91) < m.fitness(30, 92))
self.assertTrue(m.fitness(38, 91) > m.fitness(40, 92))
```
#### File: rectpack/tests/test_skyline.py
```python
from unittest import TestCase
from rectpack.geometry import Rectangle
import rectpack.skyline as skyline
class TestSkyline(TestCase):
def test_init(self):
s = skyline.SkylineBl(100, 100, rot=False)
rect1 = s.add_rect(30, 30)
rect2 = s.add_rect(100, 70)
self.assertEqual(rect1, Rectangle(0, 0, 30, 30))
self.assertEqual(rect2, Rectangle(0, 30, 100, 70))
def test_rotation(self):
# Test rotation is enabled by default
s = skyline.SkylineBl(100, 10)
rect1 = s.add_rect(10, 100)
self.assertEqual(rect1, Rectangle(0, 0, 100, 10))
# Test rotation can be disabled
s = skyline.SkylineBl(100, 10, rot=False)
rect1 = s.add_rect(10, 100)
self.assertEqual(rect1, None)
def test_waste_management(self):
# Generate one wasted section
s = skyline.SkylineBlWm(100, 100, rot=False)
rect1 = s.add_rect(30, 30)
rect2 = s.add_rect(100, 70)
self.assertEqual(rect1, Rectangle(0, 0, 30, 30))
self.assertEqual(rect2, Rectangle(0, 30, 100, 70))
self.assertEqual(len(s), 2)
# Add rectangle that only fits into wasted section
self.assertEqual(s.add_rect(71, 30), None)
self.assertEqual(s.add_rect(70, 31), None)
rect3 = s.add_rect(70, 30)
self.assertEqual(rect3, Rectangle(30, 0, 70, 30))
self.assertEqual(len(s), 3)
rect4 = s.add_rect(70, 30)
self.assertEqual(rect4, None)
# Test the same without waste management
s = skyline.SkylineBl(100, 100)
rect1 = s.add_rect(30, 30)
rect2 = s.add_rect(100, 70)
self.assertEqual(rect1, Rectangle(0, 0, 30, 30))
self.assertEqual(rect2, Rectangle(0, 30, 100, 70))
self.assertEqual(s.add_rect(70, 30), None)
self.assertEqual(len(s), 2)
# Test waste supports rectangle rotation
s = skyline.SkylineBlWm(100, 100, rot=False)
sr = skyline.SkylineBlWm(100, 100, rot=True)
self.assertEqual(s.add_rect(30, 30), Rectangle(0, 0, 30, 30))
self.assertEqual(s.add_rect(100, 70), Rectangle(0, 30, 100, 70))
self.assertEqual(sr.add_rect(30, 30), Rectangle(0, 0, 30, 30))
self.assertEqual(sr.add_rect(100, 70), Rectangle(0, 30, 100, 70))
self.assertEqual(s.add_rect(30, 70), None)
self.assertEqual(sr.add_rect(30, 70), Rectangle(30, 0, 70, 30))
# Try with more than one wasted section
s = skyline.SkylineBlWm(100, 100, rot=False)
self.assertEqual(s.add_rect(40, 50), Rectangle(0, 0, 40, 50))
self.assertEqual(s.add_rect(20, 30), Rectangle(40, 0, 20, 30))
self.assertEqual(s.add_rect(20, 10), Rectangle(60, 0, 20, 10))
self.assertEqual(s.add_rect(100, 50), Rectangle(0, 50, 100, 50))
# Next ones only fit if waste is working
self.assertEqual(s.add_rect(20, 20), Rectangle(40, 30, 20, 20))
self.assertEqual(s.add_rect(20, 30), Rectangle(60, 10, 20, 30))
self.assertEqual(s.add_rect(20, 50), Rectangle(80, 0, 20, 50))
self.assertEqual(s.add_rect(20, 5), Rectangle(60, 40, 20, 5))
self.assertEqual(s.add_rect(20, 5), Rectangle(60, 45, 20, 5))
self.assertEqual(s.add_rect(1, 1), None)
def test_iter(self):
# Test correctly calculated when waste is enabled
s = skyline.SkylineBlWm(100, 100)
self.assertTrue(s.add_rect(50, 50))
self.assertTrue(s.add_rect(100, 50))
self.assertEqual(len([r for r in s]), 2)
self.assertTrue(s.add_rect(40, 40))
self.assertEqual(len([r for r in s]), 3)
def test_len(self):
s = skyline.SkylineBlWm(100, 100)
self.assertTrue(s.add_rect(50, 50))
self.assertTrue(s.add_rect(100, 50))
self.assertEqual(len(s), 2)
self.assertTrue(s.add_rect(50, 50))
self.assertEqual(len(s), 3)
def test_skyline1(self):
"""Test skyline for complex positions is generated correctly
+---------------------------+
| |
+---------------------+ |
| 4 | |
+----------------+----+ |
| 3 | |
+----------+-----+ +-----+
| | | |
| | | 5 |
| 1 | | |
| | | |
| +----------+-----+
| | 2 |
+----------+----------------+
"""
s = skyline.SkylineMwf(100, 100, rot=False)
rect1 = s.add_rect(40, 60)
rect2 = s.add_rect(60, 10)
rect3 = s.add_rect(70, 20)
rect4 = s.add_rect(80, 20)
rect5 = s.add_rect(20, 40)
self.assertEqual(rect1, Rectangle(0, 0, 40, 60))
self.assertEqual(rect2, Rectangle(40, 0, 60, 10))
self.assertEqual(rect3, Rectangle(0, 60, 70, 20))
self.assertEqual(rect4, Rectangle(0, 80, 80, 20))
self.assertEqual(rect5, Rectangle(80, 10, 20, 40))
def test_skyline2(self):
"""
+---------------------------+
| |
| |
| +--------------------+
| | 4 |
| | |
+----+ +-----------+--------+
| | | |
| | | |
| | | |
| | | |
| 1 | | 3 |
| | | |
| | | |
| +-------------+ |
| | 2 | |
+----+-------------+--------+
"""
s = skyline.SkylineMwfl(100, 100, rot=False)
rect1 = s.add_rect(20, 60)
rect2 = s.add_rect(50, 10)
rect3 = s.add_rect(30, 60)
rect4 = s.add_rect(70, 20)
self.assertEqual(rect1, Rectangle(0, 0, 20, 60))
self.assertEqual(rect2, Rectangle(20, 0, 50, 10))
self.assertEqual(rect3, Rectangle(70, 0, 30, 60))
self.assertEqual(rect4, Rectangle(30, 60, 70, 20))
def test_skyline3(self):
"""
+-------------------+-------+
| 10 | |
+----+----+---------+ |
| | | | 9 |
| | w2 | 8 | |
| 6 | | | |
| | +---------+-------+
| +----+ |
+----+ | 5 |
| | 7 +---+-------------+
| | |w1 | |
| +--------+ 4 |
| 1 | | |
| | 2 +----------+--+
| | | 3 | |
+----+--------+----------+--+
"""
s = skyline.SkylineMwf(100, 100, rot=False)
rect1 = s.add_rect(20, 50)
rect2 = s.add_rect(30, 30)
rect3 = s.add_rect(40, 10)
rect4 = s.add_rect(50, 40)
rect5 = s.add_rect(70, 20)
rect6 = s.add_rect(20, 40)
rect7 = s.add_rect(10, 30)
rect8 = s.add_rect(40, 20)
rect9 = s.add_rect(30, 30)
rect10 = s.add_rect(70, 10)
w1 = s.add_rect(20, 20)
w2 = s.add_rect(10, 30)
self.assertEqual(rect1, Rectangle(0, 0, 20, 50))
self.assertEqual(rect2, Rectangle(20, 0, 30, 30))
self.assertEqual(rect3, Rectangle(50, 0, 40, 10))
self.assertEqual(rect4, Rectangle(50, 10, 50, 40))
self.assertEqual(rect5, Rectangle(30, 50, 70, 20))
self.assertEqual(rect6, Rectangle(0, 50, 20, 40))
self.assertEqual(rect7, Rectangle(20, 30, 10, 30))
self.assertEqual(rect8, Rectangle(30, 70, 40, 20))
self.assertEqual(rect9, Rectangle(70, 70, 30, 30))
self.assertEqual(rect10, Rectangle(0, 90, 70, 10))
self.assertEqual(w1, None)
self.assertEqual(w2, None)
# With Waste management enabled
s = skyline.SkylineMwfWm(100, 100, rot=False)
rect1 = s.add_rect(20, 50)
rect2 = s.add_rect(30, 30)
rect3 = s.add_rect(40, 10)
rect4 = s.add_rect(50, 40)
rect5 = s.add_rect(70, 20)
rect6 = s.add_rect(20, 40)
rect7 = s.add_rect(10, 30)
rect8 = s.add_rect(40, 20)
rect9 = s.add_rect(30, 30)
rect10 = s.add_rect(70, 10)
w1 = s.add_rect(20, 20)
w2 = s.add_rect(10, 30)
self.assertEqual(rect1, Rectangle(0, 0, 20, 50))
self.assertEqual(rect2, Rectangle(20, 0, 30, 30))
self.assertEqual(rect3, Rectangle(50, 0, 40, 10))
self.assertEqual(rect4, Rectangle(50, 10, 50, 40))
self.assertEqual(rect5, Rectangle(30, 50, 70, 20))
self.assertEqual(rect6, Rectangle(0, 50, 20, 40))
self.assertEqual(rect7, Rectangle(20, 30, 10, 30))
self.assertEqual(rect8, Rectangle(30, 70, 40, 20))
self.assertEqual(rect9, Rectangle(70, 70, 30, 30))
self.assertEqual(rect10, Rectangle(0, 90, 70, 10))
self.assertEqual(w1, Rectangle(30, 30, 20, 20))
self.assertEqual(w2, Rectangle(20, 60, 10, 30))
def test_skyline4(self):
"""
+---------------------+-----+
| 4 | 5 |
| | |
+----+----------------------+
| | | |
| | | |
| | | |
| | w1 | |
| 1 | | 3 |
| | | |
| | | |
| | | |
| +----------------+ |
| | | |
| | 2 | |
| | | |
+----+----------------+-----+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(20, 80)
rect2 = s.add_rect(60, 20)
rect3 = s.add_rect(20, 80)
rect4 = s.add_rect(80, 20)
w1 = s.add_rect(60, 50)
rect5 = s.add_rect(20, 20)
self.assertEqual(rect1, Rectangle(0, 0, 20, 80))
self.assertEqual(rect2, Rectangle(20, 0, 60, 20))
self.assertEqual(rect3, Rectangle(80, 0, 20, 80))
self.assertEqual(rect4, Rectangle(0, 80, 80, 20))
self.assertEqual(rect5, Rectangle(80, 80, 20, 20))
self.assertEqual(w1, Rectangle(20, 20, 60, 50))
def test_skyline5(self):
"""
+------+--------------+-----+
| | | |
| 8 | 5 | |
| | | |
| +--------------------+
+------+ | |
| | | |
| | 4 | 7 |
| | | |
| | +-----+
| 1 +---------+----+ |
| | | w1 | |
| | | | 6 |
| | 2 +----+ |
| | | 3 | |
| | | | |
+------+---------+----+-----+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(20, 70)
rect2 = s.add_rect(30, 40)
rect3 = s.add_rect(20, 20)
rect4 = s.add_rect(50, 40)
rect5 = s.add_rect(50, 20)
rect6 = s.add_rect(30, 50)
rect7 = s.add_rect(20, 30)
rect8 = s.add_rect(20, 30)
w1 = s.add_rect(20, 20)
self.assertEqual(rect1, Rectangle(0, 0, 20, 70))
self.assertEqual(rect2, Rectangle(20, 0, 30, 40))
self.assertEqual(rect3, Rectangle(50, 0, 20, 20))
self.assertEqual(rect4, Rectangle(20, 40, 50, 40))
self.assertEqual(rect5, Rectangle(20, 80, 50, 20))
self.assertEqual(rect6, Rectangle(70, 0, 30, 50))
self.assertEqual(rect7, Rectangle(70, 50, 20, 30))
self.assertEqual(rect8, Rectangle(0, 70, 20, 30))
self.assertEqual(w1, Rectangle(50, 20, 20, 20))
def test_skyline6(self):
"""
+-------------+-------------+
| | |
| 4 | |
| +-------------+
| | |
+-------------* 5 |
| 3 | |
| | |
+-------------+--+----------+
| | |
| 2 | |
| | |
+----------------+----+ |
| | |
| 1 | |
| | |
+---------------------+-----+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(80, 30)
rect2 = s.add_rect(60, 20)
rect3 = s.add_rect(50, 20)
rect4 = s.add_rect(50, 30)
rect5 = s.add_rect(50, 30)
self.assertEqual(rect1, Rectangle(0, 0, 80, 30))
self.assertEqual(rect2, Rectangle(0, 30, 60, 20))
self.assertEqual(rect3, Rectangle(0, 50, 50, 20))
self.assertEqual(rect4, Rectangle(0, 70, 50, 30))
self.assertEqual(rect5, Rectangle(50, 50, 50, 30))
def test_skyline7(self):
"""
+-----------------+---------+
+-----------------+ |
| | |
| 4 | |
| | |
+-----------+-----+ |
| | | 5 |
| | | |
| w1 | | |
| | | |
| | 2 | |
| | | |
+-----------+ +---------+
| | | |
| 1 | | 3 |
| | | |
+-----------+-----+---------+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(40, 20)
rect2 = s.add_rect(20, 60)
rect3 = s.add_rect(40, 20)
rect4 = s.add_rect(60, 20)
rect5 = s.add_rect(40, 80)
w1 = s.add_rect(40, 40)
self.assertEqual(rect1, Rectangle(0, 0, 40, 20))
self.assertEqual(rect2, Rectangle(40, 0, 20, 60))
self.assertEqual(rect3, Rectangle(60, 0, 40, 20))
self.assertEqual(rect4, Rectangle(0, 60, 60, 20))
self.assertEqual(rect5, Rectangle(60, 20, 40, 80))
self.assertEqual(w1, Rectangle(0, 20, 40, 40))
def test_skyline8(self):
"""
+---------------------------+
| |
+----------------------+ |
| 4 | |
| | |
+-----------+-----+----+ |
| | | |
| | | |
| w1 | | |
| | | |
| | 2 | |
| | | |
+-----------+ | |
| | +---------+
| 1 | | 3 |
| | | |
+-----------+-----+---------+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(40, 20)
rect2 = s.add_rect(20, 60)
rect3 = s.add_rect(40, 10)
rect4 = s.add_rect(80, 20)
w1 = s.add_rect(40, 40)
self.assertEqual(rect1, Rectangle(0, 0, 40, 20))
self.assertEqual(rect2, Rectangle(40, 0, 20, 60))
self.assertEqual(rect3, Rectangle(60, 0, 40, 10))
self.assertEqual(rect4, Rectangle(0, 60, 80, 20))
self.assertEqual(w1, Rectangle(0, 20, 40, 40))
def test_skyline9(self):
"""
+---------------------------+
| |
| +---------------------+
| | 4 |
| | |
| +-----+-----+---------+
| | | |
| | | |
| | | w1 |
| | | |
| | 2 | |
| | | |
| | +---------+
| | | |
+-----------+ | 3 |
| 1 | | |
+-----------+-----+---------+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(40, 20)
rect2 = s.add_rect(20, 60)
rect3 = s.add_rect(40, 30)
rect4 = s.add_rect(80, 20)
w1 = s.add_rect(40, 30)
self.assertEqual(rect1, Rectangle(0, 0, 40, 20))
self.assertEqual(rect2, Rectangle(40, 0, 20, 60))
self.assertEqual(rect3, Rectangle(60, 0, 40, 30))
self.assertEqual(rect4, Rectangle(20, 60, 80, 20))
self.assertEqual(w1, Rectangle(60, 30, 40, 30))
def test_skyline10(self):
"""
+---------------------------+
| |
| |
| |
| |
| +----+
| | |
| | |
| | |
+----------------+ | |
| | | |
| +-----+ 3 |
| 1 | | |
| | 2 | |
| | | |
| | | |
+----------------+-----+----+
With rotation
"""
s = skyline.SkylineMwfl(100, 100, rot=True)
rect1 = s.add_rect(50, 40)
rect2 = s.add_rect(30, 30)
rect3 = s.add_rect(70, 20)
self.assertEqual(rect1, Rectangle(0, 0, 50, 40))
self.assertEqual(rect2, Rectangle(50, 0, 30, 30))
self.assertEqual(rect3, Rectangle(80, 0, 20, 70))
def test_getitem(self):
"""
Test __getitem__ works with all rectangles included waste.
+---------------------------+
| |
| +---------------------+
| | 4 |
| | |
| +-----+-----+---------+
| | | |
| | | |
| | | w1 |
| | | |
| | 2 | |
| | | |
| | +---------+
| | | |
+-----------+ | 3 |
| 1 | | |
+-----------+-----+---------+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(40, 20)
rect2 = s.add_rect(20, 60)
rect3 = s.add_rect(40, 30)
rect4 = s.add_rect(80, 20)
w1 = s.add_rect(40, 30)
self.assertEqual(s[0], Rectangle(0, 0, 40, 20))
self.assertEqual(s[1], Rectangle(40, 0, 20, 60))
self.assertEqual(s[2], Rectangle(60, 0, 40, 30))
self.assertEqual(s[3], Rectangle(20, 60, 80, 20))
self.assertEqual(s[4], Rectangle(60, 30, 40, 30))
self.assertEqual(s[-1], Rectangle(60, 30, 40, 30))
self.assertEqual(s[3:],
[Rectangle(20, 60, 80, 20), Rectangle(60, 30, 40, 30)])
class TestSkylineMwf(TestCase):
def test_init(self):
""" """
p = skyline.SkylineMwf(100, 100)
self.assertFalse(p._waste_management)
def test_fitness(self):
"""Test position wasting less space has better fitness"""
p = skyline.SkylineMwf(100, 100, rot=False)
p.add_rect(20, 20)
self.assertTrue(p.fitness(90, 10) < p.fitness(100, 10))
def test_skyline(self):
"""
+---------------------------+
| |
| |
+----+ +------------------+
| | | 5 |
| | +---+--+-----------+
| | | | |
| | | | |
| | | | |
| 1 | | | |
| | | +-----------+
| +-------+ 3| |
| | | | |
| | | | 4 |
| | 2 | | |
| | | | |
+----+-------+--+-----------+
"""
s = skyline.SkylineMwf(100, 100, rot=False)
rect1 = s.add_rect(20, 80)
rect2 = s.add_rect(20, 40)
rect3 = s.add_rect(20, 70)
rect4 = s.add_rect(40, 50)
rect5 = s.add_rect(70, 10)
self.assertEqual(rect1, Rectangle(0, 0, 20, 80))
self.assertEqual(rect2, Rectangle(20, 0, 20, 40))
self.assertEqual(rect3, Rectangle(40, 0, 20, 70))
self.assertEqual(rect4, Rectangle(60, 0, 40, 50))
self.assertEqual(rect5, Rectangle(30, 70, 70, 10))
class TestSkylineMwFwm(TestCase):
def test_init(self):
"""Test Waste management is enabled"""
p = skyline.SkylineMwfWm(100, 100)
self.assertTrue(p._waste_management)
def test_skyline(self):
"""
+---------------------------+
| |
| |
+----+ +------------------+
| | | 5 |
| | +---+--+-----------+
| | | | |
| | | | |
| | | | w1 |
| 1 | | | |
| | | +-----------+
| +-------+ 3| |
| | | | |
| | | | 4 |
| | 2 | | |
| | | | |
+----+-------+--+-----------+
"""
s = skyline.SkylineMwfWm(100, 100, rot=False)
rect1 = s.add_rect(20, 80)
rect2 = s.add_rect(20, 40)
rect3 = s.add_rect(20, 70)
rect4 = s.add_rect(40, 50)
rect5 = s.add_rect(70, 10)
w1 = s.add_rect(40, 20)
self.assertEqual(rect1, Rectangle(0, 0, 20, 80))
self.assertEqual(rect2, Rectangle(20, 0, 20, 40))
self.assertEqual(rect3, Rectangle(40, 0, 20, 70))
self.assertEqual(rect4, Rectangle(60, 0, 40, 50))
self.assertEqual(rect5, Rectangle(30, 70, 70, 10))
self.assertEqual(w1, Rectangle(60, 50, 40, 20))
class TestSkylineMwfl(TestCase):
def test_init(self):
""" """
p = skyline.SkylineMwfl(100, 100)
self.assertFalse(p._waste_management)
def test_fitness(self):
"""Test lower one has best fitness"""
p = skyline.SkylineMwfl(100, 100, rot=False)
p.add_rect(20, 20)
self.assertTrue(p.fitness(90, 10) < p.fitness(90, 20))
def test_skyline1(self):
"""
+---------------------------+
| |
| |
| |
| |
| |
| |
| +--------+
+------------------+ 3 |
| | |
| +--------+
| | |
| 1 | |
| | 2 |
| | |
| | |
+------------------+--------+
"""
s = skyline.SkylineMwfl(100, 100, rot=True)
rect1 = s.add_rect(70, 50)
rect2 = s.add_rect(40, 30)
rect3 = s.add_rect(20, 30)
self.assertEqual(rect1, Rectangle(0, 0, 70, 50))
self.assertEqual(rect2, Rectangle(70, 0, 30, 40))
self.assertEqual(rect3, Rectangle(70, 40, 30, 20))
def test_skyline2(self):
"""
+---------------------------+
| |
| |
| |
| |
| |
| |
| |
| |
| |
+-----------+---------------+
| | 3 |
| | |
| 1 +---------+-----+
| | 2 | |
| | | |
+-----------+---------+-----+
"""
s = skyline.SkylineMwfl(100, 100, rot=False)
rect1 = s.add_rect(40, 40)
rect2 = s.add_rect(40, 20)
rect3 = s.add_rect(60, 20)
self.assertEqual(rect1, Rectangle(0, 0, 40, 40))
self.assertEqual(rect2, Rectangle(40, 0, 40, 20))
self.assertEqual(rect3, Rectangle(40, 20, 60, 20))
class TestSkylineMwflWm(TestCase):
def test_init(self):
"""Test Waste management is enabled"""
p = skyline.SkylineMwflWm(100, 100)
self.assertTrue(p._waste_management)
def test_skyline(self):
"""
+---------------------------+
| |
| |
| |
| |
| |
| |
| |
| |
| |
+-----------+---------------+
| | 3 |
| | |
| 1 +---------+-----+
| | 2 | w1 |
| | | |
+-----------+---------+-----+
"""
s = skyline.SkylineMwflWm(100, 100, rot=False)
rect1 = s.add_rect(40, 40)
rect2 = s.add_rect(40, 20)
rect3 = s.add_rect(60, 20)
w1 = s.add_rect(20, 20)
self.assertEqual(rect1, Rectangle(0, 0, 40, 40))
self.assertEqual(rect2, Rectangle(40, 0, 40, 20))
self.assertEqual(rect3, Rectangle(40, 20, 60, 20))
self.assertEqual(w1, Rectangle(80, 0, 20, 20))
class TestSkylineBl(TestCase):
def test_init(self):
"""Test Waste management is disabled"""
p = skyline.SkylineBl(100, 100)
self.assertFalse(p._waste_management)
def test_fitness(self):
"""Test lower is better"""
p = skyline.SkylineBl(100, 100, rot=False)
self.assertEqual(p.fitness(100, 20), p.fitness(10, 20))
self.assertTrue(p.fitness(100, 10) < p.fitness(100, 11))
# The same but with wasted space
p = skyline.SkylineBl(100, 100, rot=False)
p.add_rect(80, 50)
self.assertEqual(p.fitness(100, 10), p.fitness(80, 10))
self.assertTrue(p.fitness(100, 10) < p.fitness(40, 20))
def test_skyline1(self):
"""
+---------------------------+
| |
| |
| |
| |
| |
| |
+-------------+-------------+
| 4 | |
| | |
+---------+---+ |
| | | 3 |
| | | |
| 1 +---+ |
| | 2 | |
| | | |
+---------+---+-------------+
Test lower positions is better than one not losing space
"""
s = skyline.SkylineBl(100, 100, rot=False)
rect1 = s.add_rect(40, 30)
rect2 = s.add_rect(10, 20)
rect3 = s.add_rect(50, 50)
rect4 = s.add_rect(50, 20)
self.assertEqual(rect1, Rectangle(0, 0, 40, 30))
self.assertEqual(rect2, Rectangle(40, 0, 10, 20))
self.assertEqual(rect3, Rectangle(50, 0, 50, 50))
self.assertEqual(rect4, Rectangle(0, 30, 50, 20))
def test_skyline2(self):
"""
+---------------------------+
| |
| |
| |
+--------------------+ |
| | |
| 4 +------+
| | 5 |
| | |
+----------------+---+------+
| | 3 |
| | |
| 1 +-----+----+
| | 2 | |
| | | |
+----------------+-----+----+
"""
s = skyline.SkylineBl(100, 100, rot=False)
rect1 = s.add_rect(50, 40)
rect2 = s.add_rect(30, 20)
rect3 = s.add_rect(50, 20)
rect4 = s.add_rect(70, 30)
rect5 = s.add_rect(20, 20)
self.assertEqual(rect1, Rectangle(0, 0, 50, 40))
self.assertEqual(rect2, Rectangle(50, 0, 30, 20))
self.assertEqual(rect3, Rectangle(50, 20, 50, 20))
self.assertEqual(rect4, Rectangle(0, 40, 70, 30))
self.assertEqual(rect5, Rectangle(70, 40, 20, 20))
class TestSkylineBlWm(TestCase):
def test_init(self):
"""Test Waste management is enabled"""
p = skyline.SkylineBlWm(100, 100)
self.assertTrue(p._waste_management)
def test_skyline1(self):
"""
+---------------------------+
| |
| |
| |
| |
+--------------------+ |
| | |
| 4 | |
| | |
| | |
+----------------+---+------+
| | 3 |
| | |
| 1 +-----+----+
| | 2 | w1 |
| | | |
+----------------+-----+----+
"""
s = skyline.SkylineBlWm(100, 100, rot=False)
rect1 = s.add_rect(50, 40)
rect2 = s.add_rect(30, 20)
rect3 = s.add_rect(50, 20)
rect4 = s.add_rect(70, 30)
w1 = s.add_rect(20, 20)
self.assertEqual(rect1, Rectangle(0, 0, 50, 40))
self.assertEqual(rect2, Rectangle(50, 0, 30, 20))
self.assertEqual(rect3, Rectangle(50, 20, 50, 20))
self.assertEqual(rect4, Rectangle(0, 40, 70, 30))
self.assertEqual(w1, Rectangle(80, 0, 20, 20))
``` |
{
"source": "joamatab/sax",
"score": 2
} |
#### File: sax/nn/io.py
```python
from __future__ import annotations
__all__ = ['load_nn_weights_json', 'save_nn_weights_json', 'get_available_sizes', 'get_dense_weights_path',
'get_norm_path', 'load_nn_dense']
# Cell
#nbdev_comment from __future__ import annotations
import json
import os
import re
from typing import Callable, Dict, List, Optional, Tuple
import jax.numpy as jnp
from .core import dense, preprocess
from .utils import norm
from ..typing_ import ComplexFloat
# Cell
def load_nn_weights_json(path: str) -> Dict[str, ComplexFloat]:
"""Load json weights from given path"""
path = os.path.abspath(os.path.expanduser(path))
weights = {}
if os.path.exists(path):
with open(path, "r") as file:
for k, v in json.load(file).items():
_v = jnp.array(v, dtype=float)
assert isinstance(_v, jnp.ndarray)
weights[k] = _v
return weights
# Cell
def save_nn_weights_json(weights: Dict[str, ComplexFloat], path: str):
"""Save json weights to given path"""
path = os.path.abspath(os.path.expanduser(path))
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "w") as file:
_weights = {}
for k, v in weights.items():
v = jnp.atleast_1d(jnp.array(v))
assert isinstance(v, jnp.ndarray)
_weights[k] = v.tolist()
json.dump(_weights, file)
# Cell
def get_available_sizes(
dirpath: str,
prefix: str,
input_names: Tuple[str, ...],
output_names: Tuple[str, ...],
) -> List[Tuple[int, ...]]:
"""Get all available json weight hidden sizes given filename parameters
> Note: this function does NOT return the input size and the output size
of the neural network. ONLY the hidden sizes are reported. The input
and output sizes can easily be derived from `input_names` (after
preprocessing) and `output_names`.
"""
all_weightfiles = os.listdir(dirpath)
possible_weightfiles = (
s for s in all_weightfiles if s.endswith(f"-{'-'.join(output_names)}.json")
)
possible_weightfiles = (
s
for s in possible_weightfiles
if s.startswith(f"{prefix}-{'-'.join(input_names)}")
)
possible_weightfiles = (re.sub("[^0-9x]", "", s) for s in possible_weightfiles)
possible_weightfiles = (re.sub("^x*", "", s) for s in possible_weightfiles)
possible_weightfiles = (re.sub("x[^0-9]*$", "", s) for s in possible_weightfiles)
possible_hidden_sizes = (s.strip() for s in possible_weightfiles if s.strip())
possible_hidden_sizes = (
tuple(hs.strip() for hs in s.split("x") if hs.strip())
for s in possible_hidden_sizes
)
possible_hidden_sizes = (
tuple(int(hs) for hs in s[1:-1]) for s in possible_hidden_sizes if len(s) > 2
)
possible_hidden_sizes = sorted(
possible_hidden_sizes, key=lambda hs: (len(hs), max(hs))
)
return possible_hidden_sizes
# Cell
def get_dense_weights_path(
*sizes: int,
input_names: Optional[Tuple[str, ...]] = None,
output_names: Optional[Tuple[str, ...]] = None,
dirpath: str = "weights",
prefix: str = "dense",
preprocess=preprocess,
):
"""Create the SAX conventional path for a given weight dictionary"""
if input_names:
num_inputs = preprocess(*jnp.ones(len(input_names))).shape[0]
sizes = (num_inputs,) + sizes
if output_names:
sizes = sizes + (len(output_names),)
path = os.path.abspath(os.path.join(dirpath, prefix))
if input_names:
path = f"{path}-{'-'.join(input_names)}"
if sizes:
path = f"{path}-{'x'.join(str(s) for s in sizes)}"
if output_names:
path = f"{path}-{'-'.join(output_names)}"
return f"{path}.json"
# Cell
def get_norm_path(
*shape: int,
input_names: Optional[Tuple[str, ...]] = None,
output_names: Optional[Tuple[str, ...]] = None,
dirpath: str = "norms",
prefix: str = "norm",
preprocess=preprocess,
):
"""Create the SAX conventional path for the normalization constants"""
if input_names and output_names:
raise ValueError(
"To get the norm name, one can only specify `input_names` OR `output_names`."
)
if input_names:
num_inputs = preprocess(*jnp.ones(len(input_names))).shape[0]
shape = (num_inputs,) + shape
if output_names:
shape = shape + (len(output_names),)
path = os.path.abspath(os.path.join(dirpath, prefix))
if input_names:
path = f"{path}-{'-'.join(input_names)}"
if shape:
path = f"{path}-{'x'.join(str(s) for s in shape)}"
if output_names:
path = f"{path}-{'-'.join(output_names)}"
return f"{path}.json"
# Internal Cell
class _PartialDense:
def __init__(self, weights, x_norm, y_norm, input_names, output_names):
self.weights = weights
self.x_norm = x_norm
self.y_norm = y_norm
self.input_names = input_names
self.output_names = output_names
def __call__(self, *params: ComplexFloat) -> ComplexFloat:
return dense(self.weights, *params, x_norm=self.x_norm, y_norm=self.y_norm)
def __repr__(self):
return f"{self.__class__.__name__}{repr(self.input_names)}->{repr(self.output_names)}"
# Cell
def load_nn_dense(
*sizes: int,
input_names: Optional[Tuple[str, ...]] = None,
output_names: Optional[Tuple[str, ...]] = None,
weightprefix="dense",
weightdirpath="weights",
normdirpath="norms",
normprefix="norm",
preprocess=preprocess,
) -> Callable:
"""Load a pre-trained dense model"""
weights_path = get_dense_weights_path(
*sizes,
input_names=input_names,
output_names=output_names,
prefix=weightprefix,
dirpath=weightdirpath,
preprocess=preprocess,
)
if not os.path.exists(weights_path):
raise ValueError("Cannot find weights path for given parameters")
x_norm_path = get_norm_path(
input_names=input_names,
prefix=normprefix,
dirpath=normdirpath,
preprocess=preprocess,
)
if not os.path.exists(x_norm_path):
raise ValueError("Cannot find normalization for input parameters")
y_norm_path = get_norm_path(
output_names=output_names,
prefix=normprefix,
dirpath=normdirpath,
preprocess=preprocess,
)
if not os.path.exists(x_norm_path):
raise ValueError("Cannot find normalization for output parameters")
weights = load_nn_weights_json(weights_path)
x_norm_dict = load_nn_weights_json(x_norm_path)
y_norm_dict = load_nn_weights_json(y_norm_path)
x_norm = norm(x_norm_dict["mean"], x_norm_dict["std"])
y_norm = norm(y_norm_dict["mean"], y_norm_dict["std"])
partial_dense = _PartialDense(weights, x_norm, y_norm, input_names, output_names)
return partial_dense
```
#### File: sax/nn/loss.py
```python
from __future__ import annotations
__all__ = ['mse', 'huber_loss', 'l2_reg']
# Cell
#nbdev_comment from __future__ import annotations
from typing import Dict
import jax.numpy as jnp
from ..typing_ import ComplexFloat
# Cell
def mse(x: ComplexFloat, y: ComplexFloat) -> float:
"""mean squared error"""
return ((x - y) ** 2).mean()
# Cell
def huber_loss(x: ComplexFloat, y: ComplexFloat, delta: float=0.5) -> float:
"""huber loss"""
return ((delta ** 2) * ((1.0 + ((x - y) / delta) ** 2) ** 0.5 - 1.0)).mean()
# Cell
def l2_reg(weights: Dict[str, ComplexFloat]) -> float:
"""L2 regularization loss"""
numel = 0
loss = 0.0
for w in (v for k, v in weights.items() if k[0] in ("w", "b")):
numel = numel + w.size
loss = loss + (jnp.abs(w) ** 2).sum()
return loss / numel
```
#### File: sax/sax/utils.py
```python
from __future__ import annotations
__all__ = ['block_diag', 'clean_string', 'copy_settings', 'validate_settings', 'try_float', 'flatten_dict',
'unflatten_dict', 'get_ports', 'get_port_combinations', 'get_settings', 'grouped_interp', 'merge_dicts',
'mode_combinations', 'reciprocal', 'rename_params', 'rename_ports', 'update_settings',
'validate_not_mixedmode', 'validate_multimode', 'validate_sdict', 'get_inputs_outputs']
# Cell
#nbdev_comment from __future__ import annotations
import inspect
import re
from functools import lru_cache, partial, wraps
from typing import Any, Callable, Dict, Iterable, Iterator, Tuple, Union, cast, overload
import jax
import jax.numpy as jnp
import jax.scipy as jsp
from natsort import natsorted
from .typing_ import (
Array,
ComplexFloat,
Float,
Model,
ModelFactory,
SCoo,
SDense,
SDict,
Settings,
SType,
is_mixedmode,
is_model,
is_model_factory,
is_scoo,
is_sdense,
is_sdict,
)
# Cell
def block_diag(*arrs: Array) -> Array:
"""create block diagonal matrix with arbitrary batch dimensions """
batch_shape = arrs[0].shape[:-2]
N = 0
for arr in arrs:
if batch_shape != arr.shape[:-2]:
raise ValueError("batch dimensions for given arrays don't match.")
m, n = arr.shape[-2:]
if m != n:
raise ValueError("given arrays are not square.")
N += n
block_diag = jax.vmap(jsp.linalg.block_diag, in_axes=0, out_axes=0)(
*(arr.reshape(-1, arr.shape[-2], arr.shape[-1]) for arr in arrs)
).reshape(*batch_shape, N, N)
return block_diag
# Cell
def clean_string(s: str) -> str:
"""clean a string such that it is a valid python identifier"""
s = s.strip()
s = s.replace(".", "p") # point
s = s.replace("-", "m") # minus
s = re.sub("[^0-9a-zA-Z]", "_", s)
if s[0] in "0123456789":
s = "_" + s
return s
# Cell
def copy_settings(settings: Settings) -> Settings:
"""copy a parameter dictionary"""
return validate_settings(settings) # validation also copies
def validate_settings(settings: Settings) -> Settings:
"""Validate a parameter dictionary"""
_settings = {}
for k, v in settings.items():
if isinstance(v, dict):
_settings[k] = validate_settings(v)
else:
_settings[k] = try_float(v)
return _settings
def try_float(f: Any) -> Any:
"""try converting an object to float, return unchanged object on fail"""
try:
return jnp.asarray(f, dtype=float)
except (ValueError, TypeError):
return f
# Cell
def flatten_dict(dic: Dict[str, Any], sep: str = ",") -> Dict[str, Any]:
"""flatten a nested dictionary"""
return _flatten_dict(dic, sep=sep)
def _flatten_dict(
dic: Dict[str, Any], sep: str = ",", frozen: bool = False, parent_key: str = ""
) -> Dict[str, Any]:
items = []
for k, v in dic.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, dict):
items.extend(
_flatten_dict(v, sep=sep, frozen=frozen, parent_key=new_key).items()
)
else:
items.append((new_key, v))
return dict(items)
# Cell
def unflatten_dict(dic, sep=","):
"""unflatten a flattened dictionary """
# from: https://gist.github.com/fmder/494aaa2dd6f8c428cede
items = dict()
for k, v in dic.items():
keys = k.split(sep)
sub_items = items
for ki in keys[:-1]:
if ki in sub_items:
sub_items = sub_items[ki]
else:
sub_items[ki] = dict()
sub_items = sub_items[ki]
sub_items[keys[-1]] = v
return items
# Cell
def get_ports(S: Union[Model, SType]) -> Tuple[str, ...]:
"""get port names of a model or an stype"""
if is_model(S):
return _get_ports_from_model(cast(Model, S))
elif is_sdict(S):
ports_set = {p1 for p1, _ in S} | {p2 for _, p2 in S}
return tuple(natsorted(ports_set))
elif is_scoo(S) or is_sdense(S):
*_, ports_map = S
return tuple(natsorted(ports_map.keys()))
else:
raise ValueError("Could not extract ports for given S")
@lru_cache(maxsize=4096) # cache to prevent future tracing
def _get_ports_from_model(model: Model) -> Tuple[str, ...]:
S: SType = jax.eval_shape(model)
return get_ports(S)
# Cell
def get_port_combinations(S: Union[Model, SType]) -> Tuple[Tuple[str, str], ...]:
"""get port combinations of a model or an stype"""
if is_model(S):
S = cast(Model, S)
return _get_port_combinations_from_model(S)
elif is_sdict(S):
S = cast(SDict, S)
return tuple(S.keys())
elif is_scoo(S):
Si, Sj, _, pm = cast(SCoo, S)
rpm = {int(i): str(p) for p, i in pm.items()}
return tuple(natsorted((rpm[int(i)], rpm[int(j)]) for i, j in zip(Si, Sj)))
elif is_sdense(S):
_, pm = cast(SDense, S)
return tuple(natsorted((p1, p2) for p1 in pm for p2 in pm))
else:
raise ValueError("Could not extract ports for given S")
@lru_cache(maxsize=4096) # cache to prevent future tracing
def _get_port_combinations_from_model(model: Model) -> Tuple[Tuple[str, str], ...]:
S: SType = jax.eval_shape(model)
return get_port_combinations(S)
# Cell
def get_settings(model: Union[Model, ModelFactory]) -> Settings:
"""Get the parameters of a SAX model function"""
signature = inspect.signature(model)
settings: Settings = {
k: (v.default if not isinstance(v, dict) else v)
for k, v in signature.parameters.items()
if v.default is not inspect.Parameter.empty
}
# make sure an inplace operation of resulting dict does not change the
# circuit parameters themselves
return copy_settings(settings)
# Cell
def grouped_interp(wl: Float, wls: Float, phis: Float) -> Float:
"""Grouped phase interpolation"""
wl = cast(Array, jnp.asarray(wl))
wls = cast(Array, jnp.asarray(wls))
# make sure values between -pi and pi
phis = cast(Array, jnp.asarray(phis)) % (2 * jnp.pi)
phis = jnp.where(phis > jnp.pi, phis - 2 * jnp.pi, phis)
if not wls.ndim == 1:
raise ValueError("grouped_interp: wls should be a 1D array")
if not phis.ndim == 1:
raise ValueError("grouped_interp: wls should be a 1D array")
if not wls.shape == phis.shape:
raise ValueError("grouped_interp: wls and phis shape does not match")
return _grouped_interp(wl.reshape(-1), wls, phis).reshape(*wl.shape)
@partial(jax.vmap, in_axes=(0, None, None), out_axes=0)
@jax.jit
def _grouped_interp(
wl: Array, # 0D array (not-vmapped) ; 1D array (vmapped)
wls: Array, # 1D array
phis: Array, # 1D array
) -> Array:
dphi_dwl = (phis[1::2] - phis[::2]) / (wls[1::2] - wls[::2])
phis = phis[::2]
wls = wls[::2]
dwl = (wls[1:] - wls[:-1]).mean(0, keepdims=True)
t = (wl - wls + 1e-5 * dwl) / dwl # small offset to ensure no values are zero
t = jnp.where(jnp.abs(t) < 1, t, 0)
m0 = jnp.where(t > 0, size=1)[0]
m1 = jnp.where(t < 0, size=1)[0]
t = t[m0]
wl0 = wls[m0]
wl1 = wls[m1]
phi0 = phis[m0]
phi1 = phis[m1]
dphi_dwl0 = dphi_dwl[m0]
dphi_dwl1 = dphi_dwl[m1]
_phi0 = phi0 - 0.5 * (wl1 - wl0) * (
dphi_dwl0 * (t ** 2 - 2 * t) - dphi_dwl1 * t ** 2
)
_phi1 = phi1 - 0.5 * (wl1 - wl0) * (
dphi_dwl0 * (t - 1) ** 2 - dphi_dwl1 * (t ** 2 - 1)
)
phis = jnp.arctan2(
(1 - t) * jnp.sin(_phi0) + t * jnp.sin(_phi1),
(1 - t) * jnp.cos(_phi0) + t * jnp.cos(_phi1),
)
return phis
# Cell
def merge_dicts(*dicts: Dict) -> Dict:
"""merge (possibly deeply nested) dictionaries"""
if len(dicts) == 1:
return dict(_generate_merged_dict(dicts[0], {}))
elif len(dicts) == 2:
return dict(_generate_merged_dict(dicts[0], dicts[1]))
else:
return merge_dicts(dicts[0], merge_dicts(*dicts[1:]))
def _generate_merged_dict(dict1: Dict, dict2: Dict) -> Iterator[Tuple[Any, Any]]:
# inspired by https://stackoverflow.com/questions/7204805/how-to-merge-dictionaries-of-dictionaries
keys = {**{k: None for k in dict1}, **{k: None for k in dict2}} # keep key order, values irrelevant
for k in keys:
if k in dict1 and k in dict2:
v1, v2 = dict1[k], dict2[k]
if isinstance(v1, dict) and isinstance(v2, dict):
v = dict(_generate_merged_dict(v1, v2))
else:
# If one of the values is not a dict, you can't continue merging it.
# Value from second dict overrides one in first and we move on.
v = v2
elif k in dict1:
v = dict1[k]
else: # k in dict2:
v = dict2[k]
if isinstance(v, dict):
yield (k, {**v}) # shallow copy of dict
else:
yield (k, v)
# Cell
def mode_combinations(
modes: Iterable[str], cross: bool = False
) -> Tuple[Tuple[str, str], ...]:
"""create mode combinations for a collection of given modes"""
if cross:
mode_combinations = natsorted((m1, m2) for m1 in modes for m2 in modes)
else:
mode_combinations = natsorted((m, m) for m in modes)
return tuple(mode_combinations)
# Cell
def reciprocal(sdict: SDict) -> SDict:
"""Make an SDict reciprocal"""
if is_sdict(sdict):
return {
**{(p1, p2): v for (p1, p2), v in sdict.items()},
**{(p2, p1): v for (p1, p2), v in sdict.items()},
}
else:
raise ValueError("sax.reciprocal is only valid for SDict types")
# Internal Cell
@overload
def rename_params(model: ModelFactory, renamings: Dict[str, str]) -> ModelFactory:
...
@overload
def rename_params(model: Model, renamings: Dict[str, str]) -> Model:
...
# Cell
def rename_params(
model: Union[Model, ModelFactory], renamings: Dict[str, str]
) -> Union[Model, ModelFactory]:
"""rename the parameters of a `Model` or `ModelFactory` given a renamings mapping old parameter names to new."""
reversed_renamings = {v: k for k, v in renamings.items()}
if len(reversed_renamings) < len(renamings):
raise ValueError("Multiple old names point to the same new name!")
if is_model_factory(model):
old_model_factory = cast(ModelFactory, model)
old_settings = get_settings(model)
@wraps(old_model_factory)
def new_model_factory(**settings):
old_settings = {
reversed_renamings.get(k, k): v for k, v in settings.items()
}
model = old_model_factory(**old_settings)
return rename_params(model, renamings)
new_settings = {renamings.get(k, k): v for k, v in old_settings.items()}
_replace_kwargs(new_model_factory, **new_settings)
return new_model_factory
elif is_model(model):
old_model = cast(Model, model)
old_settings = get_settings(model)
@wraps(old_model)
def new_model(**settings):
old_settings = {
reversed_renamings.get(k, k): v for k, v in settings.items()
}
return old_model(**old_settings)
new_settings = {renamings.get(k, k): v for k, v in old_settings.items()}
_replace_kwargs(new_model, **new_settings)
return new_model
else:
raise ValueError(
"rename_params should be used to decorate a Model or ModelFactory."
)
def _replace_kwargs(func: Callable, **kwargs: ComplexFloat):
"""Change the kwargs signature of a function"""
sig = inspect.signature(func)
settings = [
inspect.Parameter(k, inspect.Parameter.KEYWORD_ONLY, default=v)
for k, v in kwargs.items()
]
func.__signature__ = sig.replace(parameters=settings)
# Internal Cell
@overload
def rename_ports(S: SDict, renamings: Dict[str, str]) -> SDict:
...
@overload
def rename_ports(S: SCoo, renamings: Dict[str, str]) -> SCoo:
...
@overload
def rename_ports(S: SDense, renamings: Dict[str, str]) -> SDense:
...
@overload
def rename_ports(S: Model, renamings: Dict[str, str]) -> Model:
...
@overload
def rename_ports(S: ModelFactory, renamings: Dict[str, str]) -> ModelFactory:
...
# Cell
def rename_ports(
S: Union[SType, Model, ModelFactory], renamings: Dict[str, str]
) -> Union[SType, Model, ModelFactory]:
"""rename the ports of an `SDict`, `Model` or `ModelFactory` given a renamings mapping old port names to new."""
if is_scoo(S):
Si, Sj, Sx, ports_map = cast(SCoo, S)
ports_map = {renamings[p]: i for p, i in ports_map.items()}
return Si, Sj, Sx, ports_map
elif is_sdense(S):
Sx, ports_map = cast(SDense, S)
ports_map = {renamings[p]: i for p, i in ports_map.items()}
return Sx, ports_map
elif is_sdict(S):
sdict = cast(SDict, S)
original_ports = get_ports(sdict)
assert len(renamings) == len(original_ports)
return {(renamings[p1], renamings[p2]): v for (p1, p2), v in sdict.items()}
elif is_model(S):
old_model = cast(Model, S)
@wraps(old_model)
def new_model(**settings) -> SType:
return rename_ports(old_model(**settings), renamings)
return new_model
elif is_model_factory(S):
old_model_factory = cast(ModelFactory, S)
@wraps(old_model_factory)
def new_model_factory(**settings) -> Callable[..., SType]:
return rename_ports(old_model_factory(**settings), renamings)
return new_model_factory
else:
raise ValueError("Cannot rename ports for type {type(S)}")
# Cell
def update_settings(
settings: Settings, *compnames: str, **kwargs: ComplexFloat
) -> Settings:
"""update a nested settings dictionary"""
_settings = {}
if not compnames:
for k, v in settings.items():
if isinstance(v, dict):
_settings[k] = update_settings(v, **kwargs)
else:
if k in kwargs:
_settings[k] = try_float(kwargs[k])
else:
_settings[k] = try_float(v)
else:
for k, v in settings.items():
if isinstance(v, dict):
if k == compnames[0]:
_settings[k] = update_settings(v, *compnames[1:], **kwargs)
else:
_settings[k] = v
else:
_settings[k] = try_float(v)
return _settings
# Cell
def validate_not_mixedmode(S: SType):
"""validate that an stype is not 'mixed mode' (i.e. invalid)
Args:
S: the stype to validate
"""
if is_mixedmode(S): # mixed mode
raise ValueError(
"Given SType is neither multimode or singlemode. Please check the port "
"names: they should either ALL contain the '@' separator (multimode) "
"or NONE should contain the '@' separator (singlemode)."
)
# Cell
def validate_multimode(S: SType, modes=("te", "tm")) -> None:
"""validate that an stype is multimode and that the given modes are present."""
try:
current_modes = set(p.split("@")[1] for p in get_ports(S))
except IndexError:
raise ValueError("The given stype is not multimode.")
for mode in modes:
if mode not in current_modes:
raise ValueError(
f"Could not find mode '{mode}' in one of the multimode models."
)
# Cell
def validate_sdict(sdict: Any) -> None:
"""Validate an `SDict`"""
if not isinstance(sdict, dict):
raise ValueError("An SDict should be a dictionary.")
for ports in sdict:
if not isinstance(ports, tuple) and not len(ports) == 2:
raise ValueError(f"SDict keys should be length-2 tuples. Got {ports}")
p1, p2 = ports
if not isinstance(p1, str) or not isinstance(p2, str):
raise ValueError(
f"SDict ports should be strings. Got {ports} "
f"({type(ports[0])}, {type(ports[1])})"
)
# Cell
def get_inputs_outputs(ports: Tuple[str, ...]):
inputs = tuple(p for p in ports if p.lower().startswith("in"))
outputs = tuple(p for p in ports if not p.lower().startswith("in"))
if not inputs:
inputs = tuple(p for p in ports if not p.lower().startswith("out"))
outputs = tuple(p for p in ports if p.lower().startswith("out"))
return inputs, outputs
``` |
{
"source": "joamatab/SiliconPhotonicsDesign",
"score": 2
} |
#### File: SiliconPhotonicsDesign/needs_fixing/gc.py
```python
import json
import pathlib
import matplotlib.pyplot as plt
import numpy as np
def gc(
session=None,
period=0.66e-6,
ff=0.5,
wl=1550e-9,
n_gratings=50,
wg_height=220e-9,
etch_depth=70e-9,
box_height=2e-6,
clad_height=2e-6,
substrate_height=2e-6,
material="Si (Silicon) - Palik",
material_clad="SiO2 (Glass) - Palik",
wg_width=500e-9,
polarization="TE",
wavelength=1550e-9,
gc_xmin=0,
fiber_position=4.5e-6,
fiber_angle_deg=20,
wl_span=0.3e-6, # wavelength span
mesh_accuracy=3, # FDTD simulation mesh accuracy
frequency_points=100, # global frequency points
simulation_time=1000e-15, # maximum simulation time [s]
core_index=1.4682,
cladding_index=1.4629,
core_diameter=8.2e-6,
cladding_diameter=100e-6,
d=0.2e-6,
):
import lumapi
s = session or lumapi.FDTD(hide=False)
s.newproject()
s.selectall()
s.deleteall()
gap = period * (1 - ff)
# etched region of the grating
s.addrect()
s.set("name", "GC_base")
s.set("material", material)
s.set("x max", (n_gratings + 1) * period)
s.set("x min", gc_xmin)
s.set("y", 0.5 * (wg_height - etch_depth))
s.set("y span", wg_height - etch_depth)
# add GC teeth;
for i in range(n_gratings):
s.addrect()
s.set("name", "GC_tooth")
s.set("material", material)
s.set("y", 0.5 * wg_height)
s.set("y span", wg_height)
s.set("x min", gc_xmin + gap + i * period)
s.set("x max", gc_xmin + period + i * period)
s.selectpartial("GC")
s.addtogroup("GC")
# draw silicon substrate;
s.addrect()
s.set("name", "substrate")
s.set("material", material)
s.set("x max", 30e-6)
s.set("x min", -20e-6)
s.set("y", -1 * (box_height + 0.5 * substrate_height))
s.set("y span", substrate_height)
s.set("alpha", 0.2)
s.addrect()
# draw burried oxide;
s.set("name", "BOX")
s.set("material", material_clad)
s.set("x max", 30e-6)
s.set("x min", -20e-6)
s.set("y min", -box_height)
s.set("y max", clad_height)
s.set("override mesh order from material database", True)
s.set("mesh order", 3)
s.set("alpha", 0.3)
s.addrect()
# draw waveguide;
s.set("name", "WG")
s.set("material", material)
s.set("x min", -20e-6)
s.set("x max", gc_xmin)
s.set("y", 0.11e-6)
s.set("y span", wg_height)
# add simulation region;
s.addfdtd(
dimension="2D",
x_max=15e-6,
x_min=-3.5e-6,
y_min=-(box_height + 0.2e-6),
y_max=clad_height + 2e-6,
mesh_accuracy=mesh_accuracy,
simulation_time=simulation_time,
)
# add waveguide mode source;
s.addmode()
s.set("name", "waveguide_source")
s.set("x", -3e-6)
s.set("y", 0.5 * wg_height)
s.set("y span", 2e-6)
s.set("direction", "Forward")
s.set("use global source settings", True)
s.set("enabled", False)
# add fibre;
theta = np.arcsin(np.sin(fiber_angle_deg * np.pi / 180) / core_index) * 180 / np.pi
r1 = core_diameter / 2
r2 = cladding_diameter / 2
span = 15 * r1
if theta > 89:
theta = 89
if theta < -89:
theta = -89
thetarad = theta * np.pi / 180
L = 20e-6 / np.cos(thetarad)
V1 = [
(-r1 / np.cos(thetarad), 0),
(r1 / np.cos(thetarad), 0),
(r1 / np.cos(thetarad) + L * np.sin(thetarad), L * np.cos(thetarad)),
(-r1 / np.cos(thetarad) + L * np.sin(thetarad), L * np.cos(thetarad)),
]
V2 = [
(-r2 / np.cos(thetarad), 0),
(r2 / np.cos(thetarad), 0),
(r2 / np.cos(thetarad) + L * np.sin(thetarad), L * np.cos(thetarad)),
(-r2 / np.cos(thetarad) + L * np.sin(thetarad), L * np.cos(thetarad)),
]
v1 = s.matrix(4, 2)
v2 = s.matrix(4, 2)
for i in range(4):
for j in range(2):
v1[i][j] = V1[i][j]
v2[i][j] = V2[i][j]
s.addpoly()
s.set("name", "fibre_core")
s.set("x", 0)
s.set("y", 0)
s.set("vertices", v1)
s.set("index", core_index)
s.addpoly()
s.set("name", "fibre_cladding")
s.set("override mesh order from material database", 1)
s.set("mesh order", 3)
s.set("x", 0)
s.set("y", 0)
s.set("vertices", v2)
s.set("index", cladding_index)
s.addmode()
s.set("name", "fibre_mode")
s.set("injection axis", "y-axis")
s.set("direction", "Backward")
s.set("use global source settings", 1)
s.set("theta", -theta)
s.set("x span", span)
s.d = 0.4e-6
s.set("x", d * np.sin(thetarad))
s.set("y", d * np.cos(thetarad))
s.set("rotation offset", abs(span / 2 * np.tan(thetarad)))
s.addpower()
s.set("name", "fibre_top")
s.set("x span", span)
s.set("x", d * np.sin(thetarad))
s.set("y", d * np.cos(thetarad))
s.addmodeexpansion()
s.set("name", "fibre_modeExpansion")
s.set("monitor type", "2D Y-normal")
s.setexpansion("fibre_top", "fibre_top")
s.set("x span", span)
s.set("x", d * np.sin(thetarad))
s.set("y", d * np.cos(thetarad))
s.set("theta", -theta)
s.set("rotation offset", abs(span / 2 * np.tan(thetarad)))
s.set("override global monitor settings", False)
s.selectpartial("fibre")
s.addtogroup("fibre")
s.select("fibre::fibre_modeExpansion")
s.setexpansion("fibre_top", "::model::fibre::fibre_top")
s.unselectall()
s.select("fibre")
s.set("x", fiber_position)
s.set("y", clad_height + 1e-6)
s.addpower()
# add monitor;
s.set("name", "T")
s.set("monitor type", "2D X-normal")
s.set("x", -2.8e-6)
s.set("y", 0.5 * wg_height)
s.set("y span", 1e-6)
s.addmodeexpansion() # add waveguide mode expansion monitor
s.set("name", "waveguide")
s.set("monitor type", "2D X-normal")
s.setexpansion("T", "T")
s.set("x", -2.9e-6)
s.set("y", 0.5 * wg_height)
s.set("y span", 1e-6)
if polarization == "TE":
s.select("fibre::fibre_mode")
s.set("mode selection", "fundamental TM")
s.select("fibre::fibre_modeExpansion")
s.set("mode selection", "fundamental TM")
s.select("waveguide_source")
s.set("mode selection", "fundamental TM")
s.select("waveguide")
s.set("mode selection", "fundamental TM")
else:
s.select("fibre::fibre_mode")
s.set("mode selection", "fundamental TE")
s.select("fibre::fibre_modeExpansion")
s.set("mode selection", "fundamental TE")
s.select("waveguide_source")
s.set("mode selection", "fundamental TE")
s.select("waveguide")
s.set("mode selection", "fundamental TE")
# global properties
s.setglobalmonitor("frequency points", frequency_points)
s.setglobalmonitor("use wavelength spacing", 1)
s.setglobalmonitor("use source limits", 1)
s.setglobalsource("center wavelength", wl)
s.setglobalsource("wavelength span", wl_span)
s.save("GC_fibre")
#########################
# Compute Sparameters
#########################
s.addport() # fibre port
# p = "FDTD::ports::port 1"
s.set("injection axis", "y-axis")
s.set("x", d * np.sin(thetarad))
s.set("y", d * np.cos(thetarad) + 3)
s.set("x span", span)
s.set("theta", -theta)
s.set("rotation offset", abs(span / 2 * np.tan(thetarad)))
s.addport() # waveguide
# p = "FDTD::ports::port 2"
s.set("injection axis", "x-axis")
s.set("x", -2.9e-6)
s.set("y", 0.5 * wg_height)
s.set("y span", 1e-6)
return dict(session=s)
def run(session, filepath="grating"):
s = session
filepath = pathlib.Path(filepath)
filepath_json = filepath.with_suffix(".json")
filepath_sp = str(filepath.with_suffix(".dat"))
# filepath_sim_settings = filepath.with_suffix(".settings.json")
# filepath_fsp = str(filepath.with_suffix(".fsp"))
# s.save(filepath_fsp)
# s.run()
# s.save(filepath_fsp)
# if a sweep task named s-parameter sweep already exists, remove it
s.deletesweep("s-parameter sweep")
# add s-parameter sweep task
s.addsweep(3)
# un-check "Excite all ports" option
s.setsweep("s-parameter sweep", "Excite all ports", 0)
# use auto-symmetry to populate the S-matrix setup table
s.setsweep("S sweep", "auto symmetry", True)
# run s-parameter sweep
s.runsweep("s-parameter sweep")
# collect results
# S_matrix = s.getsweepresult("s-parameter sweep", "S matrix")
sp = s.getsweepresult("s-parameter sweep", "S parameters")
# visualize results
# s.visualize(S_matrix);
# s.visualize(S_parameters);
# s.visualize(S_diagnostic);
# export S-parameter data to file named s_params.dat to be loaded in INTERCONNECT
s.exportsweep("s-parameter sweep", filepath_sp)
print(f"wrote sparameters to {filepath_sp}")
keys = [key for key in sp.keys() if key.startswith("S")]
ra = {f"{key}a": list(np.unwrap(np.angle(sp[key].flatten()))) for key in keys}
rm = {f"{key}m": list(np.abs(sp[key].flatten())) for key in keys}
results = {"wavelength_nm": list(sp["lambda"].flatten() * 1e9)}
results.update(ra)
results.update(rm)
with open(filepath_json, "w") as f:
json.dump(results, f)
return results
def plot(results, logscale=True, keys=None):
"""plots Sparameters"""
r = results
w = r["wavelength_nm"]
if keys:
assert isinstance(keys, list)
for key in keys:
assert key in r, f"{key} not in {r.keys()}"
else:
keys = [key for key in r.keys() if key.startswith("S") and key.endswith("m")]
for key in keys:
if logscale:
y = 20 * np.log10(r[key])
else:
y = r[key]
plt.plot(w, y, label=key[:-1])
plt.legend()
plt.xlabel("wavelength (nm)")
if __name__ == "__main__":
import lumapi
s = lumapi.FDTD()
d = gc(session=s)
# results = run(session=d['session'])
# plot(results)
# print(r)
```
#### File: SiliconPhotonicsDesign/pylum/run.py
```python
from pylum.write_scripts import write_scripts
def run_mode(scripts_dict, session=None, return_session=False):
""" runs a dict of scripts in a MODE session
there should be a main.lsf defined
"""
import lumapi
dirpath = scripts_dict.get("dirpath", write_scripts(scripts_dict))
s = session or lumapi.MODE()
s.cd(str(dirpath))
s.eval(scripts_dict["main.lsf"])
if return_session:
return s
def run_fdtd(scripts_dict, session=None, return_session=False):
""" runs a dict of scripts in a FDTD session
there should be a main.lsf defined
.. code-block:: python
import pylum
from pylum.grating_coupler import sweep
scripts_dict = sweep()
run_fdtd(scripts_dict)
"""
import lumapi
dirpath = scripts_dict.get("dirpath", write_scripts(scripts_dict))
s = session or lumapi.FDTD()
s.cd(str(dirpath))
s.eval(scripts_dict["main.lsf"])
if return_session:
return s
if __name__ == "__main__":
from pylum.grating_coupler import sweep
scripts_dict = sweep()
run_fdtd(scripts_dict)
```
#### File: SiliconPhotonicsDesign/pylum/waveguide.py
```python
from pylum.config import materials
def waveguide(
session=None,
wg_width=500e-9,
wg_height=220e-9,
slab_height=0,
box_height=2e-6,
clad_height=2e-6,
margin_wg_height=1e-6,
margin_wg_width=2e-6,
material_wg="si",
material_wafer="si",
material_clad="sio2",
material_box="sio2",
wavelength=1550e-9,
mesh_size=10e-9,
modes=4,
):
""" draws a waveguide 2D mode solver
Args:
session: None
wg_width: 500e-9
wg_height: 220e-9
slab_height: 0
box_height: 2e-6
clad_height: 2e-6
margin_wg_height: 1e-6
margin_wg_width: 2e-6
material_wg: "si"
material_wafer: "si"
material_clad: "sio2"
material_box: "sio2"
wavelength: 1550e-9
mesh_size: 10e-9
modes: 4
"""
for material in [material_wg, material_box, material_clad, material_wafer]:
if material not in materials:
raise ValueError(f"{material} not in {list(materials.keys())}")
material_wg = materials[material_wg]
material_wafer = materials[material_wafer]
material_clad = materials[material_clad]
material_box = materials[material_box]
import lumapi
s = session or lumapi.MODE(hide=False)
s.newproject()
s.selectall()
s.deleteall()
xmin = -2e-6
xmax = 2e-6
zmin = -margin_wg_height
zmax = wg_height + margin_wg_height
dy = 2 * margin_wg_width + wg_width
s.addrect()
s.set("name", "clad")
s.set("material", material_clad)
s.set("z min", 0)
s.set("z max", clad_height)
s.set("y", 0)
s.set("y span", dy)
s.set("x min", xmin)
s.set("x max", xmax)
s.set("override mesh order from material database", 1)
s.set(
"mesh order", 3
) # similar to "send to back", put the cladding as a background.
s.set("alpha", 0.05)
s.addrect()
s.set("name", "box")
s.set("material", material_box)
s.set("z min", -box_height)
s.set("z max", 0)
s.set("y", 0)
s.set("y span", dy)
s.set("x min", xmin)
s.set("x max", xmax)
s.set("alpha", 0.05)
s.addrect()
s.set("name", "wafer")
s.set("material", material_wafer)
s.set("z min", -box_height - 2e-6)
s.set("z max", -box_height)
s.set("y", 0)
s.set("y span", dy)
s.set("x min", xmin)
s.set("x max", xmax)
s.set("alpha", 0.1)
s.addrect()
s.set("name", "waveguide")
s.set("material", material_wg)
s.set("z min", 0)
s.set("z max", wg_height)
s.set("y", 0)
s.set("y span", wg_width)
s.set("x min", xmin)
s.set("x max", xmax)
if slab_height > 0:
s.addrect()
s.set("name", "waveguide")
s.set("material", material_wg)
s.set("z min", 0)
s.set("z max", slab_height)
s.set("y", 0)
s.set("y span", dy)
s.set("x min", xmin)
s.set("x max", xmax)
s.addfde()
s.set("solver type", "2D X normal")
s.set("x", 0)
s.set("z max", zmax)
s.set("z min", zmin)
s.set("y", 0)
s.set("y span", dy)
s.set("wavelength", wavelength)
s.set("solver type", "2D X normal")
s.set("y min bc", "PML")
s.set("y max bc", "PML")
# radiation loss
s.set("z min bc", "metal")
s.set("z max bc", "metal")
s.set("define y mesh by", "maximum mesh step")
s.set("dy", mesh_size)
s.set("define z mesh by", "maximum mesh step")
s.set("dz", mesh_size)
s.set("number of trial modes", modes)
s.cleardcard()
return s
if __name__ == "__main__":
import lumapi
s = lumapi.MODE()
s = waveguide(session=s)
``` |
{
"source": "joamatab/simphony",
"score": 3
} |
#### File: simphony/examples/filters.py
```python
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
from simphony.library import ebeam, sipann
from simphony.netlist import Subcircuit
from simphony.simulation import SweepSimulation
from simphony.tools import freq2wl
# Have a main data line where frequency multiplexed data enters the circuit.
wg_data = ebeam.ebeam_wg_integral_1550(100e-6)
# A terminator for dispersing unused light
term = ebeam.ebeam_terminator_te1550()
def ring_factory(radius):
"""
Creates a full ring (with terminator) from a half ring.
Ports of a half ring are ordered like so:
2 4
| |
\ /
\ /
---=====---
1 3
Resulting pins are ('in', 'out', 'pass').
Parameters
----------
radius : float
The radius of the ring resonator, in microns.
"""
# Have rings for selecting out frequencies from the data line.
half_ring = sipann.sipann_dc_halfring(radius=radius)
circuit = Subcircuit()
circuit.add([
(half_ring, 'input'),
(half_ring, 'output'),
(term, 'terminator')
])
circuit.elements['input'].pins = ('pass', 'midb', 'in', 'midt')
circuit.elements['output'].pins = ('out', 'midt', 'term', 'midb')
circuit.connect_many([
('input', 'midb', 'output', 'midb'),
('input', 'midt', 'output', 'midt'),
('terminator', 'n1', 'output', 'term')
])
return circuit
# Behold, we can run a simulation on a single ring resonator.
cir1 = ring_factory(10)
sim1 = SweepSimulation(cir1, 1500e-9, 1600e-9)
res1 = sim1.simulate()
f1, s = res1.data(res1.pinlist['in'], res1.pinlist['pass'])
plt.plot(f1, s)
plt.title("10-micron Ring Resonator")
plt.tight_layout()
plt.show()
# Now, we'll create the circuit (using several ring resonator subcircuits)
# and add all individual instances.
circuit = Subcircuit('Add-Drop Filter')
e = circuit.add([
(wg_data, 'input'),
(ring_factory(10), 'ring10'),
(wg_data, 'out1'),
(wg_data, 'connect1'),
(ring_factory(11), 'ring11'),
(wg_data, 'out2'),
(wg_data, 'connect2'),
(ring_factory(12), 'ring12'),
(wg_data, 'out3'),
(term, 'terminator')
])
# You can set pin names individually (here I'm naming all the outputs that
# I'll want to access before they get scrambled and associated with different
# elements):
circuit.elements['input'].pins['n1'] = 'input'
circuit.elements['out1'].pins['n2'] = 'out1'
circuit.elements['out2'].pins['n2'] = 'out2'
circuit.elements['out3'].pins['n2'] = 'out3'
circuit.connect_many([
('input', 'n2', 'ring10', 'in'),
('out1', 'n1', 'ring10', 'out'),
('connect1', 'n1', 'ring10', 'pass'),
('connect1', 'n2', 'ring11', 'in'),
('out2', 'n1', 'ring11', 'out'),
('connect2', 'n1', 'ring11', 'pass'),
('connect2', 'n2', 'ring12', 'in'),
('out3', 'n1', 'ring12', 'out'),
('terminator', 'n1', 'ring12', 'pass'),
])
# Run a simulation on the netlist.
simulation = SweepSimulation(circuit, 1524.5e-9, 1551.15e-9)
result = simulation.simulate()
fig = plt.figure(tight_layout=True)
gs = gridspec.GridSpec(1, 3)
ax = fig.add_subplot(gs[0, :2])
f, s = result.data(result.pinlist['input'], result.pinlist['out1'])
ax.plot(freq2wl(f)*1e9, s, label='Output 1', lw='0.7')
f, s = result.data(result.pinlist['input'], result.pinlist['out2'])
ax.plot(freq2wl(f)*1e9, s, label='Output 2', lw='0.7')
f, s = result.data(result.pinlist['input'], result.pinlist['out3'])
ax.plot(freq2wl(f)*1e9, s, label='Output 3', lw='0.7')
ax.set_ylabel("Fractional Optical Power")
ax.set_xlabel("Wavelength (nm)")
plt.legend(loc='upper right')
ax = fig.add_subplot(gs[0, 2])
f, s = result.data(result.pinlist['input'], result.pinlist['out1'])
ax.plot(freq2wl(f)*1e9, s, label='Output 1', lw='0.7')
f, s = result.data(result.pinlist['input'], result.pinlist['out2'])
ax.plot(freq2wl(f)*1e9, s, label='Output 2', lw='0.7')
f, s = result.data(result.pinlist['input'], result.pinlist['out3'])
ax.plot(freq2wl(f)*1e9, s, label='Output 3', lw='0.7')
ax.set_xlim(1543,1545)
ax.set_ylabel("Fractional Optical Power")
ax.set_xlabel("Wavelength (nm)")
fig.align_labels()
plt.show()
```
#### File: simphony/examples/gm.py
```python
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import matplotlib.pyplot as plt
import numpy as np
from simphony.library import ebeam, sipann
from simphony.netlist import Subcircuit
from simphony.simulation import SweepSimulation
from simphony.tools import freq2wl, wl2freq
# We can rename the pins attribute on the class before we instantiate them;
# then we don't have to rename the pins on each element individually later.
ebeam.ebeam_wg_integral_1550.pins = ('in', 'out')
sipann.sipann_dc_fifty.pins = ('in1', 'in2', 'out1', 'out2')
sipann.sipann_dc_crossover1550.pins = ('in1', 'in2', 'out1', 'out2')
# Get all the models we're going to need for the green machine circuit:
gc = ebeam.ebeam_gc_te1550()
wg100 = ebeam.ebeam_wg_integral_1550(length=100e-6)
dc = sipann.sipann_dc_fifty()
crossover = sipann.sipann_dc_crossover1550()
wgin2 = ebeam.ebeam_wg_integral_1550(length=102.125e-6)
wg300 = ebeam.ebeam_wg_integral_1550(length=300e-6)
# Add all the elements used in the circuit
circuit = Subcircuit('Green Machine')
e = circuit.add([
# Define the four input grating couplers
(gc, 'in1'),
(gc, 'in2'),
(gc, 'in3'),
(gc, 'in4'),
# The grating couplers each feed into their own waveguide
(wg100, 'wg1'),
(wg100, 'wg2'),
(wg100, 'wg3'),
(wg100, 'wg4'),
# Each pair of waveguides feeds into a 50/50 directional coupler
(dc, 'dc1'),
(dc, 'dc2'),
# After mixing, the center pair of waveguides cross paths at a 100/0
# crossing. The edge pair of waveguides pass uninterrupted.
(wg300, 'wg_pass1'),
(wg100, 'wg_in1'), (wgin2, 'wg_out1'),
(crossover, 'crossing'),
(wg100, 'wg_in2'), (wgin2, 'wg_out2'),
(wg300, 'wg_pass2'),
# After crossing, the waveguides are mixed again.
(dc, 'dc3'),
(dc, 'dc4'),
# The outputs are fed through waveguides.
(wg100, 'wg5'),
(wg100, 'wg6'),
(wg100, 'wg7'),
(wg100, 'wg8'),
# We finally output the values through grating couplers.
(gc, 'out1'),
(gc, 'out2'),
(gc, 'out3'),
(gc, 'out4'),
])
# Let's rename some ports on some of our elements so that we can:
# 1) find them again later, and
# 2) make our code clearer by using plain english for the connections.
circuit.elements['in1'].pins['n1'] = 'in1'
circuit.elements['in2'].pins['n1'] = 'in2'
circuit.elements['in3'].pins['n1'] = 'in3'
circuit.elements['in4'].pins['n1'] = 'in4'
circuit.elements['out1'].pins['n2'] = 'out1'
circuit.elements['out2'].pins['n2'] = 'out2'
circuit.elements['out3'].pins['n2'] = 'out3'
circuit.elements['out4'].pins['n2'] = 'out4'
# Phew! Now that we got all those elements out of the way, we can finally
# work on the circuit connnections.
circuit.connect_many([
('in1', 'n2', 'wg1', 'in'),
('in2', 'n2', 'wg2', 'in'),
('in3', 'n2', 'wg3', 'in'),
('in4', 'n2', 'wg4', 'in'),
('wg1', 'out', 'dc1', 'in1'),
('wg2', 'out', 'dc1', 'in2'),
('wg3', 'out', 'dc2', 'in1'),
('wg4', 'out', 'dc2', 'in2'),
('dc1', 'out1', 'wg_pass1', 'in'),
('dc1', 'out2', 'wg_in1', 'in'), ('wg_in1', 'out', 'crossing', 'in1'), ('crossing', 'out1', 'wg_out1', 'in'),
('dc2', 'out1', 'wg_in2', 'in'), ('wg_in2', 'out', 'crossing', 'in2'), ('crossing', 'out2', 'wg_out2', 'in'),
('dc2', 'out2', 'wg_pass2', 'in'),
('wg_pass1', 'out', 'dc3', 'in1'),
('wg_out1', 'out', 'dc3', 'in2'),
('wg_out2', 'out', 'dc4', 'in1'),
('wg_pass2', 'out', 'dc4', 'in2'),
('dc3', 'out1', 'wg5', 'in'),
('dc3', 'out2', 'wg6', 'in'),
('dc4', 'out1', 'wg7', 'in'),
('dc4', 'out2', 'wg8', 'in'),
('wg5', 'out', 'out1', 'n1'),
('wg6', 'out', 'out2', 'n1'),
('wg7', 'out', 'out3', 'n1'),
('wg8', 'out', 'out4', 'n1'),
])
# Run a simulation on our circuit.
simulation = SweepSimulation(circuit, 1549.9e-9, 1550.1e-9)
# simulation = SweepSimulation(circuit, 1510e-9, 1590e-9)
result = simulation.simulate()
# Get the simulation results
# f, s = result.data(result.pinlist['in1'], result.pinlist['out1'])
# The Green Machine is optimized for 1550 nanometers. We'd like to investigate
# its behavior at that specific frequency:
set_freq = wl2freq(1550e-9)
in_port = 'in1'
plt.figure()
plt.plot(*result.data(result.pinlist[in_port], result.pinlist['out1']), label='1 to 5')
plt.plot(*result.data(result.pinlist[in_port], result.pinlist['out2']), label='1 to 6')
plt.plot(*result.data(result.pinlist[in_port], result.pinlist['out3']), label='1 to 7')
plt.plot(*result.data(result.pinlist[in_port], result.pinlist['out4']), label='1 to 8')
plt.axvline(set_freq)
plt.legend(loc="upper right")
plt.xlabel("Frequency (Hz)")
plt.ylabel("Fractional Optical Power")
plt.show()
# We're interested now in the phase offsets at our wavelength of interest.
plt.figure()
freq, s = result.f, result.s
idx = np.argmax(freq>set_freq)
input_pin = result.pinlist['in1'].index
outputs = [result.pinlist['out' + str(n)].index for n in range(1,5)]
offset = min(np.angle(s[idx, outputs, input_pin]))
# angles = np.unwrap(np.angle(s[:, outputs, input_pin])).T - offset
angles = np.angle(s[:, outputs, input_pin]).T - offset
for angle in angles:
plt.plot(freq2wl(freq)*1e9, angle, linewidth='0.7')
plt.axvline(1550, color='k', linestyle='--', linewidth='0.5')
plt.legend([r'$\phi_4$',r'$\phi_5$',r'$\phi_6$',r'$\phi_7$'], loc='upper right')
plt.xlabel("Wavelength (nm)")
plt.ylabel("Phase")
plt.show()
import sys
sys.exit()
plt.figure()
idx = np.argmax(freq>set_freq)
print(idx, freq2wl(freq[idx]))
angles = np.rad2deg(np.unwrap(np.angle(s[:,outputs,input_pin]))).T
angles = angles + ((angles[:,idx] % 2*np.pi) - angles[:,idx]).reshape((4,1))
print(angles[:,idx], angles)
for i in range(4):
plt.plot(freq2wl(freq)*1e9, angles[i])#, label="Port {} to {}".format(i, j))
plt.plot(freq2wl(freq[idx])*1e9, angles[i][idx], 'rx')
plt.axvline(1550)
# plt.legend()
plt.xlabel("Wavelength (nm)")
plt.ylabel("Phase")
plt.show()
import sys
sys.exit()
# plt.axvline(set_freq/1e12)
plt.show()
# -----------------------------------------------------------------------------
#
# Response at precisely 1550nm
#
idx = np.argmax(freq>set_freq)
print(idx, freq2wl(freq[idx]))
# Phases of the four outputs at 1550nm
plt.figure()
circle = np.linspace(0, 2*np.pi)
plt.plot(np.cos(circle), np.sin(circle))
# for i in range(0,4):
inputs1550 = [0] * 8
for output in range(4,8):
rad = np.angle(s[idx,output,i])
plt.plot(np.cos(rad), np.sin(rad), 'o')
inputs1550[output-4] = np.cos(rad) + np.sin(rad) * 1j
plt.xlim(-1, 1)
plt.ylim(-1, 1)
plt.axes().set_aspect('equal')
# for val in inputs1550:
# print(val, np.rad2deg(np.angle(val)))
# -----------------------------------------------------------------------------
#
# Multiple input stuffs:
#
def multi_input(num_ports, inputs, verbose=True):
inputs = np.array(inputs, dtype=np.complex_)
if verbose:
angles = np.rad2deg(np.angle(inputs))
print(angles - min(angles))
out = np.zeros([len(freq), num_ports], dtype='complex128')
for j in range(len(freq)):
out[j, :] = np.dot(s[j, :, :], inputs.T)
return out
def plot_outputs(out):
plt.figure()
for j in range(8):
plt.subplot(8, 1, j+1)
plt.plot(freq/1e12, np.abs(out[:,j])**2, label="Port {}".format(j))
plt.axvline(set_freq/1e12)
plt.legend()
plt.xlabel("Frequency (THz)")
plt.ylabel("Normalized Power")
out = multi_input(8, inputs1550)
plt.figure()
for j in range(8):
plt.subplot(8, 1, j+1)
plt.plot(freq/1e12, np.abs(out[:,j])**2, label="Port {}".format(j))
plt.axvline(set_freq/1e12)
plt.legend()
plt.xlabel("Frequency (THz)")
plt.ylabel("Normalized Power")
# plt.figure()
# for j in range(8):
# plt.plot(freq/1e12, np.rad2deg(np.unwrap(np.angle(out[:,j]))), label="Port {}".format(j))
# # plt.plot(freq/1e12, np.rad2deg(np.angle(s[:,j,i])), label="Port {} to {}".format(i, j))
# plt.axvline(set_freq/1e12)
# plt.legend()
# plt.xlabel("Frequency (THz)")
# plt.ylabel("Phase")
plt.show()
```
#### File: library/gdsfactory/test_components.py
```python
import numpy as np
import simphony.library.gdsfactory as cl
def test_mmi1x2(data_regression):
c = cl.mmi1x2()
wav = np.linspace(1520, 1570, 3) * 1e-9
f = 3e8 / wav
s = c.s_parameters(freq=f)
_, rows, cols = np.shape(s)
sdict = {
f"S{i+1}{j+1}": np.abs(s[:, i, j]).tolist()
for i in range(rows)
for j in range(cols)
}
data_regression.check(sdict)
def test_mmi1x2(data_regression):
c = cl.mmi1x2()
wav = np.linspace(1520, 1570, 3) * 1e-9
f = 3e8 / wav
s = c.s_parameters(freq=f)
_, rows, cols = np.shape(s)
sdict = {
f"S{i+1}{j+1}": np.abs(s[:, i, j]).tolist()
for i in range(rows)
for j in range(cols)
}
data_regression.check(sdict)
def test_coupler_ring(data_regression):
c = cl.coupler_ring()
wav = np.linspace(1520, 1570, 3) * 1e-9
f = 3e8 / wav
s = c.s_parameters(freq=f)
_, rows, cols = np.shape(s)
sdict = {
f"S{i+1}{j+1}": np.abs(s[:, i, j]).tolist()
for i in range(rows)
for j in range(cols)
}
data_regression.check(sdict)
```
#### File: simphony/simphony/persist.py
```python
import inspect
import io
import os
import pickle
from simphony.elements import Model
from simphony.tools import wl2freq, interpolate
def export_model(model, filename, wl=None, freq=None):
"""
Exports a simphony model (using pickle) for the given frequency/wavelength
range to a '.mdl' file.
Must include either the wavelength or frequency argument. If both are
included, defaults to frequency argument.
Parameters
-----------
model : Model
Any class inheriting from simphony.elements.Model
filename : str
The filename (may include path to directory) to save the model to.
Note that the suffix '.mdl' will be appended to the filename.
wl : ndarray, optional
Wavelengths you want to save sparameters for (in meters).
freq : ndarray, optional
Frequencies you want to save sparameters for (in Hz).
Examples
--------
We can write a model for a ``ebeam_wg_integral_1550`` instantiated with a
length of 100 nanometers to a file named ``wg100nm.mdl``.
>>> import numpy as np
>>> from simphony.library.ebeam import ebeam_wg_integral_1550
>>> wg1 = ebeam_wg_integral_1550(100e-9)
>>> export_model(wg1, 'wg100nm', wl=np.linspace(1520e-9, 1580e-9, 51))
"""
if not issubclass(model.__class__, Model):
raise ValueError('{} does not extend {}'.format(model, Model))
if wl is None and freq is None:
raise ValueError('Frequency or wavelength range not defined.')
# Convert wavelength to frequency
if freq is None:
freq = wl2freq(wl)[::-1]
# Load all data into a dictionary.
attributes = inspect.getmembers(model, lambda a:not(inspect.isroutine(a)))
attributes = dict([a for a in attributes if not(a[0].startswith('__') and a[0].endswith('__')) and not a[0].startswith('_')])
params = dict()
params["model"] = model.__class__.__name__
params["attributes"] = attributes
params["f"] = freq
params["s"] = model.s_parameters(freq)
# Dump to pickle.
pickle.dump(params, io.open(filename + '.mdl', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
def import_model(filename, force=False):
"""
Imports a model from file
Parameters
----------
filename : str
The filename (may include path to directory) to load the model from.
Returns
-------
model : class
A class that inherits from simphony.elements.Model that is the
reconstructed model.
Examples
--------
>>> waveguide_100nm = import_model('wg100nano.mdl')
>>> wg = waveguide_100nm()
>>> s = wg.s_parameters(np.linspace(wl2freq(1540e-9), wl2freq(1560e-9), 51))
"""
path, ext = os.path.splitext(filename)
if ext != '.mdl' and force == False:
raise ValueError('Requested file {} is not a .mdl file, to force load set parameter ``force=True``.'.format(filename))
params = pickle.load(io.open(filename, 'rb'))
klass = type(
params['model'],
(Model, ),
params['attributes']
)
def s_parameters(self, freq):
"""
Get the s-parameters of a parameterized waveguide.
Parameters
----------
freq : np.ndarray
A frequency array to calculate s-parameters over (in Hz).
Returns
-------
s : np.ndarray
Returns the calculated s-parameter matrix.
"""
return interpolate(freq, self._f, self._s)
setattr(klass, '_f', params['f'])
setattr(klass, '_s', params['s'])
setattr(klass, 's_parameters', s_parameters)
return klass
```
#### File: plugins/siepic/mapping.py
```python
def rearg(component, parameters):
"""
Maps arguments from spice files to the keyword dictionaries accepted
by the built-in model libraries, discarding unused parameters.
Parameters
----------
component : str
parameters : dict
Returns
-------
args : dict
"""
mapping = components[component]
results = {}
for k, v in parameters.items():
if k in mapping:
results[mapping[k]] = v
return results
components = {
'ebeam_bdc_te1550': {},
# 'contra_directional_coupler': {},
'ebeam_dc_halfring_straight': {},
'ebeam_dc_te1550': {},
# 'ebeam_disconnected_te1550': {},
# 'ebeam_disconnected_tm1550': {},
# 'ebeam_taper_te1550': {},
'ebeam_terminator_te1550': {},
# 'ebeam_terminator_tm1550': {},
'ebeam_gc_te1550': {},
'ebeam_wg_integral_1550' : {
'wg_length': 'length',
'wg_width': 'width',
},
'ebeam_y_1550': {},
}
```
#### File: siepic/tests/test_siepic.py
```python
import pytest
import os
from simphony.plugins.siepic.parser import load_spi
#==============================================================================
# Test the parser
#==============================================================================
EBeam_sequoiap_A_v2_result = {
'circuits': [
{
'name': 'EBeam_sequoiap_A_v2',
'ports': [
'ebeam_gc_te1550$1_laser',
'ebeam_gc_te1550$1_detector1'
],
'subcircuits': 'EBeam_sequoiap_A_v2',
'params': [
{
'name': 'sch_x',
'value': -1.0
},
{
'name': 'sch_y',
'value': -1.0
}
]
}
],
'subcircuits': [
{
'name': 'EBeam_sequoiap_A_v2',
'ports': ['ebeam_gc_te1550$1_laser', 'ebeam_gc_te1550$1_detector1'],
'components': [
{
'name': 'ebeam_y_1550_67',
'model': 'ebeam_y_1550',
'ports': ['N$80', 'N$81', 'N$82'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': 0.00010077000000000001,
'lay_y': 0.00013824,
'sch_x': 8.339586207,
'sch_y': 11.440551724
}
},
{
'name': 'ebeam_gc_te1550_68',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550$1_laser', 'N$80'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': 7.687e-05,
'lay_y': 0.00013824,
'sch_x': 6.361655172,
'sch_y': 11.440551724
}
},
{
'name': 'ebeam_gc_te1550_69',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550$1_detector1', 'N$83'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': 7.687e-05,
'lay_y': 1.1240000000000002e-05,
'sch_x': 6.361655172,
'sch_y': 0.930206897
}
},
{
'name': 'ebeam_y_1550_70',
'model': 'ebeam_y_1550',
'ports': ['N$83', 'N$85', 'N$84'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': 0.00010077000000000001,
'lay_y': 1.1240000000000002e-05,
'sch_x': 8.339586207,
'sch_y': 0.930206897
}
},
{
'name': 'ebeam_wg_integral_1550_72',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$81', 'N$84'],
'params': {
'library': 'Design kits/ebeam',
'wg_length': 0.000189995,
'wg_width': 5e-07,
'points': '[[108.17,140.99],[138.469,140.99],[138.469,8.49],[108.17,8.49]]',
'radius': 5.0,
'lay_x': 0.000123694,
'lay_y': 7.474e-05,
'sch_x': 10.236744828,
'sch_y': 6.18537931
}
},
{
'name': 'ebeam_wg_integral_1550_83',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$82', 'N$85'],
'params': {
'library': 'Design kits/ebeam',
'wg_length': 0.000149995,
'wg_width': 5e-07,
'points': '[[104.92,389.16],[120.719,389.16],[120.719,267.66],[104.92,267.66]]',
'radius': 5.0,
'lay_x': 0.000116444,
'lay_y': 7.474e-05,
'sch_x': 9.636744828,
'sch_y': 6.18537931
}
}
],
'params': {
'MC_uniformity_width': 0.0,
'MC_uniformity_thickness': 0.0,
'MC_resolution_x': 100.0,
'MC_resolution_y': 100.0,
'MC_grid': 1e-05,
'MC_non_uniform': 99.0
}
}
],
'analyses': [
{
'definition': {
'input_unit': 'wavelength',
'input_parameter': 'start_and_stop'
},
'params': {
'minimum_loss': 80.0,
'analysis_type':
'scattering_data',
'multithreading':
'user_defined',
'number_of_threads': 1.0,
'orthogonal_identifier': 1.0,
'start': 1.5e-06,
'stop': 1.6e-06,
'number_of_points': 3000.0,
'input': ['EBeam_sequoiap_A_v2,ebeam_gc_te1550$1_detector1'],
'output': 'EBeam_sequoiap_A_v2,ebeam_gc_te1550$1_laser'
}
}
]
}
MZI4_result = {
'circuits': [
{
'name': 'MZI4',
'ports': ['ebeam_gc_te1550_detector2', 'ebeam_gc_te1550_laser1'],
'subcircuits': 'MZI4',
'params': [{'name': 'sch_x', 'value': -1.0}, {'name': 'sch_y', 'value': -1.0}]
}
],
'subcircuits': [
{
'name': 'MZI4',
'ports': ['ebeam_gc_te1550_detector2', 'ebeam_gc_te1550_laser1'],
'components': [
{
'name': 'ebeam_y_1550_0',
'model': 'ebeam_y_1550',
'ports': ['N$0', 'N$2', 'N$1'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': 7.4e-06,
'lay_y': 0.000127,
'sch_x': 0.478534829,
'sch_y': 8.212692343
}
},
{
'name': 'ebeam_gc_te1550_1',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550_detector2', 'N$0'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': -1.6500000000000005e-05,
'lay_y': 0.000127,
'sch_x': -1.067003336,
'sch_y': 8.212692343
}
},
{
'name': 'ebeam_gc_te1550_2',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550_laser1', 'N$3'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': -1.6500000000000005e-05,
'lay_y': 0.000254,
'sch_x': -1.067003336,
'sch_y': 16.425384686
}
},
{
'name': 'ebeam_y_1550_3',
'model': 'ebeam_y_1550',
'ports': ['N$6', 'N$5', 'N$4'],
'params': {
'library': 'Design kits/ebeam',
'lay_x': 8.993e-05,
'lay_y': 0.000127,
'sch_x': 5.815491515,
'sch_y': 8.212692343,
'sch_f': 'true'
}
},
{
'name': 'ebeam_wg_integral_1550_4',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$1', 'N$4'],
'params': {
'library': 'Design kits/ebeam',
'wg_length': 6.773e-05,
'wg_width': 5e-07,
'points': '[[14.8,124.25],[82.53,124.25]]',
'radius': 5.0,
'lay_x': 4.866500000000001e-05,
'lay_y': 0.00012425,
'sch_x': 3.147013172,
'sch_y': 8.034858453
}
},
{
'name': 'ebeam_wg_integral_1550_5',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$2', 'N$5'],
'params': {
'library': 'Design kits/ebeam',
'wg_length': 0.000297394,
'wg_width': 5e-07,
'points': '[[14.8,129.75],[28.64,129.75],[28.64,247.68],[75.36,247.68],[75.36,129.75],[82.53,129.75]]',
'radius': 5.0,
'lay_x': 4.866500000000001e-05,
'lay_y': 0.000188715,
'sch_x': 3.147013172,
'sch_y': 12.203608153
}
},
{
'name': 'ebeam_wg_integral_1550_6',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$6', 'N$3'],
'params': {
'library': 'Design kits/ebeam',
'wg_length': 0.000256152,
'wg_width': 5e-07,
'points': '[[97.33,127.0],[114.79,127.0],[114.79,254.0],[0.0,254.0]]',
'radius': 5.0,
'lay_x': 5.777e-05,
'lay_y': 0.0001905,
'sch_x': 3.735805013,
'sch_y': 12.319038514
}
}
],
'params': {
'MC_uniformity_width': 0.0,
'MC_uniformity_thickness': 0.0,
'MC_resolution_x': 100.0,
'MC_resolution_y': 100.0,
'MC_grid': 1e-05,
'MC_non_uniform': 99.0
}
}
],
'analyses': [
{
'definition': {
'input_unit': 'wavelength',
'input_parameter': 'start_and_stop'
},
'params': {
'minimum_loss': 80.0,
'analysis_type': 'scattering_data',
'multithreading': 'user_defined',
'number_of_threads': 1.0,
'orthogonal_identifier': 1.0,
'start': 1.5e-06,
'stop': 1.6e-06,
'number_of_points': 2000.0,
'input': ['MZI4,ebeam_gc_te1550_detector2'],
'output': 'MZI4,ebeam_gc_te1550_laser1'
}
}
]
}
top_result = {
'circuits': [
{
'name': 'top',
'ports': ['ebeam_gc_te1550_laser1', 'ebeam_gc_te1550_detector2', 'ebeam_gc_te1550_detector4', 'ebeam_gc_te1550_detector3'],
'subcircuits': 'top',
'params': [
{'name': 'sch_x', 'value': -1.0},
{'name': 'sch_y', 'value': -1.0}
]
}
],
'subcircuits': [
{
'name': 'top',
'ports': ['ebeam_gc_te1550_laser1', 'ebeam_gc_te1550_detector2', 'ebeam_gc_te1550_detector4', 'ebeam_gc_te1550_detector3'],
'components': [
{
'name': 'ebeam_dc_te1550_0',
'model': 'ebeam_dc_te1550',
'ports': ['N$0', 'N$1', 'N$3', 'N$2'],
'params': {'library': 'Design kits/ebeam', 'wg_width': 5e-07, 'gap': 2e-07, 'radius': 5e-06, 'Lc': 1.5e-05, 'lay_x': 2.36e-06, 'lay_y': 1.2e-07, 'sch_x': 0.082235221, 'sch_y': 0.004181452}
},
{
'name': 'ebeam_gc_te1550_1',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550_laser1', 'N$4'],
'params': {'library': 'Design kits/ebeam', 'lay_x': -0.00013533, 'lay_y': 1.475e-05, 'sch_x': -4.715632378, 'sch_y': 0.513970129}
},
{
'name': 'ebeam_gc_te1550_2',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550_detector2', 'N$5'],
'params': {'library': 'Design kits/ebeam', 'lay_x': -0.00012984, 'lay_y': -7.662e-05, 'sch_x': -4.524330954, 'sch_y': -2.669857037}
},
{
'name': 'ebeam_gc_te1550_3',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550_detector4', 'N$6'],
'params': {'library': 'Design kits/ebeam', 'lay_x': 9.456e-05, 'lay_y': -8.471e-05, 'sch_x': 3.294984096, 'sch_y': -2.951756586, 'sch_r': 180.0}
},
{
'name': 'ebeam_gc_te1550_4',
'model': 'ebeam_gc_te1550',
'ports': ['ebeam_gc_te1550_detector3', 'N$7'],
'params': {'library': 'Design kits/ebeam', 'lay_x': 0.00013005, 'lay_y': 3.253e-05, 'sch_x': 4.531648495, 'sch_y': 1.133521919, 'sch_r': 180.0}
},
{
'name': 'ebeam_wg_integral_1550_5',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$0', 'N$5'],
'params': {'library': 'Design kits/ebeam', 'wg_length': 0.000173487, 'wg_width': 5e-07, 'points': '[[-11.14,-2.23],[-40.45,-2.23],[-40.45,-76.62],[-113.34,-76.62]]', 'radius': 5.0, 'lay_x': -6.224e-05, 'lay_y': -3.9425e-05, 'sch_x': -2.168779718, 'sch_y': -1.373781176}
},
{
'name': 'ebeam_wg_integral_1550_6',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$4', 'N$1'],
'params': {'library': 'Design kits/ebeam', 'wg_length': 0.000116867, 'wg_width': 5e-07, 'points': '[[-118.83,14.75],[-26.47,14.75],[-26.47,2.47],[-11.14,2.47]]', 'radius': 5.0, 'lay_x': -6.4985e-05, 'lay_y': 8.61e-06, 'sch_x': -2.26443043, 'sch_y': 0.300019174}
},
{
'name': 'ebeam_wg_integral_1550_7',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$8', 'N$2'],
'params': {'library': 'Design kits/ebeam', 'wg_length': 7.4217e-05, 'wg_width': 5e-07, 'points': '[[65.87,29.78],[36.16,29.78],[36.16,2.47],[15.86,2.47]]', 'radius': 5.0, 'lay_x': 4.0865e-05, 'lay_y': 1.6125e-05, 'sch_x': 1.423958598, 'sch_y': 0.561882599}
},
{
'name': 'ebeam_wg_integral_1550_8',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$3', 'N$6'],
'params': {'library': 'Design kits/ebeam', 'wg_length': 0.000141577, 'wg_width': 5e-07, 'points': '[[15.86,-2.23],[35.04,-2.23],[35.04,-84.71],[78.06,-84.71]]', 'radius': 5.0, 'lay_x': 4.696e-05, 'lay_y': -4.347000000000001e-05, 'sch_x': 1.636341509, 'sch_y': -1.51473095}
},
{
'name': 'ebeam_y_1550_9',
'model': 'ebeam_y_1550',
'ports': ['N$8', 'N$10', 'N$9'],
'params': {'library': 'Design kits/ebeam', 'lay_x': 7.327e-05, 'lay_y': 2.978e-05, 'sch_x': 2.553124838, 'sch_y': 1.037696979}
},
{
'name': 'ebeam_terminator_te1550_10',
'model': 'ebeam_terminator_te1550',
'ports': ['N$11'],
'params': {'library': 'Design kits/ebeam', 'lay_x': 9.14e-05, 'lay_y': 2.7e-07, 'sch_x': 3.184872529, 'sch_y': 0.009408267, 'sch_r': 270.0}
},
{
'name': 'ebeam_wg_integral_1550_11',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$9', 'N$11'],
'params': {'library': 'Design kits/ebeam', 'wg_length': 3.0488e-05, 'wg_width': 5e-07, 'points': '[[80.67,27.03],[91.4,27.03],[91.4,5.72]]', 'radius': 5.0, 'lay_x': 8.641e-05, 'lay_y': 1.675e-05, 'sch_x': 3.010993821, 'sch_y': 0.5836609940000002}
},
{
'name': 'ebeam_wg_integral_1550_12',
'model': 'ebeam_wg_integral_1550',
'ports': ['N$10', 'N$7'],
'params': {
'library': 'Design kits/ebeam',
'wg_length': 3.288e-05,
'wg_width': 5e-07,
'points': '[[80.67,32.53],[113.55,32.53]]',
'radius': 5.0,
'lay_x': 9.711e-05,
'lay_y': 3.253e-05,
'sch_x': 3.383839949,
'sch_y': 1.133521919
}
}
],
'params': {
'MC_uniformity_width': 0.0,
'MC_uniformity_thickness': 0.0,
'MC_resolution_x': 100.0,
'MC_resolution_y': 100.0,
'MC_grid': 1e-05,
'MC_non_uniform': 99.0
}
}
],
'analyses': [
{
'definition': {
'input_unit': 'wavelength',
'input_parameter': 'start_and_stop'
},
'params': {
'minimum_loss': 80.0,
'analysis_type': 'scattering_data',
'multithreading': 'user_defined',
'number_of_threads': 1.0,
'orthogonal_identifier': 1.0,
'start': 1.5e-06,
'stop': 1.6e-06,
'number_of_points': 2000.0,
'input': ['top,ebeam_gc_te1550_detector2', 'top,ebeam_gc_te1550_detector3', 'top,ebeam_gc_te1550_detector4'],
'output': 'top,ebeam_gc_te1550_laser1'
}
}
]
}
def test_EBeam_sequoiap_A_v2():
filename = os.path.join(os.path.dirname(__file__), 'spice', 'EBeam_sequoiap_A_v2', 'EBeam_sequoiap_A_v2_main.spi')
res = load_spi(filename)
assert res == EBeam_sequoiap_A_v2_result
def test_MZI4():
filename = os.path.join(os.path.dirname(__file__), 'spice', 'MZI4', 'MZI4_main.spi')
res = load_spi(filename)
assert res == MZI4_result
def test_top():
filename = os.path.join(os.path.dirname(__file__), 'spice', 'top', 'top_main.spi')
res = load_spi(filename)
assert res == top_result
#==============================================================================
# Test the builder
#==============================================================================
# import os
# filename = os.path.join('tests', 'spice', 'MZI4', 'MZI4_main.spi')
# filename = os.path.join('tests', 'spice', 'EBeam_sequoiap_A_v2', 'EBeam_sequoiap_A_v2_main.spi')
# filename = os.path.join('tests', 'spice', 'top', 'top_main.spi')
# data = load_spi(filename)
# from simphony.plugins.siepic.builders import build_circuit
# build_circuit(data, 'simphony.library.siepic')
```
#### File: simphony/tests/test_elements.py
```python
import pytest
import simphony.library.ebeam as ebeam
class TestNodes:
def test_rename(self):
wg = ebeam.ebeam_wg_integral_1550(50e-6)
# with pytest.raises(ValueError):
# wg._node_idx_by_name('n3')
# wg.rename_nodes(('n1', 'n3'))
# assert wg._node_idx_by_name('n3') == 1
```
#### File: simphony/tests/test_netlist.py
```python
import pytest
import simphony.library.ebeam as ebeam
from simphony.netlist import Pin, PinList
class TestPin:
class TestCreate:
def test_noargs(self):
p1 = Pin(None, None)
with pytest.raises(AttributeError):
p1.element
with pytest.raises(AttributeError):
p1.index
def test_args(self):
name = 'n1'
p1 = Pin(None, name)
assert p1.name == name
def test_rename(self):
name = 'oldname'
p1 = Pin(None, name)
name = 'newname'
p1.name = name
assert p1.name == name
class TestPinlist:
class TestInstantiate:
def test_create_with_strings(self):
pinnames = ['n1', 'n2', 'n3']
pinlist = PinList(None, *pinnames)
assert len(pinlist) == 3
name = iter(pinnames)
for pin in pinlist:
assert pin.pinlist is pinlist
assert pin.name == next(name)
def test_create_with_pins(self):
length = 4
pins = [Pin(None, 'n' + str(i)) for i in range(length)]
pinlist = PinList(None, *pins)
assert len(pinlist) == length
npin = iter(pins)
for pin in pinlist:
assert pin.pinlist is pinlist
assert pin is next(npin)
def test_create_with_mixed_args(self):
scrambled = ['n1', 'n2', Pin(None, 'n3'), Pin(None, 'n4'), 'n5']
pinlist = PinList(None, *scrambled)
assert len(pinlist) == 5
for pin in pinlist:
assert pin.pinlist is pinlist
class TestGet:
def setup_method(self):
self.length = 4
self.pins = [Pin(None, 'n' + str(i)) for i in range(self.length)]
self.pinlist = PinList(None, *self.pins)
def test_get_with_int(self):
for i in range(self.length):
assert self.pinlist[i] == self.pins[i]
assert self.pinlist[i] is self.pins[i]
def test_get_with_str(self):
for i in range(self.length):
name = self.pins[i].name
assert self.pinlist[name] == self.pins[i]
assert self.pinlist[name] is self.pins[i]
def test_get_with_object(self):
for i in range(self.length):
pin = self.pins[i]
assert self.pinlist[pin] == self.pins[i]
assert self.pinlist[pin] is pin
class TestSet:
# pinlist.pins = ('out', 'in', 'mix')
# pinlist.pins = ('n1')
pass
class TestSwitchPin:
pass
class TestOperators:
def setup_method(self):
self.length = 8
self.pins = [Pin(None, 'n' + str(i)) for i in range(self.length)]
self.pinlist1 = PinList(None, *self.pins[:int(self.length/2)])
self.pinlist2 = PinList(None, *self.pins[int(self.length/2):])
def test_add(self):
self.pinlist_new = self.pinlist1 + self.pinlist2
assert self.pinlist_new is not self.pinlist1
assert self.pinlist_new is not self.pinlist2
piter = iter(self.pins)
for pin in self.pinlist_new:
assert pin is next(piter)
def test_add_empty_to_containing(self):
p1 = PinList(None)
p2 = self.pinlist1
pinlist_new = p1 + p2
for pin in pinlist_new:
assert pin.pinlist is pinlist_new
def test_remove(self):
self.test_add()
self.pinlist_new.remove('n1')
assert self.pinlist_new['n7'].index == 6
self.pinlist_new.remove('n4')
assert self.pinlist_new['n7'].index == 5
```
#### File: simphony/simphony/tools.py
```python
import re
from scipy.constants import c as SPEED_OF_LIGHT
from scipy.interpolate import interp1d
MATH_SUFFIXES = {
"f": "e-15",
"p": "e-12",
"n": "e-9",
"u": "e-6",
"m": "e-3",
"c": "e-2",
"k": "e3",
"M": "e6",
"G": "e9",
"T": "e12",
}
def str2float(num):
"""
Converts a number represented as a string to a float. Can include suffixes
(such as 'u' for micro, 'k' for kilo, etc.).
Parameters
----------
num : str
A string representing a number, optionally with a suffix.
Returns
-------
float
The string converted back to its floating point representation.
Raises
------
ValueError
If the argument is malformed or the suffix is not recognized.
Examples
--------
>>> str2float('14.5c')
0.145
Values without suffixes get converted to floats normally.
>>> str2float('2.53')
2.53
If an unrecognized suffix is present, a ``ValueError`` is raised.
>>> str2float('17.3o')
ValueError: Suffix 'o' in '17.3o' not recognized.
([-+]?[0-9]+[.]?[0-9]*((?:[eE][-+]?[0-9]+)|[a-zA-Z])?)
Some floats are represented in exponential notation instead of suffixes,
and we can handle those, too:
>>> str2float('15.2e-6')
1.52e-7
>>> str2float('0.4E6')
400000.0
"""
matches = re.findall(
r"([-+]?[0-9]+(?:[.][0-9]+)?)((?:[eE][-+]?[0-9]+)|(?:[a-zA-Z]))?", num
)
if len(matches) > 1:
raise ValueError("'{}' is malformed".format(num))
num, suffix = matches[0]
try:
if suffix.startswith("e") or suffix.startswith("E"):
return float(num + suffix)
else:
return float(num + (MATH_SUFFIXES[suffix] if suffix != "" else ""))
except KeyError as e:
raise ValueError("Suffix {} in '{}' not recognized.".format(str(e), matches[0]))
def freq2wl(freq):
"""Convenience function for converting from frequency to wavelength.
Parameters
----------
freq : float
The frequency in SI units (Hz).
Returns
-------
wl : float
The wavelength in SI units (m).
"""
return SPEED_OF_LIGHT / freq
def wl2freq(wl):
"""Convenience function for converting from wavelength to frequency.
Parameters
----------
wl : float
The wavelength in SI units (m).
Returns
-------
freq : float
The frequency in SI units (Hz).
"""
return SPEED_OF_LIGHT / wl
def interpolate(resampled, sampled, s_parameters):
"""Returns the result of a cubic interpolation for a given frequency range.
Parameters
----------
output_freq : np.ndarray
The desired frequency range for a given input to be interpolated to.
input_freq : np.ndarray
A frequency array, indexed matching the given s_parameters.
s_parameters : np.array
S-parameters for each frequency given in input_freq.
Returns
-------
result : np.array
The values of the interpolated function (fitted to the input
s-parameters) evaluated at the ``output_freq`` frequencies.
"""
func = interp1d(sampled, s_parameters, kind="cubic", axis=0)
return func(resampled)
def get_subclasses(cls):
"""
Recursively gets all subclasses for a given class, even the subclasses of
subclasses.
If a subclass resides in a model not imported by default by Simphony, those
classes will not be returned. Libraries must be imported first for this
function to be able to find those classes.
Parameters
----------
cls : class
The class to find all the subclasses of.
Yields
-------
subclass : class
Yields the next subclass from the generator.
Notes
-----
To get a list of subclasses, simply use the following syntax::
list(get_subclasses(klass))
"""
for subclass in cls.__subclasses__():
yield from get_subclasses(subclass)
yield subclass
``` |
{
"source": "joamatab/YAMLDash",
"score": 2
} |
#### File: YAMLDash/yamldash/app.py
```python
from multiprocessing import cpu_count
import webbrowser
import dash
from yamldash.layout import layout
from yamldash.layout import theme
ascii_title = r"""
__ __ __ __ _ _____ _
\ \ / //\ | \/ | | | __ \ | |
\ \_/ // \ | \ / | | | | | | __ _ ___| |__
\ // /\ \ | |\/| | | | | | |/ _` / __| '_ \
| |/ ____ \| | | | |____| |__| | (_| \__ \ | | |
|_/_/ \_\_| |_|______|_____/ \__,_|___/_| |_|
"""
app = dash.Dash(
__name__,
external_stylesheets=[
theme,
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css",
],
)
wsgi_app = app.server
app.title = "YAMLDash - Interactive YAML Validation"
app.layout = layout
from yamldash import callbacks # noqa: E402, F401
def run_debug():
app.run_server(debug=True)
def run():
print(ascii_title)
webbrowser.open("127.0.0.1:8080", new=2)
try:
import waitress
print("Listening on 127.0.0.1:8080.")
print("Press CTRL-C to stop.")
waitress.serve(wsgi_app, listen="127.0.0.1:8080", threads=cpu_count())
except ModuleNotFoundError:
print("Waitress server not found (use 'pip install waitress' to install it)")
print("Defaulting to Flask development server.\n")
app.run_server(port=8080)
``` |
{
"source": "joamatab/zeropdk",
"score": 2
} |
#### File: zeropdk/examples/ebeam_pdk.py
```python
import os
import logging
from collections import abc
from zeropdk import Tech
from zeropdk.pcell import PCell
logger = logging.getLogger()
lyp_path = os.path.join(os.path.dirname(__file__), "EBeam.lyp")
# Technology file
EBeam = Tech.load_from_xml(lyp_path)
# Helper functions
def draw_ports(cell, ports):
""" Draws ports in the Pin Recognition layer (SiEPIC)
"""
if isinstance(ports, abc.Mapping): # dictionary
for port in ports.values():
port.draw(cell, EBeam.layers["PinRec"])
elif isinstance(ports, abc.Sequence): # list
for port in ports:
port.draw(cell, EBeam.layers["PinRec"])
else:
raise RuntimeError("Give a list or dict of Ports")
# PCells
from zeropdk.default_library.io import DCPad, DCPadArray
from zeropdk.pcell import PCellParameter, TypeLayer, ParamContainer
# Overriding default layers
class DCPad(DCPad):
params = ParamContainer(
PCellParameter(
name="layer_metal",
type=TypeLayer,
description="Metal Layer",
default=EBeam.layers["M1"],
),
PCellParameter(
name="layer_opening",
type=TypeLayer,
description="Open Layer",
default=EBeam.layers["13_MLopen"],
),
)
class DCPadArray(DCPadArray):
params = ParamContainer(
PCellParameter(
name="layer_metal",
type=TypeLayer,
description="Metal Layer",
default=EBeam.layers["M1"],
),
PCellParameter(
name="layer_opening",
type=TypeLayer,
description="Open Layer",
default=EBeam.layers["13_MLopen"],
),
)
```
#### File: tests/cells/test_pcell_library.py
```python
import pytest
from ..context import zeropdk # noqa
from zeropdk.default_library import io
import klayout.db as kdb
DCPad = io.DCPad
@pytest.fixture
def top_cell():
def _top_cell():
layout = kdb.Layout()
layout.dbu = 0.001
TOP = layout.create_cell("TOP")
return TOP, layout
return _top_cell
def test_pad_pcell(top_cell):
pad = DCPad(name="testname")
pad.params.layer_metal = kdb.LayerInfo(1, 0)
pad.params.layer_opening = kdb.LayerInfo(2, 0)
# This will get automatically converted to LayerInfo
# No Error
pad.params.layer_metal = "1/0"
# TODO set defaults here
TOP, layout = top_cell()
cell, ports = pad.new_cell(layout)
assert "el0" in ports
origin, angle = kdb.DPoint(0, 0), 0
TOP.insert_cell(cell, origin, angle)
TOP.write("tests/tmp/pad.gds")
```
#### File: tests/layout/test_layout_write.py
```python
import pytest
from ..context import zeropdk # noqa
from zeropdk.layout.polygons import rectangle
from zeropdk.layout import insert_shape
import klayout.db as kdb
@pytest.fixture
def top_cell():
def _top_cell():
layout = kdb.Layout()
layout.dbu = 0.001
TOP = layout.create_cell("TOP")
return TOP, layout
return _top_cell
def test_rectangle_write(top_cell):
TOP, layout = top_cell()
layer = "1/0"
center = kdb.DPoint(0, 0)
width = 20
height = 10
ex = kdb.DVector(1, 1)
ey = kdb.DVector(0, 1)
r = rectangle(center, width, height, ex, ey)
assert repr(r) == "(-10,-15;-10,-5;10,15;10,5)"
insert_shape(TOP, layer, r)
TOP.write("tests/tmp/test_rectangle.gds")
```
#### File: tests/layout/test_points.py
```python
import random
import numpy as np
from ..context import zeropdk # noqa
import klayout.db as kdb
def random_point(Point, a=-10, b=10):
a = 0
b = 10
x = random.uniform(a, b)
y = random.uniform(a, b)
p = Point(x, y)
return p
def test_add_sub():
p1 = random_point(kdb.Point)
p2 = random_point(kdb.Point)
sump = p1 + p2
assert sump.x == p1.x + p2.x
assert sump.y == p1.y + p2.y
assert isinstance(sump, kdb.Point)
diffp = p2 - p1
assert diffp.x == p2.x - p1.x
assert diffp.y == p2.y - p1.y
assert isinstance(diffp, kdb.Vector)
assert p1 == (sump - diffp) / 2
assert p2 == (sump + diffp) / 2
def test_mul():
p_classes = (kdb.Point, kdb.Vector)
for p_class in p_classes:
p1 = random_point(kdb.Vector)
p2 = random_point(kdb.Vector)
assert p1 * p2 == p1.x * p2.x + p1.y * p2.y
p3 = p1 * 2
assert p3.x == p1.x * 2
assert p3.y == p1.y * 2
def test_numpy():
t = np.arange(3)
ex = kdb.Point(1, 0)
# Point should consume a numpy array and produce a np.array of points
point_array = t * ex
assert isinstance(point_array, np.ndarray)
assert np.all([0 * ex, 1 * ex, 2 * ex] == point_array)
```
#### File: tests/layout/test_waveguide.py
```python
import numpy as np
import pytest
from ..context import zeropdk # noqa
from zeropdk.layout.waveguides import waveguide_dpolygon
from zeropdk.layout import insert_shape
import klayout.db as kdb
@pytest.fixture
def top_cell():
def _top_cell():
layout = kdb.Layout()
layout.dbu = 0.001
TOP = layout.create_cell("TOP")
return TOP, layout
return _top_cell
def test_waveguide(top_cell):
t = np.linspace(-1, 1, 100)
ex = kdb.DPoint(1, 0)
ey = kdb.DPoint(0, 1)
# list of points depicting a parabola
points_list = 100 * t * ex + 100 * t ** 2 * ey
dbu = 0.001
width = 1
wg = waveguide_dpolygon(points_list, width, dbu, smooth=True)
# write to test_waveguide.gds (we should see a parabola)
TOP, layout = top_cell()
layer = "1/0"
insert_shape(TOP, layer, wg)
TOP.write("tests/tmp/test_waveguide.gds")
```
#### File: tests/technology/test_xml.py
```python
from ..context import zeropdk # noqa
from pathlib import Path
import os
from zeropdk.tech import Tech
import klayout.db as kdb
def test_load_from_xml():
filepath = Path(os.path.dirname(__file__)).resolve() / "EBeam.lyp"
ebeam = Tech.load_from_xml(filepath)
assert ebeam.layers["M1"] == kdb.LayerInfo(41, 0, "M1")
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.