code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-24 01:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Passenger',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('sex', models.CharField(max_length=10)),
('survived', models.BooleanField()),
('age', models.FloatField()),
('ticket_class', models.PositiveSmallIntegerField()),
('embarked', models.CharField(max_length=100)),
],
),
]
| [
"django.db.models.FloatField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.CharField"
] | [((389, 482), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (405, 482), False, 'from django.db import migrations, models\n'), ((507, 539), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (523, 539), False, 'from django.db import migrations, models\n'), ((567, 598), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (583, 598), False, 'from django.db import migrations, models\n'), ((631, 652), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (650, 652), False, 'from django.db import migrations, models\n'), ((680, 699), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (697, 699), False, 'from django.db import migrations, models\n'), ((736, 770), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {}), '()\n', (768, 770), False, 'from django.db import migrations, models\n'), ((803, 835), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (819, 835), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from py_trade_signal.exception import TradeSignalException
from py_trade_signal.signal_utils import SignalUtils as SU
from finta.utils import trending_up, trending_down
from finta import TA
import pandas as pd
import numpy as np
class MacdSignal(object):
__module__ = "py_trade_signal"
def __init__(self,
df: pd.DataFrame,
df2: pd.DataFrame = None):
self.df = df
self.df2 = df2
def buy(self,
period_fast: int = 12,
period_slow: int = 26,
signal: int = 9,
column: str = "close",
adjust: bool = True,
period_fast2: int = 12,
period_slow2: int = 26,
signal2: int = 9) -> np.bool_:
"""Calculate a moving average convergence-divergence buy signal from a bullish signal crossover.
An optional second dataframe can be used to calculate the signal for a different time period
:param period_fast:
:param period_slow:
:param signal:
:param column:
:param adjust:
:param period_fast2:
:param period_slow2:
:param signal2:
:return bool:
"""
try:
raw_macd = TA.MACD(self.df, period_fast, period_slow, signal, column, adjust)
except TradeSignalException as error:
raise error
else:
_is_negative = raw_macd["MACD"].iloc[-1] < 0
_below_signal = raw_macd["MACD"].iloc[-1] < raw_macd["SIGNAL"].iloc[-1]
_trending_up = trending_up(raw_macd["MACD"].iloc[:-2], period=int(period_fast/2)).iloc[-1]
buying = _is_negative and _below_signal and _trending_up
if SU.is_valid_dataframe(self.df2):
try:
raw_macd2 = TA.MACD(self.df2, period_fast2, period_slow2, signal2, column, adjust)
except TradeSignalException as error:
raise error
else:
_is_negative2 = raw_macd2["MACD"].iloc[-1] < 0
_below_signal2 = raw_macd2["MACD"].iloc[-1] < raw_macd2["SIGNAL"].iloc[-1]
_trending_up2 = trending_up(raw_macd2["MACD"].iloc[:-2], period=int(period_fast2/2)).iloc[-1]
buying = buying and _is_negative2 and _below_signal2 and _trending_up2
return buying
def sell(self,
period_fast: int = 12,
period_slow: int = 26,
signal: int = 9,
column: str = "close",
adjust: bool = True,
period_fast2: int = 12,
period_slow2: int = 26,
signal2: int = 9) -> np.bool_:
"""Calculate a moving average convergence-divergence sell signal from a bullish signal crossover.
An optional second dataframe can be used to calculate the signal for a different time period
:param period_fast:
:param period_slow:
:param signal:
:param column:
:param adjust:
:param period_fast2:
:param period_slow2:
:param signal2:
:return bool:
"""
try:
raw_macd = TA.MACD(self.df, period_fast, period_slow, signal, column, adjust)
except TradeSignalException as error:
raise error
else:
_is_positive = raw_macd["MACD"].iloc[-1] > 0
_above_signal = raw_macd["MACD"].iloc[-1] > raw_macd["SIGNAL"].iloc[-1]
_trending_down = trending_down(raw_macd["MACD"].iloc[:-2], period=int(period_fast/2)).iloc[-1]
selling = _is_positive and _above_signal and _trending_down
if SU.is_valid_dataframe(self.df2):
try:
raw_macd2 = TA.MACD(self.df2, period_fast2, period_slow2, signal2, column, adjust)
except TradeSignalException as error:
raise error
else:
_is_positive2 = raw_macd2["MACD"].iloc[-1] > 0
_above_signal2 = raw_macd2["MACD"].iloc[-1] < raw_macd2["SIGNAL"].iloc[-1]
_trending_down2 = trending_down(raw_macd2["MACD"].iloc[:-2], period=int(period_fast2/2)).iloc[-1]
selling = selling and _is_positive2 and _above_signal2 and _trending_down2
return selling
| [
"finta.TA.MACD",
"py_trade_signal.signal_utils.SignalUtils.is_valid_dataframe"
] | [((1273, 1339), 'finta.TA.MACD', 'TA.MACD', (['self.df', 'period_fast', 'period_slow', 'signal', 'column', 'adjust'], {}), '(self.df, period_fast, period_slow, signal, column, adjust)\n', (1280, 1339), False, 'from finta import TA\n'), ((1755, 1786), 'py_trade_signal.signal_utils.SignalUtils.is_valid_dataframe', 'SU.is_valid_dataframe', (['self.df2'], {}), '(self.df2)\n', (1776, 1786), True, 'from py_trade_signal.signal_utils import SignalUtils as SU\n'), ((3201, 3267), 'finta.TA.MACD', 'TA.MACD', (['self.df', 'period_fast', 'period_slow', 'signal', 'column', 'adjust'], {}), '(self.df, period_fast, period_slow, signal, column, adjust)\n', (3208, 3267), False, 'from finta import TA\n'), ((3690, 3721), 'py_trade_signal.signal_utils.SignalUtils.is_valid_dataframe', 'SU.is_valid_dataframe', (['self.df2'], {}), '(self.df2)\n', (3711, 3721), True, 'from py_trade_signal.signal_utils import SignalUtils as SU\n'), ((1841, 1911), 'finta.TA.MACD', 'TA.MACD', (['self.df2', 'period_fast2', 'period_slow2', 'signal2', 'column', 'adjust'], {}), '(self.df2, period_fast2, period_slow2, signal2, column, adjust)\n', (1848, 1911), False, 'from finta import TA\n'), ((3776, 3846), 'finta.TA.MACD', 'TA.MACD', (['self.df2', 'period_fast2', 'period_slow2', 'signal2', 'column', 'adjust'], {}), '(self.df2, period_fast2, period_slow2, signal2, column, adjust)\n', (3783, 3846), False, 'from finta import TA\n')] |
from textblob import TextBlob
import sys
class SearchResultOptimizer(object):
# the search filter documents gets filterd by category
def filter_search_documents_by_category(self, documents):
category_score_map = {}
category_map = {}
for document in documents:
doc_source = document['_source']
doc_score = document['_score']
# get the category
category = doc_source['category']
if category not in category_score_map:
category_score_map[category] = doc_score
category_map[category] = [doc_source]
else:
category_score_map[category] += doc_score
category_map[category].append(doc_source)
max_key = ''
max_val = -sys.maxsize
for key in category_score_map:
score = category_score_map[key]
if score > max_val:
max_key = key
max_val = score
return category_map[max_key]
def filter_meaningful_products(self, item_query, products):
# find the noun in the query with which search is made
blob = TextBlob(item_query)
blob_tags = blob.tags
# get the list of noun words in the query
noun_words_in_query = []
# loop over the tags associated with each word in the query
for blob in blob_tags:
word = blob[0]
tag = blob[1]
# Singular nouns are NN and plural nouns are NNS in the tags, any tag other then that is not a noun
if tag == 'NN' or tag == 'NNS':
noun_words_in_query.append(word)
# will contain the list of products removed
products_removed = []
for product in products:
product_title = product['title']
noun_not_in_product_counter = 0
# if the noun word doesn't appear in the title of the product then remove that product from the list of products
# this solves the problem where when you want green grapes you get green pepperes instead
for noun in noun_words_in_query:
# if noun is not in the list of products then find the similarity that the noun has to the product
if noun not in product_title.lower():
scores = self.__check_similarity(noun, product_title)
noun_not_in_product_counter += 1
for score in scores:
if score > 90:
# if a score is greater than 90 then the process is undone
noun_not_in_product_counter -=1
break
# if the noun not in product counter equals the length of the list of query nouns then the search product result is invalid
# we remove this product as it is not relevant and it was based on adjectives like green or sparkling
if noun_not_in_product_counter == len(noun_words_in_query):
products_removed.append(product)
products.remove(product)
return products_removed
# Lets say you have a word called weat and a sentance called wheat flour good stuff
# then it will find the similarity for each word in whweat flour good stuff and a similarity score over 90 indicates that the product was just mispelled
def __check_similarity(self, word_a, sentance):
word_a = word_a.lower()
sentance = sentance.lower()
sentance_words = sentance.split(' ')
similarity_scores = []
for word in sentance_words:
similarity_score = self.__check_word_similarity(word_a, word)
similarity_scores.append(similarity_score)
return similarity_scores
def __check_word_similarity(self, word_a, word_b):
word_a = word_a.lower()
word_b = word_b.lower()
counter = 0
for word in word_a:
if word in word_b:
counter += 1
num_chars_a = len(word_a)
similariy_count = counter / num_chars_a
return similariy_count * 100 | [
"textblob.TextBlob"
] | [((1172, 1192), 'textblob.TextBlob', 'TextBlob', (['item_query'], {}), '(item_query)\n', (1180, 1192), False, 'from textblob import TextBlob\n')] |
# Copyright (c) 2013, MN Technique and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import csv
from frappe import _
def execute(filters=None):
columns, data = [], []
columns = get_columns()
data = get_data(filters)
return columns, data
def get_columns():
return [
_("Settlement No") + "::150",
_("Settlement Date") + ":Date:80",
_("Order No") + "::100",
_("Order Item No") + "::100",
_("Order Date") + ":Date:80",
_("Status") + "::50",
_("SKU") + "::75",
_("Title") + "::200",
_("MRP") + ":Currency:75",
_("Qty") + "::30",
_("BizOne Principal") + ":Currency:75",
_("Order Total") + ":Currency:75",
_("Total Marketplace Fee") + ":Currency:75",
_("Service Tax") + ":Currency:75",
_("Swachh Bharat Cess Tax") + ":Currency:75",
_("Krishi Kalyan Cess Tax") + ":Currency:75",
_("Settlement Value") + ":Currency:75",
_("Realization") + ":Percent:75"
]
def get_data(filters=None):
if not filters:
filters = {}
#Place flipkart order id as header filter.
header_filters = []
# if filters.get("flipkart_order_id"):
# header_filters = {"flipkart_order_id" : filters.pop("flipkart_order_id")}
if filters.get("from_date") and filters.get("to_date"):
fromdate = frappe.utils.get_datetime(filters.get("from_date"));
todate = frappe.utils.get_datetime(filters.get("to_date"));
if fromdate > todate:
frappe.msgprint("From Date exceeds To Date")
return []
header_filters.append(["settlement_date", ">=", fromdate])
header_filters.append(["settlement_date", "<=", todate])
out = []
payments = frappe.get_all("EPI Flipkart Order Payment", filters=header_filters, fields=["*"], order_by="settlement_date")
order_item_value_total = 0.0
settlement_total = 0.0
for payment in payments:
data_filters = [["parent", "=", payment["name"]]]
if filters.get("settlement_ref_no"):
data_filters.append(["settlement_ref_no", "=", filters.get("settlement_ref_no")])
if filters.get("order_status"):
data_filters.append(["order_status", "=", filters.get("order_status")])
payment_items = frappe.get_all("EPI Flipkart Order Payment Item", filters=data_filters, fields=["*"], order_by="sku_id")
for pi in payment_items:
row = []
row.append(pi.settlement_ref_no)
row.append(payment.settlement_date)
row.append(payment.flipkart_order_id)
row.append(pi.order_item_id)
row.append(payment.order_date)
row.append(pi.order_status)
row.append(pi.sku_id)
bizone_mrp = 0.0
title_id = frappe.db.get_value("EPI Catalog Listing", filters={"sku_id_flipkart": pi.sku_id}, fieldname="name")
if title_id:
title = frappe.get_doc("EPI Catalog Listing", title_id)
row.append(title.item_name_flipkart or title.item_name)
row.append(title.mrp)
bizone_mrp = title.mrp
else:
row.append("-")
row.append(0.0)
row.append(pi.qty)
bizone_principal = pi.qty * bizone_mrp
row.append(bizone_principal)
row.append(pi.order_item_value)
order_item_value_total += pi.order_item_value
row.append(pi.total_marketplace_fee)
row.append(pi.service_tax)
row.append(pi.swachh_bharat_cess_tax)
row.append(pi.krishi_kalyan_cess_tax)
row.append(pi.settlement_value)
settlement_total += pi.settlement_value
realization = 0.0
if bizone_principal > 0.0:
realization = (pi.settlement_value * 100) / bizone_principal
row.append(realization)
out.append(row)
#out.append(["", "", "", "", "", "","", "", order_item_value_total, "", "", "", "", settlement_total, ""])
return out
| [
"frappe.db.get_value",
"frappe._",
"frappe.msgprint",
"frappe.get_doc",
"frappe.get_all"
] | [((1628, 1743), 'frappe.get_all', 'frappe.get_all', (['"""EPI Flipkart Order Payment"""'], {'filters': 'header_filters', 'fields': "['*']", 'order_by': '"""settlement_date"""'}), "('EPI Flipkart Order Payment', filters=header_filters, fields\n =['*'], order_by='settlement_date')\n", (1642, 1743), False, 'import frappe\n'), ((2133, 2241), 'frappe.get_all', 'frappe.get_all', (['"""EPI Flipkart Order Payment Item"""'], {'filters': 'data_filters', 'fields': "['*']", 'order_by': '"""sku_id"""'}), "('EPI Flipkart Order Payment Item', filters=data_filters,\n fields=['*'], order_by='sku_id')\n", (2147, 2241), False, 'import frappe\n'), ((350, 368), 'frappe._', '_', (['"""Settlement No"""'], {}), "('Settlement No')\n", (351, 368), False, 'from frappe import _\n'), ((382, 402), 'frappe._', '_', (['"""Settlement Date"""'], {}), "('Settlement Date')\n", (383, 402), False, 'from frappe import _\n'), ((419, 432), 'frappe._', '_', (['"""Order No"""'], {}), "('Order No')\n", (420, 432), False, 'from frappe import _\n'), ((446, 464), 'frappe._', '_', (['"""Order Item No"""'], {}), "('Order Item No')\n", (447, 464), False, 'from frappe import _\n'), ((478, 493), 'frappe._', '_', (['"""Order Date"""'], {}), "('Order Date')\n", (479, 493), False, 'from frappe import _\n'), ((510, 521), 'frappe._', '_', (['"""Status"""'], {}), "('Status')\n", (511, 521), False, 'from frappe import _\n'), ((534, 542), 'frappe._', '_', (['"""SKU"""'], {}), "('SKU')\n", (535, 542), False, 'from frappe import _\n'), ((555, 565), 'frappe._', '_', (['"""Title"""'], {}), "('Title')\n", (556, 565), False, 'from frappe import _\n'), ((579, 587), 'frappe._', '_', (['"""MRP"""'], {}), "('MRP')\n", (580, 587), False, 'from frappe import _\n'), ((608, 616), 'frappe._', '_', (['"""Qty"""'], {}), "('Qty')\n", (609, 616), False, 'from frappe import _\n'), ((629, 650), 'frappe._', '_', (['"""BizOne Principal"""'], {}), "('BizOne Principal')\n", (630, 650), False, 'from frappe import _\n'), ((671, 687), 'frappe._', '_', (['"""Order Total"""'], {}), "('Order Total')\n", (672, 687), False, 'from frappe import _\n'), ((708, 734), 'frappe._', '_', (['"""Total Marketplace Fee"""'], {}), "('Total Marketplace Fee')\n", (709, 734), False, 'from frappe import _\n'), ((755, 771), 'frappe._', '_', (['"""Service Tax"""'], {}), "('Service Tax')\n", (756, 771), False, 'from frappe import _\n'), ((792, 819), 'frappe._', '_', (['"""Swachh Bharat Cess Tax"""'], {}), "('Swachh Bharat Cess Tax')\n", (793, 819), False, 'from frappe import _\n'), ((840, 867), 'frappe._', '_', (['"""Krishi Kalyan Cess Tax"""'], {}), "('Krishi Kalyan Cess Tax')\n", (841, 867), False, 'from frappe import _\n'), ((888, 909), 'frappe._', '_', (['"""Settlement Value"""'], {}), "('Settlement Value')\n", (889, 909), False, 'from frappe import _\n'), ((930, 946), 'frappe._', '_', (['"""Realization"""'], {}), "('Realization')\n", (931, 946), False, 'from frappe import _\n'), ((1424, 1468), 'frappe.msgprint', 'frappe.msgprint', (['"""From Date exceeds To Date"""'], {}), "('From Date exceeds To Date')\n", (1439, 1468), False, 'import frappe\n'), ((2558, 2663), 'frappe.db.get_value', 'frappe.db.get_value', (['"""EPI Catalog Listing"""'], {'filters': "{'sku_id_flipkart': pi.sku_id}", 'fieldname': '"""name"""'}), "('EPI Catalog Listing', filters={'sku_id_flipkart': pi.\n sku_id}, fieldname='name')\n", (2577, 2663), False, 'import frappe\n'), ((2687, 2734), 'frappe.get_doc', 'frappe.get_doc', (['"""EPI Catalog Listing"""', 'title_id'], {}), "('EPI Catalog Listing', title_id)\n", (2701, 2734), False, 'import frappe\n')] |
import json
import os
import unittest
import unittest.mock as mock
from urllib.parse import urlparse, parse_qs
from .context import requests
def load_google_sample():
fname = os.path.join(os.path.dirname(__file__), "sample.google.json")
with open(fname, "rb") as fp:
return fp.read()
def load_HERE_sample():
fname = os.path.join(os.path.dirname(__file__), "sample.here.json")
with open(fname, "rb") as fp:
return fp.read()
class GoogleGeocodeServiceTest(unittest.TestCase):
def setUp(self):
self.service = requests.GoogleGeocodeService()
def test_prepare(self):
url = self.service.prepare({"APP_KEY": "foo"},
"1+Way+There+Some+Place")
parsed = urlparse(url)
qs = parse_qs(parsed.query)
self.assertEqual(qs, {"address": ["1+Way+There+Some+Place"],
"key": ["foo"]})
result_url, rest = url.split("?", 1)
self.assertEqual(result_url, self.service.url)
def test_process_response_ok(self):
data = load_google_sample().decode()
result = self.service.process_response(data)
self.assertEqual(result, {"lat": '37.4224082',
"lng": '-122.0856086'})
# Verify unexpected data is dropped
js = '''{
"results": [
{
"geometry": {
"location": {
"lat": "37.4224082",
"lng": "-122.0856086",
"other": "extra"
}
}
}
]
}'''
result = self.service.process_response(js)
self.assertEqual(result, {"lat": "37.4224082",
"lng": "-122.0856086"})
# Location is not found. Not an error.
self.assertEqual({}, self.service.process_response('{"results": [] }'))
def test_process_response_fail(self):
with self.assertRaises(requests.DataProcessingError):
self.service.process_response('{"results": [{"geometry": {}}] }')
with self.assertRaises(requests.DataProcessingError):
self.service.process_response('{"results": [{"geometry": {"location": {}}}] }')
with self.assertRaises(requests.DataProcessingError):
js = '{"results": [{"geometry": {"location": {"lat": 37.4224082}}}] }'
self.service.process_response(js)
with self.assertRaises(requests.DataProcessingError):
js = '{"results": [{"geometry": {"location": {"lng": -122.0856086}}}] }'
self.service.process_response(js)
class HEREGeocodeServiceTest(unittest.TestCase):
def setUp(self):
self.service = requests.HEREGeocodeService()
def test_prepare(self):
url = self.service.prepare({"APP_ID": "foo", "APP_CODE": "bar"},
"1+Way+There+Some+Place")
parsed = urlparse(url)
qs = parse_qs(parsed.query)
self.assertEqual(qs, {"searchtext": ["1+Way+There+Some+Place"],
"app_id": ["foo"],
"app_code": ["bar"]})
result_url, rest = url.split("?", 1)
self.assertEqual(result_url, self.service.url)
def test_process_response_ok(self):
data = load_HERE_sample().decode()
result = self.service.process_response(data)
self.assertEqual(result, {"lat": '41.88449',
"lng": '-87.6387699'})
# Verify unexpected data is dropped
js = '''{
"Response": {
"View": [
{
"Result": [
{
"Location": {
"NavigationPosition": [
{
"Latitude": "37.4224082",
"Longitude": "-122.0856086",
"other": "extra"
}
]
}
}
]
}
]
}
}'''
result = self.service.process_response(js)
self.assertEqual(result, {"lat": "37.4224082",
"lng": "-122.0856086"})
# Location not found. Not an error.
self.assertEqual({}, self.service.process_response('{"Response": {"View": []} }'))
def test_process_response_fail(self):
with self.assertRaises(requests.DataProcessingError):
self.service.process_response('{"Response": {} }')
with self.assertRaises(requests.DataProcessingError):
self.service.process_response('{"Response": {"View": [{"Result": []}]} }')
with self.assertRaises(requests.DataProcessingError):
self.service.process_response('{"Response": {"View": [{"Result": [{"Location": {}}]}]} }')
with self.assertRaises(requests.DataProcessingError):
js = '''{
"Response": {
"View": [
{
"Result": [
{
"Location": {
"NavigationPosition": []
}
}
]
}
]
}
}'''
self.service.process_response(js)
with self.assertRaises(requests.DataProcessingError):
js = '''{
"Response": {
"View": [
{
"Result": [
{
"Location": {
"NavigationPosition": [
{
"Latitude": "123"
}
]
}
}
]
}
]
}
}'''
self.service.process_response(js)
with self.assertRaises(requests.DataProcessingError):
js = '''{
"Response": {
"View": [
{
"Result": [
{
"Location": {
"NavigationPosition": [
{
"Longitude": "-678"
}
]
}
}
]
}
]
}
}'''
self.service.process_response(js)
class GeocodeLookupTests(unittest.TestCase):
def test_init(self):
with self.assertRaises(requests.GeocodeLookup.ConfigError):
requests.GeocodeLookup({}, {})
with self.assertRaises(requests.GeocodeLookup.ConfigError):
requests.GeocodeLookup({"services": []}, {})
# Unknown services
with self.assertRaises(requests.GeocodeLookup.ConfigError):
requests.GeocodeLookup({"services": ["foo", "bar"]}, {})
# Known but no credentials
with self.assertRaises(requests.GeocodeLookup.ConfigError):
requests.GeocodeLookup({"services": ["HERE", "google"]}, {})
# Known but wrong credentials
with self.assertRaises(requests.GeocodeLookup.ConfigError):
requests.GeocodeLookup({"services": ["HERE", "google"]},
{"HERE": {"user": "alice", "password": "<PASSWORD>"},
"google": {"user": "alice", "password": "<PASSWORD>"}})
obj = requests.GeocodeLookup({"services": ["HERE", "google"],
"HERE": {"url":
"https://geocoder.cit.api.here.com/6.2/geocode.json"}},
{"HERE": {"APP_ID": "foo", "APP_CODE": "bar"},
"google": {"APP_KEY": "thing1"}})
self.assertEqual(obj._services["HERE"].url, "https://geocoder.cit.api.here.com/6.2/geocode.json")
@mock.patch('urllib.request.urlopen')
def test_request_success_google(self, urlopen):
request = mock.MagicMock(code=200)
request.read.return_value = load_google_sample()
urlopen.return_value = request
obj = requests.GeocodeLookup({"services": ["google"]},
{"google": {"APP_KEY": "thing1"}})
result = obj.request("1600+Amphitheatre+Parkway+Mountain+View+CA")
self.assertEqual(result, {"location": {"lat": "37.4224082", "lng": "-122.0856086"},
"served_by": "google"})
@mock.patch('urllib.request.urlopen')
def test_request_success_HERE(self, urlopen):
request = mock.MagicMock(code=200)
request.read.return_value = load_HERE_sample()
urlopen.return_value = request
obj = requests.GeocodeLookup({"services": ["HERE"]},
{"HERE": {"APP_ID": "thing1",
"APP_CODE": "thing2"}})
result = obj.request("425+W+Randolph+Chicago")
self.assertEqual(result, {"location": {"lat": "41.88449", "lng": "-87.6387699"},
"served_by": "HERE"})
@mock.patch('urllib.request.urlopen')
def test_request_success_fallback(self, urlopen):
fail_request = mock.MagicMock()
fail_request.code = 404
success_request = mock.MagicMock()
success_request.read.return_value = load_HERE_sample()
success_request.code = 200
urlopen.side_effect = [fail_request, success_request]
obj = requests.GeocodeLookup({"services": ["google", "HERE"]},
{"google": {"APP_KEY": "foo"},
"HERE": {"APP_ID": "thing1",
"APP_CODE": "thing2"}})
result = obj.request("425+W+Randolph+Chicago")
self.assertEqual(result, {"location": {"lat": "41.88449", "lng": "-87.6387699"},
"served_by": "HERE"})
@mock.patch('urllib.request.urlopen')
def test_request_success_not_found(self, urlopen):
request = mock.MagicMock()
request.read.return_value = b'{"Response": {"View": []} }'
request.code = 200
urlopen.return_value = request
obj = requests.GeocodeLookup({"services": ["HERE"]},
{"HERE": {"APP_ID": "thing1",
"APP_CODE": "thing2"}})
result = obj.request("This%20Old%20House")
self.assertEqual(result, {})
@mock.patch('urllib.request.urlopen')
def test_request_failure(self, urlopen):
request = mock.MagicMock(code=403)
urlopen.return_value = request
with self.assertRaises(requests.GeocodeLookup.Error):
obj = requests.GeocodeLookup({"services": ["HERE"]},
{"HERE": {"APP_ID": "thing1",
"APP_CODE": "thing2"}})
result = obj.request("425+W+Randolph+Chicago")
self.assertEqual(result, None)
| [
"urllib.parse.urlparse",
"unittest.mock.MagicMock",
"os.path.dirname",
"urllib.parse.parse_qs",
"unittest.mock.patch"
] | [((8048, 8084), 'unittest.mock.patch', 'mock.patch', (['"""urllib.request.urlopen"""'], {}), "('urllib.request.urlopen')\n", (8058, 8084), True, 'import unittest.mock as mock\n'), ((8643, 8679), 'unittest.mock.patch', 'mock.patch', (['"""urllib.request.urlopen"""'], {}), "('urllib.request.urlopen')\n", (8653, 8679), True, 'import unittest.mock as mock\n'), ((9273, 9309), 'unittest.mock.patch', 'mock.patch', (['"""urllib.request.urlopen"""'], {}), "('urllib.request.urlopen')\n", (9283, 9309), True, 'import unittest.mock as mock\n'), ((10123, 10159), 'unittest.mock.patch', 'mock.patch', (['"""urllib.request.urlopen"""'], {}), "('urllib.request.urlopen')\n", (10133, 10159), True, 'import unittest.mock as mock\n'), ((10677, 10713), 'unittest.mock.patch', 'mock.patch', (['"""urllib.request.urlopen"""'], {}), "('urllib.request.urlopen')\n", (10687, 10713), True, 'import unittest.mock as mock\n'), ((195, 220), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (210, 220), False, 'import os\n'), ((354, 379), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (369, 379), False, 'import os\n'), ((751, 764), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (759, 764), False, 'from urllib.parse import urlparse, parse_qs\n'), ((778, 800), 'urllib.parse.parse_qs', 'parse_qs', (['parsed.query'], {}), '(parsed.query)\n', (786, 800), False, 'from urllib.parse import urlparse, parse_qs\n'), ((2910, 2923), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (2918, 2923), False, 'from urllib.parse import urlparse, parse_qs\n'), ((2937, 2959), 'urllib.parse.parse_qs', 'parse_qs', (['parsed.query'], {}), '(parsed.query)\n', (2945, 2959), False, 'from urllib.parse import urlparse, parse_qs\n'), ((8155, 8179), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'code': '(200)'}), '(code=200)\n', (8169, 8179), True, 'import unittest.mock as mock\n'), ((8748, 8772), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'code': '(200)'}), '(code=200)\n', (8762, 8772), True, 'import unittest.mock as mock\n'), ((9387, 9403), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (9401, 9403), True, 'import unittest.mock as mock\n'), ((9462, 9478), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (9476, 9478), True, 'import unittest.mock as mock\n'), ((10233, 10249), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (10247, 10249), True, 'import unittest.mock as mock\n'), ((10777, 10801), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'code': '(403)'}), '(code=403)\n', (10791, 10801), True, 'import unittest.mock as mock\n')] |
from flask import Flask
from src.common.database import Database
__author__ = "<NAME>"
app = Flask(__name__)
app.config.from_object('config')
app.secret_key = "123"
@app.route('/')
def hello_world():
return "Hello World"
@app.before_first_request
def init_db():
Database.initialize()
from src.models.users.views import user_blueprint
app.register_blueprint(user_blueprint, url_prefix="/users")
| [
"src.common.database.Database.initialize",
"flask.Flask"
] | [((96, 111), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (101, 111), False, 'from flask import Flask\n'), ((277, 298), 'src.common.database.Database.initialize', 'Database.initialize', ([], {}), '()\n', (296, 298), False, 'from src.common.database import Database\n')] |
#hardware platform: pyboard V1.1
# use this demo you should do:
# 1. open boot.py
# 2. enable pyb.usb_mode('VCP+HID')
# 3. close uPyCraft and reconnet pyboard for PC
# 4. open uPyCraft and run this demo
# 5. ctrl+c or stop could stop this demo
# Restoring your pyboard to normal
# 1. Hold down the USR switch.
# 2. While still holding down USR, press and release the RST switch.
# 3. The LEDs will then cycle green to orange to green+orange and back again.
# 4. Keep holding down USR until only the orange LED is lit, and then let go of the USR switch.
# 5. The orange LED should flash quickly 4 times, and then turn off.
# 6. You are now in safe mode.
import pyb
switch=pyb.Switch()
accel=pyb.Accel() #Accel is an object that controls the accelerometer
hid=pyb.USB_HID() #create USB_HID object.it can be used to emulate a peripheral such as a mouse or keyboard.
while not switch():
hid.send((0,accel.x(),accel.y(),0)) #Send data over the USB HID interface | [
"pyb.USB_HID",
"pyb.Accel",
"pyb.Switch"
] | [((671, 683), 'pyb.Switch', 'pyb.Switch', ([], {}), '()\n', (681, 683), False, 'import pyb\n'), ((690, 701), 'pyb.Accel', 'pyb.Accel', ([], {}), '()\n', (699, 701), False, 'import pyb\n'), ((780, 793), 'pyb.USB_HID', 'pyb.USB_HID', ([], {}), '()\n', (791, 793), False, 'import pyb\n')] |
from datetime import datetime
from sqlalchemy import Column, DateTime, ForeignKey, Integer
from sqlalchemy.ext.declarative import declared_attr
from zemfrog.globals import db
class LikeMixin:
id = Column(Integer, primary_key=True)
created_at = Column(DateTime, default=datetime.utcnow)
@declared_attr
def article_id(self):
return Column(ForeignKey("article.id"), nullable=False)
@declared_attr
def user_id(self):
return Column(ForeignKey("user.id"), nullable=False)
class Like(LikeMixin, db.Model):
pass
class Dislike(LikeMixin, db.Model):
pass
| [
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] | [((204, 237), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (210, 237), False, 'from sqlalchemy import Column, DateTime, ForeignKey, Integer\n'), ((255, 296), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (261, 296), False, 'from sqlalchemy import Column, DateTime, ForeignKey, Integer\n'), ((365, 389), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""article.id"""'], {}), "('article.id')\n", (375, 389), False, 'from sqlalchemy import Column, DateTime, ForeignKey, Integer\n'), ((472, 493), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (482, 493), False, 'from sqlalchemy import Column, DateTime, ForeignKey, Integer\n')] |
import psikit
sapt = psikit.Sapt()
sapt.monomer1_from_molfile('phenol1.mol')
sapt.monomer2_from_molfile('phenol2.mol')
sapt.make_dimer()
p = sapt.dimer
sapt.run_fisapt()
| [
"psikit.Sapt"
] | [((21, 34), 'psikit.Sapt', 'psikit.Sapt', ([], {}), '()\n', (32, 34), False, 'import psikit\n')] |
from spellchecker import SpellChecker
spell = SpellChecker()
spell.word_frequency.load_words(['microsoft', 'apple', 'google', 'tfl', 'copd',
'rihanna', 'chatbot', 'skype', 'facebook', 'amazon', 'nhs', 'spotify'])
def auto_correct(sentence):
words = sentence.split()
# find those words that may be misspelled
misspelled = spell.unknown(words)
if misspelled:
for word in misspelled:
# Get the one `most likely` answer
correct = spell.correction(word)
sentence = sentence.replace(word, correct)
return sentence
# print(auto_correct("new york, us"))
#print(auto_correct("lol")) | [
"spellchecker.SpellChecker"
] | [((47, 61), 'spellchecker.SpellChecker', 'SpellChecker', ([], {}), '()\n', (59, 61), False, 'from spellchecker import SpellChecker\n')] |
# Generated by Django 3.1.1 on 2020-12-24 11:41
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="TweetAccount",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("fullname", models.CharField(max_length=255)),
("username", models.CharField(max_length=255)),
("twitter_id", models.PositiveIntegerField(unique=True)),
],
),
migrations.CreateModel(
name="TweetHashtag",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("name", models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name="Tweet",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("tweet_id", models.PositiveIntegerField(unique=True)),
("created_at", models.DateTimeField()),
("like_count", models.PositiveIntegerField()),
("reply_count", models.PositiveIntegerField()),
("retweet_count", models.PositiveIntegerField()),
("text", models.TextField(max_length=280)),
(
"account",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, related_name="tweets", to="scraper.tweetaccount"
),
),
("hashtags", models.ManyToManyField(blank=True, related_name="tweets", to="scraper.TweetHashtag")),
],
options={
"ordering": ("-tweet_id",),
},
),
]
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.AutoField",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((336, 429), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (352, 429), False, 'from django.db import migrations, models\n'), ((457, 489), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (473, 489), False, 'from django.db import migrations, models\n'), ((521, 553), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (537, 553), False, 'from django.db import migrations, models\n'), ((587, 627), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'unique': '(True)'}), '(unique=True)\n', (614, 627), False, 'from django.db import migrations, models\n'), ((765, 858), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (781, 858), False, 'from django.db import migrations, models\n'), ((882, 927), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (898, 927), False, 'from django.db import migrations, models\n'), ((1058, 1151), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1074, 1151), False, 'from django.db import migrations, models\n'), ((1179, 1219), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'unique': '(True)'}), '(unique=True)\n', (1206, 1219), False, 'from django.db import migrations, models\n'), ((1253, 1275), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (1273, 1275), False, 'from django.db import migrations, models\n'), ((1309, 1338), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (1336, 1338), False, 'from django.db import migrations, models\n'), ((1373, 1402), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (1400, 1402), False, 'from django.db import migrations, models\n'), ((1439, 1468), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (1466, 1468), False, 'from django.db import migrations, models\n'), ((1496, 1528), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(280)'}), '(max_length=280)\n', (1512, 1528), False, 'from django.db import migrations, models\n'), ((1600, 1717), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""tweets"""', 'to': '"""scraper.tweetaccount"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='tweets', to='scraper.tweetaccount')\n", (1617, 1717), False, 'from django.db import migrations, models\n'), ((1808, 1897), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""tweets"""', 'to': '"""scraper.TweetHashtag"""'}), "(blank=True, related_name='tweets', to=\n 'scraper.TweetHashtag')\n", (1830, 1897), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
"""
Defines PNCursorTableModel class.
"""
import math
import threading
import time
import itertools
import locale
from datetime import date
from PyQt5 import QtCore, QtGui, Qt # type: ignore
from PyQt5.QtCore import pyqtSignal # type: ignore
from PyQt5 import (
QtWidgets,
) # type: ignore # FIXME: Not allowed here! this is for QCheckBox but it's not needed
from pineboolib.core.utils.utils_base import filedir
from pineboolib.core.utils import logging
from pineboolib.application.utils.date_conversion import date_amd_to_dma
from typing import Any, Iterable, Optional, Union, List, Dict, Tuple, cast, TYPE_CHECKING
if TYPE_CHECKING:
from pineboolib.application.metadata.pnfieldmetadata import PNFieldMetaData # noqa: F401
from pineboolib.application.metadata.pntablemetadata import PNTableMetaData # noqa: F401
from pineboolib.application.database.pnsqlcursor import PNSqlCursor # noqa: F401
from pineboolib.application.database.pnsqlquery import PNSqlQuery # noqa: F401
from pineboolib.interfaces.iconnection import IConnection
from pineboolib.interfaces.iapicursor import IApiCursor
DEBUG = False
class PNCursorTableModel(QtCore.QAbstractTableModel):
"""
Link between FLSqlCursor and database.
"""
logger = logging.getLogger("CursorTableModel")
rows = 15
cols = 5
USE_THREADS = False
USE_TIMER = True
CURSOR_COUNT = itertools.count()
rowsLoaded = 0
where_filter: str
where_filters: Dict[str, str] = {}
_metadata = None
_sortOrder = ""
_disable_refresh = None
color_function_ = None
need_update = False
_driver_sql = None
_size = None
parent_view: Optional[QtWidgets.QTableView] # type is FLDatatable
sql_str = ""
canFetchMoreRows: bool
_initialized: Optional[
bool
] = None # Usa 3 estado None, True y False para hacer un primer refresh retardado si pertenece a un fldatatable
def __init__(self, conn: "IConnection", parent: "PNSqlCursor") -> None:
"""
Constructor.
@param conn. PNConnection Object
@param parent. related FLSqlCursor
"""
super(PNCursorTableModel, self).__init__()
if parent is None:
raise ValueError("Parent is mandatory")
self._cursorConn = conn
self._parent: "PNSqlCursor" = parent
self.parent_view = None
# self._metadata = self._parent.metadata()
if not self.metadata():
return
self._driver_sql = self.db().driver()
self.USE_THREADS = self.driver_sql().useThreads()
self.USE_TIMER = self.driver_sql().useTimer()
if self.USE_THREADS and self.USE_TIMER:
self.USE_TIMER = False
self.logger.warning("SQL Driver supports Threads and Timer, defaulting to Threads")
if not self.USE_THREADS and not self.USE_TIMER:
self.USE_TIMER = True
self.logger.warning(
"SQL Driver supports neither Threads nor Timer, defaulting to Timer"
)
self.USE_THREADS = False
self.USE_TIMER = True
self.rowsLoaded = 0
self.sql_fields: List[str] = []
self.sql_fields_omited: List[str] = []
self.sql_fields_without_check: List[str] = []
# self.field_aliases = []
# self.field_type = []
# self.field_metaData = []
self.col_aliases: List[str] = []
# Indices de busqueda segun PK y CK. Los array "pos" guardan las posiciones
# de las columnas afectadas. PK normalmente valdrá [0,].
# CK puede ser [] o [2,3,4] por ejemplo.
# En los IDX tendremos como clave el valor compuesto, en array, de la clave.
# Como valor del IDX tenemos la posicion de la fila.
# Si se hace alguna operación en _data como borrar filas intermedias hay
# que invalidar los indices. Opcionalmente, regenerarlos.
self.pkpos: List[int] = []
self.ckpos: List[int] = []
self.pkidx: Dict[Tuple, int] = {}
self.ckidx: Dict[Tuple, int] = {}
self._checkColumn: Dict[str, Any] = {}
# Establecer a False otra vez si el contenido de los indices es erróneo.
self.indexes_valid = False
self._data: List[List[Any]] = []
self._vdata: List[Optional[List[Any]]] = []
self._column_hints: List[int] = []
self.updateColumnsCount()
self.rows = 0
self.rowsLoaded = 0
self.pendingRows = 0
self.lastFetch = 0.0
self.fetchedRows = 0
self._showPixmap = True
self.color_function_ = None
# self.color_dict_ = {}
self.where_filter = "1=1"
self.where_filters = {}
self.where_filters["main-filter"] = ""
self.where_filters["filter"] = ""
self.sql_str = ""
if self.USE_THREADS:
self.fetchLock = threading.Lock()
self.threadFetcher = threading.Thread(target=self.threadFetch)
self.threadFetcherStop = threading.Event()
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.updateRows)
self.timer.start(1000)
self.canFetchMoreRows = True
self._disable_refresh = False
self._cursor_db: IApiCursor = self.db().cursor()
self._initialized = None
# self.refresh()
def disable_refresh(self, disable: bool) -> None:
"""
Disable refresh.
e.g. FLSqlQuery.setForwardOnly(True).
@param disable. True or False
"""
self._disable_refresh = disable
def sort(self, column: int, order: QtCore.Qt.SortOrder = QtCore.Qt.AscendingOrder) -> None:
"""
Change order by used ASC/DESC and column.
@param col. Column to sort by
@param order. 0 ASC, 1 DESC
"""
col = column
# order 0 ascendente , 1 descendente
ord = "ASC"
if order == 1:
ord = "DESC"
field_mtd = self.metadata().indexFieldObject(col)
if field_mtd.type() == "check":
return
col_name = field_mtd.name()
order_list: List[str] = []
found_ = False
if self._sortOrder:
for column_name in self._sortOrder.split(","):
if col_name in column_name and ord in column_name:
found_ = True
order_list.append("%s %s" % (col_name, ord))
else:
order_list.append(column_name)
if not found_:
self.logger.debug(
"%s. Se intenta ordernar por una columna (%s) que no está definida en el order by previo (%s). "
"El order by previo se perderá" % (__name__, col_name, self._sortOrder)
)
else:
self._sortOrder = ",".join(order_list)
if not found_:
self._sortOrder = "%s %s" % (col_name, ord)
self.refresh()
def getSortOrder(self) -> str:
"""
Get current sort order.
Returns string with sortOrder value.
@return string with info about column and order
"""
return self._sortOrder
def setSortOrder(self, sort_order: Union[List[str], str]) -> None:
"""
Set current ORDER BY.
"""
self._sortOrder = ""
if isinstance(sort_order, list):
self._sortOrder = ",".join(sort_order)
else:
self._sortOrder = sort_order
# def setColorFunction(self, f):
# self.color_function_ = f
# def dict_color_function(self):
# return self.color_function_
def data(self, index: QtCore.QModelIndex, role: int = QtCore.Qt.DisplayRole) -> Any:
"""
Retrieve information about a record.
(overload of QAbstractTableModel)
Could return alignment, backgroun color, value... depending on role.
@param index. Register position
@param role. information type required
@return solicited data
"""
row = index.row()
col = index.column()
field = self.metadata().indexFieldObject(col)
_type = field.type()
res_color_function: List[str] = []
if _type != "check":
r = [x for x in self._data[row]]
self._data[row] = r
d = r[col]
else:
pK = str(self.value(row, self.metadata().primaryKey()))
if pK not in self._checkColumn.keys():
d = (
QtWidgets.QCheckBox()
) # FIXME: Not allowed here. This is GUI. This can be emulated with TRUE/FALSE
self._checkColumn[pK] = d
if self.parent_view and role in [QtCore.Qt.BackgroundRole, QtCore.Qt.ForegroundRole]:
fun_get_color, iface = self.parent_view.functionGetColor()
if fun_get_color is not None:
context_ = None
fun_name_ = None
if fun_get_color.find(".") > -1:
list_ = fun_get_color.split(".")
from pineboolib.application.safeqsa import SafeQSA
qsa_widget = SafeQSA.get_any(list_[0])
fun_name_ = list_[1]
if qsa_widget:
context_ = qsa_widget.iface
else:
context_ = iface
fun_name_ = fun_get_color
function_color = getattr(context_, fun_name_, None)
if function_color is not None:
field_name = field.name()
field_value = d
cursor = self._parent
selected = False
res_color_function = function_color(
field_name, field_value, cursor, selected, _type
)
else:
raise Exception(
"No se ha resuelto functionGetColor %s desde %s" % (fun_get_color, context_)
)
# print("Data ", index, role)
# print("Registros", self.rowCount())
# roles
# 0 QtCore.Qt.DisplayRole
# 1 QtCore.Qt.DecorationRole
# 2 QtCore.Qt.EditRole
# 3 QtCore.Qt.ToolTipRole
# 4 QtCore.Qt.StatusTipRole
# 5 QtCore.Qt.WhatThisRole
# 6 QtCore.Qt.FontRole
# 7 QtCore.Qt.TextAlignmentRole
# 8 QtCore.Qt.BackgroundRole
# 9 QtCore.Qt.ForegroundRole
if role == QtCore.Qt.CheckStateRole and _type == "check":
if pK in self._checkColumn.keys():
if self._checkColumn[pK].isChecked():
return QtCore.Qt.Checked
return QtCore.Qt.Unchecked
elif role == QtCore.Qt.TextAlignmentRole:
d = QtCore.Qt.AlignVCenter
if _type in ("int", "double", "uint"):
d = d | QtCore.Qt.AlignRight
elif _type in ("bool", "date", "time"):
d = d | QtCore.Qt.AlignCenter
elif _type in ("unlock", "pixmap"):
d = d | QtCore.Qt.AlignHCenter
return d
elif role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole):
# r = self._vdata[row]
if _type == "bool":
if d in (True, "1"):
d = "Sí"
else:
d = "No"
elif _type in ("unlock", "pixmap"):
d = None
elif _type in ("string", "stringlist") and not d:
d = ""
elif _type == "time" and d:
d = str(d)
elif _type == "date":
# Si es str lo paso a datetime.date
if d and isinstance(d, str):
if len(d.split("-")[0]) == 4:
d = date_amd_to_dma(d)
if d:
list_ = d.split("-")
d = date(int(list_[2]), int(list_[1]), int(list_[0]))
if d and isinstance(d, date):
# Cogemos el locale para presentar lo mejor posible la fecha
try:
locale.setlocale(locale.LC_TIME, "")
date_format = locale.nl_langinfo(locale.D_FMT)
date_format = date_format.replace("y", "Y") # Año con 4 dígitos
date_format = date_format.replace("/", "-") # Separadores
d = d.strftime(date_format)
except AttributeError:
import platform
self.logger.warning(
"locale specific date format is not yet implemented for %s",
platform.system(),
)
elif _type == "check":
return
elif _type == "double":
if d is not None:
d = QtCore.QLocale.system().toString(float(d), "f", field.partDecimal())
elif _type in ("int", "uint"):
if d is not None:
d = QtCore.QLocale.system().toString(int(d))
if self.parent_view is not None:
self.parent_view.resize_column(col, d)
return d
elif role == QtCore.Qt.DecorationRole:
pixmap = None
if _type in ("unlock", "pixmap"):
if _type == "unlock":
if d in (True, "1"):
pixmap = QtGui.QPixmap(filedir("../share/icons", "unlock.png"))
else:
pixmap = QtGui.QPixmap(filedir("../share/icons", "lock.png"))
if self.parent_view is not None:
if (
self.parent_view.showAllPixmap()
or row == self.parent_view.cursor().at()
):
if pixmap and not pixmap.isNull() and self.parent_view:
row_height = self.parent_view.rowHeight(row) # Altura row
row_width = self.parent_view.columnWidth(col)
new_pixmap = QtGui.QPixmap(row_width, row_height) # w , h
center_width = (row_width - pixmap.width()) / 2
center_height = (row_height - pixmap.height()) / 2
new_pixmap.fill(QtCore.Qt.transparent)
painter = Qt.QPainter(new_pixmap)
painter.drawPixmap(
center_width,
center_height,
pixmap.width(),
pixmap.height(),
pixmap,
)
pixmap = new_pixmap
else:
if self.parent_view is not None:
if self.parent_view and self.parent_view.showAllPixmap():
if not self.db().manager().isSystemTable(self._parent.table()):
d = self.db().manager().fetchLargeValue(d)
else:
from pineboolib.application.utils.xpm import cacheXPM
d = cacheXPM(d)
if d:
pixmap = QtGui.QPixmap(d)
return pixmap
elif role == QtCore.Qt.BackgroundRole:
if _type == "bool":
if d in (True, "1"):
d = QtGui.QBrush(QtCore.Qt.green)
else:
d = QtGui.QBrush(QtCore.Qt.red)
elif _type == "check":
obj_ = self._checkColumn[pK]
if obj_.isChecked():
d = QtGui.QBrush(QtCore.Qt.green)
else:
d = QtGui.QBrush(QtCore.Qt.white)
else:
if res_color_function and len(res_color_function) and res_color_function[0] != "":
color_ = QtGui.QColor(res_color_function[0])
style_ = getattr(QtCore.Qt, res_color_function[2], None)
d = QtGui.QBrush(color_)
d.setStyle(style_)
else:
d = None
return d
elif role == QtCore.Qt.ForegroundRole:
if _type == "bool":
if d in (True, "1"):
d = QtGui.QBrush(QtCore.Qt.black)
else:
d = QtGui.QBrush(QtCore.Qt.white)
else:
if res_color_function and len(res_color_function) and res_color_function[1] != "":
color_ = QtGui.QColor(res_color_function[1])
style_ = getattr(QtCore.Qt, res_color_function[2], None)
d = QtGui.QBrush(color_)
d.setStyle(style_)
else:
d = None
return d
# else:
# print("role desconocido", role)
return None
def threadFetch(self) -> None:
"""
Retrieve information of the table.
Only used when SQL driver supports Thread.
"""
if not self.metadata():
return
self.refreshFetch(2000)
def updateRows(self) -> None:
"""
Update virtual records managed by its model.
"""
if self.USE_THREADS:
ROW_BATCH_COUNT = 200 if self.threadFetcher.is_alive() else 0
elif self.USE_TIMER:
ROW_BATCH_COUNT = 200 if self.timer.isActive() else 0
else:
return
parent = QtCore.QModelIndex()
fromrow = self.rowsLoaded
torow = self.fetchedRows - ROW_BATCH_COUNT - 1
if torow - fromrow < 5:
if self.canFetchMoreRows:
self.logger.trace(
"Updaterows %s (updated:%d)", self.metadata().name(), self.fetchedRows
)
self.fetchMore(parent, self.metadata().name(), self.where_filter)
return
self.logger.trace("Updaterows %s (UPDATE:%d)", self.metadata().name(), torow - fromrow + 1)
self.beginInsertRows(parent, fromrow, torow)
self.rowsLoaded = torow + 1
self.endInsertRows()
topLeft = self.index(fromrow, 0)
bottomRight = self.index(torow, self.cols - 1)
self.dataChanged.emit(topLeft, bottomRight)
def fetchMore(
self,
index: QtCore.QModelIndex,
tablename: Optional[str] = None,
where_filter: Optional[str] = None,
size_hint: int = 1000,
) -> None:
"""
Retrieve new data from DB.
@param index. parent Tableindex.
@param tablename. Table name to fetch from.
@param where_filter. Filter to be used to get data.
"""
if not self.sql_str:
return
tiempo_inicial = time.time()
# ROW_BATCH_COUNT = min(200 + self.rowsLoaded // 10, 1000)
ROW_BATCH_COUNT = size_hint
parent = index
fromrow = self.rowsLoaded
# FIXME: Hay que borrar luego las que no se cargaron al final...
torow = self.rowsLoaded + ROW_BATCH_COUNT
if self.fetchedRows - ROW_BATCH_COUNT - 1 > torow:
torow = self.fetchedRows - ROW_BATCH_COUNT - 1
if tablename is None:
tablename = self.metadata().name()
self.logger.trace(
"refrescando modelo tabla %r, rows: %d %r" % (tablename, self.rows, (fromrow, torow))
)
if torow < fromrow:
return
# print("QUERY:", sql)
if self.fetchedRows <= torow and self.canFetchMoreRows:
if self.USE_THREADS and self.threadFetcher.is_alive():
self.threadFetcher.join()
if where_filter is None:
where_filter = self.where_filter
c_all = self.driver_sql().fetchAll(
self.cursorDB(), tablename, where_filter, self.sql_str, self._curname
)
newrows = len(c_all) # self._cursor.rowcount
from_rows = self.rows
self._data += c_all
self._vdata += [None] * newrows
self.fetchedRows += newrows
self.rows += newrows
self.canFetchMoreRows = bool(newrows > 0)
self.logger.trace(
"refrescando modelo tabla %r, new rows: %d fetched: %d",
tablename,
newrows,
self.fetchedRows,
)
if not self.USE_THREADS:
self.refreshFetch(ROW_BATCH_COUNT)
self.pendingRows = 0
self.indexUpdateRowRange((from_rows, self.rows))
# if self.USE_THREADS is True:
# self.threadFetcher = threading.Thread(target=self.threadFetch)
# self.threadFetcher.start()
if torow > self.rows - 1:
torow = self.rows - 1
if torow < fromrow:
return
self.beginInsertRows(parent, fromrow, torow)
if fromrow == 0:
data_trunc = self._data[:200]
for row in data_trunc:
for r, val in enumerate(row):
txt = str(val)
ltxt = len(txt)
newlen = int(40 + math.tanh(ltxt / 3000.0) * 35000.0)
self._column_hints[r] += newlen
for r in range(len(self._column_hints)):
self._column_hints[r] = int(self._column_hints[r] // (len(self._data[:200]) + 1))
# self._column_hints = [int(x) for x in self._column_hints]
self.indexes_valid = True
self.rowsLoaded = torow + 1
self.endInsertRows()
# print("fin refresco modelo tabla %r , query %r, rows: %d %r"
# % (self._table.name, self._table.query_table, self.rows, (fromrow,torow)))
topLeft = self.index(fromrow, 0)
bottomRight = self.index(torow, self.cols - 1)
self.dataChanged.emit(topLeft, bottomRight)
tiempo_final = time.time()
self.lastFetch = tiempo_final
if self.USE_THREADS and not self.threadFetcher.is_alive() and self.canFetchMoreRows:
self.threadFetcher = threading.Thread(target=self.threadFetch)
self.threadFetcherStop = threading.Event()
self.threadFetcher.start()
if tiempo_final - tiempo_inicial > 0.2:
self.logger.info(
"fin refresco tabla '%s' :: rows: %d %r :: (%.3fs)",
self.metadata().name(),
self.rows,
(fromrow, torow),
tiempo_final - tiempo_inicial,
)
def _refresh_field_info(self) -> None:
"""
Check if query fields do exist.
If any field does not exist it gets marked as ommitted.
"""
is_query = self.metadata().isQuery()
qry_tables = []
qry = None
# if qry is None:
# return
if is_query:
qry = self.db().manager().query(self.metadata().query())
if qry is None:
raise Exception(" The query %s return empty value" % self.metadata().query())
qry_select = [x.strip() for x in (qry.select()).split(",")]
qry_fields: Dict[str, str] = {
fieldname.split(".")[-1]: fieldname for fieldname in qry_select
}
for table in qry.tablesList():
mtd = self.db().manager().metadata(table, True)
if mtd:
qry_tables.append((table, mtd))
for n, field in enumerate(self.metadata().fieldList()):
# if field.visibleGrid():
# sql_fields.append(field.name())
if field.isPrimaryKey():
self.pkpos.append(n)
if field.isCompoundKey():
self.ckpos.append(n)
if is_query:
if field.name() in qry_fields:
self.sql_fields.append(qry_fields[field.name()])
else:
found = False
for table, mtd in qry_tables:
if field.name() in mtd.fieldNames():
self.sql_fields.append("%s.%s" % (table, field.name()))
found = True
break
# Omito los campos que aparentemente no existen
if not found and not field.name() in self.sql_fields_omited:
if qry is None:
raise Exception("The qry is empty!")
# NOTE: Esto podría ser por ejemplo porque no entendemos los campos computados.
self.logger.error(
"CursorTableModel.refresh(): Omitiendo campo '%s' referenciado en query %s. El campo no existe en %s ",
field.name(),
self.metadata().name(),
qry.tablesList(),
)
self.sql_fields_omited.append(field.name())
else:
if field.type() != field.Check:
self.sql_fields_without_check.append(field.name())
self.sql_fields.append(field.name())
def refresh(self) -> None:
"""
Refresh information mananged by this class.
"""
if (
self._initialized is None and self.parent_view
): # Si es el primer refresh y estoy conectado a un FLDatatable()
self._initialized = True
QtCore.QTimer.singleShot(1, self.refresh)
return
if (
self._initialized
): # Si estoy inicializando y no me ha enviado un sender, cancelo el refesh
obj = self.sender()
if not obj:
return
self._initialized = False
if self._disable_refresh and self.rows > 0:
return
if not self.metadata():
self.logger.warning(
"ERROR: CursorTableModel :: No hay tabla %s", self.metadata().name()
)
return
""" FILTRO WHERE """
where_filter = ""
for k, wfilter in sorted(self.where_filters.items()):
# if wfilter is None:
# continue
wfilter = wfilter.strip()
if not wfilter:
continue
if not where_filter:
where_filter = wfilter
elif wfilter not in where_filter:
if where_filter not in wfilter:
where_filter += " AND " + wfilter
if not where_filter:
where_filter = "1 = 1"
self.where_filter = where_filter
# Si no existe un orderBy y se ha definido uno desde FLTableDB ...
if self.where_filter.find("ORDER BY") == -1 and self.getSortOrder():
if self.where_filter.find(";") > -1: # Si el where termina en ; ...
self.where_filter = self.where_filter.replace(
";", " ORDER BY %s;" % self.getSortOrder()
)
else:
self.where_filter = "%s ORDER BY %s" % (self.where_filter, self.getSortOrder())
""" FIN """
parent = QtCore.QModelIndex()
oldrows = self.rowsLoaded
self.beginRemoveRows(parent, 0, oldrows)
if self.USE_THREADS:
self.threadFetcherStop.set()
if self.threadFetcher.is_alive():
self.threadFetcher.join()
self.rows = 0
self.rowsLoaded = 0
self.fetchedRows = 0
self.sql_fields = []
self.sql_fields_without_check = []
self.pkpos = []
self.ckpos = []
self._data = []
self.endRemoveRows()
if oldrows > 0:
cast(pyqtSignal, self.rowsRemoved).emit(parent, 0, oldrows - 1)
if self.metadata().isQuery():
query = self.db().manager().query(self.metadata().query())
if query is None:
raise Exception("query is empty!")
from_ = query.from_()
else:
from_ = self.metadata().name()
self._refresh_field_info()
self._curname = "cur_%s_%08d" % (self.metadata().name(), next(self.CURSOR_COUNT))
if self.sql_fields_without_check:
self.sql_str = ", ".join(self.sql_fields_without_check)
else:
self.sql_str = ", ".join(self.sql_fields)
SZ_FETCH = max(1000, oldrows)
self.driver_sql().refreshQuery(
self._curname, self.sql_str, from_, self.where_filter, self.cursorDB(), self.db().db()
)
self.refreshFetch(SZ_FETCH)
self.need_update = False
self.rows = 0
self.canFetchMoreRows = True
# print("rows:", self.rows)
self.pendingRows = 0
self._column_hints = [120] * len(self.sql_fields)
# self.threadFetcher = threading.Thread(target=self.threadFetch)
# self.threadFetcherStop = threading.Event()
# self.threadFetcher.start()
# self.color_dict_.clear() # Limpiamos diccionario de colores
self.fetchMore(parent, self.metadata().name(), self.where_filter, size_hint=SZ_FETCH)
# print("%s:: rows: %s" % (self._curname, self.rows))
def refreshFetch(self, n: int) -> None:
"""
Refresh new information from DB for a certain amount of records.
@param n. Number of records to fetch
"""
self.driver_sql().refreshFetch(
n,
self._curname,
self.metadata().name(),
self.cursorDB(),
self.sql_str,
self.where_filter,
)
def indexUpdateRow(self, rownum: int) -> None:
"""
Update row index, used to locate virtual registers on TableModel.
@param rownum. Row number
"""
row = self._data[rownum]
if self.pkpos:
key = tuple([row[x] for x in self.pkpos])
self.pkidx[key] = rownum
if self.ckpos:
key = tuple([row[x] for x in self.ckpos])
self.ckidx[key] = rownum
def indexUpdateRowRange(self, rowrange: Tuple[int, int]) -> None:
"""
Update index for a range of rows.
Used to locate records in TableModel.
@param rowrange. Tuple (from, to)
"""
rows = self._data[rowrange[0] : rowrange[1]]
if self.pkpos:
for n, row in enumerate(rows):
key = tuple([row[x] for x in self.pkpos])
self.pkidx[key] = n + rowrange[0]
if self.ckpos:
for n, row in enumerate(rows):
key = tuple([row[x] for x in self.ckpos])
self.ckidx[key] = n + rowrange[0]
def value(self, row: Optional[int], fieldName: str) -> Any:
"""
Retrieve column value for a row.
@param row. Row number to retrieve
@param fieldName. Field name.
@return Value
"""
if row is None or row < 0 or row >= self.rows:
return None
col = None
if not self.metadata().isQuery():
col = self.metadata().indexPos(fieldName)
else:
# Comparo con los campos de la qry, por si hay algun hueco que no se detectaria con indexPos
for x, fQ in enumerate(self.sql_fields):
if fieldName == fQ[fQ.find(".") + 1 :]:
col = x
break
if not col:
return None
mtdfield = self.metadata().field(fieldName)
if mtdfield is None:
raise Exception("fieldName: %s not found" % fieldName)
type_ = mtdfield.type()
if type_ == "check":
return None
campo = self._data[row][col]
if type_ in ("serial", "uint", "int"):
if campo not in (None, "None"):
campo = int(campo)
elif campo == "None":
self.logger.warning("Campo no deberia ser un string 'None'")
return campo
def updateValuesDB(self, pKValue: Any, dict_update: Dict[str, Any]) -> bool:
"""
Update record data from tableModel into DB.
@param pKValue. Pirmary Key of the record to be updated
@param dict_update. Fields to be updated
"""
self.logger.trace("updateValuesDB: init: pKValue %s, dict_update %s", pKValue, dict_update)
row = self.findPKRow([pKValue])
# if row is None:
# raise AssertionError(
# "Los indices del CursorTableModel no devolvieron un registro (%r)" % (pKValue))
if row is None:
return False
if self.value(row, self.pK()) != pKValue:
raise AssertionError(
"Los indices del CursorTableModel devolvieron un registro erroneo: %r != %r"
% (self.value(row, self.pK()), pKValue)
)
self.setValuesDict(row, dict_update)
pkey_name = self.metadata().primaryKey()
# TODO: la conversion de mogrify de bytes a STR va a dar problemas con los acentos...
mtdfield = self.metadata().field(pkey_name)
if mtdfield is None:
raise Exception("Primary Key %s not found" % pkey_name)
typePK_ = mtdfield.type()
pKValue = self.db().manager().formatValue(typePK_, pKValue, False)
# if typePK_ == "string" or typePK_ == "pixmap" or typePK_ == "stringlist" or typePK_ == "time" or typePK_ == "date":
# pKValue = str("'" + pKValue + "'")
where_filter = "%s = %s" % (pkey_name, pKValue)
update_set = []
for key, value in dict_update.items():
mtdfield = self.metadata().field(key)
if mtdfield is None:
raise Exception("Field %s not found" % key)
type_ = mtdfield.type()
# if type_ == "string" or type_ == "pixmap" or type_ == "stringlist" or type_ == "time" or type_ == "date":
# value = str("'" + value + "'")
if type_ in ("string", "stringlist"):
value = self.db().normalizeValue(value)
value = self.db().manager().formatValue(type_, value, False)
# update_set.append("%s = %s" % (key, (self._cursor.mogrify("%s",[value]))))
update_set.append("%s = %s" % (key, value))
if len(update_set) == 0:
return False
update_set_txt = ", ".join(update_set)
sql = self.driver_sql().queryUpdate(self.metadata().name(), update_set_txt, where_filter)
# print("MODIFYING SQL :: ", sql)
try:
self.db().execute_query(sql)
except Exception:
self.logger.exception("ERROR: CursorTableModel.Update %s:", self.metadata().name())
# self._cursor.execute("ROLLBACK")
return False
try:
if self.cursorDB().description:
returning_fields = [x[0] for x in self.cursorDB().description]
for orow in self.cursorDB():
dict_update = dict(zip(returning_fields, orow))
self.setValuesDict(row, dict_update)
except Exception:
self.logger.exception(
"updateValuesDB: Error al assignar los valores de vuelta al buffer"
)
self.need_update = True
return True
def setValuesDict(self, row: int, update_dict: Dict[str, Any]) -> None:
"""
Set value to a row using a Dict.
@param row. Row to update
@param update_dict. Key-Value where key is the fieldname and value is the value to update
"""
if DEBUG:
self.logger.info("CursorTableModel.setValuesDict(row %s) = %r", row, update_dict)
try:
if isinstance(self._data[row], tuple):
self._data[row] = list(self._data[row])
r = self._vdata[row]
if r is None:
r = [str(x) for x in self._data[row]]
self._vdata[row] = r
colsnotfound = []
for fieldname, value in update_dict.items():
# col = self.metadata().indexPos(fieldname)
try:
col = self.sql_fields.index(fieldname)
self._data[row][col] = value
r[col] = value
except ValueError:
colsnotfound.append(fieldname)
if colsnotfound:
self.logger.warning(
"CursorTableModel.setValuesDict:: columns not found: %r", colsnotfound
)
self.indexUpdateRow(row)
except Exception:
self.logger.exception(
"CursorTableModel.setValuesDict(row %s) = %r :: ERROR:", row, update_dict
)
def setValue(self, row: int, fieldname: str, value: Any) -> None:
"""
Set value to a cell.
@param row. related row
@param fieldname. name of the field to update.
@param value. Value to write. Text, Pixmap, etc.
"""
# Reimplementación para que todo pase por el método genérico.
self.setValuesDict(row, {fieldname: value})
def Insert(self, fl_cursor: "PNSqlCursor") -> bool: # FIXME: Should be "insert" in lowercase.
"""
Create new row in TableModel.
@param buffer . PNBuffer to be added.
"""
# Metemos lineas en la tabla de la bd
# pKValue = None
buffer = fl_cursor.buffer()
if buffer is None:
raise Exception("Cursor has no buffer")
campos = ""
valores = ""
for b in buffer.fieldsList():
value: Any = None
if buffer.value(b.name) is None:
mtdfield = fl_cursor.metadata().field(b.name)
if mtdfield is None:
raise Exception("field %s not found" % b.name)
value = mtdfield.defaultValue()
else:
value = buffer.value(b.name)
if value is not None: # si el campo se rellena o hay valor default
# if b.name == fl_cursor.metadata().primaryKey():
# pKValue = value
if b.type_ in ("string", "stringlist") and isinstance(value, str):
value = self.db().normalizeValue(value)
value = self.db().manager().formatValue(b.type_, value, False)
if not campos:
campos = b.name
valores = value
else:
campos = u"%s,%s" % (campos, b.name)
valores = u"%s,%s" % (valores, value)
if campos:
sql = """INSERT INTO %s (%s) VALUES (%s)""" % (fl_cursor.d.curName_, campos, valores)
# conn = self._cursorConn.db()
try:
# print(sql)
self.db().execute_query(sql)
# self.refresh()
# if pKValue is not None:
# fl_cursor.move(self.findPKRow((pKValue,)))
self.need_update = True
except Exception:
self.logger.exception(
"CursorTableModel.%s.Insert() :: SQL: %s", self.metadata().name(), sql
)
# self._cursor.execute("ROLLBACK")
return False
# conn.commit()
return True
return False
def Delete(self, cursor: "PNSqlCursor") -> None: # FIXME: Should be "delete" in lowercase.
"""
Delete a row from tableModel.
@param cursor . FLSqlCursor object
"""
pKName = self.metadata().primaryKey()
mtdfield = self.metadata().field(pKName)
if mtdfield is None:
raise Exception("PK Field %s not found" % pKName)
typePK = mtdfield.type()
tableName = self.metadata().name()
val = self.db().manager().formatValue(typePK, self.value(cursor.d._currentregister, pKName))
sql = "DELETE FROM %s WHERE %s = %s" % (tableName, pKName, val)
# conn = self._cursorConn.db()
try:
self.db().execute_query(sql)
self.need_update = True
except Exception:
self.logger.exception("CursorTableModel.%s.Delete() :: ERROR:", self.metadata().name())
# self._cursor.execute("ROLLBACK")
return
# conn.commit()
def findPKRow(self, pklist: Iterable[Any]) -> Optional[int]:
"""
Retrieve row index of a record given a primary key.
@param pklist. Primary Key list to find. Use a List [] even for a single record.
@return row index.
"""
if not isinstance(pklist, (tuple, list)):
raise ValueError(
"findPKRow expects a list as first argument. Enclose PK inside brackets [self.pkvalue]"
)
if not self.indexes_valid:
for n in range(self.rows):
self.indexUpdateRow(n)
self.indexes_valid = True
pklist = tuple(pklist)
if pklist[0] is None:
raise ValueError("Primary Key can't be null")
parent = QtCore.QModelIndex()
while self.canFetchMoreRows and pklist not in self.pkidx:
self.fetchMore(parent, self.metadata().name(), self.where_filter)
if not self.indexes_valid:
for n in range(self.rows):
self.indexUpdateRow(n)
self.indexes_valid = True
if pklist not in self.pkidx:
self.logger.info(
"CursorTableModel.%s.findPKRow:: PK not found: %r (from: %r)",
self.metadata().name(),
pklist,
list(self.pkidx.keys())[:8],
)
return None
return self.pkidx[pklist]
def findCKRow(self, cklist: Iterable[Any]) -> Optional[int]:
"""
Retrieve row index from its Composite Key.
@param cklist. CK list to find.
@return row index.
"""
if not isinstance(cklist, (tuple, list)):
raise ValueError("findCKRow expects a list as first argument.")
if not self.indexes_valid:
for n in range(self.rows):
self.indexUpdateRow(n)
self.indexes_valid = True
cklist = tuple(cklist)
if cklist not in self.ckidx:
self.logger.warning(
"CursorTableModel.%s.findCKRow:: CK not found: %r ", self.metadata().name(), cklist
)
return None
return self.ckidx[cklist]
def pK(self) -> str:
"""
Get field name of the primary key.
@return field name
"""
return self.metadata().primaryKey()
def fieldType(self, fieldName: str) -> str:
"""
Retrieve field type for a given field name.
@param fieldName. required field name.
@return field type.
"""
field = self.metadata().field(fieldName)
if field is None:
raise Exception("field %s not found" % fieldName)
return field.type()
def alias(self, fieldName: str) -> str:
"""
Retrieve alias name for a field name.
@param fieldName. field name requested.
@return alias for the field.
"""
field = self.metadata().field(fieldName)
if field is None:
raise Exception("field %s not found" % fieldName)
return field.alias()
def columnCount(self, *args: List[Any]) -> int:
"""
Get current column count.
@return Number of columns present.
"""
# if args:
# self.logger.warning("columnCount%r: wrong arg count", args, stack_info=True)
return self.cols
def updateColumnsCount(self) -> None:
"""
Set number of columns in tableModel.
"""
self.cols = len(self.metadata().fieldList())
self.loadColAliases()
if self.metadata().isQuery():
self._refresh_field_info()
def rowCount(self, parent: QtCore.QModelIndex = None) -> int:
"""
Get current row count.
@return Row number present in table.
"""
return self.rowsLoaded
def size(self) -> int:
"""
Get amount of data selected on the cursor.
@return number of retrieved rows.
"""
size = 0
mtd = self.metadata()
if mtd:
where_ = self.where_filter
from_ = self.metadata().name()
if mtd.isQuery():
qry = self.db().manager().query(self.metadata().query())
if qry is None:
raise Exception("Query not found")
from_ = qry.from_()
if self.where_filter.find("ORDER BY") > -1:
where_ = self.where_filter[: self.where_filter.find("ORDER BY")]
from pineboolib.application.database.pnsqlquery import PNSqlQuery # noqa: F811
q = PNSqlQuery(None, self.db().name)
q.exec_("SELECT COUNT(*) FROM %s WHERE %s" % (from_, where_))
if q.first():
size = q.value(0)
return size
def headerData(
self, section: int, orientation: QtCore.Qt.Orientation, role: int = QtCore.Qt.DisplayRole
) -> Any:
"""
Retrieve header data.
@param section. Column
@param orientation. Horizontal, Vertical
@param role. QtCore.Qt.DisplayRole only. Every other option is ommitted.
@return info for section, orientation and role.
"""
if role == QtCore.Qt.DisplayRole:
if orientation == QtCore.Qt.Horizontal:
if not self.col_aliases:
self.loadColAliases()
return self.col_aliases[section]
elif orientation == QtCore.Qt.Vertical:
return section + 1
return None
def loadColAliases(self) -> None:
"""
Load column alias for every column.
"""
self.col_aliases = [
str(self.metadata().indexFieldObject(i).alias()) for i in range(self.cols)
]
def fieldMetadata(self, fieldName: str) -> "PNFieldMetaData":
"""
Retrieve FLFieldMetadata for given fieldName.
@param fieldName. field name.
@return FLFieldMetadata
"""
field = self.metadata().field(fieldName)
if field is None:
raise Exception("fieldName %s not found" % fieldName)
return field
def metadata(self) -> "PNTableMetaData":
"""
Retrieve FLTableMetaData for this tableModel.
@return Objeto FLTableMetaData
"""
if self._parent.d.metadata_ is None:
raise Exception("Metadata not set")
return self._parent.d.metadata_
def driver_sql(self) -> Any:
"""Get SQL Driver used."""
return self._driver_sql
def cursorDB(self) -> "IApiCursor":
"""
Get currently used database cursor.
@return cursor object
"""
return self._cursor_db
def db(self) -> "IConnection":
"""Get current connection."""
return self._cursorConn
def set_parent_view(self, parent_view: QtWidgets.QTableView) -> None:
"""Set the parent view."""
self.parent_view = parent_view
| [
"PyQt5.QtCore.QModelIndex",
"pineboolib.application.safeqsa.SafeQSA.get_any",
"PyQt5.QtGui.QColor",
"pineboolib.core.utils.logging.getLogger",
"pineboolib.application.utils.xpm.cacheXPM",
"threading.Lock",
"platform.system",
"PyQt5.QtCore.QLocale.system",
"pineboolib.core.utils.utils_base.filedir",
"pineboolib.application.utils.date_conversion.date_amd_to_dma",
"PyQt5.Qt.QPainter",
"locale.setlocale",
"PyQt5.QtCore.QTimer",
"PyQt5.QtWidgets.QCheckBox",
"time.time",
"typing.cast",
"PyQt5.QtCore.QTimer.singleShot",
"PyQt5.QtGui.QBrush",
"threading.Event",
"PyQt5.QtGui.QPixmap",
"itertools.count",
"threading.Thread",
"locale.nl_langinfo",
"math.tanh"
] | [((1294, 1331), 'pineboolib.core.utils.logging.getLogger', 'logging.getLogger', (['"""CursorTableModel"""'], {}), "('CursorTableModel')\n", (1311, 1331), False, 'from pineboolib.core.utils import logging\n'), ((1423, 1440), 'itertools.count', 'itertools.count', ([], {}), '()\n', (1438, 1440), False, 'import itertools\n'), ((5063, 5078), 'PyQt5.QtCore.QTimer', 'QtCore.QTimer', ([], {}), '()\n', (5076, 5078), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((17838, 17858), 'PyQt5.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (17856, 17858), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((19118, 19129), 'time.time', 'time.time', ([], {}), '()\n', (19127, 19129), False, 'import time\n'), ((22257, 22268), 'time.time', 'time.time', ([], {}), '()\n', (22266, 22268), False, 'import time\n'), ((27514, 27534), 'PyQt5.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (27532, 27534), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((41416, 41436), 'PyQt5.QtCore.QModelIndex', 'QtCore.QModelIndex', ([], {}), '()\n', (41434, 41436), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((4894, 4910), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (4908, 4910), False, 'import threading\n'), ((4944, 4985), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.threadFetch'}), '(target=self.threadFetch)\n', (4960, 4985), False, 'import threading\n'), ((5023, 5040), 'threading.Event', 'threading.Event', ([], {}), '()\n', (5038, 5040), False, 'import threading\n'), ((22433, 22474), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.threadFetch'}), '(target=self.threadFetch)\n', (22449, 22474), False, 'import threading\n'), ((22512, 22529), 'threading.Event', 'threading.Event', ([], {}), '()\n', (22527, 22529), False, 'import threading\n'), ((25828, 25869), 'PyQt5.QtCore.QTimer.singleShot', 'QtCore.QTimer.singleShot', (['(1)', 'self.refresh'], {}), '(1, self.refresh)\n', (25852, 25869), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((8534, 8555), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', ([], {}), '()\n', (8553, 8555), False, 'from PyQt5 import QtWidgets\n'), ((9174, 9199), 'pineboolib.application.safeqsa.SafeQSA.get_any', 'SafeQSA.get_any', (['list_[0]'], {}), '(list_[0])\n', (9189, 9199), False, 'from pineboolib.application.safeqsa import SafeQSA\n'), ((28065, 28099), 'typing.cast', 'cast', (['pyqtSignal', 'self.rowsRemoved'], {}), '(pyqtSignal, self.rowsRemoved)\n', (28069, 28099), False, 'from typing import Any, Iterable, Optional, Union, List, Dict, Tuple, cast, TYPE_CHECKING\n'), ((21512, 21536), 'math.tanh', 'math.tanh', (['(ltxt / 3000.0)'], {}), '(ltxt / 3000.0)\n', (21521, 21536), False, 'import math\n'), ((15723, 15752), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.green'], {}), '(QtCore.Qt.green)\n', (15735, 15752), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((15799, 15826), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.red'], {}), '(QtCore.Qt.red)\n', (15811, 15826), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((13543, 13582), 'pineboolib.core.utils.utils_base.filedir', 'filedir', (['"""../share/icons"""', '"""unlock.png"""'], {}), "('../share/icons', 'unlock.png')\n", (13550, 13582), False, 'from pineboolib.core.utils.utils_base import filedir\n'), ((13657, 13694), 'pineboolib.core.utils.utils_base.filedir', 'filedir', (['"""../share/icons"""', '"""lock.png"""'], {}), "('../share/icons', 'lock.png')\n", (13664, 13694), False, 'from pineboolib.core.utils.utils_base import filedir\n'), ((15969, 15998), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.green'], {}), '(QtCore.Qt.green)\n', (15981, 15998), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((16045, 16074), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.white'], {}), '(QtCore.Qt.white)\n', (16057, 16074), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((16222, 16257), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['res_color_function[0]'], {}), '(res_color_function[0])\n', (16234, 16257), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((16359, 16379), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['color_'], {}), '(color_)\n', (16371, 16379), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((16633, 16662), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.black'], {}), '(QtCore.Qt.black)\n', (16645, 16662), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((16709, 16738), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.white'], {}), '(QtCore.Qt.white)\n', (16721, 16738), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((16885, 16920), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['res_color_function[1]'], {}), '(res_color_function[1])\n', (16897, 16920), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((17022, 17042), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['color_'], {}), '(color_)\n', (17034, 17042), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((14234, 14270), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['row_width', 'row_height'], {}), '(row_width, row_height)\n', (14247, 14270), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((14556, 14579), 'PyQt5.Qt.QPainter', 'Qt.QPainter', (['new_pixmap'], {}), '(new_pixmap)\n', (14567, 14579), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((15451, 15462), 'pineboolib.application.utils.xpm.cacheXPM', 'cacheXPM', (['d'], {}), '(d)\n', (15459, 15462), False, 'from pineboolib.application.utils.xpm import cacheXPM\n'), ((15538, 15554), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['d'], {}), '(d)\n', (15551, 15554), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((11840, 11858), 'pineboolib.application.utils.date_conversion.date_amd_to_dma', 'date_amd_to_dma', (['d'], {}), '(d)\n', (11855, 11858), False, 'from pineboolib.application.utils.date_conversion import date_amd_to_dma\n'), ((12186, 12222), 'locale.setlocale', 'locale.setlocale', (['locale.LC_TIME', '""""""'], {}), "(locale.LC_TIME, '')\n", (12202, 12222), False, 'import locale\n'), ((12261, 12293), 'locale.nl_langinfo', 'locale.nl_langinfo', (['locale.D_FMT'], {}), '(locale.D_FMT)\n', (12279, 12293), False, 'import locale\n'), ((12764, 12781), 'platform.system', 'platform.system', ([], {}), '()\n', (12779, 12781), False, 'import platform\n'), ((12963, 12986), 'PyQt5.QtCore.QLocale.system', 'QtCore.QLocale.system', ([], {}), '()\n', (12984, 12986), False, 'from PyQt5 import QtCore, QtGui, Qt\n'), ((13133, 13156), 'PyQt5.QtCore.QLocale.system', 'QtCore.QLocale.system', ([], {}), '()\n', (13154, 13156), False, 'from PyQt5 import QtCore, QtGui, Qt\n')] |
from flask import Flask, request, url_for
from MainEngine import MainEngine
# Initialize the app
app = Flask(__name__, instance_relative_config=True)
app.secret_key='<KEY>'
main_engine = MainEngine()
# Load the views
from app import views
# Load the config file
app.config.from_object('config') | [
"MainEngine.MainEngine",
"flask.Flask"
] | [((105, 151), 'flask.Flask', 'Flask', (['__name__'], {'instance_relative_config': '(True)'}), '(__name__, instance_relative_config=True)\n', (110, 151), False, 'from flask import Flask, request, url_for\n'), ((190, 202), 'MainEngine.MainEngine', 'MainEngine', ([], {}), '()\n', (200, 202), False, 'from MainEngine import MainEngine\n')] |
import unittest
from pathlib import Path
from pmworker.pdftk import (
cat_ranges_for_delete,
cat_ranges_for_reorder,
split_ranges
)
test_dir = Path(__file__).parent
test_data_dir = test_dir / Path("data")
abs_path_input_pdf = test_data_dir / Path("input.de.pdf")
class TestPDFTk(unittest.TestCase):
def test_split_ranges_input_1(self):
"""
Input: total = 9, after=1, before=False
Output: list1 = [1]; list2 = [2, 3, 4, ..., 9].
"""
list1, list2 = split_ranges(
total=9,
after=1,
before=False
)
self.assertEqual(
list1,
[1]
)
self.assertEqual(
list2,
[2, 3, 4, 5, 6, 7, 8, 9]
)
def test_split_ranges_input_2(self):
"""
Input: total = 9; after=False, before=1
Output: list1 = [], list2 = [1, 2, 3, 4, ..., 9]
"""
list1, list2 = split_ranges(
total=9,
after=False,
before=1
)
self.assertEqual(
list1,
[]
)
self.assertEqual(
list2,
[1, 2, 3, 4, 5, 6, 7, 8, 9]
)
def test_split_ranges_input_3(self):
"""
Input: total = 5; after=4; before=False
Output: list1 = [1, 2, 3, 4] list2 = [5]
"""
list1, list2 = split_ranges(
total=5,
after=4,
before=False
)
self.assertEqual(
list1,
[1, 2, 3, 4]
)
self.assertEqual(
list2,
[5]
)
def test_split_ranges_input_4(self):
"""
Input: total = 5; after=False; before=False
Output: list1 = [1, 2, 3, 4, 5] list2 = []
"""
list1, list2 = split_ranges(
total=5,
after=False,
before=False
)
self.assertEqual(
list1,
[1, 2, 3, 4, 5]
)
self.assertEqual(
list2,
[]
)
def test_cat_ranger_for_reorder(self):
# swap first and second pages
result = cat_ranges_for_reorder(
page_count=4,
new_order=[
{'page_num': 2, 'page_order': 1},
{'page_num': 1, 'page_order': 2},
{'page_num': 3, 'page_order': 3},
{'page_num': 4, 'page_order': 4}
]
)
self.assertEqual(
result,
[2, 1, 3, 4],
)
# swap first and last pages
result = cat_ranges_for_reorder(
page_count=4,
new_order=[
{'page_num': 4, 'page_order': 1},
{'page_num': 2, 'page_order': 2},
{'page_num': 3, 'page_order': 3},
{'page_num': 1, 'page_order': 4}
]
)
self.assertEqual(
result,
[4, 2, 3, 1],
)
# swap pages in two pages document
result = cat_ranges_for_reorder(
page_count=2,
new_order=[
{'page_num': 2, 'page_order': 1},
{'page_num': 1, 'page_order': 2},
]
)
self.assertEqual(
result,
[2, 1],
)
def test_cat_ranger_for_reorder_4_Y_pages(self):
"""
Given document Y with 4 pages in following order:
Y2
Y4
Y1
Y3
Try to reorder them:
Y1
Y2
Y3
Y4
"""
# swap pages in two pages document
result = cat_ranges_for_reorder(
page_count=4,
new_order=[
{'page_num': 1, 'page_order': 2},
{'page_num': 2, 'page_order': 4},
{'page_num': 3, 'page_order': 1},
{'page_num': 4, 'page_order': 3},
]
)
self.assertEqual(
result,
[3, 1, 4, 2],
)
def test_cat_ranges_for_delete(self):
result = cat_ranges_for_delete(
page_count=8,
page_numbers=[3]
)
self.assertEqual(
result,
[1, 2, 4, 5, 6, 7, 8],
)
result = cat_ranges_for_delete(
page_count=8,
page_numbers=[1, 2, 3]
)
self.assertEqual(
result,
[4, 5, 6, 7, 8],
)
result = cat_ranges_for_delete(
page_count=8,
page_numbers=[1, 8]
)
self.assertEqual(
result,
[2, 3, 4, 5, 6, 7],
)
| [
"pmworker.pdftk.split_ranges",
"pmworker.pdftk.cat_ranges_for_reorder",
"pmworker.pdftk.cat_ranges_for_delete",
"pathlib.Path"
] | [((157, 171), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (161, 171), False, 'from pathlib import Path\n'), ((206, 218), 'pathlib.Path', 'Path', (['"""data"""'], {}), "('data')\n", (210, 218), False, 'from pathlib import Path\n'), ((256, 276), 'pathlib.Path', 'Path', (['"""input.de.pdf"""'], {}), "('input.de.pdf')\n", (260, 276), False, 'from pathlib import Path\n'), ((508, 552), 'pmworker.pdftk.split_ranges', 'split_ranges', ([], {'total': '(9)', 'after': '(1)', 'before': '(False)'}), '(total=9, after=1, before=False)\n', (520, 552), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((957, 1001), 'pmworker.pdftk.split_ranges', 'split_ranges', ([], {'total': '(9)', 'after': '(False)', 'before': '(1)'}), '(total=9, after=False, before=1)\n', (969, 1001), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((1400, 1444), 'pmworker.pdftk.split_ranges', 'split_ranges', ([], {'total': '(5)', 'after': '(4)', 'before': '(False)'}), '(total=5, after=4, before=False)\n', (1412, 1444), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((1835, 1883), 'pmworker.pdftk.split_ranges', 'split_ranges', ([], {'total': '(5)', 'after': '(False)', 'before': '(False)'}), '(total=5, after=False, before=False)\n', (1847, 1883), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((2183, 2375), 'pmworker.pdftk.cat_ranges_for_reorder', 'cat_ranges_for_reorder', ([], {'page_count': '(4)', 'new_order': "[{'page_num': 2, 'page_order': 1}, {'page_num': 1, 'page_order': 2}, {\n 'page_num': 3, 'page_order': 3}, {'page_num': 4, 'page_order': 4}]"}), "(page_count=4, new_order=[{'page_num': 2,\n 'page_order': 1}, {'page_num': 1, 'page_order': 2}, {'page_num': 3,\n 'page_order': 3}, {'page_num': 4, 'page_order': 4}])\n", (2205, 2375), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((2616, 2808), 'pmworker.pdftk.cat_ranges_for_reorder', 'cat_ranges_for_reorder', ([], {'page_count': '(4)', 'new_order': "[{'page_num': 4, 'page_order': 1}, {'page_num': 2, 'page_order': 2}, {\n 'page_num': 3, 'page_order': 3}, {'page_num': 1, 'page_order': 4}]"}), "(page_count=4, new_order=[{'page_num': 4,\n 'page_order': 1}, {'page_num': 2, 'page_order': 2}, {'page_num': 3,\n 'page_order': 3}, {'page_num': 1, 'page_order': 4}])\n", (2638, 2808), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((3056, 3176), 'pmworker.pdftk.cat_ranges_for_reorder', 'cat_ranges_for_reorder', ([], {'page_count': '(2)', 'new_order': "[{'page_num': 2, 'page_order': 1}, {'page_num': 1, 'page_order': 2}]"}), "(page_count=2, new_order=[{'page_num': 2,\n 'page_order': 1}, {'page_num': 1, 'page_order': 2}])\n", (3078, 3176), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((3676, 3868), 'pmworker.pdftk.cat_ranges_for_reorder', 'cat_ranges_for_reorder', ([], {'page_count': '(4)', 'new_order': "[{'page_num': 1, 'page_order': 2}, {'page_num': 2, 'page_order': 4}, {\n 'page_num': 3, 'page_order': 1}, {'page_num': 4, 'page_order': 3}]"}), "(page_count=4, new_order=[{'page_num': 1,\n 'page_order': 2}, {'page_num': 2, 'page_order': 4}, {'page_num': 3,\n 'page_order': 1}, {'page_num': 4, 'page_order': 3}])\n", (3698, 3868), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((4116, 4169), 'pmworker.pdftk.cat_ranges_for_delete', 'cat_ranges_for_delete', ([], {'page_count': '(8)', 'page_numbers': '[3]'}), '(page_count=8, page_numbers=[3])\n', (4137, 4169), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((4313, 4372), 'pmworker.pdftk.cat_ranges_for_delete', 'cat_ranges_for_delete', ([], {'page_count': '(8)', 'page_numbers': '[1, 2, 3]'}), '(page_count=8, page_numbers=[1, 2, 3])\n', (4334, 4372), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n'), ((4509, 4565), 'pmworker.pdftk.cat_ranges_for_delete', 'cat_ranges_for_delete', ([], {'page_count': '(8)', 'page_numbers': '[1, 8]'}), '(page_count=8, page_numbers=[1, 8])\n', (4530, 4565), False, 'from pmworker.pdftk import cat_ranges_for_delete, cat_ranges_for_reorder, split_ranges\n')] |
from airflow import settings
from airflow.models import Connection
aws_credentials = Connection(
conn_id='aws_credentials',
conn_type='Amazon Web Services',
host='',
login='',
password=''
)
redshift = Connection(
conn_id='redshift',
conn_type='Postgres',
host='udacity-dend-p5.cvxtjfgdtfwc.us-west-2.redshift.amazonaws.com',
login='awsuser',
password='',
port='5439',
schema='dev'
)
session = settings.Session()
session.add(aws_credentials)
session.add(redshift)
session.commit()
| [
"airflow.models.Connection",
"airflow.settings.Session"
] | [((85, 192), 'airflow.models.Connection', 'Connection', ([], {'conn_id': '"""aws_credentials"""', 'conn_type': '"""Amazon Web Services"""', 'host': '""""""', 'login': '""""""', 'password': '""""""'}), "(conn_id='aws_credentials', conn_type='Amazon Web Services', host\n ='', login='', password='')\n", (95, 192), False, 'from airflow.models import Connection\n'), ((241, 430), 'airflow.models.Connection', 'Connection', ([], {'conn_id': '"""redshift"""', 'conn_type': '"""Postgres"""', 'host': '"""udacity-dend-p5.cvxtjfgdtfwc.us-west-2.redshift.amazonaws.com"""', 'login': '"""awsuser"""', 'password': '""""""', 'port': '"""5439"""', 'schema': '"""dev"""'}), "(conn_id='redshift', conn_type='Postgres', host=\n 'udacity-dend-p5.cvxtjfgdtfwc.us-west-2.redshift.amazonaws.com', login=\n 'awsuser', password='', port='5439', schema='dev')\n", (251, 430), False, 'from airflow.models import Connection\n'), ((489, 507), 'airflow.settings.Session', 'settings.Session', ([], {}), '()\n', (505, 507), False, 'from airflow import settings\n')] |
from django.template.loader_tags import register
@register.inclusion_tag('djangocms_comments/admin_submit_line.html', takes_context=True)
def submit_row(context):
"""
Displays the row of buttons for delete and save.
"""
from django.contrib.admin.templatetags.admin_modify import submit_row
row = submit_row(context)
row['is_soft_deleted'] = context['original'].moderated == 'deleted'
return row
| [
"django.template.loader_tags.register.inclusion_tag",
"django.contrib.admin.templatetags.admin_modify.submit_row"
] | [((52, 143), 'django.template.loader_tags.register.inclusion_tag', 'register.inclusion_tag', (['"""djangocms_comments/admin_submit_line.html"""'], {'takes_context': '(True)'}), "('djangocms_comments/admin_submit_line.html',\n takes_context=True)\n", (74, 143), False, 'from django.template.loader_tags import register\n'), ((318, 337), 'django.contrib.admin.templatetags.admin_modify.submit_row', 'submit_row', (['context'], {}), '(context)\n', (328, 337), False, 'from django.contrib.admin.templatetags.admin_modify import submit_row\n')] |
#<NAME>
#evargasv, Section I, 15-112
# Fall 2015 Term Project
### Some code taken From CMU 15-112 Fall 2015 course
### Website: http://www.cs.cmu.edu/~112/index.html
### Took: Animation Class, rgbString Method, scroll Implementation
## I do not own any of the images used in this program
# Player sprite from :
# http://www.picgifs.com/graphics/walking/graphics-walking-859804-821606/
from random import random, choice, randint
from math import sin, pi
class Drawn(object):
# light offset passes the time to all Drawn children to represent day time
daylightOffset = 1
#game = none # init in player
@staticmethod #from CMU 15-112
def rgbString(red, green, blue, outside = False): #edited to suit game
offset = lambda x: int(__class__.daylightOffset * x) if outside else x
return "#%02x%02x%02x" % (offset(red),offset(green), offset(blue))
@staticmethod
def updateDaylightOffset(colorOffset):
minDayLight = 0.15 # set minimum rgb product
__class__.daylightOffset = max(colorOffset, minDayLight)
class GameAttribute(Drawn):
def __init__(self, owner, value, cap = None , name = None, kind = None):
self.kind = kind
self.owner = owner
self.cap = cap
self.value = value
self.name = name
self.addToList()
def __repr__(self):
return self.name
def __eq__(self, other):
return isinstance(other, GameAttribute) and (self.kind == other.kind)
def addToList(self):
if self.name.endswith('stat'):
self.owner.stats.append(self)
else:
self.owner.resources.append(self)
def isEmpty(self):
if self.value <= 0:
return True
return False
def lose(self, loss):
empty = self.isEmpty()
if not empty and self.value >= loss:
self.value -= loss
return self.value < loss
def gain(self, gain):
if self.cap == None or self.value < self.cap:
self.value += gain
else:
self.value = self.cap
def getValue(self):
return self.value
class MyButton(Drawn):
def __init__(self, x0 , y0, width, height, text = None, color = None,
font = None):
self.x0 = x0
self.y0 = y0
self.width = width
self.height = height
self.x1 = self.x0 + width
self.y1 =self.y0 + height
self.txt = text
self.font = font
self.color = color
def draw(self, game):
game.canvas.create_rectangle(self.x0, self.y0, self.x1, self.y1,
fill = self.color)
game.canvas.create_text(self.x0 + self.width/2,
self.y0 + self.height/2,
text = self.txt, font = self.font)
def isClicked(self, x, y):
if x>=self.x0 and x<=self.x1 and y<=self.y1 and y>= self.y0:
return True
return False
###################################################################
# Player
###################################################################
class Player(Drawn):
'''Creates player with all actions and attributes'''
def __init__(self, game):
self.game = game
self.lookRightIcon = game.playerImages[0]
self.lookLeftIcon = game.playerImages[1]
self.dir = 'right'
self.walking = False
self.screenInsteps = 30# 30 steps to move accross screen
self.step = game.width//self.screenInsteps
self.displayInv = False
self.wieldables = []
self.specialItems = []
self.itemsPlaced = []
self.wielding = None
#Define position and dimensions
self.xPos = self.step * 10 # arbitrary
self.yPos = self.game.groundY
self.width = self.lookRightIcon.width()
self.height = self.lookRightIcon.height()
self.xScrollMargin = self.width + 20 # min distance from canvas edge
self.xScroll = 0 # amount scrolled (right increase), (left decrease)
self.curX = self.xPos - self.xScroll # actuall player xPos on screen
self.xVisited = {0, game.width} # set of visited x coords in world
#Initialize player stats
self.alive = True
self.stats = []
# does not kill player
self.power = GameAttribute(self, 100, 100, 'Powerstat', kind = 'power')
#essential
self.food = GameAttribute(self, 20, 20, 'Foodstat', kind = 'food')
#essential
self.water = GameAttribute(self, 30, 30, 'Waterstat', kind = 'water')
#Initialize player Resources
self.resources = []
self.myWater = playerResource(self, self.water.value*3, name ='myWater',
kind = 'water', iconPath = '../GifAssets/WaterIcon.gif')
self.myFood = playerResource(self, self.food.value*3, name ='myFood',
kind = 'food',iconPath = '../GifAssets/FoodIcon.gif')
self.myWood = playerResource(self, 40, name ='myWood', kind = 'wood',
iconPath = '../GifAssets/WoodIcon.gif')
self.myMoney = playerResource(self, 100, name = 'myMoney',
kind ='money',iconPath = '../GifAssets/MoneyIcon.gif')
self.mySeeds = None #Seeds()
self.myLeaves = None #Leaves
def walk(self, d): # Scroll implementation From CMU's 15-112
#changes player's x position and updates xScroll
sx= self.xScroll
self.dir = 'right' if d > 0 else 'left'
canvasWidth = self.game.width
self.xPos += (d * self.step)
if self.xPos < sx + self.xScrollMargin :
self.xScroll = self.xPos - self.xScrollMargin
elif self.xPos > sx + canvasWidth - self.xScrollMargin:
self.xScroll = self.xPos - canvasWidth + self.xScrollMargin
self.curX = self.xPos - sx
def updateVisitedTerrain(self):
# returns True if in new terrain, and updates xVisited
vx = self.xVisited
if self.xPos not in set(range(min(vx), max(vx), self.step)):
# only add new positions, max/min has step to skip unnecessary values
self.xVisited.add(self.xPos)
return True
return False
def enterBuilding(self, building):
# has to be 2 steps away from left of player
xDoor = building.x1 - self.xScroll
if xDoor <= self.curX and xDoor >= self.curX - (2*self.step):
return True
return False
def interact(self, thing):
if thing.x1 >= self.curX and thing.x0 <= self.curX:
return True
return False
def wield(self, i):
self.wielding = self.wieldables[i]
def placeInWorld(self):
print(self.wieldables, 'before')
if self.wielding != None:
self.itemsPlaced.append((self.wielding, self.xPos))
self.wieldables.remove(self.wielding)
self.wielding = None
print(self.wieldables, 'after')
print(self.wielding, 'wielding')
def chop(self, trees):
'''Optimization for onScreen only trees is not great,
because I would still need to loop over the entire treelist,
make a smaller list, and then loop through that one again.
It's better to just loop through the tree list and compare xCoordinates'''
for tree in trees:
if tree.x1 >= self.xPos and tree.x0 <= self.xPos:
tree.chopped()
if tree.integrity <= 0:
for resource in tree.resources:
for myResource in self.resources:
if resource == myResource:
myResource.gain(resource.value)
trees.remove(tree)
break
def pickFruit(self, trees):
for tree in trees:
if (isinstance(tree, FruitTrees) and tree.x1 >= self.xPos
and tree.x0 <= self.xPos):
value = tree.picked()
self.myFood.gain(value)
def hunger(self):
empty = self.food.lose(1)
if empty: #empty
self.alive = False
def thirst(self):
empty = self.water.lose(1)
if empty: #empty
self.alive = False
def eat(self):
if not self.myFood.isEmpty():
self.myFood.lose(1)
self.food.gain(1)
def drink(self):
if not self.myWater.isEmpty():
self.myWater.lose(1)
self.water.gain(1)
def usePower(self, output):
empty = self.power.lose(output)
if empty:
return False
return True
def drawStat(self, game, stat, i):
barWidth = game.width//4
barHeight = game.height//20
x0 = 5
y0 = 10
yOffset = x0 + barHeight * i # brings next stat lower
# Draw Encasing box
barOutline = (x0,yOffset, x0 + barWidth, yOffset + barHeight)
game.canvas.create_rectangle(*barOutline, width = 2)
# Draw Colored bar to indicate actual value
colors = ['yellow', 'orange', 'blue']
statStatus = stat.getValue()/stat.cap
statBar = (x0, yOffset, x0 + barWidth * statStatus, yOffset + barHeight)
game.canvas.create_rectangle(*statBar, fill = colors[i], width = 0)
# Draw Text To indicate what stat is
game.canvas.create_text(x0, yOffset, text = str(stat).strip('stat'),
anchor = 'nw')
def drawPlayer(self, game):
if self.dir == 'right':
self.curImage = self.lookRightIcon
else: self.curImage = self.lookLeftIcon
game.canvas.create_image(self.curX, self.yPos,
anchor = 'se', image = self.curImage)
def toggleInv(self):
self.displayInv = not self.displayInv
def drawInventory(self, game):
(xMargin, yMargin) = (game.width//8, game.height//8)
self.invCoords = (xMargin, yMargin, game.width - xMargin,
game.height - yMargin)
game.canvas.create_rectangle(*self.invCoords, fill = 'grey')
#yiconOffset =
#for i,resource in enumerate(self.resources):
#resource.drawInInv()
def draw(self):
game = self.game
self.drawPlayer(game)
for i,stat in enumerate(self.stats):
self.drawStat(game, stat, i)
#if self.wielding != None:
#self.wielding.drawEx(game, sx)
class playerResource(GameAttribute):
PRICES = {'water': 4, 'food': 5, 'wood': 3, 'money':0}
def __init__(self, owner, value, cap = None , name = None, kind = None,
iconPath = None):
super().__init__(owner,value, cap, name, kind)
self.game = self.owner.game
self.iconPath = iconPath
self.price = __class__.PRICES[self.kind]
self.selling = 0
self.moreButton = MyButton(0, 0, 0, 0, 'Dummy')
self.lessButton = MyButton(0, 0, 0, 0, 'Dummy')#will create when drawing
def drawInInv(self, x0, y0, width, height, image):
canvas = self.game.canvas
canvas.create_rectangle(x0, y0, x0+width, y0+height)
self.image = image # need reference
display = 'Amount: %d' % (self.value)
canvas.create_text(x0, y0 + self.image.height() ,anchor = 'nw',
text = display)
canvas.create_image(x0, y0, anchor = 'nw', image = self.image)
def drawInMerchScreen(self, x0, y0, width, height, image):
canvas = self.game.canvas
self.image = image #need reference
display = str(self.selling)
textXOffset = 10
canvas.create_text(x0 + self.image.width() + textXOffset,
y0 + height//2, anchor = 'w', text = display)
canvas.create_text(x0 + self.image.width() + textXOffset*2,
y0 + height//2, anchor = 'w', text = '$' + str(self.price))
canvas.create_image(x0, y0, anchor = 'nw', image = self.image)
buttonW, buttonH = width//4, height//2
self.moreButton = MyButton(x0 + width-buttonW, y0, buttonW, buttonH,
'MORE', 'green')
self.lessButton = MyButton(x0 + width-buttonW, y0 + buttonH, buttonW,
buttonH, 'LESS', 'red')
self.moreButton.draw(self.game)
self.lessButton.draw(self.game)
###################################################################
# World
###################################################################
class World(Drawn):
def __init__(self, player):
self.game = player.game
# set up time
minute = 60
self.secondCount = 0
self.todaySecCount = 0
self.dayStage = 'day'
self.dayTimeFrac = 0
self.dayCount = 0
self.dayLength = 0.5* minute
# set up the player
self.player = player
# make world and set environmental variables
self.makeWorld()
self.newDay()
self.setTreeDensity(4)
self.raining = False
self.rainIcon = self.game.rainIcon
self.rainx0 = 0
self.sunIcon = self.game.sunIcon
self.sunY0 = self.game.groundY
def makeWorld(self):
self.trees = [Trees(self,self.game.width, 3) ,
FruitTrees(self, 5*self.game.width//3, 3, 'apple')]
self.buildings = []
self.house = House(self, 0, 2*self.game.width//3,
2*self.game.height//3, 'playerHouse')
self.shed = Shed(self, -5*self.game.width//6,
self.game.width//3, self.game.height//2, 'craft')
def rollProbability(self):
self.prob = [1]+[2]+[3]*3 + [4]*3 + [5]*2 + [6]*2 +[7]+[8]
return choice(self.prob)
def newDay(self):
self.dayCount += 1
self.sunIntensity = self.rollProbability()
self.rainProbability = self.rollProbability()/max(self.prob) # 0 to 1
self.setRainTime()
for tree in self.trees:
tree.grow()
def setRainTime(self):
if self.rainProbability > 0.15: # any lower means no rain that day
duration = round(self.rainProbability * self.dayLength)
self.rainStart = randint(0, self.dayLength - duration)
self.rainEnd = self.rainStart + duration
else:
self.rainStart = self.rainEnd = None
def setDayLen(self, length):pass
def setTreeDensity(self, maxTreesPerScreen):
self.treeDensity = maxTreesPerScreen
def generateTree(self):
newTreeXs = set()
pX = self.player.xPos
xPositive = pX > 0 # true if headed right
# roll variables
rollNumTrees = randint(1,4)
# xOffset so with max tree width so they don't overlap
xOffset = self.game.width//(Trees.width*Trees.ageCap)
xDir = 1 if xPositive else -1 # going left is negative x
for tree in range(rollNumTrees):
# random distance from player
def rollTreeX():
return xDir * randint(1, Trees.width*Trees.ageCap)
xFactor = rollTreeX()
if xFactor * xOffset + pX in newTreeXs:
xFactor = rollTreeX() # roll untill unique x is found
treeX = xFactor * xOffset # placement in new scree
selectTreeRoll = randint(1,10)
age = randint(1, Trees.ageCap-2)
if selectTreeRoll <= 7: # Prob = 0.7
self.trees.append(Trees(self, pX + treeX, age))
else: # Prob = 0.3
self.trees.append(FruitTrees(self, pX + treeX, age, 'apple'))
def tick(self):
'''advances time in game, returns current second count'''
self.secondCount += 1
self.todaySecCount = self.secondCount % self.dayLength
if self.todaySecCount == 0: self.newDay()
if self.rainStart and self.todaySecCount == self.rainStart:
self.raining = True
elif self.rainStart and self.todaySecCount == self.rainEnd:
self.raining = False
if self.todaySecCount <= self.dayLength//3: # split day in thirds
self.dayStage = 'day'
self.moveSun(1)
self.chargePannels(1)
elif self.todaySecCount <= 2*self.dayLength//3:
self.dayStage = 'afternoon'
self.moveSun(-1)
self.chargePannels(2)
else:
self.dayStage = 'night'
if self.raining:
self.player.myWater.gain(1)
return self.secondCount
def adjustWorldColor(self): # called in Simulator.py in openWorldTimerFired
#Calculates product for rgb values of objects outside due to day/night
self.dayTimeFrac = self.game.timerCount/(self.dayLength*
self.game.second)
# only need first half of sin graph, so multiply by pi, not 2pi
adjustTimeColor = self.dayTimeFrac * pi
phaseShift = pi/6 # don't want to start day at darkest point.
myDayNightFunction = lambda t, C: abs(sin(t + C)) # yay for trig
return myDayNightFunction(adjustTimeColor, phaseShift)
def moveRain(self):
rainStep = 5
self.rainx0 += rainStep
if self.rainx0 + self.rainIcon.width() >= self.game.width:
self.rainx0 = 0
def moveSun(self, d):
yInterval = self.game.height - self.game.groundY # height of sky
# step through yInterval in one third of day, two thirds is up and down
step = 2*yInterval//(self.dayLength//3)
if d > 0:
self.sunY0 -= step # move up screen
else:
self.sunY0 += step # move down screen
def drawBackground(self, game):
skyBlue = self.rgbString(135, 206, 255, True)
groundGreen = self.rgbString(0,201,87, True)
# Draw Sky
game.canvas.create_rectangle(0,0,self.game.width, self.game.height,
fill = skyBlue)
# Draw Ground
game.canvas.create_rectangle(0, game.groundY, self.game.width,
self.game.height, fill = groundGreen)
def drawRain(self, game):
game.canvas.create_image(self.rainx0, game.height//8, anchor = 'w',
image = self.rainIcon)
def drawSun(self, game):
game.canvas.create_image(3*game.width//4, self.sunY0, anchor = 's',
image = self.sunIcon)
def chargePannels(self, chargePerSec):
for item,xPos in self.player.itemsPlaced:
if isinstance(item, SolarCells):
item.charge(self.player, chargePerSec)
def draw(self):
game = self.game
sx = self.player.xScroll
self.drawBackground(game)
if self.dayStage != 'night':
self.drawSun(game)
for building in self.buildings:
building.drawExt(game, sx)
for tree in self.trees:
tree.draw(sx)
for item,xPos in self.player.itemsPlaced:
item.drawExt(game,xPos, sx)
if self.raining:
self.drawRain(game)
##################
#Outside Objects
##################
class GameObjects(Drawn):
def __init__(self, world, x0, width, height, name):
self.world = world # for dimensions, and time
self.owner = self.world.player
self.game = self.owner.game
self.name = name
self.width = width
self.height = height
self.x0 = x0
self.x1 = self.x0 + width
def __repr__(self):
return self.name
def getBounds(self):
return (self.x0, self.y0, self.x1, self.y1)
#######################WHAT I WILL WORK ON TODAY
class Trees(GameObjects):
counter = 0
ageCap = 6
(width, height) = (10, 30)
def __init__(self, world, x0, age):
__class__.counter += 1
self.count = __class__.counter
self.age = age
self.size = self.age* randint(2,4) # roll
self.woodProcuct = randint(1,5) # roll
super().__init__(world, x0, self.size * __class__.width,
self.size * __class__.height, 'tree%d' % self.count)
self.resources = []
self.wood = GameAttribute(self, self.size*self.woodProcuct,
kind = 'wood', name= 'tree%dWood' % self.count)
self.integrity = age
def grow(self):
# increase wood, and leaves.
if self.age < __class__.ageCap:
oldSizeRoll = self.size/self.age # get old size roll
self.age += 1
self.size = oldSizeRoll * self.age
self.wood.gain(self.woodProcuct) # increase wood by change in size
self.width = self.size * __class__.width
self.height = self.size * __class__.height # recalculate dimensions
self.x1 = self.x0 + self.width
def chopped(self):
self.integrity -= 1
def draw(self, sx):
game = self.game
leafColor = self.rgbString(58, 95, 11, True)
trunkColor = self.rgbString(165, 100, 6, True)
self.y0, self.y1 = self.game.groundY - self.height, game.groundY
xOffset = self.width//3
yOffset = self.height//3
#draw Trunk
trunkX0, trunkX1 = self.x0 + xOffset, self.x1 - xOffset
game.canvas.create_rectangle(trunkX0 - sx, self.y1 - yOffset,
trunkX1 - sx, self.y1, fill = trunkColor)
# draw greenery
leavesCoords = [
(self.x0 - sx, self.y1 - yOffset),
(trunkX0 - sx, self.y0 +yOffset),
(self.x0 - sx, self.y0 +yOffset),
(self.x0 - sx + self.width/2, self.y0),
(self.x1 - sx, self.y0 + yOffset),
(trunkX1 - sx, self.y0 + yOffset),
(self.x1 - sx, self.y1 - yOffset)]
game.canvas.create_polygon(leavesCoords, fill = leafColor)
class FruitTrees(Trees):
# map fruits to values
FRUITS= {'apple': 3, 'orange': 3, 'apricot': 2, 'cherry': 1,}
def __init__(self, world, x0, age, fruit):
super().__init__(world, x0, age)
self.fruit = fruit
self.fruitImage = self.game.fruitImages[self.fruit]
self.fruitNumber = min(self.size, 8) # max 8 fruits
self.foodValue = __class__.FRUITS[self.fruit]
self.food = GameAttribute(self, self.fruitNumber * self.foodValue,
name= 'tree%dWood' % self.count, kind = 'food')
def picked(self):
if self.fruitNumber>0:
self.fruitNumber -= 1
self.food.lose(self.foodValue)
return self.foodValue
return 0
def grow(self):
super().grow()
self.fruitNumber = min(self.fruitNumber +1, 8) # keep max, regen fruit
def drawFruit(self, sx):
xOffset = self.width//4
yOffset = self.height//3
j = 0
for i in range(self.fruitNumber):
i %= 4 # columns of 4
if i % 4 == 0:
j += 1 # when a col is full start new row
self.game.canvas.create_image((self.x0 + i*xOffset) - sx,
self.y1 - j*yOffset, anchor = 's', image = self.fruitImage)
def draw(self, sx):
super().draw(sx)
self.drawFruit(sx)
class SolarCells(Drawn):
counter = 0
def __init__(self, world):
__class__.counter += 1
self.count = __class__.counter
self.price = 100
self.iconPath = '../GifAssets/SolarCellIcon.gif'
self.world = world
timeOffset = Drawn.daylightOffset
self.buying = 0
def __repr__(self):
return ('SolarCell %d' % self.count)
def buy(self, player, amount):
if amount <= 0:
return
else:
if player.myMoney.value >= self.price:
player.myMoney.lose(self.price)
player.wieldables.append(SolarCells(self.world))
self.buy(player, amount - 1)
def charge(self, player, n):
player.power.gain(n)
print(player.power.value)
def drawExt(self, game, x0, sx):
self.x0 = x0
game.canvas.create_image(self.x0 - sx, game.groundY,
image = game.panelImage)
def drawInMerchScreen(self, x0, y0, width, height, image):
canvas = self.world.game.canvas
self.image = image #need reference
display = str(self.buying)
textXOffset = 10
canvas.create_text(x0 + self.image.width() + textXOffset,
y0 + height//2, anchor = 'w', text = display)
canvas.create_text(x0 + self.image.width() + 2*textXOffset,
y0 + height//2, anchor = 'w',
text ='$'+ str(self.price))
canvas.create_image(x0, y0, anchor = 'nw', image = self.image)
buttonW, buttonH = width//4, height//2
self.moreButton = MyButton(x0 + width-buttonW, y0, buttonW, buttonH,
'MORE', 'green')
self.lessButton = MyButton(x0 + width-buttonW, y0 + buttonH, buttonW,
buttonH, 'LESS', 'red')
self.moreButton.draw(self.world.game)
self.lessButton.draw(self.world.game)
##################
#Structures
##################
class Structure(GameObjects):
def __init__(self, world, x0, width, height, name): # windows
super().__init__(world, x0, width, height, name)
#self.windows = windows ADD WINDOWS
world.buildings.append(self)
def __repr__(self):
return self.name
def inside(self): # called when entering building to switch mode
return self.name
# When in openWorld, Draw exterior
def drawExt(self, game, sx):
mainY0 = game.groundY - 2*self.height/3
roofY0 = game.groundY - self.height
windowWidth = self.width//6
windowHeight = self.height//5
self.drawMain(game, self.x0 - sx, mainY0, self.x1 - sx, game.groundY)
self.drawRoof(game, self.x0 - sx, roofY0, self.x1 - sx, mainY0)
#self.drawWindow(game, windowWidth, windowHeight)
def drawMain(self, game, x0, y0, x1, y1,):
color = self.rgbString(139, 105, 20, True)
game.canvas.create_rectangle(x0, y0, x1, y1, fill = color)
def drawRoof(self, game, x0, y0, x1, y1):
color = self.rgbString(107, 66, 38, True)
xOffset = abs(x0 - x1)//8 # for trapezium shape
polygonCoords = [(x0-xOffset,y1), (x0+xOffset,y0), (x1-xOffset,y0),
(x1+xOffset,y1)]
game.canvas.create_polygon(polygonCoords,fill = color)
def drawWindow(self, game, wWidth, wHeight):
#for window in range(self.windows): Draw window
pass
# When inside, Draw interior
def drawIn(self):
''' For House: workbench, bed, computer, fridge'''
game = self.world.game
wallColor = self.rgbString(139, 105, 20)
game.canvas.create_rectangle(0,0, game.width, game.height,
fill = wallColor)
class Shed(Structure):
def __init__(self, world, x0, width, height, name): # windows
super().__init__(world, x0, width, height, name)
def drawIn(self):
super().drawIn()
self.drawSideBars(self.game)
self.drawTable(self.game)
self.drawText(self.game)
def drawText(self, game):
game.canvas.create_text(5,0, text = 'toolBar', anchor = 'nw',
font = 'Helvetica 22')
game.canvas.create_text(5*game.width//6,0, text = 'Materials',
anchor = 'nw', font = 'Helvetica 22')
def drawSideBars(self, game):
game.canvas.create_rectangle(0,0, game.width//6, 2*(game.height//3),
fill = 'brown')
game.canvas.create_rectangle(5*game.width//6,0, game.width, game.height,
fill = 'brown')
def drawTable(self, game):
tableColor = self.rgbString(133, 87, 35)
game.canvas.create_rectangle(game.width//6,0, 5*game.width//6,
game.height, fill= tableColor)
class House(Structure):
def __init__(self, world, x0, width, height, name): # windows
super().__init__(world, x0, width, height, name)
self.computer = Computer(world, self.game.width//3, self.game.width//8,
self.game.height//15, 'computer', '../GifAssets/ComputerIcon.gif')
self.objects = [self.computer]
self.flowerPainting = self.game.artImages[0]
self.peacePainting = self.game.artImages[1]
def drawIn(self):
super().drawIn()
game = self.game
game.canvas.create_image(game.width//4, game.height//2,
image = self.flowerPainting)
game.canvas.create_image(3*game.width//4, 2*game.height//3,
image = self.peacePainting)
####################
# Computer
####################
class Computer(GameObjects):
def __init__(self, world, x0, width, height, name, iconPath = None):
super().__init__(world, x0, width, height, name)
self.screenColor = self.rgbString(102, 178, 255)
self.y0 = self.owner.yPos - self.owner.height
self.iconPath = iconPath
self.icons = []
iconOffset = self.game.height//8
iconX0 = self.game.width//15
self.browserIcon = BrowserIcon(self, iconX0, iconOffset, 'browser',
'../GifAssets/BrowserIcon.gif')
self.inventoryIcon = CompIcon(self, iconX0, 3*iconOffset, 'inventory',
'../GifAssets/Inventory.gif')
self.hasPower = True
def turnOff(self):
self.hasPower = False
def drawIn(self):
# draw Background
self.drawBackground(self.game)
self.owner.drawStat(self.game, self.owner.power, 0) # draw power stat
# icons are drawn in computerRedrawAll in simulator.py
def drawBackground(self, game):
if self.hasPower:
game.canvas.create_rectangle(0,0, game.width, game.height,
fill = self.screenColor)
else: # draw black screen
game.canvas.create_rectangle(0,0, game.width, game.height,
fill = 'black')
def drawExt(self, image):
self.image = image # need reference
self.game.canvas.create_image(self.x0, self.y0, image = self.image)
class CompIcon(Drawn):
def __init__(self, computer, x0, y0, name, iconPath = None):
self.iconPath = iconPath
self.x0 = x0
self.y0 = y0
self.name = name
computer.icons.append(self)
self.game = computer.game
self.world = computer.world
self.margin = computer.game.width//8
self.windowCoords = (0 + self.margin, 0 + self.margin,
self.game.width - self.margin, self.game.height - self.margin)
def __repr__(self):
return self.name
def iconClicked(self, x, y):
width = self.image.width()
height = self.image.height()
if (x>=self.x0 and x<=self.x0 + width and y<=self.y0 + height and
y >= self.y0):
return True
return False
def drawIn(self, game): # draw 'Application' contents
windowColor = self.rgbString(0, 51, 102)
game.canvas.create_rectangle(*self.windowCoords, fill = windowColor)
for button in self.buttons:
button.draw(self.game)
def drawExt(self, game, image): #draw icon in desktop screen
self.image = image # need reference to redraw
game.canvas.create_image(self.x0, self.y0, anchor = 'nw',
image = self.image)
class BrowserIcon(CompIcon):
def __init__(self, computer, x0, y0, name, iconPath = None):
super().__init__(computer, x0, y0, name, iconPath)
self.buttons = []
self.player = self.world.player
self.createButtons()
self.sellMerch = [resource for resource in self.player.resources]
solarCell = SolarCells(self.world)
self.buyMerch = [solarCell]
def createButtons(self):
(wX0, wY0, wX1, wY1) = self.windowCoords
wWidth, wHeight = (wX1 - wX0, wY1 - wY0)
self.buyButton(wX0, wY0, wX1, wY1, wWidth, wHeight)
self.sellButton(wX0, wY0, wX1, wY1, wWidth, wHeight)
def buyButton(self, wX0, wY0, wX1, wY1, wWidth, wHeight):
self.merchColor = self.rgbString( 153, 204, 255)
buy = MyButton(wX0 + wWidth//6, wY0, wWidth//6,
wHeight//6, text = 'Buy', color = self.merchColor,
font = 'Helvetica 24')
self.buttons.append(buy)
def sellButton(self, wX0, wY0, wX1, wY1, wWidth, wHeight):
sell = MyButton(wX0 + 3*wWidth//6, wY0, wWidth//6,
wHeight//6, text = 'Sell', color = self.merchColor,
font = 'Helvetica 24')
self.buttons.append(sell)
class Garden(Structure):
def __init__(self, world, x0, width, height, name):
pass
| [
"random.choice",
"random.randint",
"math.sin"
] | [((13939, 13956), 'random.choice', 'choice', (['self.prob'], {}), '(self.prob)\n', (13945, 13956), False, 'from random import random, choice, randint\n'), ((14889, 14902), 'random.randint', 'randint', (['(1)', '(4)'], {}), '(1, 4)\n', (14896, 14902), False, 'from random import random, choice, randint\n'), ((20209, 20222), 'random.randint', 'randint', (['(1)', '(5)'], {}), '(1, 5)\n', (20216, 20222), False, 'from random import random, choice, randint\n'), ((14419, 14456), 'random.randint', 'randint', (['(0)', '(self.dayLength - duration)'], {}), '(0, self.dayLength - duration)\n', (14426, 14456), False, 'from random import random, choice, randint\n'), ((15520, 15534), 'random.randint', 'randint', (['(1)', '(10)'], {}), '(1, 10)\n', (15527, 15534), False, 'from random import random, choice, randint\n'), ((15552, 15580), 'random.randint', 'randint', (['(1)', '(Trees.ageCap - 2)'], {}), '(1, Trees.ageCap - 2)\n', (15559, 15580), False, 'from random import random, choice, randint\n'), ((20162, 20175), 'random.randint', 'randint', (['(2)', '(4)'], {}), '(2, 4)\n', (20169, 20175), False, 'from random import random, choice, randint\n'), ((17247, 17257), 'math.sin', 'sin', (['(t + C)'], {}), '(t + C)\n', (17250, 17257), False, 'from math import sin, pi\n'), ((15235, 15273), 'random.randint', 'randint', (['(1)', '(Trees.width * Trees.ageCap)'], {}), '(1, Trees.width * Trees.ageCap)\n', (15242, 15273), False, 'from random import random, choice, randint\n')] |
import re
import json
import requests
from bs4 import BeautifulSoup
from pathlib import Path
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
@app.route('/')
def index():
return render_template("index.html")
@app.route('/wiki_table', methods = ['POST'])
def wiki_table():
if request.method == "POST":
text_url = request.form['input']
# check if input filed is empty
if text_url =="":
return redirect(url_for('index'))
title_list=[]
wiki_site=[]
wiki_title=[]
wiki_url=[]
wiki_geo={}
wiki_quick={}
item={}
if 'wikipedia.org' in text_url:
p=Path(text_url)
url="https://"+p.parts[1]
# Load Wikipedia Category Page
page_response = requests.get(text_url)
page_content = BeautifulSoup(page_response.content, "html.parser")
# check if there is a div tag with class mw-pages or mw-category
if 'mw-pages' in str(page_content):
# Pages in category
divPage = page_content.find('div',{"id":"mw-pages"})
# print("mw-pages")
elif 'mw-category' in str(page_content):
# Pages in category
divPage = page_content.find('div',{"class":"mw-category"})
# print("mw-category")
# Get li tags
li=divPage.find_all('li')
# Looping through all the links
for j in range(len(li)):
a=li[j].find('a', href=True)
# Title of article
title_list.append(a['title'])
item[a['title']]={}
item[a['title']]["title"]=a['title']
# Create new link for subpage
url_new=url+a['href']
item[a['title']]["title_url"]=url_new
# WikiData from URL
try:
page_data = requests.get(url_new)
page_data_content = BeautifulSoup(page_data.content, "html.parser")
li_data=page_data_content.find('li',{"id":"t-wikibase"})
a_data=li_data.find('a', href=True)
a_data_id=a_data['href'].replace("https://www.wikidata.org/wiki/Special:EntityPage/","")
a_data_entity="http://www.wikidata.org/entity/"+a_data_id
# Url of wikidata page
item[a['title']]["wikidata_id"]=a_data_id
item[a['title']]["wikidata_url"]=a_data_entity
page_data_instance = requests.get(a_data_entity)
page_data_instance_content = BeautifulSoup(page_data_instance.content, "html.parser")
# JSON data of a page
b=str(page_data_instance_content)
text_json = json.loads(b)
# Whole JSON data of a page
# print(text_json)
try:
temp_array_lat=[]
temp_array_lon=[]
item[a['title']]['geo']=False
for i in range(len(text_json["entities"][a_data_id]['claims']['P625'])):
# get coordinates from the wikidata page if they exist
P625_item_lat=text_json["entities"][a_data_id]['claims']['P625'][i]['mainsnak']["datavalue"]["value"]['latitude']
P625_item_lon=text_json["entities"][a_data_id]['claims']['P625'][i]['mainsnak']["datavalue"]["value"]['longitude']
temp_array_lat.append(P625_item_lat)
item[a['title']]['latitude']=temp_array_lat
temp_array_lon.append(P625_item_lon)
item[a['title']]['longitude']=temp_array_lon
# if wikidata has coordinates it become true
item[a['title']]['geo']=True
except:
pass
try:
# get sites from wikidata page
language_item=text_json["entities"][a_data_id]['sitelinks']
language_item_keys=language_item.keys()
listDic = list(language_item.keys())
# print(listDic)
# n=0
# if len(listDic)<5:
# n=len(listDic)
# else:
# n=5
# for i in range(n):
# key=listDic[i]
for key in listDic:
# for key in language_item.keys():
# if (key != 'commonswiki') and ('wikivoyage' not in key) and (key == 'skwiki' or key == 'cswiki' or key == 'eowiki' or key == 'fawiki' or key == 'be_x_oldwiki'):
if (key != 'commonswiki') and ('wikivoyage' not in key) and ('wikiquote' not in key) and ('wikisource' not in key) and ('wikinews' not in key) and ('wiktionary' not in key) and ('wikiversity' not in key) and ('wikibooks' not in key):
print(key)
language_title=text_json["entities"][a_data_id]['sitelinks'][key]['title']
language_url=text_json["entities"][a_data_id]['sitelinks'][key]['url']
print(language_title)
wiki_site.append(key)
wiki_title.append(language_title)
wiki_url.append(language_url)
item[a['title']]['wiki_site']=wiki_site
item[a['title']]['wiki_title']=wiki_title
item[a['title']]['wiki_url']=wiki_url
# begin scraping data from the pages
page_coordinates = requests.get(language_url)
page_coordinates_content = BeautifulSoup(page_coordinates.content, "html.parser")
page_coordinates_content_div=''
# check if there is coordinates on the page
if ('mw-indicator-coordinates' in str(page_coordinates_content)) and (key != 'cswiki' or key != 'skwiki'):
page_coordinates_content_div = page_coordinates_content.find('div',{"id":"mw-indicator-coordinates"})
print("mw-indicator-coordinates")
elif 'coordinatesindicator' in str(page_coordinates_content):
page_coordinates_content_div = page_coordinates_content.find('span',{"id":"coordinatesindicator"})
print("coordinatesindicator")
elif ('Показать карту' in str(page_coordinates_content)):
page_coordinates_content_div = page_coordinates_content.find('span',{"title":"Показать карту"})
print("Показать карту")
elif 'Xəritədə göstər' in str(page_coordinates_content):
page_coordinates_content_div = page_coordinates_content.find('span',{"title":"Xəritədə göstər"})
print("Xəritədə göstər")
elif 'Паказаць карту' in str(page_coordinates_content):
page_coordinates_content_div = page_coordinates_content.find('span',{"title":"Паказаць карту"})
print("Паказаць карту")
elif ('id="coordinates' in str(page_coordinates_content)) and (key == 'ptwiki'):
page_coordinates_content_div = page_coordinates_content.find('div',{"id":"coordinates"})
print("div coordinates id")
elif ('plainlinksneverexpand' in str(page_coordinates_content)) and (key != 'frwiki' and key != 'myvwiki' and key != 'eowiki' and key != 'hrwiki' and key != 'nnwiki'):
page_coordinates_content_div = page_coordinates_content.find('div',{"class":"plainlinksneverexpand"})
print("plainlinksneverexpand")
elif 'coordinatespan' in str(page_coordinates_content):
page_coordinates_content_div = page_coordinates_content.find('span',{"id":"coordinatespan"})
print("coordinatespan")
elif 'id="coordinates' in str(page_coordinates_content):
page_coordinates_content_div = page_coordinates_content.find('span',{"id":"coordinates"})
print("span coordinates id")
elif 'class="coordinates' in str(page_coordinates_content):
page_coordinates_content_div = page_coordinates_content.find('span',{"class":"coordinates"})
print("span coordinates class")
# if there are some coordinates continue with the scraping
if (page_coordinates_content_div != '') and (page_coordinates_content_div != None):
# get coordinates from the kartographer
if 'mw-kartographer-maplink' in str(page_coordinates_content_div):
page_coordinates_content_a=page_coordinates_content_div.find('a', {"class":"mw-kartographer-maplink"})
print("mw-kartographer-maplink")
# print(page_coordinates_content_a['data-lat'])
# print(page_coordinates_content_a['data-lon'])
wiki_geo[key] = str(page_coordinates_content_a['data-lat'])+', '+str(page_coordinates_content_a['data-lon'])
item[a['title']]['geo_cor']=wiki_geo
# print(wiki_site)
if item[a['title']]['geo']==False:
wiki_quick[key] =str(a_data_id)+"|P625|@"+str(page_coordinates_content_a['data-lat'])+"/"+str(page_coordinates_content_a['data-lon'])+"||"
else:
wiki_quick[key] = ''
item[a['title']]['wiki_quick']=wiki_quick
else:
page_coordinates_content_a=page_coordinates_content_div.find('a', {"class":"external text"}, href=True)
if page_coordinates_content_a != None:
if 'http' in page_coordinates_content_a['href']:
url_hack=page_coordinates_content_a['href']
else:
url_hack='https:'+page_coordinates_content_a['href']
geo_hack = requests.get(url_hack)
geo_hack_content = BeautifulSoup(geo_hack.content, "html.parser")
# fix for some problems with eswiki
if key == 'eswiki':
geo_hack_content=geo_hack_content.find('span',{"class":"geo"})
# get data from cswiki and skwiki
if key == 'cswiki' or key == 'skwiki':
n=geo_hack_content
strange_geo_hack = re.search("\s?(\d+.\d+)°?,\s+(\d+.\d+)°?", str(n))
geo_hack_latitude=strange_geo_hack.group(1)
geo_hack_longitude=strange_geo_hack.group(2)
# get data from the rest of the pages
else:
geo_hack_latitude=geo_hack_content.find('span',{"class":"latitude"})
# print(geo_hack_latitude)
geo_hack_latitude=re.sub('<[^<>]+>', '', str(geo_hack_latitude))
geo_hack_longitude=geo_hack_content.find('span',{"class":"longitude"})
# print(geo_hack_longitude)
geo_hack_longitude=re.sub('<[^<>]+>', '', str(geo_hack_longitude))
if (geo_hack_latitude=='None') and (geo_hack_longitude=='None'):
wiki_geo[key] = ''
wiki_quick[key] = ''
else:
wiki_geo[key] = geo_hack_latitude+', '+geo_hack_longitude
if item[a['title']]['geo']==False:
wiki_quick[key] =str(a_data_id)+"|P625|@"+str(geo_hack_latitude)+"/"+str(geo_hack_longitude)+"||"
else:
wiki_quick[key] = ''
item[a['title']]['geo_cor']=wiki_geo
# print(wiki_site)
item[a['title']]['wiki_quick']=wiki_quick
else:
wiki_geo[key] = ''
item[a['title']]['geo_cor']=wiki_geo
# print(wiki_geo)
wiki_quick[key] = ''
item[a['title']]['wiki_quick']=wiki_quick
else:
wiki_geo[key] = ''
item[a['title']]['geo_cor']=wiki_geo
# print(wiki_geo)
wiki_quick[key] = ''
item[a['title']]['wiki_quick']=wiki_quick
else:
pass
# end of the links
# print("------------------------------------------------------------------------------------------------")
wiki_site=[]
wiki_title=[]
wiki_url=[]
wiki_geo={}
wiki_quick={}
except:
pass
except:
pass
# counting the number of elements
length=len(title_list)
else:
return redirect(url_for('index'))
return render_template("wiki_table.html", length=length, title_list=title_list, item=item)
if __name__ == '__main__':
app.run() | [
"flask.render_template",
"json.loads",
"pathlib.Path",
"flask.Flask",
"requests.get",
"flask.url_for",
"bs4.BeautifulSoup"
] | [((169, 184), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (174, 184), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((223, 252), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (238, 252), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((11144, 11231), 'flask.render_template', 'render_template', (['"""wiki_table.html"""'], {'length': 'length', 'title_list': 'title_list', 'item': 'item'}), "('wiki_table.html', length=length, title_list=title_list,\n item=item)\n", (11159, 11231), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((612, 626), 'pathlib.Path', 'Path', (['text_url'], {}), '(text_url)\n', (616, 626), False, 'from pathlib import Path\n'), ((707, 729), 'requests.get', 'requests.get', (['text_url'], {}), '(text_url)\n', (719, 729), False, 'import requests\n'), ((747, 798), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_response.content', '"""html.parser"""'], {}), "(page_response.content, 'html.parser')\n", (760, 798), False, 'from bs4 import BeautifulSoup\n'), ((11115, 11131), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (11122, 11131), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((459, 475), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (466, 475), False, 'from flask import Flask, render_template, request, redirect, url_for\n'), ((1595, 1616), 'requests.get', 'requests.get', (['url_new'], {}), '(url_new)\n', (1607, 1616), False, 'import requests\n'), ((1641, 1688), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_data.content', '"""html.parser"""'], {}), "(page_data.content, 'html.parser')\n", (1654, 1688), False, 'from bs4 import BeautifulSoup\n'), ((2110, 2137), 'requests.get', 'requests.get', (['a_data_entity'], {}), '(a_data_entity)\n', (2122, 2137), False, 'import requests\n'), ((2171, 2227), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_data_instance.content', '"""html.parser"""'], {}), "(page_data_instance.content, 'html.parser')\n", (2184, 2227), False, 'from bs4 import BeautifulSoup\n'), ((2314, 2327), 'json.loads', 'json.loads', (['b'], {}), '(b)\n', (2324, 2327), False, 'import json\n'), ((4594, 4620), 'requests.get', 'requests.get', (['language_url'], {}), '(language_url)\n', (4606, 4620), False, 'import requests\n'), ((4655, 4709), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_coordinates.content', '"""html.parser"""'], {}), "(page_coordinates.content, 'html.parser')\n", (4668, 4709), False, 'from bs4 import BeautifulSoup\n'), ((8623, 8645), 'requests.get', 'requests.get', (['url_hack'], {}), '(url_hack)\n', (8635, 8645), False, 'import requests\n'), ((8675, 8721), 'bs4.BeautifulSoup', 'BeautifulSoup', (['geo_hack.content', '"""html.parser"""'], {}), "(geo_hack.content, 'html.parser')\n", (8688, 8721), False, 'from bs4 import BeautifulSoup\n')] |
#!/usr/bin/env python3
#
# Copyright (C) 2020 <NAME> <<EMAIL>>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
#
import sys
import secrets
if len(sys.argv) >= 2:
maxbits = int(sys.argv[1])
else:
maxbits = 64
maxval = 2 ** maxbits
print(hex(secrets.randbelow(maxval)))
| [
"secrets.randbelow"
] | [((374, 399), 'secrets.randbelow', 'secrets.randbelow', (['maxval'], {}), '(maxval)\n', (391, 399), False, 'import secrets\n')] |
import numpy as np
import pandas as pd
from xgboost.sklearn import XGBClassifier
import os
print(os.listdir("../input"))
train = pd.read_csv("../input/expedia-hotel-recommendations/train.csv",nrows=100000)
test = pd.read_csv("../input/expedia-hotel-recommendations/test.csv")
def datetime(data, column):
data[column] = pd.to_datetime(data[column],errors='coerce')
year = data[column].dt.year
month = data[column].dt.month
day = data[column].dt.day
return year, month, day
train['dt_year'],train['dt_month'],train['dt_day'] = datetime(train,'date_time')
train['ci_year'],train['ci_month'],train['ci_day'] = datetime(train,'srch_ci')
train['co_year'],train['co_month'],train['co_day'] = datetime(train,'srch_co')
test['dt_year'],test['dt_month'],test['dt_day'] = datetime(test,'date_time')
test['ci_year'],test['ci_month'],test['ci_day'] = datetime(test,'srch_ci')
test['co_year'],test['co_month'],test['co_day'] = datetime(test,'srch_co')
train = train.drop(['date_time','srch_ci','srch_co','user_id','is_mobile','is_booking','cnt'],axis=1)
test = test.drop(['date_time','srch_ci','srch_co','id','user_id','is_mobile'],axis=1)
y_train = train['hotel_cluster'].values
train = train.drop('hotel_cluster', axis=1)
xgb = XGBClassifier(num_class=12 ,max_depth=6, learning_rate=0.3, n_estimators=25,
objective='multi:softprob',subsample=0.4, colsample_bytree=0.5, eval_metric='mlogloss')
xgb.fit(train, y_train)
pred = xgb.predict_proba(test)
submission = pd.read_csv("../input/expedia-hotel-recommendations/sample_submission.csv")
id_sub = submission['id']
hts= [np.argsort(pred[i][::-1])[:5] for i in range(len(id_sub))]
write_p = [" ".join([str(l) for l in p]) for p in hts]
write_frame = ["{0},{1}".format(id_sub[i], write_p[i]) for i in range(len(hts))]
write_frame = ["id,hotel_cluster"] + write_frame
with open("sub_expedia.csv", "w+") as f:
f.write("\n".join(write_frame))
| [
"xgboost.sklearn.XGBClassifier",
"os.listdir",
"pandas.read_csv",
"numpy.argsort",
"pandas.to_datetime"
] | [((132, 209), 'pandas.read_csv', 'pd.read_csv', (['"""../input/expedia-hotel-recommendations/train.csv"""'], {'nrows': '(100000)'}), "('../input/expedia-hotel-recommendations/train.csv', nrows=100000)\n", (143, 209), True, 'import pandas as pd\n'), ((216, 278), 'pandas.read_csv', 'pd.read_csv', (['"""../input/expedia-hotel-recommendations/test.csv"""'], {}), "('../input/expedia-hotel-recommendations/test.csv')\n", (227, 278), True, 'import pandas as pd\n'), ((1245, 1418), 'xgboost.sklearn.XGBClassifier', 'XGBClassifier', ([], {'num_class': '(12)', 'max_depth': '(6)', 'learning_rate': '(0.3)', 'n_estimators': '(25)', 'objective': '"""multi:softprob"""', 'subsample': '(0.4)', 'colsample_bytree': '(0.5)', 'eval_metric': '"""mlogloss"""'}), "(num_class=12, max_depth=6, learning_rate=0.3, n_estimators=25,\n objective='multi:softprob', subsample=0.4, colsample_bytree=0.5,\n eval_metric='mlogloss')\n", (1258, 1418), False, 'from xgboost.sklearn import XGBClassifier\n'), ((1499, 1574), 'pandas.read_csv', 'pd.read_csv', (['"""../input/expedia-hotel-recommendations/sample_submission.csv"""'], {}), "('../input/expedia-hotel-recommendations/sample_submission.csv')\n", (1510, 1574), True, 'import pandas as pd\n'), ((99, 121), 'os.listdir', 'os.listdir', (['"""../input"""'], {}), "('../input')\n", (109, 121), False, 'import os\n'), ((327, 372), 'pandas.to_datetime', 'pd.to_datetime', (['data[column]'], {'errors': '"""coerce"""'}), "(data[column], errors='coerce')\n", (341, 372), True, 'import pandas as pd\n'), ((1609, 1634), 'numpy.argsort', 'np.argsort', (['pred[i][::-1]'], {}), '(pred[i][::-1])\n', (1619, 1634), True, 'import numpy as np\n')] |
from qnote.cli.operator import TagOperator
from qnote.internal.exceptions import (
StorageCheckException,
SafeExitException,
)
from qnote.objects import Tag, Tags
from qnote.storage import get_storer
__all__ = ['TagManager']
class TagManager(object):
def __init__(self, config):
self.config = config
def list_tags(self):
storer = get_storer(self.config)
tags, counts = storer.get_all_tags_with_count()
lines = ['%4s %s' % (count, tag) for (count, tag) in zip(counts, tags)]
msg = '\n'.join(lines)
print(msg)
def clear_empty_tags(self):
"""Clear those tags which no notes are tagged by."""
storer = get_storer(self.config)
tags, counts = storer.get_all_tags_with_count()
tags_to_remove = [tags[i] for i, v in enumerate(counts) if v == 0]
try:
TagOperator(self.config).confirm_to_remove_tags(tags_to_remove)
except KeyboardInterrupt as ex:
raise SafeExitException() from ex
n_deleted = storer.delete_tags_by_name(tags_to_remove)
msg = '%s tag%s ha%s been deleted.' % (
n_deleted,
's' if n_deleted > 1 else '',
've' if n_deleted > 1 else 's'
)
print(msg)
def rename_tag(self, old_name, new_name):
storer = get_storer(self.config)
if not storer.check_tag_exist(old_name):
msg = 'Tag "%s" does not exist, so that we cannot rename it' % old_name
raise SafeExitException(msg)
if storer.check_tag_exist(new_name):
msg = 'Tag "%s" already exist, please choose another name' % new_name
raise SafeExitException(msg)
storer.rename_tag(old_name, new_name)
msg = 'Tag "%s" has been renamed "%s"' % (old_name, new_name)
print(msg)
| [
"qnote.cli.operator.TagOperator",
"qnote.internal.exceptions.SafeExitException",
"qnote.storage.get_storer"
] | [((367, 390), 'qnote.storage.get_storer', 'get_storer', (['self.config'], {}), '(self.config)\n', (377, 390), False, 'from qnote.storage import get_storer\n'), ((689, 712), 'qnote.storage.get_storer', 'get_storer', (['self.config'], {}), '(self.config)\n', (699, 712), False, 'from qnote.storage import get_storer\n'), ((1333, 1356), 'qnote.storage.get_storer', 'get_storer', (['self.config'], {}), '(self.config)\n', (1343, 1356), False, 'from qnote.storage import get_storer\n'), ((1509, 1531), 'qnote.internal.exceptions.SafeExitException', 'SafeExitException', (['msg'], {}), '(msg)\n', (1526, 1531), False, 'from qnote.internal.exceptions import StorageCheckException, SafeExitException\n'), ((1677, 1699), 'qnote.internal.exceptions.SafeExitException', 'SafeExitException', (['msg'], {}), '(msg)\n', (1694, 1699), False, 'from qnote.internal.exceptions import StorageCheckException, SafeExitException\n'), ((992, 1011), 'qnote.internal.exceptions.SafeExitException', 'SafeExitException', ([], {}), '()\n', (1009, 1011), False, 'from qnote.internal.exceptions import StorageCheckException, SafeExitException\n'), ((870, 894), 'qnote.cli.operator.TagOperator', 'TagOperator', (['self.config'], {}), '(self.config)\n', (881, 894), False, 'from qnote.cli.operator import TagOperator\n')] |
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
class Command(BaseCommand):
def handle(self, *args, **options):
User = get_user_model()
User.objects.create_superuser('jaesuk', '<EMAIL>', "jaesuk") | [
"django.contrib.auth.get_user_model"
] | [((185, 201), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (199, 201), False, 'from django.contrib.auth import get_user_model\n')] |
from abc import ABC, abstractproperty
from re import match
from typing import Optional, Tuple
class CodePattern(ABC):
"""
Abstract base class for all types of code block.
"""
def __str__(self) -> str:
return self.__class__.__name__
def clean(self, line: str) -> str:
"""
Reveals the content of a line from its markup.
Arguments:
line: Line to clean.
Returns:
Line content.
"""
if not self.clean_expression:
return line
if m := match(self.clean_expression, line):
return m.group(1) if len(m.groups()) > 0 else line
return line
@property
def clean_expression(self) -> Optional[str]:
"""
Gets the regular expression that reveals the content of a line from its
markup.
"""
return None
@abstractproperty
def end_expression(self) -> str:
"""
Gets the regular expression that matches the end of this type of block.
"""
@abstractproperty
def fenced(self) -> bool:
"""
Returns `True` if this type of code block has some pattern at the top
and bottom of the block to indicate the content boundary.
"""
def is_end(self, line: str) -> bool:
"""
Checks if `line` ends code blocks of this type.
Arguments:
line: Line to check
Returns:
`True` if `line` ends code blocks of this type.
"""
return not not match(self.end_expression, line.rstrip())
def is_start(self, line: str) -> Tuple[bool, Optional[str]]:
"""
Checks if `line` starts a code block of this type.
Arguments:
line: Line to check.
Returns:
`True` and language if `line` starts a code block of this type.
"""
if m := match(self.start_expression, line):
lang = m.group(1) if len(m.groups()) > 0 else None
return True, lang
return False, None
@abstractproperty
def start_expression(self) -> str:
"""
Gets the regular expression that matches the start of this type of
block.
"""
| [
"re.match"
] | [((555, 589), 're.match', 'match', (['self.clean_expression', 'line'], {}), '(self.clean_expression, line)\n', (560, 589), False, 'from re import match\n'), ((1896, 1930), 're.match', 'match', (['self.start_expression', 'line'], {}), '(self.start_expression, line)\n', (1901, 1930), False, 'from re import match\n')] |
from copy import copy
import json
class Reporter(object):
"""
The goal of this class is to have an object with dot acccess that can
keep track of data that happens during a program's execution.
Additionally it should be able to reset itself to the original template
and serialize itself
This class is principally used with hawk_eye_notify so it abides by the
{source, output} convention used by the templating there.
"""
def __init__(self, source, output_dict, no_reset=[]):
"""
The output_dict is what will generate the template and be serialized
as well as being dot accessible from the runtime Reporter instance
The no reset keys are there to stop the reset function
from deleting data in particular keys on the report object
"""
self.__template = {}
self.__source = source
self.__no_reset = no_reset
for key, value in output_dict.items():
self.add_key_value(key, value)
def serialize(self, sort_keys=True, indent=4):
"""
Serializes the report for writing to a file.
It should only serialize things that were defined in the template
"""
acc = {
'source': self.__source,
'output': {}
}
for key in self.__template.keys():
acc['output'][key] = getattr(self, key)
return json.dumps(copy(acc), sort_keys=sort_keys, indent=indent)
def add_key_value(self, key, value):
"""
Builds the __template one key value at a time.
This will copy every key value pair so that changes that happen
in the reporter and different instances from the same template
will not affect one another
"""
key_copy = copy(key)
value_copy = copy(value)
self.__template[key_copy] = value_copy
setattr(self, key_copy, value_copy)
return self
def get_template(self):
return copy(self.__template)
def reset(self, force=False):
"""
For every key in the template, this function should set them back to their
initial state unless the key was in the no_reset list passed at init.
If force is passed this function will ignore the no_reset list
"""
for key in self.__template.keys():
if force:
setattr(self, key, self.__template.get(key))
elif key in self.__no_reset:
pass
else:
setattr(self, key, self.__template.get(key))
return self
| [
"copy.copy"
] | [((1787, 1796), 'copy.copy', 'copy', (['key'], {}), '(key)\n', (1791, 1796), False, 'from copy import copy\n'), ((1818, 1829), 'copy.copy', 'copy', (['value'], {}), '(value)\n', (1822, 1829), False, 'from copy import copy\n'), ((1985, 2006), 'copy.copy', 'copy', (['self.__template'], {}), '(self.__template)\n', (1989, 2006), False, 'from copy import copy\n'), ((1420, 1429), 'copy.copy', 'copy', (['acc'], {}), '(acc)\n', (1424, 1429), False, 'from copy import copy\n')] |
# Generated by Django 3.1 on 2021-11-25 14:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pcell', '0002_company_visited_year'),
]
operations = [
migrations.AlterField(
model_name='company',
name='company_ctc',
field=models.IntegerField(),
),
migrations.AlterField(
model_name='company',
name='job_eligibility',
field=models.CharField(max_length=1000),
),
migrations.AlterField(
model_name='company',
name='job_profile',
field=models.CharField(max_length=1000),
),
migrations.AlterField(
model_name='company',
name='job_skills',
field=models.CharField(max_length=1000),
),
]
| [
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((340, 361), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (359, 361), False, 'from django.db import migrations, models\n'), ((493, 526), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (509, 526), False, 'from django.db import migrations, models\n'), ((654, 687), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (670, 687), False, 'from django.db import migrations, models\n'), ((814, 847), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)'}), '(max_length=1000)\n', (830, 847), False, 'from django.db import migrations, models\n')] |
import logging
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
custom_format = logging.Formatter('%(asctime)s %(levelname)s\t'
'%(pathname)s def %(funcName)s:%(lineno)d - %(message)s')
cli_handler = logging.StreamHandler()
file_handler = logging.FileHandler('learning.log', encoding='utf-8')
cli_handler.setFormatter(custom_format)
file_handler.setFormatter(custom_format)
cli_handler.setLevel(logging.INFO)
file_handler.setLevel(logging.DEBUG)
logger.handlers.clear()
logger.addHandler(cli_handler)
logger.addHandler(file_handler)
return logger
| [
"logging.getLogger",
"logging.Formatter",
"logging.StreamHandler",
"logging.FileHandler"
] | [((52, 75), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (69, 75), False, 'import logging\n'), ((132, 244), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s\t%(pathname)s def %(funcName)s:%(lineno)d - %(message)s"""'], {}), "(\n '%(asctime)s %(levelname)s\\t%(pathname)s def %(funcName)s:%(lineno)d - %(message)s'\n )\n", (149, 244), False, 'import logging\n'), ((295, 318), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (316, 318), False, 'import logging\n'), ((338, 391), 'logging.FileHandler', 'logging.FileHandler', (['"""learning.log"""'], {'encoding': '"""utf-8"""'}), "('learning.log', encoding='utf-8')\n", (357, 391), False, 'import logging\n')] |
import re
import pytz
import requests
from datetime import datetime, timedelta
from urllib.parse import urljoin
from bs4 import BeautifulSoup
from podgen import Podcast, Episode, Media
# Fetch and parse episode metadata
wikipedia_url = "https://en.wikipedia.org/wiki/The_Unbelievable_Truth_(radio_show)"
wikipedia_html = requests.get(wikipedia_url).text
wikipedia_soup = BeautifulSoup(wikipedia_html, 'html.parser')
raw_series_metadata = wikipedia_soup.find_all("table", class_="wikitable")
metadata = dict()
for raw_table in raw_series_metadata:
for raw_row in raw_table.find_all("tr"):
if raw_row.find_all('th'):
# table header row, skip
continue
try:
cells = raw_row.find_all("td")
identifier = cells[0].text.replace('\n', '')
metadata[identifier] = {
'series': identifier[:2],
'episode': identifier[3:],
'guests': cells[2].text.replace('\n', ''),
'topics': cells[3].text.replace('\n', ''),
'first_broadcast': cells[1].text.replace('\n', ''),
}
except IndexError:
pass
# Fetch and parse episode audio
archive_urls = [
"https://archive.org/details/theunbelievabletruth1-5",
"https://archive.org/details/theunbelievabletruth6-10",
"https://archive.org/details/theunbelievabletruth11-15",
"https://archive.org/details/theunbelievabletruth16-20",
"https://archive.org/details/theunbelievabletruth21-23", # 21-25
"https://archive.org/details/the-unbelievable-truth-s-26"
]
raw_episodes = list()
for url in archive_urls:
archive_html = requests.get(url).text
archive_soup = BeautifulSoup(archive_html, 'html.parser')
raw_episodes += archive_soup.find_all("a", class_="download-pill")
# Process
episodes = list()
for raw_ep in raw_episodes:
if raw_ep['href'][-4:] != '.mp3':
# file does not end in mp3, so skip
continue
title = raw_ep.text
title = title.replace('download', '')
title = title.replace('.mp3', '').strip()
regex = re.compile(r's(\d+) e(\d+)').search(title)
identifier = f"{regex.group(1)}x{regex.group(2)}" # e.g. 08x03
# Handle edge case episodes
edge_cases = {
# File title: Custom metadata
"The Unbelievable Truth (s00 e00) 'Pilot'": {
"wikipedia_identifier": "Pilot",
"series": "00",
"episode": "00 - Pilot"
},
"The Unbelievable Truth (s02 e07) 'Christmas Special'": {
"wikipedia_identifier": "Special",
"series": "02",
"episode": "07 - Christmas Special"
},
"The Unbelievable Truth (s04 e07) 'New Year's Special'": {
"wikipedia_identifier": "Sp.",
"series": "04",
"episode": "07 - New Year's Special"
}
}
if title in edge_cases.keys():
identifier = edge_cases[title]["wikipedia_identifier"]
metadata[identifier]["series"] = edge_cases[title]["series"]
metadata[identifier]["episode"] = edge_cases[title]["episode"]
try:
ep_metadata = metadata[identifier]
output_title = f"S{ep_metadata['series']} E{ep_metadata['episode']}: {ep_metadata['topics']}"
first_broadcast_no_brackets = re.sub("[\(\[].*?[\)\]]", "", ep_metadata['first_broadcast'])
first_broadcast_datetime = datetime.strptime(first_broadcast_no_brackets, "%d %B %Y").replace(tzinfo=pytz.timezone('Europe/London'))
first_broadcast_datetime += timedelta(hours=18, minutes=30) # Episodes are often broadcast at 18:30
description = f"Guests: {ep_metadata['guests']}.<br/><br/>" + \
f"First broadcast: {first_broadcast_no_brackets}"
except KeyError:
output_title = title
description = ""
first_broadcast_datetime = datetime.now().replace(tzinfo=pytz.utc)
pass
ep = {
'title': output_title,
'url': urljoin(url, raw_ep['href']),
'description': description,
'first_broadcast': first_broadcast_datetime,
}
episodes.append(ep)
# Create the Podcast
podcast = Podcast(
name="The Unbelievable Truth",
description="Game show in which panellists compete to see how many nuggets of truth they are able to to hide amongst their lies.",
website="https://www.bbc.co.uk/programmes/b007mf4f",
explicit=False,
image="https://ichef.bbci.co.uk/images/ic/1200x675/p09q7v6j.jpg"
)
podcast.episodes += [
Episode(
title=ep['title'],
media=Media(ep['url']),
publication_date=ep['first_broadcast'],
summary=ep['description'],
)
for ep in episodes
]
# Generate and output the RSS feed
rss = podcast.rss_str()
with open('podcast.rss', 'w') as outfile:
outfile.write(rss)
| [
"pytz.timezone",
"re.compile",
"podgen.Media",
"datetime.datetime.strptime",
"requests.get",
"bs4.BeautifulSoup",
"podgen.Podcast",
"datetime.datetime.now",
"urllib.parse.urljoin",
"re.sub",
"datetime.timedelta"
] | [((373, 417), 'bs4.BeautifulSoup', 'BeautifulSoup', (['wikipedia_html', '"""html.parser"""'], {}), "(wikipedia_html, 'html.parser')\n", (386, 417), False, 'from bs4 import BeautifulSoup\n'), ((4369, 4687), 'podgen.Podcast', 'Podcast', ([], {'name': '"""The Unbelievable Truth"""', 'description': '"""Game show in which panellists compete to see how many nuggets of truth they are able to to hide amongst their lies."""', 'website': '"""https://www.bbc.co.uk/programmes/b007mf4f"""', 'explicit': '(False)', 'image': '"""https://ichef.bbci.co.uk/images/ic/1200x675/p09q7v6j.jpg"""'}), "(name='The Unbelievable Truth', description=\n 'Game show in which panellists compete to see how many nuggets of truth they are able to to hide amongst their lies.'\n , website='https://www.bbc.co.uk/programmes/b007mf4f', explicit=False,\n image='https://ichef.bbci.co.uk/images/ic/1200x675/p09q7v6j.jpg')\n", (4376, 4687), False, 'from podgen import Podcast, Episode, Media\n'), ((323, 350), 'requests.get', 'requests.get', (['wikipedia_url'], {}), '(wikipedia_url)\n', (335, 350), False, 'import requests\n'), ((1703, 1745), 'bs4.BeautifulSoup', 'BeautifulSoup', (['archive_html', '"""html.parser"""'], {}), "(archive_html, 'html.parser')\n", (1716, 1745), False, 'from bs4 import BeautifulSoup\n'), ((1661, 1678), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1673, 1678), False, 'import requests\n'), ((3459, 3524), 're.sub', 're.sub', (['"""[\\\\(\\\\[].*?[\\\\)\\\\]]"""', '""""""', "ep_metadata['first_broadcast']"], {}), "('[\\\\(\\\\[].*?[\\\\)\\\\]]', '', ep_metadata['first_broadcast'])\n", (3465, 3524), False, 'import re\n'), ((3706, 3737), 'datetime.timedelta', 'timedelta', ([], {'hours': '(18)', 'minutes': '(30)'}), '(hours=18, minutes=30)\n', (3715, 3737), False, 'from datetime import datetime, timedelta\n'), ((4170, 4198), 'urllib.parse.urljoin', 'urljoin', (['url', "raw_ep['href']"], {}), "(url, raw_ep['href'])\n", (4177, 4198), False, 'from urllib.parse import urljoin\n'), ((4773, 4789), 'podgen.Media', 'Media', (["ep['url']"], {}), "(ep['url'])\n", (4778, 4789), False, 'from podgen import Podcast, Episode, Media\n'), ((2138, 2167), 're.compile', 're.compile', (['"""s(\\\\d+) e(\\\\d+)"""'], {}), "('s(\\\\d+) e(\\\\d+)')\n", (2148, 2167), False, 'import re\n'), ((3560, 3618), 'datetime.datetime.strptime', 'datetime.strptime', (['first_broadcast_no_brackets', '"""%d %B %Y"""'], {}), "(first_broadcast_no_brackets, '%d %B %Y')\n", (3577, 3618), False, 'from datetime import datetime, timedelta\n'), ((3634, 3664), 'pytz.timezone', 'pytz.timezone', (['"""Europe/London"""'], {}), "('Europe/London')\n", (3647, 3664), False, 'import pytz\n'), ((4043, 4057), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4055, 4057), False, 'from datetime import datetime, timedelta\n')] |
import aiohttp
import csv
from .set import EntitiesSet
async def get_content(url):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with session.get(url) as resp:
if resp.status != 200:
raise Exception('An error returned while trying to download file from {}: {}'.format(url, resp.status))
return await resp.text()
class EntitiesSetsStore:
def __init__(self):
self.store = {}
async def get(self, url):
if url not in self.store:
text = await get_content(url)
csv_reader = csv.reader(text.split('\n'))
rows = [r for r in csv_reader]
headers = rows[0]
rows = rows[1:]
entities_set = EntitiesSet(url, headers, rows)
self.store[url] = entities_set
return self.store[url]
| [
"aiohttp.TCPConnector"
] | [((129, 167), 'aiohttp.TCPConnector', 'aiohttp.TCPConnector', ([], {'verify_ssl': '(False)'}), '(verify_ssl=False)\n', (149, 167), False, 'import aiohttp\n')] |
import numpy as np
GOLDEN_RATIO = 0.5 * (1 + np.sqrt(5))
WIDTH = 397.48499
def pt_to_in(x):
pt_per_in = 72.27
return x / pt_per_in
| [
"numpy.sqrt"
] | [((46, 56), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (53, 56), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import argparse
import os
import pathlib
import platform
import shutil
import subprocess
import sys
import util
# Artifacts are stored with buildkite using the following scheme:
#
# $ROOT/tmp/output/$SUITE/$WORKLOAD/$PLATFORM/{params}/[result.json, result.npy]
if platform.system() == 'Windows':
ARTIFACTS_ROOT = "\\\\rackstation\\artifacts"
else:
ARTIFACTS_ROOT = '/nas/artifacts'
def load_template(name):
this_dir = os.path.dirname(__file__)
template_path = os.path.join(this_dir, name)
with open(template_path, 'r') as file_:
return file_.read()
def get_emoji(variant):
if variant == 'windows_x86_64':
return ':windows:'
if variant == 'macos_x86_64':
return ':darwin:'
return ':linux:'
def get_engine(pkey):
if 'stripe-ocl' in pkey:
return ':barber::cl:'
if 'stripe-mtl' in pkey:
return ':barber::metal:'
if 'plaidml-mtl' in pkey:
return ':black_square_button::metal:'
return ':black_square_button::cl:'
def get_python(variant):
if variant == 'windows_x86_64':
return 'python'
return 'python3'
def cmd_pipeline(args, remainder):
import pystache
import yaml
with open('ci/plan.yml') as file_:
plan = yaml.safe_load(file_)
variants = []
for variant in plan['VARIANTS'].keys():
variants.append(dict(name=variant, python=get_python(variant), emoji=get_emoji(variant)))
tests = []
for skey, suite in plan['SUITES'].items():
for pkey, platform in suite['platforms'].items():
pinfo = plan['PLATFORMS'][pkey]
variant = pinfo['variant']
if args.pipeline not in platform['pipelines']:
continue
for wkey, workload in suite['workloads'].items():
popt = util.PlanOption(suite, workload, pkey)
skip = workload.get('skip_platforms', [])
if pkey in skip:
continue
for batch_size in suite['params'][args.pipeline]['batch_sizes']:
tests.append(
dict(suite=skey,
workload=wkey,
platform=pkey,
batch_size=batch_size,
variant=variant,
timeout=popt.get('timeout', 20),
retry=popt.get('retry'),
softfail=popt.get('softfail'),
python=get_python(variant),
emoji=get_emoji(variant),
engine=get_engine(pkey)))
if args.count:
print('variants: {}'.format(len(variants)))
print('tests : {}'.format(len(tests)))
print('total : {}'.format(len(variants) + len(tests)))
else:
ctx = dict(variants=variants, tests=tests)
yml = pystache.render(load_template('pipeline.yml'), ctx)
print(yml)
def cmd_build(args, remainder):
common_args = []
common_args += ['--config={}'.format(args.variant)]
common_args += ['--define=version={}'.format(args.version)]
common_args += ['--explain={}'.format(os.path.abspath('explain.log'))]
common_args += ['--verbose_failures']
common_args += ['--verbose_explanations']
if platform.system() == 'Windows':
util.check_call(['git', 'config', 'core.symlinks', 'true'])
cenv = util.CondaEnv(pathlib.Path('.cenv'))
cenv.create('environment-windows.yml')
env = os.environ.copy()
env.update(cenv.env())
else:
env = None
util.check_call(['bazelisk', 'test', '...'] + common_args, env=env)
archive_dir = os.path.join(
args.root,
args.pipeline,
args.build_id,
'build',
args.variant,
)
os.makedirs(archive_dir, exist_ok=True)
shutil.copy(os.path.join('bazel-bin', 'pkg.tar.gz'), archive_dir)
def cmd_test(args, remainder):
import harness
harness.run(args)
def cmd_report(args, remainder):
archive_dir = os.path.join(args.root, args.pipeline, args.build_id)
cmd = ['bazelisk', 'run', '//ci:report']
cmd += ['--']
cmd += ['--pipeline', args.pipeline]
cmd += ['--annotate']
cmd += [archive_dir]
cmd += remainder
util.check_call(cmd)
def make_cmd_build(parent):
parser = parent.add_parser('build')
parser.add_argument('variant')
parser.set_defaults(func=cmd_build)
def make_cmd_test(parent):
parser = parent.add_parser('test')
parser.add_argument('platform')
parser.add_argument('suite')
parser.add_argument('workload')
parser.add_argument('batch_size')
parser.add_argument('--local', action='store_true')
parser.set_defaults(func=cmd_test)
def make_cmd_report(parent):
parser = parent.add_parser('report')
parser.set_defaults(func=cmd_report)
def make_cmd_pipeline(parent):
parser = parent.add_parser('pipeline')
parser.add_argument('--count', action='store_true')
parser.set_defaults(func=cmd_pipeline)
def main():
pipeline = os.getenv('PIPELINE', 'pr')
branch = os.getenv('BUILDKITE_BRANCH', 'undefined')
build_id = os.getenv('BUILDKITE_BUILD_NUMBER', '0')
with open('VERSION', 'r') as verf:
version = verf.readline().strip()
default_version = os.getenv('VAI_VERSION', '{}+{}.dev{}'.format(version, pipeline, build_id))
main_parser = argparse.ArgumentParser()
main_parser.add_argument('--root', default=ARTIFACTS_ROOT)
main_parser.add_argument('--pipeline', default=pipeline)
main_parser.add_argument('--branch', default=branch)
main_parser.add_argument('--build_id', default=build_id)
main_parser.add_argument('--version', default=default_version)
sub_parsers = main_parser.add_subparsers()
make_cmd_pipeline(sub_parsers)
make_cmd_build(sub_parsers)
make_cmd_test(sub_parsers)
make_cmd_report(sub_parsers)
args, remainder = main_parser.parse_known_args()
if 'func' not in args:
main_parser.print_help()
return
if platform.system() == 'Linux' or platform.system() == 'Darwin':
path = os.getenv('PATH').split(os.pathsep)
path.insert(0, '/usr/local/miniconda3/bin')
os.environ.update({'PATH': os.pathsep.join(path)})
args.func(args, remainder)
if __name__ == '__main__':
main()
| [
"os.makedirs",
"os.getenv",
"argparse.ArgumentParser",
"pathlib.Path",
"os.pathsep.join",
"os.path.join",
"harness.run",
"os.environ.copy",
"os.path.dirname",
"platform.system",
"yaml.safe_load",
"os.path.abspath",
"util.PlanOption",
"util.check_call"
] | [((290, 307), 'platform.system', 'platform.system', ([], {}), '()\n', (305, 307), False, 'import platform\n'), ((458, 483), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (473, 483), False, 'import os\n'), ((504, 532), 'os.path.join', 'os.path.join', (['this_dir', 'name'], {}), '(this_dir, name)\n', (516, 532), False, 'import os\n'), ((3644, 3711), 'util.check_call', 'util.check_call', (["(['bazelisk', 'test', '...'] + common_args)"], {'env': 'env'}), "(['bazelisk', 'test', '...'] + common_args, env=env)\n", (3659, 3711), False, 'import util\n'), ((3730, 3806), 'os.path.join', 'os.path.join', (['args.root', 'args.pipeline', 'args.build_id', '"""build"""', 'args.variant'], {}), "(args.root, args.pipeline, args.build_id, 'build', args.variant)\n", (3742, 3806), False, 'import os\n'), ((3858, 3897), 'os.makedirs', 'os.makedirs', (['archive_dir'], {'exist_ok': '(True)'}), '(archive_dir, exist_ok=True)\n', (3869, 3897), False, 'import os\n'), ((4024, 4041), 'harness.run', 'harness.run', (['args'], {}), '(args)\n', (4035, 4041), False, 'import harness\n'), ((4095, 4148), 'os.path.join', 'os.path.join', (['args.root', 'args.pipeline', 'args.build_id'], {}), '(args.root, args.pipeline, args.build_id)\n', (4107, 4148), False, 'import os\n'), ((4329, 4349), 'util.check_call', 'util.check_call', (['cmd'], {}), '(cmd)\n', (4344, 4349), False, 'import util\n'), ((5118, 5145), 'os.getenv', 'os.getenv', (['"""PIPELINE"""', '"""pr"""'], {}), "('PIPELINE', 'pr')\n", (5127, 5145), False, 'import os\n'), ((5159, 5201), 'os.getenv', 'os.getenv', (['"""BUILDKITE_BRANCH"""', '"""undefined"""'], {}), "('BUILDKITE_BRANCH', 'undefined')\n", (5168, 5201), False, 'import os\n'), ((5217, 5257), 'os.getenv', 'os.getenv', (['"""BUILDKITE_BUILD_NUMBER"""', '"""0"""'], {}), "('BUILDKITE_BUILD_NUMBER', '0')\n", (5226, 5257), False, 'import os\n'), ((5456, 5481), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5479, 5481), False, 'import argparse\n'), ((1271, 1292), 'yaml.safe_load', 'yaml.safe_load', (['file_'], {}), '(file_)\n', (1285, 1292), False, 'import yaml\n'), ((3348, 3365), 'platform.system', 'platform.system', ([], {}), '()\n', (3363, 3365), False, 'import platform\n'), ((3388, 3447), 'util.check_call', 'util.check_call', (["['git', 'config', 'core.symlinks', 'true']"], {}), "(['git', 'config', 'core.symlinks', 'true'])\n", (3403, 3447), False, 'import util\n'), ((3561, 3578), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (3576, 3578), False, 'import os\n'), ((3914, 3953), 'os.path.join', 'os.path.join', (['"""bazel-bin"""', '"""pkg.tar.gz"""'], {}), "('bazel-bin', 'pkg.tar.gz')\n", (3926, 3953), False, 'import os\n'), ((3220, 3250), 'os.path.abspath', 'os.path.abspath', (['"""explain.log"""'], {}), "('explain.log')\n", (3235, 3250), False, 'import os\n'), ((3477, 3498), 'pathlib.Path', 'pathlib.Path', (['""".cenv"""'], {}), "('.cenv')\n", (3489, 3498), False, 'import pathlib\n'), ((6108, 6125), 'platform.system', 'platform.system', ([], {}), '()\n', (6123, 6125), False, 'import platform\n'), ((6140, 6157), 'platform.system', 'platform.system', ([], {}), '()\n', (6155, 6157), False, 'import platform\n'), ((1827, 1865), 'util.PlanOption', 'util.PlanOption', (['suite', 'workload', 'pkey'], {}), '(suite, workload, pkey)\n', (1842, 1865), False, 'import util\n'), ((6186, 6203), 'os.getenv', 'os.getenv', (['"""PATH"""'], {}), "('PATH')\n", (6195, 6203), False, 'import os\n'), ((6309, 6330), 'os.pathsep.join', 'os.pathsep.join', (['path'], {}), '(path)\n', (6324, 6330), False, 'import os\n')] |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'dégainer'."""
from primaires.interpreteur.commande.commande import Commande
class CmdDegainer(Commande):
"""Commande 'dégainer'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "degainer", "unsheathe")
self.nom_categorie = "objets"
self.schema = "<nom_objet>"
self.aide_courte = "dégaine une arme"
self.aide_longue = \
"Cette commande vous permet de dégainer une arme " \
"que vous possédez dans un fourreau équipé. Le premier " \
"et unique paramètre est le nom du fourreau (pas " \
"le nom de l'arme). Vous devez posséder une main de " \
"libre au minimum pour faire cette action. L'arme " \
"dégainée sera automatiquement équipée."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
nom_objet = self.noeud.get_masque("nom_objet")
nom_objet.proprietes["conteneurs"] = \
"(personnage.equipement.equipes, )"
nom_objet.proprietes["heterogene"] = "True"
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
personnage.agir("degainer")
fourreau_trouve = False
for fourreau in dic_masques["nom_objet"].objets:
if fourreau.est_de_type("armure") and fourreau.fourreau:
fourreau_trouve = True
break
if not fourreau_trouve:
personnage << "|err|{} n'est pas un fourreau.|ff|".format(
dic_masques["nom_objet"].objet.nom_singulier)
return
arme = fourreau.au_fourreau
if arme is None:
personnage << "|err|Ce fourreau ne contient aucune arme.|ff|"
return
libre = None
for membre in personnage.equipement.membres:
if membre.peut_equiper(arme):
libre = membre
break
if libre is None:
personnage << "|err|Vous ne disposez d'aucune main de libre.|ff|"
return
fourreau.au_fourreau = None
libre.equipe.append(arme)
arme.contenu = personnage.equipement.equipes
personnage << "Vous dégainez {}.".format(arme.get_nom())
personnage.salle.envoyer("{{}} dégaine {}.".format(arme.get_nom()),
personnage)
| [
"primaires.interpreteur.commande.commande.Commande.__init__"
] | [((1805, 1853), 'primaires.interpreteur.commande.commande.Commande.__init__', 'Commande.__init__', (['self', '"""degainer"""', '"""unsheathe"""'], {}), "(self, 'degainer', 'unsheathe')\n", (1822, 1853), False, 'from primaires.interpreteur.commande.commande import Commande\n')] |
from django import forms
class TeamMemberForm(forms.Form):
member_id = forms.IntegerField(required=True)
| [
"django.forms.IntegerField"
] | [((77, 110), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(True)'}), '(required=True)\n', (95, 110), False, 'from django import forms\n')] |
# https://github.com/bwoodsend/vtkplotlib
# pip install vtkplotlib
import vtkplotlib as vpl
from stl.mesh import Mesh
import numpy as np
import json
import os
import sys
from tqdm import tqdm
from PIL import Image
def get_stl_paths(full_path, stl_file='/xyz_0_0_0_pca_pair.stl' ,json_filename=''):
dir_list = os.listdir(full_path)
data = {}
for d in dir_list:
file_list = os.listdir(os.path.join(full_path, d))
data[d] = os.path.join(full_path, d + stl_file)
if json_filename:
print('Making JSON for', json_filename)
with open(json_filename+".json", 'w') as f:
json.dump(data, f)
return data
def _transparent_background(fig_path):
#make black background transparent
img = Image.open(fig_path)
img = img.convert("RGBA")
datas = img.getdata()
newData = []
for item in datas:
if item[0] == 0 and item[1] == 0 and item[2] == 0:
newData.append((0, 0, 0, 0)) #0 = black, last 0 is transp.
else:
newData.append(item)
img.putdata(newData)
# img.save(fig_path.replace('.png', '_transp.png'), "PNG")
img.save(fig_path, "PNG")
def _add_normalizing_vector_point(mesh, minpt, maxpt):
"""
This function allows you to visualize all meshes in their size relative to each other
It is a quick simple hack: by adding 2 vector points at the same x coordinates at the
extreme left and extreme right of the largest .stl mesh, all the meshes are displayed
with the same scale.
input: [mesh], minpoint coordinates, maxpoint coordinates
output: [mesh] with 2 added coordinate points
"""
newmesh = Mesh(np.zeros(mesh.vectors.shape[0]+2, dtype=Mesh.dtype))
# newmesh.vectors = np.vstack([mesh.vectors,
# np.array([ [[0,maxpt,0], [0,maxpt,0], [0,maxpt,0]],
# [[0,minpt,0], [0,minpt,0], [0,minpt,0]] ], float) ])
newmesh.vectors = np.vstack([mesh.vectors,
np.array([ [[0,0,maxpt], [0,0,maxpt], [0,0,maxpt]],
[[0,0,minpt], [0,0,minpt], [0,0,minpt]] ], float) ])
return newmesh
def create_figure(path_dict, figure_path, path_dict2=None, pair_mapping=None, transp_backg=False):
assert ((path_dict2 is None) + (pair_mapping is None)) != 1, \
'please specify all kwargs or none of them'
if pair_mapping is not None:
# for k in tqdm(pair_mapping):
# mesh= Mesh.from_file(path_dict[k[0]])
# mesh2 = Mesh.from_file(path_dict2[k[1]])
for k,values in tqdm(pair_mapping.items()):
mesh = Mesh.from_file(path_dict[k])
mesh = _add_normalizing_vector_point(mesh, 300, -300)
fig = vpl.figure()
fig.background_color = 'black'
vpl.mesh_plot(mesh, color = 'pink', opacity=0.3) #make dendrite translucent
for v in values: # add second, third,.. .stl to same plot
mesh2 = Mesh.from_file(path_dict2[str(v)])
vpl.mesh_plot(mesh2)
save_path = figure_path + str(k) + '.png'
vpl.save_fig(save_path, magnification=5, off_screen=True, )
if transp_backg == True: #make black background transparent
_transparent_background(save_path)
fig.close()
else:
for k in tqdm(path_dict):
# Read the STL using numpy-stl
mesh = Mesh.from_file(path_dict[k])
if debug == True:
mesh = _add_normalizing_vector_point(mesh, 300, -300)
fig = vpl.figure()
fig.background_color = 'black'
vpl.mesh_plot(mesh)
save_path = figure_path +str(k) + '.png'
vpl.save_fig(save_path, magnification=5, off_screen=True, )
if transp_backg == True: #make black background transparent
_transparent_background(save_path)
fig.close()
if __name__ == '__main__':
# change figure path and stl path if needed
# change background transparency if needed
print('start')
debug = False
if debug == True:
import pdb
figure_path = '/home/youngcoconut/Documents/snowjournal/volume2stl/figures/test/'
paths_dict = {1499496: '/home/youngcoconut/Documents/snowjournal/volume2stl/stl/1499496/xyz_0_0_0_pca.stl'}
create_figure(paths_dict, figure_path, transp_backg=True)
else:
pairs = True
if pairs == False:
figure_path = '/home/youngcoconut/Documents/snowjournal/volume2stl/figures/test/'
stl_path = '/home/youngcoconut/Documents/snowjournal/volume2stl/stl_mitos_dendrites_length_500/stl_dendrites/'
paths_dict = get_stl_paths(stl_path)
# paths_dict = get_stl_paths(sys.argv[1])
create_figure(paths_dict, figure_path, transp_backg=True)
else:
figure_path = '/home/youngcoconut/Documents/snowjournal/volume2stl/figures/pca_nocrumbs_237/'
paths_dict = get_stl_paths('/home/youngcoconut/Documents/snowjournal/volume2stl/stl_237/stl_dendrites/')
# paths_dict = get_stl_paths(sys.argv[1])
path_dict2=get_stl_paths('/home/youngcoconut/Documents/snowjournal/volume2stl/stl_237/stl_all_mitos/')
# mito-id, seg-id --> seg-id, mito-id
# idmap = np.loadtxt('mito_len500_bead_pair.txt')
# idmap = idmap[:,[1,0]].astype(np.uint32).astype(str)
with open('data/lut_dendrite_mito_237.json') as json_file:
idmap = json.load(json_file)
create_figure(paths_dict, figure_path, path_dict2=path_dict2, pair_mapping=idmap, transp_backg=True)
print('done')
| [
"os.listdir",
"PIL.Image.open",
"vtkplotlib.save_fig",
"tqdm.tqdm",
"os.path.join",
"vtkplotlib.figure",
"numpy.array",
"numpy.zeros",
"json.load",
"vtkplotlib.mesh_plot",
"json.dump",
"stl.mesh.Mesh.from_file"
] | [((315, 336), 'os.listdir', 'os.listdir', (['full_path'], {}), '(full_path)\n', (325, 336), False, 'import os\n'), ((750, 770), 'PIL.Image.open', 'Image.open', (['fig_path'], {}), '(fig_path)\n', (760, 770), False, 'from PIL import Image\n'), ((451, 488), 'os.path.join', 'os.path.join', (['full_path', '(d + stl_file)'], {}), '(full_path, d + stl_file)\n', (463, 488), False, 'import os\n'), ((1661, 1714), 'numpy.zeros', 'np.zeros', (['(mesh.vectors.shape[0] + 2)'], {'dtype': 'Mesh.dtype'}), '(mesh.vectors.shape[0] + 2, dtype=Mesh.dtype)\n', (1669, 1714), True, 'import numpy as np\n'), ((3345, 3360), 'tqdm.tqdm', 'tqdm', (['path_dict'], {}), '(path_dict)\n', (3349, 3360), False, 'from tqdm import tqdm\n'), ((405, 431), 'os.path.join', 'os.path.join', (['full_path', 'd'], {}), '(full_path, d)\n', (417, 431), False, 'import os\n'), ((624, 642), 'json.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (633, 642), False, 'import json\n'), ((1992, 2107), 'numpy.array', 'np.array', (['[[[0, 0, maxpt], [0, 0, maxpt], [0, 0, maxpt]], [[0, 0, minpt], [0, 0,\n minpt], [0, 0, minpt]]]', 'float'], {}), '([[[0, 0, maxpt], [0, 0, maxpt], [0, 0, maxpt]], [[0, 0, minpt], [0,\n 0, minpt], [0, 0, minpt]]], float)\n', (2000, 2107), True, 'import numpy as np\n'), ((2619, 2647), 'stl.mesh.Mesh.from_file', 'Mesh.from_file', (['path_dict[k]'], {}), '(path_dict[k])\n', (2633, 2647), False, 'from stl.mesh import Mesh\n'), ((2732, 2744), 'vtkplotlib.figure', 'vpl.figure', ([], {}), '()\n', (2742, 2744), True, 'import vtkplotlib as vpl\n'), ((2800, 2846), 'vtkplotlib.mesh_plot', 'vpl.mesh_plot', (['mesh'], {'color': '"""pink"""', 'opacity': '(0.3)'}), "(mesh, color='pink', opacity=0.3)\n", (2813, 2846), True, 'import vtkplotlib as vpl\n'), ((3110, 3167), 'vtkplotlib.save_fig', 'vpl.save_fig', (['save_path'], {'magnification': '(5)', 'off_screen': '(True)'}), '(save_path, magnification=5, off_screen=True)\n', (3122, 3167), True, 'import vtkplotlib as vpl\n'), ((3424, 3452), 'stl.mesh.Mesh.from_file', 'Mesh.from_file', (['path_dict[k]'], {}), '(path_dict[k])\n', (3438, 3452), False, 'from stl.mesh import Mesh\n'), ((3573, 3585), 'vtkplotlib.figure', 'vpl.figure', ([], {}), '()\n', (3583, 3585), True, 'import vtkplotlib as vpl\n'), ((3641, 3660), 'vtkplotlib.mesh_plot', 'vpl.mesh_plot', (['mesh'], {}), '(mesh)\n', (3654, 3660), True, 'import vtkplotlib as vpl\n'), ((3727, 3784), 'vtkplotlib.save_fig', 'vpl.save_fig', (['save_path'], {'magnification': '(5)', 'off_screen': '(True)'}), '(save_path, magnification=5, off_screen=True)\n', (3739, 3784), True, 'import vtkplotlib as vpl\n'), ((3022, 3042), 'vtkplotlib.mesh_plot', 'vpl.mesh_plot', (['mesh2'], {}), '(mesh2)\n', (3035, 3042), True, 'import vtkplotlib as vpl\n'), ((5536, 5556), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (5545, 5556), False, 'import json\n')] |
from random import choice, sample, randint
from itertools import product
try:
from utils_formula_parser import atomics, FOL_variables, FOL_predicates, FOL_individual_constants
except ModuleNotFoundError:
from exercises.templates.exercises.utils_formula_parser import atomics, FOL_variables, FOL_predicates, FOL_individual_constants
# ----------------------------------------------------------------------------------------------------------------------
# PROPOSITIONAL FORMULAS
def random_propositional_formula(max_depth, max_atomics, logic):
"""Makes a choice of depth and atomics and calls random_prop_formula2"""
depth = randint(0, max_depth)
if depth == 0:
atomics_to_be_used = atomics[:1]
else:
atomics_to_be_used = atomics[:randint(1, max_atomics)]
if len(atomics_to_be_used) > depth + 1:
atomics_to_be_used = atomics_to_be_used[:depth + 2]
formula = random_propositional_formula2(depth, atomics_to_be_used, logic)
return formula
def random_propositional_formula2(depth, atomics_to_be_used, logic):
"""Generates a random propositional formula
Returns [the formula, profundity, used letters]"""
formula_structure = random_propositional_structure(len(atomics_to_be_used), depth, logic)
formula = replace_propositional_letters(formula_structure, atomics_to_be_used)
return formula
def random_propositional_structure(num_atomics, depth, logic):
"""Places where a letter must go are marked with $
num_atomics is the MINIMUM number of $s that are needed"""
if depth == 0:
return '$'
constants2 = logic.constants()[:]
if depth + 1 == num_atomics:
# Choose only from the binary operations
constants2 = logic.constants(2)[:]
# If there is depth left, choose an operation
chosen_operation = choice(constants2)
# Unary operation
if chosen_operation in logic.constants(1):
formula_inside = random_propositional_structure(num_atomics, depth - 1, logic)
# formula = chosen_operation + formula_inside
formula = f'{chosen_operation}{formula_inside}'
return formula
else:
# We have to make sure that at least one of the sides has the required depth
# Just build two operations (one of them with complete depth, the other one random)
# And then make a choice as to where to put the complete one
formula1 = random_propositional_structure(1, depth - 1, logic)
formula1_num_letters = formula1.count('$')
formula2_num_letters = max(1, num_atomics - formula1_num_letters)
formula2 = random_propositional_structure(formula2_num_letters,
randint(formula2_num_letters - 1, depth - 1), logic)
if choice([True, False]):
# formula = "(" + formula1 + " " + chosen_operation + " " + formula2 + ")"
formula = f'({formula1} {chosen_operation} {formula2})'
else:
# formula = "(" + formula2 + " " + chosen_operation + " " + formula1 + ")"
formula = f'({formula2} {chosen_operation} {formula1})'
return formula
def replace_propositional_letters(formula_structure, atomic_replacements):
replacement_indexes = [x for x in range(len(formula_structure)) if formula_structure[x] == '$']
# Each atomic chooses one position
for a in atomic_replacements:
chosen_position = choice(replacement_indexes)
before = formula_structure[:chosen_position]
after = formula_structure[chosen_position + 1:]
formula_structure = f'{before}{a}{after}'
# formula_structure = formula_structure[:chosen_position] + a + formula_structure[chosen_position + 1:]
replacement_indexes.remove(chosen_position)
# If there are still $s left, assign random atomics to them
while '$' in formula_structure:
first_apperance = formula_structure.index('$')
before = formula_structure[:first_apperance]
after = formula_structure[first_apperance + 1:]
formula_structure = f'{before}{choice(atomic_replacements)}{after}'
# formula_structure = formula_structure[:first_apperance] + choice(atomic_replacements) + \
# formula_structure[first_apperance + 1:]
return formula_structure
def random_propositional_argument(number_premises, max_profund, max_letters, logic):
premises = list()
for num in range(number_premises):
premises.append(random_propositional_formula(max_profund, max_letters, logic))
conclusion = random_propositional_formula(max_profund, max_letters, logic)
return [*premises, conclusion]
# ----------------------------------------------------------------------------------------------------------------------
# SETS
def random_set(max_cardinality):
"""Returns the set AND A LIST! (so that its members are ordered for display)"""
cardinality = randint(0, max_cardinality)
set_to_sample = set(range(1, max_cardinality + 1))
chosen_list = sample(set_to_sample, cardinality)
chosen_list.sort()
chosen_list = [str(x) for x in chosen_list]
chosen_set = frozenset(chosen_list)
return [chosen_set, chosen_list]
def random_set_operation(sets, operations_to_use, depth):
formula_structure = random_set_operation_structure(len(sets), operations_to_use, depth)
formula = replace_set_letters(formula_structure, sets)
return formula
def random_set_operation_structure(num_sets, operations_to_use, depth):
"""Places where a letter must go are marked with $
num_sets is the MINIMUM number of $s that are needed
Operations_to_use is a list (for ex ['union', 'inters', 'cartp'])"""
if depth == 0:
return '$'
operations2 = operations_to_use[:]
if depth + 1 == num_sets:
# Choose only from the binary operations
if '℘' in operations2:
operations2.remove('℘')
# If there is depth left, choose an operation
chosen_operation = choice(operations2)
# The only unary operation
if chosen_operation == '℘':
formula_inside = random_set_operation_structure(num_sets, operations_to_use, depth-1)
if formula_inside[0] == '(' and formula_inside[-1] == ')':
formula_inside = formula_inside[1:-1]
formula = f'℘({formula_inside})'
return formula
else:
# We have to make sure that at least one of the sides has the required depth
# Just build two operations (one of them with complete depth, the other one random)
# And then make a choice as to where to put the complete one
formula1 = random_set_operation_structure(1, operations_to_use, depth - 1)
formula1_num_sets = formula1.count('$')
formula2_num_sets = max(1, num_sets - formula1_num_sets)
formula2 = random_set_operation_structure(formula2_num_sets, operations_to_use,
randint(formula2_num_sets - 1, depth - 1))
if choice([True, False]):
formula = f'{chosen_operation}({formula1},{formula2})'
else:
formula = f'{chosen_operation}({formula2},{formula1})'
return formula
def replace_set_letters(formula_structure, sets):
"""Gets a string with $ symbols inside, replaces them with the sets (each set appears at least once, then random)
SETS (names) MUST BE ONE CHARACTER LONG"""
replacement_indexes = [x for x in range(len(formula_structure)) if formula_structure[x] == '$']
# Each set chooses one position
for s in sets:
chosen_position = choice(replacement_indexes)
formula_structure = formula_structure[:chosen_position] + s + formula_structure[chosen_position+1:]
replacement_indexes.remove(chosen_position)
# If there are still $s left, assign random sets to them
while '$' in formula_structure:
first_apperance = formula_structure.index('$')
formula_structure = formula_structure[:first_apperance] + choice(sets) + formula_structure[first_apperance+1:]
return formula_structure
# ----------------------------------------------------------------------------------------------------------------------
# FIRST ORDER
def random_predicate_formula(num_variables, depth, max_arity, max_constants, logic):
# Depth has to be at least the number of bound variables
# Max arity has to be at least the number of bound variables
formula_structure = random_predicate_structure(num_variables, depth, logic)
formula_structure = replace_predicate_variables(formula_structure, num_variables)
formula_structure = replace_predicate_atomics1(formula_structure, logic)
formula_structure = replace_predicate_atomics2(formula_structure, max_arity)
formula_structure = replace_predicate_atomics3(formula_structure, max_constants, logic)
return formula_structure
def random_predicate_structure(num_variables, depth, logic):
"""Places where a letter must go are marked with $
Places where a variable must go are marked with %"""
if depth == 0:
return '$'
constants2 = logic.constants()[:]
constants2.extend(logic.quantifiers[:])
if depth == num_variables:
# Choose only from the quantifiers
constants2 = logic.quantifiers[:]
elif num_variables == 0:
# Don't choose from the quantifiers (otherwise it puts spurious quantifiers Vx Ex Phi)
constants2 = logic.constants()[:]
# If there is depth left, choose an operation
chosen_operation = choice(constants2)
# Unary operation
if chosen_operation in logic.constants(1):
formula_inside = random_predicate_structure(num_variables, depth - 1, logic)
# formula = chosen_operation + formula_inside
formula = f'{chosen_operation}{formula_inside}'
return formula
# Quantifier
elif chosen_operation in logic.quantifiers:
formula_inside = random_predicate_structure(num_variables - 1, depth - 1, logic)
formula = f'{chosen_operation}% {formula_inside}'
return formula
# Binary
else:
# We have to make sure that at least one of the sides has the required depth
# Just build two operations (one of them with complete depth, the other one random)
# And then make a choice as to where to put the complete one
formula1 = random_predicate_structure(randint(0, num_variables), depth - 1, logic)
formula1_num_variables = formula1.count('%')
formula2_num_variables = num_variables - formula1_num_variables
formula2_depth = randint(formula2_num_variables, depth -1)
formula2 = random_predicate_structure(formula2_num_variables, formula2_depth, logic)
if choice([True, False]):
formula = f'({formula1} {chosen_operation} {formula2})'
else:
formula = f'({formula2} {chosen_operation} {formula1})'
return formula
def replace_predicate_variables(formula_structure, num_variables):
variables = FOL_variables[:num_variables]
replacement_indexes = [x for x in range(len(formula_structure)) if formula_structure[x] == '%']
# Each variable chooses one position
for a in variables:
chosen_position = choice(replacement_indexes)
before = formula_structure[:chosen_position]
after = formula_structure[chosen_position + 1:]
formula_structure = f'{before}{a}{after}'
replacement_indexes.remove(chosen_position)
# If there are still %s left, assign random variables to them
while '%' in formula_structure:
first_apperance = formula_structure.index('%')
before = formula_structure[:first_apperance]
after = formula_structure[first_apperance + 1:]
formula_structure = f'{before}{choice(variables)}{after}'
return formula_structure
def replace_predicate_atomics1(formula_structure, logic):
"""This function merely replaces the $s with the number of bound variables"""
bound_variables = list()
bound_variables_parentheses = list()
for index in range(len(formula_structure)):
if formula_structure[index] == '(':
bound_variables_parentheses = [x + 1 for x in bound_variables_parentheses]
elif formula_structure[index] == ')':
bound_variables_parentheses = [x - 1 for x in bound_variables_parentheses]
elif formula_structure[index] == '$':
formula_structure = formula_structure[:index] + str(len(bound_variables)) + formula_structure[index+1:]
if formula_structure[index] in logic.constants(2) and \
bound_variables_parentheses and bound_variables_parentheses[-1] == 0:
del bound_variables[-1]
del bound_variables_parentheses[-1]
if formula_structure[index] in FOL_variables:
bound_variables.append(formula_structure[index])
bound_variables_parentheses.append(0)
return formula_structure
def replace_predicate_atomics2(form_structure, max_arity):
"""This one decides the arity of the predicates and then replaces the integers with letters followed with #"""
formula_structure = form_structure
formula_numbers = list(formula_structure)
formula_numbers = [int(x) for x in formula_numbers if x.isdigit()]
min_bounded = min(formula_numbers)
max_bounded = max(formula_numbers)
predicates = FOL_predicates[:randint(1, len(formula_numbers))]
predicate_arity_dict = {p: None for p in predicates}
pred_with_max = choice(predicates)
predicate_arity_dict[pred_with_max] = randint(max_bounded, max_arity)
for p in [x for x in predicates if x != pred_with_max]:
predicate_arity_dict[p] = max(1, randint(min_bounded, max_arity))
# Have each predicate choose a spot (if it can)
for pred in predicates:
replacement_indexes = [x for x in range(len(formula_structure)) if formula_structure[x].isdigit() and
int(formula_structure[x]) <= predicate_arity_dict[pred]]
if replacement_indexes:
chosen_index = choice(replacement_indexes)
formula_structure = formula_structure[:chosen_index] + pred + "#" * predicate_arity_dict[pred] \
+ formula_structure[chosen_index + 1:]
# If there are still spaces left, put random predicates in them
remaining_atomics = [x for x in formula_structure if x.isdigit()]
while remaining_atomics:
first_digit = remaining_atomics[0]
first_digit_index = formula_structure.index(remaining_atomics[0])
possible_predicates = [p for p in predicates if predicate_arity_dict[p] >= int(first_digit)]
chosen_predicate = choice(possible_predicates)
formula_structure = formula_structure[:first_digit_index] \
+ chosen_predicate + "#" * predicate_arity_dict[chosen_predicate] \
+ formula_structure[first_digit_index+1:]
remaining_atomics = [x for x in formula_structure if x.isdigit()]
return formula_structure
def replace_predicate_atomics3(formula_structure, max_constants, logic):
"""Replaces some #s with variables, the rest with constants"""
for index1 in range(len(formula_structure)):
if formula_structure[index1] in logic.quantifiers:
variable = formula_structure[index1 + 1]
variable_present = False
# Get the reach of the quantifier
reach = ''
open_left = 0
closing_right = 0
other_variables = set()
# The next character is a variable, and the next is a space: Vx Phi
for index2 in range(index1 + 3, len(formula_structure)):
if formula_structure[index2] == '(':
open_left += 1
reach += formula_structure[index2]
elif formula_structure[index2] == ')':
closing_right += 1
if closing_right >= open_left:
break
reach += formula_structure[index2]
elif formula_structure[index2] in logic.constants(2) and open_left == closing_right:
reach = reach[:-1]
break
elif formula_structure[index2] in logic.quantifiers:
if formula_structure[index2+1] != variable:
other_variables.add(formula_structure[index2+1])
reach += formula_structure[index2]
elif formula_structure[index2] == variable:
variable_present = True
reach += formula_structure[index2]
else:
reach += formula_structure[index2]
# If the variable is already present in the reach, do nothing
if not variable_present:
replacement_indexes = [x for x in range(index1, index1+len(reach)+3) if formula_structure[x] == '#']
if len(replacement_indexes) - len(other_variables) != 0:
sample_size = randint(1, len(replacement_indexes) - len(other_variables))
replacements = sample(replacement_indexes, sample_size)
for index3 in replacements:
formula_structure = formula_structure[:index3] + variable + formula_structure[index3+1:]
# If there are still #s left, replace them with constants
if "#" in formula_structure:
constants = FOL_individual_constants[:max_constants]
while "#" in formula_structure:
first_open = formula_structure.index("#")
formula_structure = formula_structure[: first_open] + choice(constants) + formula_structure[first_open+1:]
return formula_structure
def random_model(num_elements, formula, logic_name):
# MAXIMUM NUM ELEMENTS IS 14
dom = sample(list(range(1,15)), num_elements)
dom.sort()
model = dict()
model['Domain'] = [str(x) for x in dom]
ind_constants_copy = FOL_individual_constants[:num_elements]
for elem in dom:
chosen_constant = choice(ind_constants_copy)
ind_constants_copy.remove(chosen_constant)
model[chosen_constant] = str(elem)
# Now infer the number and arity of predicates from the formula
predicate_arity_dict = dict()
predicates_in_formula = {p for p in formula if p in FOL_predicates}
for p in predicates_in_formula:
for index1 in range(len(formula)):
if formula[index1] == p:
arity = 0
for index2 in range(index1+1, len(formula)):
if formula[index2] in FOL_individual_constants or formula[index2] in FOL_variables:
arity += 1
else:
break
if formula[index1] not in predicate_arity_dict:
predicate_arity_dict[formula[index1]] = arity
break
# Give an extension and anti-extension to each predicate
# Manually impose restrictions according to the chosen logic
for predicate in predicate_arity_dict:
if predicate_arity_dict[predicate] == 1:
all_tuples = model["Domain"]
elif predicate_arity_dict[predicate] > 1:
all_tuples = list(product(model["Domain"], repeat=predicate_arity_dict[predicate]))
model[predicate + "+"] = sorted(sample(all_tuples, randint(0, len(all_tuples))))
if logic_name == "Classical":
# Every possibiliy is either in the extension or the antiextension, and not both
model[predicate + "-"] = sorted(list(set(all_tuples) - set(model[predicate + "+"])))
elif logic_name == "LP" or logic_name == "RM3" or logic_name == "LFI1":
# Every item that is not in the extension must be in the anti - but there might be some overlapping elements
model[predicate + "-"] = list(set(all_tuples) - set(model[predicate + "+"]))
model[predicate + "-"].extend(sample(model[predicate + "+"], randint(0, len(model[predicate + "+"]))))
model[predicate + "-"].sort()
elif logic_name == "K3" or logic_name == "Ł3":
# No item overlaps, but there might be things that are not in the ext or antiext
possible_anti = list(set(all_tuples) - set(model[predicate + "+"]))
model[predicate + "-"] = sorted(sample(possible_anti, randint(0, len(possible_anti))))
elif logic_name == "FDE":
# Any value goes for the anti-extension
model[predicate + "-"] = sorted(sample(all_tuples, randint(0, len(all_tuples))))
return model
| [
"itertools.product",
"random.sample",
"random.choice",
"random.randint"
] | [((644, 665), 'random.randint', 'randint', (['(0)', 'max_depth'], {}), '(0, max_depth)\n', (651, 665), False, 'from random import choice, sample, randint\n'), ((1842, 1860), 'random.choice', 'choice', (['constants2'], {}), '(constants2)\n', (1848, 1860), False, 'from random import choice, sample, randint\n'), ((4950, 4977), 'random.randint', 'randint', (['(0)', 'max_cardinality'], {}), '(0, max_cardinality)\n', (4957, 4977), False, 'from random import choice, sample, randint\n'), ((5051, 5085), 'random.sample', 'sample', (['set_to_sample', 'cardinality'], {}), '(set_to_sample, cardinality)\n', (5057, 5085), False, 'from random import choice, sample, randint\n'), ((6022, 6041), 'random.choice', 'choice', (['operations2'], {}), '(operations2)\n', (6028, 6041), False, 'from random import choice, sample, randint\n'), ((9566, 9584), 'random.choice', 'choice', (['constants2'], {}), '(constants2)\n', (9572, 9584), False, 'from random import choice, sample, randint\n'), ((13556, 13574), 'random.choice', 'choice', (['predicates'], {}), '(predicates)\n', (13562, 13574), False, 'from random import choice, sample, randint\n'), ((13617, 13648), 'random.randint', 'randint', (['max_bounded', 'max_arity'], {}), '(max_bounded, max_arity)\n', (13624, 13648), False, 'from random import choice, sample, randint\n'), ((2793, 2814), 'random.choice', 'choice', (['[True, False]'], {}), '([True, False])\n', (2799, 2814), False, 'from random import choice, sample, randint\n'), ((3442, 3469), 'random.choice', 'choice', (['replacement_indexes'], {}), '(replacement_indexes)\n', (3448, 3469), False, 'from random import choice, sample, randint\n'), ((7029, 7050), 'random.choice', 'choice', (['[True, False]'], {}), '([True, False])\n', (7035, 7050), False, 'from random import choice, sample, randint\n'), ((7624, 7651), 'random.choice', 'choice', (['replacement_indexes'], {}), '(replacement_indexes)\n', (7630, 7651), False, 'from random import choice, sample, randint\n'), ((11274, 11301), 'random.choice', 'choice', (['replacement_indexes'], {}), '(replacement_indexes)\n', (11280, 11301), False, 'from random import choice, sample, randint\n'), ((14744, 14771), 'random.choice', 'choice', (['possible_predicates'], {}), '(possible_predicates)\n', (14750, 14771), False, 'from random import choice, sample, randint\n'), ((18165, 18191), 'random.choice', 'choice', (['ind_constants_copy'], {}), '(ind_constants_copy)\n', (18171, 18191), False, 'from random import choice, sample, randint\n'), ((2728, 2772), 'random.randint', 'randint', (['(formula2_num_letters - 1)', '(depth - 1)'], {}), '(formula2_num_letters - 1, depth - 1)\n', (2735, 2772), False, 'from random import choice, sample, randint\n'), ((6974, 7015), 'random.randint', 'randint', (['(formula2_num_sets - 1)', '(depth - 1)'], {}), '(formula2_num_sets - 1, depth - 1)\n', (6981, 7015), False, 'from random import choice, sample, randint\n'), ((10622, 10664), 'random.randint', 'randint', (['formula2_num_variables', '(depth - 1)'], {}), '(formula2_num_variables, depth - 1)\n', (10629, 10664), False, 'from random import choice, sample, randint\n'), ((10769, 10790), 'random.choice', 'choice', (['[True, False]'], {}), '([True, False])\n', (10775, 10790), False, 'from random import choice, sample, randint\n'), ((13751, 13782), 'random.randint', 'randint', (['min_bounded', 'max_arity'], {}), '(min_bounded, max_arity)\n', (13758, 13782), False, 'from random import choice, sample, randint\n'), ((14122, 14149), 'random.choice', 'choice', (['replacement_indexes'], {}), '(replacement_indexes)\n', (14128, 14149), False, 'from random import choice, sample, randint\n'), ((774, 797), 'random.randint', 'randint', (['(1)', 'max_atomics'], {}), '(1, max_atomics)\n', (781, 797), False, 'from random import choice, sample, randint\n'), ((4097, 4124), 'random.choice', 'choice', (['atomic_replacements'], {}), '(atomic_replacements)\n', (4103, 4124), False, 'from random import choice, sample, randint\n'), ((8031, 8043), 'random.choice', 'choice', (['sets'], {}), '(sets)\n', (8037, 8043), False, 'from random import choice, sample, randint\n'), ((10426, 10451), 'random.randint', 'randint', (['(0)', 'num_variables'], {}), '(0, num_variables)\n', (10433, 10451), False, 'from random import choice, sample, randint\n'), ((11819, 11836), 'random.choice', 'choice', (['variables'], {}), '(variables)\n', (11825, 11836), False, 'from random import choice, sample, randint\n'), ((17231, 17271), 'random.sample', 'sample', (['replacement_indexes', 'sample_size'], {}), '(replacement_indexes, sample_size)\n', (17237, 17271), False, 'from random import choice, sample, randint\n'), ((17750, 17767), 'random.choice', 'choice', (['constants'], {}), '(constants)\n', (17756, 17767), False, 'from random import choice, sample, randint\n'), ((19351, 19415), 'itertools.product', 'product', (["model['Domain']"], {'repeat': 'predicate_arity_dict[predicate]'}), "(model['Domain'], repeat=predicate_arity_dict[predicate])\n", (19358, 19415), False, 'from itertools import product\n')] |
from keras import models, layers, regularizers, constraints, backend as K
from keras.engine.topology import Layer
__all__ = ["ConstMultiplierLayer", "custom_layers"]
class ConstMultiplierLayer(Layer):
def __init__(self, **kwargs):
super(ConstMultiplierLayer, self).__init__(**kwargs)
def build(self, input_shape):
self.k = self.add_weight(
name='k',
shape=(),
initializer='ones',
dtype='float32',
trainable=True,
constraint=constraints.MinMaxNorm(
min_value=0.0, max_value=1.0, axis=[]
),
)
super(ConstMultiplierLayer, self).build(input_shape)
def call(self, x):
return K.tf.multiply(self.k, x)
def compute_output_shape(self, input_shape):
return input_shape
custom_layers = {ConstMultiplierLayer.__name__: ConstMultiplierLayer}
| [
"keras.backend.tf.multiply",
"keras.constraints.MinMaxNorm"
] | [((727, 751), 'keras.backend.tf.multiply', 'K.tf.multiply', (['self.k', 'x'], {}), '(self.k, x)\n', (740, 751), True, 'from keras import models, layers, regularizers, constraints, backend as K\n'), ((524, 585), 'keras.constraints.MinMaxNorm', 'constraints.MinMaxNorm', ([], {'min_value': '(0.0)', 'max_value': '(1.0)', 'axis': '[]'}), '(min_value=0.0, max_value=1.0, axis=[])\n', (546, 585), False, 'from keras import models, layers, regularizers, constraints, backend as K\n')] |
# -*- encoding: utf-8 -*-
"""Модуль с функциями логгирования."""
import logging
import os
from project.settings import LOG_FOLDER, LOG
LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
def __get_handler(filename, level, log_format):
handler = logging.FileHandler(os.path.join(LOG_FOLDER, filename))
handler.setLevel(level)
handler.setFormatter(logging.Formatter(log_format))
return handler
def __get_handler_std(level, log_format):
handler = logging.StreamHandler()
handler.setLevel(level)
handler.setFormatter(logging.Formatter(log_format))
return handler
def __init_logger(level, file, log_format=None):
if log_format is None:
global LOG_FORMAT
log_format = LOG_FORMAT
log_obj = logging.getLogger()
log_obj.setLevel(logging.DEBUG)
log_obj.addHandler(__get_handler(file, level, log_format))
# ---
# для принтов в консоль
if LOG and level == logging.DEBUG:
log_obj.addHandler(__get_handler_std(level, log_format))
# ---
return log_obj
def __get_log_func(level, log_obj):
return {
logging.INFO: log_obj.info,
logging.DEBUG: log_obj.debug,
logging.ERROR: log_obj.error,
logging.CRITICAL: log_obj.critical,
}.get(level)
def __log_method(level, file):
log_object = __init_logger(level, file)
log_func = __get_log_func(level, log_object)
assert log_func is not None
def _level_info(*args, **kwargs):
message = str(*args) + str(**kwargs)
log_object.setLevel(level)
log_func(message)
return _level_info
log_info = __log_method(logging.INFO, 'info.log')
log_debug = __log_method(logging.DEBUG, 'debug.log')
log_critical = __log_method(logging.CRITICAL, 'critical.log')
log_error = __log_method(logging.ERROR, 'error.log')
log_track = __log_method(logging.INFO, 'tracking.log')
__all__ = ['log_info', 'log_debug', 'log_critical', 'log_error', 'log_track', ]
| [
"logging.getLogger",
"logging.Formatter",
"logging.StreamHandler",
"os.path.join"
] | [((474, 497), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (495, 497), False, 'import logging\n'), ((751, 770), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (768, 770), False, 'import logging\n'), ((277, 311), 'os.path.join', 'os.path.join', (['LOG_FOLDER', 'filename'], {}), '(LOG_FOLDER, filename)\n', (289, 311), False, 'import os\n'), ((366, 395), 'logging.Formatter', 'logging.Formatter', (['log_format'], {}), '(log_format)\n', (383, 395), False, 'import logging\n'), ((551, 580), 'logging.Formatter', 'logging.Formatter', (['log_format'], {}), '(log_format)\n', (568, 580), False, 'import logging\n')] |
"""
This module contains the classes ActionTree and Node, to store
the command history.
"""
from . import commands
from .document import Document, next_document, previous_document
from .filecommands import quit_document, quit_all, open_file, force_quit
from .mode import Mode
from logging import debug
class UndoTree:
"""
Stores the command history as a tree with undo/redo functionality.
"""
sequence = None
sequence_depth = 0
def __init__(self, doc):
self.doc = doc
self.root = Node(None)
self.current_node = self.root
def undo(self):
"""Undo previous command set current_node to its parent."""
if self.sequence != None:
# TODO: does this have to be a hard fail?
raise Exception('Cannot perform undo; a sequence of commands is being added')
if self.current_node.parent:
for command in reversed(self.current_node.commands):
command.undo(self.doc)
self.current_node = self.current_node.parent
def redo(self, child_index=0):
"""Redo most recent next command, or command at child_index if specified."""
if self.sequence != None:
# TODO: does this have to be a hard fail?
raise Exception('Cannot perform redo; a sequence of commands is being added')
if self.current_node and self.current_node.children:
l = len(self.current_node.children)
assert 0 <= child_index < l
self.current_node = self.current_node.children[child_index]
for command in self.current_node.commands:
command.do(self.doc)
def add(self, command):
"""Add a new undoable command."""
if self.sequence != None:
self.sequence.add_command(command)
else:
node = Node(self.current_node)
node.add_command(command)
self.current_node.add_child(node)
self.current_node = node
def hard_undo(self):
"""
Actually removes current_node.
Useful for previewing operations.
"""
if self.sequence != None:
# TODO: does this have to be a hard fail?
raise Exception(
'Cannot perform hard undo; a sequence of commands is being added'
)
if self.current_node.parent:
current_node = self.current_node
self.undo()
self.current_node.children.remove(current_node)
def start_sequence(self):
"""
Indicate start of a sequence.
All incoming commands should be gathered and put into one compound command.
"""
if self.sequence_depth == 0:
self.sequence = Node(self.current_node)
self.sequence_depth += 1
debug('Starting undo sequence. Entering depth: {}'.format(self.sequence_depth))
def end_sequence(self):
"""
End of sequence.
"""
if self.sequence_depth < 1:
raise Exception('Cannot end sequence; no sequence present.')
debug('Ending undo sequence. Leaving depth: {}'.format(self.sequence_depth))
if self.sequence_depth == 1:
if self.sequence.commands != []:
self.current_node.add_child(self.sequence)
self.current_node = self.sequence
self.sequence_depth = 0
self.sequence = None
else:
self.sequence_depth -= 1
class Node:
"""
Node of an ActionTree.
Each node can have a single parent, a list of commands and a list of children.
"""
def __init__(self, parent):
self.parent = parent
self.commands = []
self.children = []
def add_command(self, command):
"""Add an command to node."""
self.commands.append(command)
def add_child(self, node):
"""Add a child to node. First child is most recent."""
self.children.insert(0, node)
def init(doc):
doc.undotree = UndoTree(doc)
Document.OnDocumentInit.add(init)
# Some commands for interacting with the undo tree
def undo(doc):
"""Undo last command."""
doc.undotree.undo()
commands.undo = undo
def redo(doc):
"""Redo last undo."""
doc.undotree.redo()
commands.redo = redo
class UndoMode(Mode):
"""
Walk around in undo tree using arrow keys.
You can only switch branches between siblings.
"""
def __init__(self, doc):
Mode.__init__(self, doc)
self.keymap.update({
'left': self.left,
'right': self.right,
'up': self.up,
'down': self.down,
})
self.allowedcommands.extend([
next_document, previous_document, quit_document,
quit_all, open_file, force_quit
])
def start(self, doc, *args, **kwargs):
debug('Starting undo mode')
# Make sure the child_index is set to the index we now have
self.child_index = self.current_index()
Mode.start(self, doc, *args, **kwargs)
def stop(self, doc, *args, **kwargs):
debug('Exiting undo mode')
Mode.stop(self, doc)
def left(self, doc):
# We can always just call undo; if there is no parent it will do nothing
self.child_index = 0
doc.undotree.undo()
def right(self, doc):
# We can always just call redo; if there is no child it will do nothing
self.child_index = 0
doc.undotree.redo()
def up(self, doc):
self.child_index -= 1
# update_child_index() will take care of having a valid child_index
self.update_child_index()
def down(self, doc):
self.child_index += 1
# update_child_index() will take care of having a valid child_index
self.update_child_index()
def update_child_index(self):
"""Undo and execute the child pointed by the current child_index."""
if self.doc.undotree.current_node.parent != None:
self.doc.undotree.undo()
# The child index must be bound to the correct domain
self.child_index = self.bound(self.child_index)
self.doc.undotree.redo(self.child_index)
def bound(self, child_index):
"""Bound the given child_index to be a valid index."""
l = len(self.doc.undotree.current_node.children)
if l > 0:
child_index = min(child_index, l - 1)
child_index = max(child_index, 0)
assert 0 <= child_index < l
return child_index
def current_index(self):
node = self.doc.undotree.current_node
return node.parent.children.index(node) if node.parent != None else 0
def init_undomode(doc):
doc.modes.undomode = UndoMode(doc)
Document.OnModeInit.add(init_undomode)
def undomode(doc):
return doc.modes.undomode
commands.undomode = undomode
| [
"logging.debug"
] | [((4853, 4880), 'logging.debug', 'debug', (['"""Starting undo mode"""'], {}), "('Starting undo mode')\n", (4858, 4880), False, 'from logging import debug\n'), ((5095, 5121), 'logging.debug', 'debug', (['"""Exiting undo mode"""'], {}), "('Exiting undo mode')\n", (5100, 5121), False, 'from logging import debug\n')] |
from tkinter import *
from tkinter import ttk
import random
def main():
window = Tk()
window.geometry('300x100')
window.resizable(width=False, height=False)
window.title('GUI app')
change_title_button = Button(window, text='Change the title above', command=change_title(window)).pack()
quit_button = Button(window, text='Exit', command=window.destroy).pack()
def change_title(window):
new_title = ''
for i in range(7):
new_title = new_title + chr(ord('A') + random.randrange(26))
window.title(new_title)
main()
| [
"random.randrange"
] | [((533, 553), 'random.randrange', 'random.randrange', (['(26)'], {}), '(26)\n', (549, 553), False, 'import random\n')] |
import os
import shutil
print("python script v2")
def runCmd(cmd):
# D20190120T053813:here
print(("runCmd",cmd))
import subprocess
subprocess.check_call(cmd, shell=True) # works (w unbuffer)
if False:
output = subprocess.check_output(cmd, shell=True)
output=output.decode('utf8')
print(output)
def buildNimCsources():
print("in buildNimCsources")
runCmd("git clone --depth 1 https://github.com/nim-lang/csources.git")
# runCmd("cd csources && sh build.sh")
buildNimCsources()
print("after buildNimCsources")
| [
"subprocess.check_output",
"subprocess.check_call"
] | [((141, 179), 'subprocess.check_call', 'subprocess.check_call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (162, 179), False, 'import subprocess\n'), ((226, 266), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (249, 266), False, 'import subprocess\n')] |
import builtins
import os
import signal
import sys
from hstest.dynamic.security.exit_exception import ExitException
class ExitHandler:
_saved = False
_replaced = False
_builtins_quit = None
_builtins_exit = None
_os_kill = None
_os__exit = None
_sys_exit = None
_os_killpg = None
_signal_pthread_kill = None
_signal_siginterrupt = None
_exit_func = lambda *x, **y: ExitException.throw()
@staticmethod
def is_replaced():
return ExitHandler._replaced
@staticmethod
def replace_exit():
if not ExitHandler._saved:
ExitHandler._saved = True
ExitHandler._builtins_quit = builtins.quit if hasattr(builtins, 'quit') else None
ExitHandler._builtins_exit = builtins.exit if hasattr(builtins, 'exit') else None
ExitHandler._os_kill = os.kill if hasattr(os, 'kill') else None
ExitHandler._os__exit = os._exit if hasattr(os, '_exit') else None
ExitHandler._os_killpg = os.killpg if hasattr(os, 'killpg') else None
ExitHandler._sys_exit = sys.exit if hasattr(sys, 'exit') else None
ExitHandler._signal_pthread_kill = signal.pthread_kill if hasattr(signal, 'pthread_kill') else None
ExitHandler._signal_siginterrupt = signal.siginterrupt if hasattr(signal, 'siginterrupt') else None
builtins.quit = ExitHandler._exit_func
builtins.exit = ExitHandler._exit_func
os.kill = ExitHandler._exit_func
os._exit = ExitHandler._exit_func
os.killpg = ExitHandler._exit_func
sys.exit = ExitHandler._exit_func
signal.pthread_kill = ExitHandler._exit_func
signal.siginterrupt = ExitHandler._exit_func
ExitHandler._replaced = True
@staticmethod
def revert_exit():
if ExitHandler._replaced:
builtins.quit = ExitHandler._builtins_quit
builtins.exit = ExitHandler._builtins_exit
os.kill = ExitHandler._os_kill
os._exit = ExitHandler._os__exit
os.killpg = ExitHandler._os_killpg
sys.exit = ExitHandler._sys_exit
signal.pthread_kill = ExitHandler._signal_pthread_kill
signal.siginterrupt = ExitHandler._signal_siginterrupt
ExitHandler._replaced = False
| [
"hstest.dynamic.security.exit_exception.ExitException.throw"
] | [((414, 435), 'hstest.dynamic.security.exit_exception.ExitException.throw', 'ExitException.throw', ([], {}), '()\n', (433, 435), False, 'from hstest.dynamic.security.exit_exception import ExitException\n')] |
'''
Created by auto_sdk on 2018.10.10
'''
from top.api.base import RestApi
class CainiaoCloudprintSingleCustomareaGetRequest(RestApi):
def __init__(self, domain='gw.api.taobao.com', port=80):
RestApi.__init__(self, domain, port)
self.seller_id = None
def getapiname(self):
return 'cainiao.cloudprint.single.customarea.get'
| [
"top.api.base.RestApi.__init__"
] | [((214, 250), 'top.api.base.RestApi.__init__', 'RestApi.__init__', (['self', 'domain', 'port'], {}), '(self, domain, port)\n', (230, 250), False, 'from top.api.base import RestApi\n')] |
import argparse
import os
import json
from pathlib import Path
from azure.devops.connection import Connection
from msrest.authentication import BasicAuthentication
def get_client(organization, personal_access_token):
organization_url = 'https://dev.azure.com/' + organization
# Create a connection to the org
credentials = BasicAuthentication('', personal_access_token)
connection = Connection(base_url=organization_url,
creds=credentials)
# Get the build client
build_client = connection.clients_v6_0.get_build_client()
return build_client
def define_build(id, source_branch, source_version, parameters):
build = {
'definition': {
'id': id
}
}
# Add optional parameters
if source_branch:
build["source_branch"] = source_branch
if source_version:
build["source_version"] = source_version
if parameters:
build["parameters"] = parameters
return build
def queue_build(client, build, project):
# The failure responses from Azure Pipelines are pretty good,
# don't do any special handling.
queue_build_response = client.queue_build(build, project)
return queue_build_response
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--organization',
required=True,
help='Azure DevOps organization')
parser.add_argument('-p', '--project',
required=True,
help='Azure DevOps project')
parser.add_argument('-i', '--id',
required=True,
help='Id of the pipeline definition')
parser.add_argument('-ppe', '--pat_path_env',
help='Name of environment variable containing the path to the Azure DevOps PAT') # noqa: E501
parser.add_argument('-pe', '--pat_env',
help='Name of environment variable containing the Azure DevOps PAT') # noqa: E501
parser.add_argument(
'--source_branch', help='Source branch for the pipeline')
parser.add_argument(
'--source_version', help='Source version for the pipeline')
parser.add_argument(
'--parameters', help='Parameters for the pipeline')
parser.add_argument(
'--output_url_path', help='Url of the queued pipeline')
args = parser.parse_args()
if args.pat_env:
# Read PAT from env var
pat = os.environ[args.pat_env]
elif args.pat_path_env:
# Read PAT from file
with open(os.environ[args.pat_path_env], 'r') as f:
pat = f.readline()
f.close
else:
raise Exception('Please provide a PAT via pat_env or pat_path_env')
client = get_client(args.organization, pat)
build = define_build(args.id,
args.source_branch,
args.source_version,
args.parameters)
results = queue_build(client, build, args.project)
# Print the url of the queued build
print(results.url)
# Write Output
print("Creating output directory")
output_url_path = args.output_url_path
Path(output_url_path).parent.mkdir(parents=True, exist_ok=True)
with open(output_url_path, 'w') as f:
json.dump(results.url, f)
if __name__ == "__main__":
main()
| [
"argparse.ArgumentParser",
"pathlib.Path",
"msrest.authentication.BasicAuthentication",
"azure.devops.connection.Connection",
"json.dump"
] | [((338, 384), 'msrest.authentication.BasicAuthentication', 'BasicAuthentication', (['""""""', 'personal_access_token'], {}), "('', personal_access_token)\n", (357, 384), False, 'from msrest.authentication import BasicAuthentication\n'), ((402, 458), 'azure.devops.connection.Connection', 'Connection', ([], {'base_url': 'organization_url', 'creds': 'credentials'}), '(base_url=organization_url, creds=credentials)\n', (412, 458), False, 'from azure.devops.connection import Connection\n'), ((1260, 1285), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1283, 1285), False, 'import argparse\n'), ((3315, 3340), 'json.dump', 'json.dump', (['results.url', 'f'], {}), '(results.url, f)\n', (3324, 3340), False, 'import json\n'), ((3200, 3221), 'pathlib.Path', 'Path', (['output_url_path'], {}), '(output_url_path)\n', (3204, 3221), False, 'from pathlib import Path\n')] |
# Scientific Library
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# from jax.scipy.special import logsumexp
from scipy.special import logsumexp
# Standard Library
import logging
from multiprocessing import current_process, Manager, Pool, Process, Queue
import time
import warnings
# Third Party
import jax
import jax.numpy as jnp
from jax.random import PRNGKey as Key
from joblib import delayed, Parallel
import numpyro
from numpyro import distributions as dist
from numpyro.infer import log_likelihood, MCMC, NUTS, Predictive
import timeout_decorator
from timeout_decorator import TimeoutError
from tqdm.auto import tqdm
# First Party
from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils
from metadamage.progressbar import progress
numpyro.enable_x64()
logger = logging.getLogger(__name__)
#%%
timeout_first_fit = 5 * 60 # 5 minutes, very first fit
timeout_subsequent_fits = 60 # 1 minute
#%%
def group_to_numpyro_data(group, cfg):
forward = cfg.substitution_bases_forward
forward_ref = forward[0]
reverse = cfg.substitution_bases_reverse
reverse_ref = reverse[0]
z = np.array(group.iloc[:15]["position"], dtype=int)
k_forward = np.array(group.iloc[:15][forward], dtype=int)
N_forward = np.array(group.iloc[:15][forward_ref], dtype=int)
k_reverse = np.array(group.iloc[-15:][reverse], dtype=int)
N_reverse = np.array(group.iloc[-15:][reverse_ref], dtype=int)
data = {
"z": np.concatenate([z, -z]),
"k": np.concatenate([k_forward, k_reverse]),
"N": np.concatenate([N_forward, N_reverse]),
}
return data
#%%
def add_tax_information(fit_result, group):
fit_result["tax_id"] = group["tax_id"].iloc[0]
fit_result["tax_name"] = group["tax_name"].iloc[0]
fit_result["tax_rank"] = group["tax_rank"].iloc[0]
def add_count_information(fit_result, group, data):
fit_result["N_alignments"] = group.N_alignments.iloc[0]
fit_result["N_z1_forward"] = data["N"][0]
fit_result["N_z1_reverse"] = data["N"][15]
fit_result["N_sum_total"] = data["N"].sum()
fit_result["N_sum_forward"] = data["N"][:15].sum()
fit_result["N_sum_reverse"] = data["N"][15:].sum()
fit_result["N_min"] = data["N"].min()
fit_result["k_sum_total"] = data["k"].sum()
fit_result["k_sum_forward"] = data["k"][:15].sum()
fit_result["k_sum_reverse"] = data["k"][15:].sum()
#%%
def fit_single_group_without_timeout(
group,
cfg,
mcmc_PMD=None,
mcmc_null=None,
):
fit_result = {}
data = group_to_numpyro_data(group, cfg)
add_tax_information(fit_result, group)
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
f, f_forward, f_reverse = fits_frequentist.make_fits(fit_result, data)
add_count_information(fit_result, group, data)
# mcmc_PMD, mcmc_null = fits_Bayesian.init_mcmcs(cfg)
if mcmc_PMD is not None and mcmc_null is not None:
fits_Bayesian.make_fits(fit_result, data, mcmc_PMD, mcmc_null)
return fit_result
def get_fit_single_group_with_timeout(timeout=60):
""" timeout in seconds """
return timeout_decorator.timeout(timeout)(fit_single_group_without_timeout)
def compute_fits_seriel(df_counts, cfg):
mcmc_PMD, mcmc_null = fits_Bayesian.init_mcmcs(cfg)
groupby = df_counts.groupby("tax_id", sort=False, observed=True)
d_fit_results = {}
fit_single_group_first_fit = get_fit_single_group_with_timeout(timeout_first_fit)
fit_single_group_subsequent_fits = get_fit_single_group_with_timeout(
timeout_subsequent_fits
)
fit_single_group = fit_single_group_first_fit
logger.info(f"Fit: Initializing fit in seriel.")
with progress:
task_fit = progress.add_task(
"task_status_fitting",
progress_type="status",
status="Fitting ",
name="Fits: ",
total=len(groupby),
)
for tax_id, group in groupby:
# break
try:
fit_result = fit_single_group(group, cfg, mcmc_PMD, mcmc_null)
d_fit_results[tax_id] = fit_result
except TimeoutError:
logger.warning(f"Fit: Timeout at tax_id {tax_id}. Skipping for now")
progress.advance(task_fit)
fit_single_group = fit_single_group_subsequent_fits
return d_fit_results
def worker(queue_in, queue_out, cfg):
mcmc_PMD, mcmc_null = fits_Bayesian.init_mcmcs(cfg)
fit_single_group_first_fit = get_fit_single_group_with_timeout(timeout_first_fit)
fit_single_group_subsequent_fits = get_fit_single_group_with_timeout(
timeout_subsequent_fits
)
# first run is patient
fit_single_group = fit_single_group_first_fit
while True:
# block=True means make a blocking call to wait for items in queue
tax_id_group = queue_in.get(block=True)
if tax_id_group is None:
break
tax_id, group = tax_id_group
try:
fit_result = fit_single_group(group, cfg, mcmc_PMD, mcmc_null)
queue_out.put((tax_id, fit_result))
except TimeoutError:
queue_out.put((tax_id, TimeoutError))
fit_single_group = fit_single_group_subsequent_fits
def compute_fits_parallel_with_progressbar(df, cfg):
# logger.info(f"Fit: Initializing fit in parallel with progressbar")
groupby = df.groupby("tax_id", sort=False, observed=True)
N_groupby = len(groupby)
N_cores = cfg.N_cores if cfg.N_cores < N_groupby else N_groupby
manager = Manager()
queue_in = manager.Queue()
queue_out = manager.Queue()
the_pool = Pool(N_cores, worker, (queue_in, queue_out, cfg))
d_fit_results = {}
with progress:
task_fit = progress.add_task(
"task_status_fitting",
progress_type="status",
status="Fitting ",
name="Fits: ",
total=N_groupby,
)
for tax_id, group in groupby:
queue_in.put((tax_id, group))
# Get and print results
for _ in range(N_groupby):
tax_id, fit_result = queue_out.get()
if fit_result is not TimeoutError:
d_fit_results[tax_id] = fit_result
else:
logger.warning(f"Fit: Timeout at tax_id {tax_id}. Skipping for now")
progress.advance(task_fit)
for _ in range(N_groupby):
queue_in.put(None)
# prevent adding anything more to the queue and wait for queue to empty
# queue_in.close()
# queue_in.join_thread()
# # join the queue until we're finished processing results
# queue_out.join()
# # not closing the Queues caused me untold heartache and suffering
# queue_in.close()
# queue_out.close()
# prevent adding anything more to the process pool and wait for all processes to finish
the_pool.close()
the_pool.join()
return d_fit_results
#%%
# def make_df_fit_predictions_from_d_fits(d_fits, cfg):
# z = np.arange(15) + 1
# position = np.concatenate([z, -z])
# # d_fit_predictions = {}
# d_fit_predictions = []
# for key, d_val in d_fits.items():
# data = {
# "tax_id": key,
# "position": position,
# "mean": d_val["mean"],
# "std": d_val["std"],
# # "median": median,
# # "hdpi_lower": hpdi[0, :],
# # "hdpi_upper": hpdi[1, :],
# }
# df_tmp = pd.DataFrame(data=data)
# # d_fit_predictions[key] = df_tmp
# d_fit_predictions.append(df_tmp)
# df_fit_predictions = pd.concat(d_fit_predictions, axis="index", ignore_index=True)
# df_fit_predictions["shortname"] = cfg.shortname
# categories = ["tax_id", "shortname"]
# df_fit_predictions = utils.downcast_dataframe(
# df_fit_predictions, categories, fully_automatic=False
# )
# return df_fit_predictions
def match_tax_id_order_in_df_fit_results(df_fit_results, df):
tax_ids_all = pd.unique(df.tax_id)
ordered = [tax_id for tax_id in tax_ids_all if tax_id in df_fit_results.index]
return df_fit_results.loc[ordered]
def make_df_fit_results_from_fit_results(fit_results, df_counts, cfg):
df_fit_results = pd.DataFrame.from_dict(fit_results, orient="index")
df_fit_results = match_tax_id_order_in_df_fit_results(df_fit_results, df_counts)
df_fit_results["shortname"] = cfg.shortname
categories = ["tax_id", "tax_name", "tax_rank", "shortname"]
df_fit_results = utils.downcast_dataframe(
df_fit_results, categories, fully_automatic=False
)
df_fit_results = df_fit_results.reset_index(drop=True)
return df_fit_results
#%%
def get_chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i : i + n]
def compute_fits_parallel_with_progressbar_chunks(df, cfg, chunk_max=1000):
logger.info(
f"Fit: Initializing fit in parallel with progressbar "
f"in chunks of size {chunk_max}."
)
d_fits_all_chunks = {}
tax_ids_unique = np.array(pd.unique(df.tax_id))
chunks = get_chunks(tax_ids_unique, chunk_max)
for chunk in chunks:
d_fits_chunk = compute_fits_parallel_with_progressbar(
df.query("tax_id in @chunk"), cfg
)
d_fits_all_chunks.update(d_fits_chunk)
return d_fits_all_chunks
def compute_fits(df_counts, cfg):
if cfg.N_cores == 1: # or len(groupby) < 10:
d_fit_results = compute_fits_seriel(df_counts, cfg)
else:
if not cfg.bayesian:
d_fit_results = compute_fits_parallel_with_progressbar(df_counts, cfg)
else:
d_fit_results = compute_fits_parallel_with_progressbar_chunks(
df_counts, cfg, chunk_max=1000
)
df_fit_results = make_df_fit_results_from_fit_results(d_fit_results, df_counts, cfg)
return df_fit_results
#%%
def extract_top_max_fits(df_counts, max_fits):
top_max_fits = (
df_counts.groupby("tax_id", observed=True)["N_alignments"]
.sum()
.nlargest(max_fits)
.index
)
df_counts_top_N = df_counts.query("tax_id in @top_max_fits")
return df_counts_top_N
def get_top_max_fits(df_counts, N_fits):
if N_fits is not None and N_fits > 0:
return df_counts.pipe(extract_top_max_fits, N_fits)
else:
return df_counts
def get_fits(df_counts, cfg):
parquet_fit_results = io.Parquet(cfg.filename_fit_results)
# parquet_fit_predictions = io.Parquet(cfg.filename_fit_predictions)
if parquet_fit_results.exists(cfg.forced):
# and parquet_fit_predictions.exists(cfg.forced)
include = [
"min_alignments",
"min_k_sum",
"substitution_bases_forward",
"substitution_bases_reverse",
"N_fits",
"shortname",
"filename",
]
metadata_cfg = cfg.to_dict()
metadata_file_fit_results = parquet_fit_results.load_metadata()
# metadata_file_fit_predictions = parquet_fit_predictions.load_metadata()
if utils.metadata_is_similar(
metadata_file_fit_results, metadata_cfg, include=include
):
# and utils.metadata_is_similar(metadata_file_fit_predictions, ...)
logger.info(f"Fit: Loading fits from parquet-file.")
df_fit_results = parquet_fit_results.load()
# df_fit_predictions = parquet_fit_predictions.load()
return df_fit_results # , df_fit_predictions
logger.info(f"Fit: Generating fits and saving to file.")
df_counts_top_N = get_top_max_fits(df_counts, cfg.N_fits)
# df = df_counts = df_counts_top_N
df_fit_results = compute_fits(df_counts_top_N, cfg) # df_fit_predictions
parquet_fit_results.save(df_fit_results, metadata=cfg.to_dict())
# parquet_fit_predictions.save(df_fit_predictions, metadata=cfg.to_dict())
return df_fit_results # , df_fit_predictions
#%%
# import arviz as az
# data_no_k = filter_out_k(data)
# def get_InferenceData(mcmc, model):
# posterior_samples = mcmc.get_samples()
# posterior_predictive = Predictive(model, posterior_samples)(Key(1), **data_no_k)
# prior = Predictive(model, num_samples=500)(Key(2), **data_no_k)
# numpyro_data = az.from_numpyro(
# mcmc,
# prior=prior,
# posterior_predictive=posterior_predictive,
# # coords={"school": np.arange(eight_school_data["J"])},
# # dims={"theta": ["school"]},
# )
# return numpyro_data
# data_PMD = get_InferenceData(mcmc_PMD, model_PMD)
# data_null = get_InferenceData(mcmc_null, model_null)
# var_names = ["A", "D_max", "q", "c", "phi"]
# az.plot_trace(data_PMD, var_names=var_names)
# az.plot_dist_comparison(data_PMD, var_names=var_names)
# az.plot_posterior(data_PMD, var_names=var_names)
# model_compare = az.compare({"PMD": data_PMD, "Null": data_null}, ic="waic", scale='deviance')
# model_compare[['rank', 'waic', 'd_waic', 'dse']]
# az.plot_compare(model_compare, insample_dev=False)
| [
"logging.getLogger",
"metadamage.utils.downcast_dataframe",
"numpy.array",
"metadamage.fits_Bayesian.init_mcmcs",
"numpyro.enable_x64",
"pandas.unique",
"pandas.DataFrame.from_dict",
"numpy.concatenate",
"metadamage.progressbar.progress.advance",
"metadamage.progressbar.progress.add_task",
"timeout_decorator.timeout",
"metadamage.fits_Bayesian.make_fits",
"metadamage.io.Parquet",
"multiprocessing.Manager",
"warnings.filterwarnings",
"metadamage.utils.metadata_is_similar",
"warnings.catch_warnings",
"multiprocessing.Pool",
"metadamage.fits_frequentist.make_fits"
] | [((786, 806), 'numpyro.enable_x64', 'numpyro.enable_x64', ([], {}), '()\n', (804, 806), False, 'import numpyro\n'), ((816, 843), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (833, 843), False, 'import logging\n'), ((1151, 1199), 'numpy.array', 'np.array', (["group.iloc[:15]['position']"], {'dtype': 'int'}), "(group.iloc[:15]['position'], dtype=int)\n", (1159, 1199), True, 'import numpy as np\n'), ((1217, 1262), 'numpy.array', 'np.array', (['group.iloc[:15][forward]'], {'dtype': 'int'}), '(group.iloc[:15][forward], dtype=int)\n', (1225, 1262), True, 'import numpy as np\n'), ((1279, 1328), 'numpy.array', 'np.array', (['group.iloc[:15][forward_ref]'], {'dtype': 'int'}), '(group.iloc[:15][forward_ref], dtype=int)\n', (1287, 1328), True, 'import numpy as np\n'), ((1346, 1392), 'numpy.array', 'np.array', (['group.iloc[-15:][reverse]'], {'dtype': 'int'}), '(group.iloc[-15:][reverse], dtype=int)\n', (1354, 1392), True, 'import numpy as np\n'), ((1409, 1459), 'numpy.array', 'np.array', (['group.iloc[-15:][reverse_ref]'], {'dtype': 'int'}), '(group.iloc[-15:][reverse_ref], dtype=int)\n', (1417, 1459), True, 'import numpy as np\n'), ((3294, 3323), 'metadamage.fits_Bayesian.init_mcmcs', 'fits_Bayesian.init_mcmcs', (['cfg'], {}), '(cfg)\n', (3318, 3323), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((4474, 4503), 'metadamage.fits_Bayesian.init_mcmcs', 'fits_Bayesian.init_mcmcs', (['cfg'], {}), '(cfg)\n', (4498, 4503), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((5592, 5601), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (5599, 5601), False, 'from multiprocessing import current_process, Manager, Pool, Process, Queue\n'), ((5680, 5729), 'multiprocessing.Pool', 'Pool', (['N_cores', 'worker', '(queue_in, queue_out, cfg)'], {}), '(N_cores, worker, (queue_in, queue_out, cfg))\n', (5684, 5729), False, 'from multiprocessing import current_process, Manager, Pool, Process, Queue\n'), ((8052, 8072), 'pandas.unique', 'pd.unique', (['df.tax_id'], {}), '(df.tax_id)\n', (8061, 8072), True, 'import pandas as pd\n'), ((8289, 8340), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['fit_results'], {'orient': '"""index"""'}), "(fit_results, orient='index')\n", (8311, 8340), True, 'import pandas as pd\n'), ((8561, 8636), 'metadamage.utils.downcast_dataframe', 'utils.downcast_dataframe', (['df_fit_results', 'categories'], {'fully_automatic': '(False)'}), '(df_fit_results, categories, fully_automatic=False)\n', (8585, 8636), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((10521, 10557), 'metadamage.io.Parquet', 'io.Parquet', (['cfg.filename_fit_results'], {}), '(cfg.filename_fit_results)\n', (10531, 10557), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((1487, 1510), 'numpy.concatenate', 'np.concatenate', (['[z, -z]'], {}), '([z, -z])\n', (1501, 1510), True, 'import numpy as np\n'), ((1525, 1563), 'numpy.concatenate', 'np.concatenate', (['[k_forward, k_reverse]'], {}), '([k_forward, k_reverse])\n', (1539, 1563), True, 'import numpy as np\n'), ((1578, 1616), 'numpy.concatenate', 'np.concatenate', (['[N_forward, N_reverse]'], {}), '([N_forward, N_reverse])\n', (1592, 1616), True, 'import numpy as np\n'), ((2652, 2677), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (2675, 2677), False, 'import warnings\n'), ((2687, 2720), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (2710, 2720), False, 'import warnings\n'), ((2755, 2799), 'metadamage.fits_frequentist.make_fits', 'fits_frequentist.make_fits', (['fit_result', 'data'], {}), '(fit_result, data)\n', (2781, 2799), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((2974, 3036), 'metadamage.fits_Bayesian.make_fits', 'fits_Bayesian.make_fits', (['fit_result', 'data', 'mcmc_PMD', 'mcmc_null'], {}), '(fit_result, data, mcmc_PMD, mcmc_null)\n', (2997, 3036), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((3155, 3189), 'timeout_decorator.timeout', 'timeout_decorator.timeout', (['timeout'], {}), '(timeout)\n', (3180, 3189), False, 'import timeout_decorator\n'), ((5792, 5912), 'metadamage.progressbar.progress.add_task', 'progress.add_task', (['"""task_status_fitting"""'], {'progress_type': '"""status"""', 'status': '"""Fitting """', 'name': '"""Fits: """', 'total': 'N_groupby'}), "('task_status_fitting', progress_type='status', status=\n 'Fitting ', name='Fits: ', total=N_groupby)\n", (5809, 5912), False, 'from metadamage.progressbar import progress\n'), ((9151, 9171), 'pandas.unique', 'pd.unique', (['df.tax_id'], {}), '(df.tax_id)\n', (9160, 9171), True, 'import pandas as pd\n'), ((11183, 11271), 'metadamage.utils.metadata_is_similar', 'utils.metadata_is_similar', (['metadata_file_fit_results', 'metadata_cfg'], {'include': 'include'}), '(metadata_file_fit_results, metadata_cfg, include=\n include)\n', (11208, 11271), False, 'from metadamage import counts, fits_Bayesian, fits_frequentist, io, utils\n'), ((4290, 4316), 'metadamage.progressbar.progress.advance', 'progress.advance', (['task_fit'], {}), '(task_fit)\n', (4306, 4316), False, 'from metadamage.progressbar import progress\n'), ((6390, 6416), 'metadamage.progressbar.progress.advance', 'progress.advance', (['task_fit'], {}), '(task_fit)\n', (6406, 6416), False, 'from metadamage.progressbar import progress\n')] |
"""
Interpolation for BASTA: Along a track
"""
import os
import time
import h5py
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
from basta import utils_general as gu
from basta import utils_seismic as su
from basta import interpolation_helpers as ih
# ======================================================================================
# Interpolation helper routines
# ======================================================================================
def _calc_npoints_freqs(libitem, index2d, freq_resolution, verbose=False, debug=False):
"""
Estimate the number of points required for interpolation given a desired frequency
resolution, by calculating the largest variation of any frequency in a given track.
Currently it is only based on l=0.
Parameters
----------
libitem : h5py_group
A track in the form of an HDF5 group from a BASTA grid
index2d : array
Mask for libitem to obtain selected entries (note the shape for osc indexing!)
freq_resolution : float
Required frequency resolution in microHertz
verbose : bool, optional
Print info.
extra_debug : bool, optional
Print extra information on all frequencies. Warning: Huge output.
Returns
-------
Npoints : float
Estimated number of points required in interpolated track
"""
if debug:
print(" l = 0 frequency information for i(nitial) and f(inal) model:")
# Extract oscillation arrays for first and final point of the selection
# --> Only for l=0
fullosc = libitem["osc"][index2d].reshape((-1, 2))
fullosckey = libitem["osckey"][index2d].reshape((-1, 2))
osc = []
osckey = []
for i in [0, -1]:
osckeyl0, oscl0 = su.get_givenl(
l=0,
osc=su.transform_obj_array(fullosc[i]),
osckey=su.transform_obj_array(fullosckey[i]),
)
osckey.append(osckeyl0[1])
osc.append(oscl0[0])
# Calculate difference between first and final frequency of given order
freqdiffs = []
for nval, freqstart in zip(osckey[0], osc[0]):
nmask = osckey[1] == nval
if any(nmask):
freqend = osc[1][nmask][0]
freqdiff = np.abs(freqend - freqstart)
freqdiffs.append(freqdiff)
if debug:
print(
"{0}n = {1:2}, freq_i = {2:8.3f}, freq_f = {3:8.3f},".format(
4 * " ", nval, freqstart, freqend
),
"Delta(f) = {0:7.3f}".format(freqdiff),
)
# Obtain quantities in the notation of <NAME>
DELTA = max(freqdiffs)
Npoints = int(DELTA / freq_resolution)
if debug:
print("\n DELTA = {0:6.2f} muHz ==> Npoints = {1:4}".format(DELTA, Npoints))
elif verbose:
print("DELTA = {0:6.2f} muHz ==> Npoints = {1:4}".format(DELTA, Npoints))
return Npoints
def _calc_npoints(libitem, index, resolution, verbose=False, debug=False):
"""
Estimate the number of points required for interpolation given a desired resolution,
by calculating the variation in a given track.
Parameters
----------
libitem : h5py_group
A track in the form of an HDF5 group from a BASTA grid
index : array
Mask for libitem to obtain selected entries
resolution : dict
Required resolution. Must contain "param" with a valid parameter name from the
grid and "value" with the desired precision/resolution.
verbose : bool, optional
Print info.
extra_debug : bool, optional
Print extra information on all frequencies. Warning: Huge output.
Returns
-------
Npoints : float
Estimated number of points required in interpolated track
"""
param = resolution["param"]
paramvec = libitem[param][index]
DELTA = np.abs(paramvec[-1] - paramvec[0])
Npoints = int(DELTA / resolution["value"])
if debug:
print(
" DELTA({0}) = {1:6.2f} ==> Npoints = {2:4}".format(
param, DELTA, Npoints
)
)
elif verbose:
print("DELTA = {0:6.2f} ==> Npoints = {1:4}".format(DELTA, Npoints))
return Npoints
# ======================================================================================
# Interpolation along tracks
# ======================================================================================
def _interpolate_along(
grid,
outfile,
limits,
resolution,
intpolparams,
basepath="grid/",
intpol_freqs=False,
debug=False,
verbose=False,
):
"""
Select a part of a BASTA grid based on observational limits. Interpolate all
quantities in the tracks within that part and write to a new grid file.
Parameters
----------
grid : h5py file
Handle of grid to process
outfile : h5py file
Handle of output grid to write to
limits : dict
Constraints on the selection in the grid. Must be valid parameter names in the
grid. Example of the form: {'Teff': [5000, 6000], 'FeH': [-0.2, 0.2]}
resolution : dict
Required resolution. Must contain "param" with a valid parameter name from the
grid and "value" with the desired precision/resolution. A special case of "param"
is "freq" (or "freqs"), in which case it is the required frequency resolution in
microHertz (this corresponds to the old input "freq_resolution").
basepath : str, optional
Path in the grid where the tracks are stored. The default value applies to
standard grids of tracks. It must be modified for isochrones!
debug : bool, optional
Activate debug mode. Will print extra info and create plots of the selection.
WILL ONLY WORK PROPERLY FOR FREQUENCIES AND GRIDS (NOT DNU OR ISOCHRONES)!
verbose : bool, optional
Print information to console and make simple diagnostic plots. Will be
automatically set by debug.
Returns
-------
grid : h5py file
Handle of grid to process
outfile : h5py file
Handle of output grid to write to
fail : bool
Boolean to indicate whether the routine has failed or succeeded
"""
print("\n*******************\nAlong interpolation\n*******************")
#
# *** BLOCK 0: Initial preparation ***
#
if "grid" in basepath:
isomode = False
modestr = "track"
headvars = [
"tracks",
"massini",
"FeHini",
"MeHini",
"yini",
"alphaMLT",
"ove",
"gcut",
"eta",
"alphaFe",
"dif",
]
else:
isomode = True
modestr = "isochrone"
headvars = [
"FeH",
"FeHini",
"MeH",
"MeHini",
"xini",
"yini",
"zini",
"alphaMLT",
"ove",
"gcut",
"eta",
"alphaFe",
"dif",
]
overwrite = grid == outfile
if debug:
verbose = True
if verbose:
# Initialize logging to file (duplicate stdout)
logdir = "intpollogs"
if not os.path.exists(logdir):
os.mkdir(logdir)
logfile = open(
os.path.join(
logdir, "intpol_{0}.txt".format(time.strftime("%Y%m%dT%H%M%S"))
),
"w",
)
# Initialise diagnostic plot(s) and print info
plt.close("all")
fig1, ax1 = plt.subplots() # Full grid (Kiel)
fig2, ax2 = plt.subplots() # Only selection (Kiel)
fig3, ax3 = plt.subplots() # Age/mass information
print("Interpolating in {0}s with basepath '{1}'".format(modestr, basepath))
print("Limiting the parameters:\n{0}".format(limits))
print(
"Required resolution in {0}: {1}".format(
resolution["param"], resolution["value"]
)
)
# For tracks, interpolate sampling in age. For isochrones, in mass
if isomode:
baseparam = "massfin"
dname = "dmass"
else:
baseparam = "age"
dname = "dage"
# Change to desired baseparameter, if requested
if resolution["baseparam"] != "default":
baseparam = resolution["baseparam"]
# Nicknames for resolution in frequency
freqres = ["freq", "freqs", "frequency", "frequencies", "osc"]
# Get frequency interpolation limits from limits dict
if "freqs" in limits:
freqlims = limits["freqs"]
del limits["freqs"]
# Construct selectedmodels
print("Locating limits and restricting sub-grid ... ", flush=True)
selectedmodels = ih.get_selectedmodels(grid, basepath, limits, cut=False)
#
# *** LOOP THROUGH THE GRID ONE TRACK/ISOCHRONE AT A TIME ***
#
# Before running the actual loop, all tracks/isochrones are counted to better
# estimate the progress.
intcount = 0
for _, tracks in grid[basepath].items():
intcount += len(tracks.items())
# Use a progress bar (with the package tqdm; will write to stderr)
print("\nInterpolating along {0} tracks/isochrones ...".format(intcount))
pbar = tqdm(total=intcount, desc="--> Progress", ascii=True)
# Do the actual loop
trackcounter = 0
fail = False
# For tracks, the outer loop is just a single iteration
# For isochrones, it is the metallicities one at a time
for gname, tracks in grid[basepath].items():
if fail:
break
for noingrid, (name, libitem) in enumerate(tracks.items()):
# Update progress bar in the start of the loop to count skipped tracks
pbar.update(1)
if libitem["FeHini_weight"][()] < 0:
continue
# Make sure the user provides a valid parameter as resolution requirement
# --> In case of failure, assume the user wanted a dnu-related parameter
if resolution["param"].lower() not in freqres:
try:
libitem[resolution["param"]]
except KeyError:
print("\nCRITICAL ERROR!")
print(
"The resolution parameter '{0}'".format(resolution["param"]),
"is not found in the grid!",
)
paramguess = [
key for key in libitem.keys() if key.startswith("dnu")
]
print("Did you perhaps mean one of these:", paramguess)
print("Please provide a valid name! I WILL ABORT NOW...\n")
fail = True
break
if verbose:
pltTeff = gu.h5py_to_array(libitem["Teff"])
pltlogg = gu.h5py_to_array(libitem["logg"])
pltbase = gu.h5py_to_array(libitem[baseparam])
ax1.plot(pltTeff, pltlogg, color="darkgrey", alpha=0.2)
ax3.plot(pltbase, pltTeff, color="darkgrey", alpha=0.2)
#
# *** BLOCK 1: Obtain reduced tracks ***
#
# Check which models have parameters within limits to define mask
if os.path.join(gname, name) in selectedmodels:
index = selectedmodels[os.path.join(gname, name)]
# Make special 2D mask (for the frequency arrays). Necessary because h5py
# does not support 1D bool array indexing for non 1D data!
if intpol_freqs:
index2d = np.array(np.transpose([index, index]))
#
# *** BLOCK 2: Define interpolation mesh ***
#
# Calc number of points required and make uniform mesh
if resolution["param"].lower() in freqres:
Npoints = _calc_npoints_freqs(
libitem=libitem,
index2d=index2d,
freq_resolution=resolution["value"],
verbose=verbose,
debug=debug,
)
else:
Npoints = _calc_npoints(
libitem=libitem,
index=index,
resolution=resolution,
verbose=verbose,
debug=debug,
)
if Npoints < sum(index):
print(
"Stopped interpolation along {0} as the number of points would decrease from {1} to {2}".format(
name, sum(index), Npoints
)
)
continue
# Isochrones: mass | Tracks: age
basevec = libitem[baseparam][index]
intpolmesh = np.linspace(
start=basevec[0], stop=basevec[-1], num=Npoints
)
if debug:
print(
"{0}Range in {1} = [{2:4.3f}, {3:4.3f}]".format(
4 * " ", baseparam, basevec[0], basevec[-1]
)
)
#
# *** BLOCK 3: Interpolate in all quantities but frequencies ***
#
# Different cases...
# --> No need to interpolate INTER-track-weights (which is a scalar)
# --> INTRA-track weights (for the base parameter) is recalculated
# --> Name of orignal gong files have no meaning anymore
# --> Frequency arrays are tricky and are treated seperately
tmpparam = {}
for key in libitem.keys():
keypath = os.path.join(libitem.name, key)
if "_weight" in key:
newparam = libitem[key][()]
elif "name" in key:
newparam = len(intpolmesh) * [b"interpolated-entry"]
elif "osc" in key:
continue
elif key in intpolparams:
newparam = ih._interpolation_wrapper(
basevec, libitem[key][index], intpolmesh
)
elif key in headvars:
newparam = np.ones(Npoints) * libitem[key][0]
else:
continue
# Delete old entry, write new entry
if overwrite:
del outfile[keypath]
outfile[keypath] = newparam
# Storage for plotting the Kiel diagram
if key in ["Teff", "logg", "age", "massfin"]:
tmpparam[key] = newparam
# Bayesian weight along track
par = "massfin" if dname == "dmass" else "age"
parpath = os.path.join(libitem.name, par)
keypath = os.path.join(libitem.name, dname)
if overwrite:
del outfile[keypath]
outfile[keypath] = ih.bay_weights(outfile[parpath])
#
# *** BLOCK 4: Interpolate in frequencies ***
#
# No frequencies present in isochrones!
if not isomode and intpol_freqs:
fullosc = libitem["osc"][index2d].reshape((-1, 2))
fullosckey = libitem["osckey"][index2d].reshape((-1, 2))
osckeylist, osclist = ih.interpolate_frequencies(
fullosc=fullosc,
fullosckey=fullosckey,
agevec=basevec,
newagevec=intpolmesh,
freqlims=freqlims,
verbose=verbose,
debug=debug,
trackid=noingrid + 1,
)
# Delete the old entries
if overwrite:
del outfile[os.path.join(libitem.name, "osc")]
del outfile[os.path.join(libitem.name, "osckey")]
# Writing variable length arrays to an HDF5 file is a bit tricky,
# but can be done using datasets with a special datatype.
# --> Here we follow the approach from BASTA/make_tracks
dsetosc = outfile.create_dataset(
name=os.path.join(libitem.name, "osc"),
shape=(Npoints, 2),
dtype=h5py.special_dtype(vlen=np.float),
)
dsetosckey = outfile.create_dataset(
name=os.path.join(libitem.name, "osckey"),
shape=(Npoints, 2),
dtype=h5py.special_dtype(vlen=np.int),
)
for i in range(Npoints):
dsetosc[i] = osclist[i]
dsetosckey[i] = osckeylist[i]
trackcounter += 1
#
# *** BLOCK 1b: Handle tracks without any models inside selection ***
#
else:
# Flag the empty tracks with a single entry: A negative weight
# (note: requires at least BASTA v0.28 to be useful)
if overwrite:
del outfile[os.path.join(libitem.name, "FeHini_weight")]
outfile[os.path.join(libitem.name, "FeHini_weight")] = -1
if verbose:
print()
# Add information to the diagnostic plots
if verbose and False:
pltTeff = gu.h5py_to_array(libitem["Teff"])
pltlogg = gu.h5py_to_array(libitem["logg"])
pltbase = gu.h5py_to_array(libitem[baseparam])
ax1.plot(
pltTeff[index],
pltlogg[index],
color="#482F76",
lw=4,
alpha=0.8,
zorder=2.5,
)
ax2.plot(
pltTeff[index],
pltlogg[index],
"x",
color="#482F76",
alpha=0.4,
)
ax3.plot(
pltbase[index],
pltTeff[index],
color="#482F76",
lw=4,
alpha=0.8,
zorder=2.5,
)
if any(index):
ax2.plot(
tmpparam["Teff"],
tmpparam["logg"],
"-",
color="#56B4E9",
alpha=0.5,
)
#
# *** STOP! HERE THE TRACK LOOP IS FINISHED!
#
pbar.close()
# Re-add frequency limits for combined approaches
if intpol_freqs:
limits["freqs"] = freqlims
# Finish debugging plot with some decoration
if verbose:
print("\nDone! Finishing diagnostic plots!")
ax1.set_xlabel("Teff / K")
ax1.set_ylabel("log g")
ax1.invert_xaxis()
ax1.invert_yaxis()
fig1.savefig("intpol_diagnostic_kiel.pdf", bbox_inches="tight")
ax2.set_xlabel("Teff / K")
ax2.set_ylabel("log g")
ax2.invert_xaxis()
ax2.invert_yaxis()
fig2.savefig("intpol_diagnostic_kiel-zoom.pdf", bbox_inches="tight")
ax3.set_xlabel("Age / Myr" if baseparam == "age" else "Mass / Msun")
ax3.set_ylabel("Teff / K")
fig3.savefig("intpol_diagnostic_{0}.pdf".format(baseparam), bbox_inches="tight")
print("\nIn total {0} {1}(s) interpolated!\n".format(trackcounter, modestr))
print("Interpolation process finished!")
return grid, outfile, fail
| [
"basta.interpolation_helpers._interpolation_wrapper",
"numpy.abs",
"os.path.exists",
"numpy.ones",
"basta.interpolation_helpers.interpolate_frequencies",
"tqdm.tqdm",
"basta.utils_seismic.transform_obj_array",
"os.path.join",
"basta.utils_general.h5py_to_array",
"time.strftime",
"matplotlib.pyplot.close",
"basta.interpolation_helpers.bay_weights",
"numpy.linspace",
"basta.interpolation_helpers.get_selectedmodels",
"os.mkdir",
"h5py.special_dtype",
"numpy.transpose",
"matplotlib.pyplot.subplots"
] | [((3899, 3933), 'numpy.abs', 'np.abs', (['(paramvec[-1] - paramvec[0])'], {}), '(paramvec[-1] - paramvec[0])\n', (3905, 3933), True, 'import numpy as np\n'), ((8800, 8856), 'basta.interpolation_helpers.get_selectedmodels', 'ih.get_selectedmodels', (['grid', 'basepath', 'limits'], {'cut': '(False)'}), '(grid, basepath, limits, cut=False)\n', (8821, 8856), True, 'from basta import interpolation_helpers as ih\n'), ((9311, 9364), 'tqdm.tqdm', 'tqdm', ([], {'total': 'intcount', 'desc': '"""--> Progress"""', 'ascii': '(True)'}), "(total=intcount, desc='--> Progress', ascii=True)\n", (9315, 9364), False, 'from tqdm import tqdm\n'), ((7585, 7601), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (7594, 7601), True, 'import matplotlib.pyplot as plt\n'), ((7622, 7636), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (7634, 7636), True, 'import matplotlib.pyplot as plt\n'), ((7677, 7691), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (7689, 7691), True, 'import matplotlib.pyplot as plt\n'), ((7737, 7751), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (7749, 7751), True, 'import matplotlib.pyplot as plt\n'), ((2257, 2284), 'numpy.abs', 'np.abs', (['(freqend - freqstart)'], {}), '(freqend - freqstart)\n', (2263, 2284), True, 'import numpy as np\n'), ((7296, 7318), 'os.path.exists', 'os.path.exists', (['logdir'], {}), '(logdir)\n', (7310, 7318), False, 'import os\n'), ((7332, 7348), 'os.mkdir', 'os.mkdir', (['logdir'], {}), '(logdir)\n', (7340, 7348), False, 'import os\n'), ((1823, 1857), 'basta.utils_seismic.transform_obj_array', 'su.transform_obj_array', (['fullosc[i]'], {}), '(fullosc[i])\n', (1845, 1857), True, 'from basta import utils_seismic as su\n'), ((1878, 1915), 'basta.utils_seismic.transform_obj_array', 'su.transform_obj_array', (['fullosckey[i]'], {}), '(fullosckey[i])\n', (1900, 1915), True, 'from basta import utils_seismic as su\n'), ((10856, 10889), 'basta.utils_general.h5py_to_array', 'gu.h5py_to_array', (["libitem['Teff']"], {}), "(libitem['Teff'])\n", (10872, 10889), True, 'from basta import utils_general as gu\n'), ((10916, 10949), 'basta.utils_general.h5py_to_array', 'gu.h5py_to_array', (["libitem['logg']"], {}), "(libitem['logg'])\n", (10932, 10949), True, 'from basta import utils_general as gu\n'), ((10976, 11012), 'basta.utils_general.h5py_to_array', 'gu.h5py_to_array', (['libitem[baseparam]'], {}), '(libitem[baseparam])\n', (10992, 11012), True, 'from basta import utils_general as gu\n'), ((11332, 11357), 'os.path.join', 'os.path.join', (['gname', 'name'], {}), '(gname, name)\n', (11344, 11357), False, 'import os\n'), ((12975, 13035), 'numpy.linspace', 'np.linspace', ([], {'start': 'basevec[0]', 'stop': 'basevec[-1]', 'num': 'Npoints'}), '(start=basevec[0], stop=basevec[-1], num=Npoints)\n', (12986, 13035), True, 'import numpy as np\n'), ((15080, 15111), 'os.path.join', 'os.path.join', (['libitem.name', 'par'], {}), '(libitem.name, par)\n', (15092, 15111), False, 'import os\n'), ((15138, 15171), 'os.path.join', 'os.path.join', (['libitem.name', 'dname'], {}), '(libitem.name, dname)\n', (15150, 15171), False, 'import os\n'), ((15278, 15310), 'basta.interpolation_helpers.bay_weights', 'ih.bay_weights', (['outfile[parpath]'], {}), '(outfile[parpath])\n', (15292, 15310), True, 'from basta import interpolation_helpers as ih\n'), ((17876, 17909), 'basta.utils_general.h5py_to_array', 'gu.h5py_to_array', (["libitem['Teff']"], {}), "(libitem['Teff'])\n", (17892, 17909), True, 'from basta import utils_general as gu\n'), ((17936, 17969), 'basta.utils_general.h5py_to_array', 'gu.h5py_to_array', (["libitem['logg']"], {}), "(libitem['logg'])\n", (17952, 17969), True, 'from basta import utils_general as gu\n'), ((17996, 18032), 'basta.utils_general.h5py_to_array', 'gu.h5py_to_array', (['libitem[baseparam]'], {}), '(libitem[baseparam])\n', (18012, 18032), True, 'from basta import utils_general as gu\n'), ((7447, 7477), 'time.strftime', 'time.strftime', (['"""%Y%m%dT%H%M%S"""'], {}), "('%Y%m%dT%H%M%S')\n", (7460, 7477), False, 'import time\n'), ((11416, 11441), 'os.path.join', 'os.path.join', (['gname', 'name'], {}), '(gname, name)\n', (11428, 11441), False, 'import os\n'), ((13896, 13927), 'os.path.join', 'os.path.join', (['libitem.name', 'key'], {}), '(libitem.name, key)\n', (13908, 13927), False, 'import os\n'), ((15705, 15889), 'basta.interpolation_helpers.interpolate_frequencies', 'ih.interpolate_frequencies', ([], {'fullosc': 'fullosc', 'fullosckey': 'fullosckey', 'agevec': 'basevec', 'newagevec': 'intpolmesh', 'freqlims': 'freqlims', 'verbose': 'verbose', 'debug': 'debug', 'trackid': '(noingrid + 1)'}), '(fullosc=fullosc, fullosckey=fullosckey, agevec=\n basevec, newagevec=intpolmesh, freqlims=freqlims, verbose=verbose,\n debug=debug, trackid=noingrid + 1)\n', (15731, 15889), True, 'from basta import interpolation_helpers as ih\n'), ((17655, 17698), 'os.path.join', 'os.path.join', (['libitem.name', '"""FeHini_weight"""'], {}), "(libitem.name, 'FeHini_weight')\n", (17667, 17698), False, 'import os\n'), ((11681, 11709), 'numpy.transpose', 'np.transpose', (['[index, index]'], {}), '([index, index])\n', (11693, 11709), True, 'import numpy as np\n'), ((17586, 17629), 'os.path.join', 'os.path.join', (['libitem.name', '"""FeHini_weight"""'], {}), "(libitem.name, 'FeHini_weight')\n", (17598, 17629), False, 'import os\n'), ((16212, 16245), 'os.path.join', 'os.path.join', (['libitem.name', '"""osc"""'], {}), "(libitem.name, 'osc')\n", (16224, 16245), False, 'import os\n'), ((16283, 16319), 'os.path.join', 'os.path.join', (['libitem.name', '"""osckey"""'], {}), "(libitem.name, 'osckey')\n", (16295, 16319), False, 'import os\n'), ((16646, 16679), 'os.path.join', 'os.path.join', (['libitem.name', '"""osc"""'], {}), "(libitem.name, 'osc')\n", (16658, 16679), False, 'import os\n'), ((16755, 16788), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'vlen': 'np.float'}), '(vlen=np.float)\n', (16773, 16788), False, 'import h5py\n'), ((16898, 16934), 'os.path.join', 'os.path.join', (['libitem.name', '"""osckey"""'], {}), "(libitem.name, 'osckey')\n", (16910, 16934), False, 'import os\n'), ((17010, 17041), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'vlen': 'np.int'}), '(vlen=np.int)\n', (17028, 17041), False, 'import h5py\n'), ((14291, 14358), 'basta.interpolation_helpers._interpolation_wrapper', 'ih._interpolation_wrapper', (['basevec', 'libitem[key][index]', 'intpolmesh'], {}), '(basevec, libitem[key][index], intpolmesh)\n', (14316, 14358), True, 'from basta import interpolation_helpers as ih\n'), ((14490, 14506), 'numpy.ones', 'np.ones', (['Npoints'], {}), '(Npoints)\n', (14497, 14506), True, 'import numpy as np\n')] |
"""Parse DryMass definitions.py file"""
from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive
from sphinx.util.nodes import nested_parse_with_titles
from docutils import nodes
from drymass.cli.definitions import config
class IncludeDirective(Directive):
required_arguments = 1
optional_arguments = 0
def run(self):
sec = self.arguments[0]
content = config[sec]
rst = []
for key in content:
# get data
if len(content[key]) == 3:
default, func, doc = content[key]
notes = None
elif len(content[key]) == 4:
default, func, doc, notes = content[key]
# parse dtype
dtype = func.__name__
if dtype in ["float", "int", "str"]:
str_dtype = ":class:`{}`".format(dtype)
else:
str_dtype = ":func:`{0} <drymass.cli.parse_funcs.{0}>`".format(
dtype)
# add list item
rst.append("* | **{}** = {} ({}) -- {} ".format(key, default,
str_dtype, doc))
if notes is not None:
# add notes
rst.append(" | {}".format(notes.strip()))
rst.append("")
vl = ViewList(rst, "fakefile.rst")
# Create a node.
node = nodes.section()
node.document = self.state.document
# Parse the rst.
nested_parse_with_titles(self.state, vl, node)
return node.children
def setup(app):
app.add_directive('include_definition', IncludeDirective)
return {'version': '0.1'} # identifies the version of our extension
| [
"docutils.nodes.section",
"sphinx.util.nodes.nested_parse_with_titles",
"docutils.statemachine.ViewList"
] | [((1351, 1380), 'docutils.statemachine.ViewList', 'ViewList', (['rst', '"""fakefile.rst"""'], {}), "(rst, 'fakefile.rst')\n", (1359, 1380), False, 'from docutils.statemachine import ViewList\n'), ((1421, 1436), 'docutils.nodes.section', 'nodes.section', ([], {}), '()\n', (1434, 1436), False, 'from docutils import nodes\n'), ((1514, 1560), 'sphinx.util.nodes.nested_parse_with_titles', 'nested_parse_with_titles', (['self.state', 'vl', 'node'], {}), '(self.state, vl, node)\n', (1538, 1560), False, 'from sphinx.util.nodes import nested_parse_with_titles\n')] |
# Copyright 2016-2018 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, absolute_import, division
import os
from future import standard_library
standard_library.install_aliases()
from future.builtins import *
import sys
import io
from . import FileReferenceBase, ENCODING, get_target_path
PYVERSION = sys.version_info.major
class StringContainer(FileReferenceBase):
""" In-memory file stored as a text string
Args:
contents (str OR bytes): contents of the file
name (str): descriptive name for the container (highly optional)
encoding (str): default encoding (for both encoding strings and decoding bytes). If
not specified, default system encoding is used (usually utf-8)
Note:
This handles both unicode (known as `unicode` in py2 and `str` in py3) and raw bytestrings
(`str` in py2 and `bytes` in py3).
"""
def __init__(self, contents, name=None, encoding=ENCODING):
self.source = name
self.sourcetype = 'runtime'
self.localpath = None
self._contents = contents
self.encoding = encoding
def open(self, mode='r', encoding=None):
"""Return file-like object
Args:
mode (str): access mode (only reading modes are supported)
encoding (str): encoding type (only for binary access)
Returns:
io.BytesIO OR io.TextIOWrapper: buffer accessing the file as bytes or characters
"""
access_type = self._get_access_type(mode)
if encoding is None:
encoding = self.encoding
# here, we face the task of returning the correct data type
if access_type == 'b':
if not self._isbytes:
content = self._contents.encode(encoding) # unicode in, bytes out
else:
content = self._contents # bytes in, bytes out
return io.BytesIO(content)
else:
assert access_type == 't'
if PYVERSION == 2 and self._isbytes:
return io.BytesIO(self._contents) # bytes in, bytes out (python 2 only)
elif self._isbytes:
content = self._contents.decode(encoding) # bytes in, unicode out
else:
content = self._contents # unicode in, unicode out
return io.StringIO(content)
def size_bytes(self, encoding=ENCODING):
if not self._isbytes:
return len(self._contents.encode(encoding))
else:
return len(self._contents)
@property
def _isbytes(self):
if PYVERSION == 2:
return not isinstance(self._contents, unicode)
else:
assert PYVERSION >= 3
return isinstance(self._contents, bytes)
def put(self, filename, encoding=None):
"""Write the file to the given path
Args:
filename (str): path to write this file to
encoding (str): file encoding (default: system default)
Returns:
LocalFile: reference to the copy of the file stored at ``filename``
"""
from . import LocalFile
if os.path.isdir(filename) and self.source is None:
raise ValueError("Cannot write this object to "
"directory %s without an explicit filename." % filename)
target = get_target_path(filename, self.source)
if encoding is None:
encoding = self.encoding
if self._isbytes:
kwargs = {'mode': 'wb'}
else:
kwargs = {'mode': 'w', 'encoding': encoding}
with open(target, **kwargs) as outfile:
outfile.write(self._contents)
return LocalFile(target, encoded_with=encoding)
| [
"future.standard_library.install_aliases",
"io.StringIO",
"os.path.isdir",
"io.BytesIO"
] | [((694, 728), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (726, 728), False, 'from future import standard_library\n'), ((2448, 2467), 'io.BytesIO', 'io.BytesIO', (['content'], {}), '(content)\n', (2458, 2467), False, 'import io\n'), ((2878, 2898), 'io.StringIO', 'io.StringIO', (['content'], {}), '(content)\n', (2889, 2898), False, 'import io\n'), ((3691, 3714), 'os.path.isdir', 'os.path.isdir', (['filename'], {}), '(filename)\n', (3704, 3714), False, 'import os\n'), ((2592, 2618), 'io.BytesIO', 'io.BytesIO', (['self._contents'], {}), '(self._contents)\n', (2602, 2618), False, 'import io\n')] |
"""
Copyright 2018 Edmunds.<EMAIL>, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from classes.app import App
from classes.headers import Headers
from classes.mytime import MyTime
def test_user_agent_generation_for_past_w_identifier():
mytime = MyTime(epoch=1520020741)
stage = "local"
step = 1234
base_url = "http://shadowreader.example.com"
app = App(
name="my-test-app",
replay_start_time=mytime,
loop_duration=60,
base_url=base_url,
identifier="qa",
rate=100,
baseline=0,
)
headers = Headers(shadowreader_type="past", stage=stage, app=app, step=step).headers
user_agent = headers["x-request-id"]
print(headers)
assert user_agent == "sr_local_past_qa_my-test-app_03-02-2018-19-59_60m_20"
def test_user_agent_generation_for_past():
mytime = MyTime(epoch=1520020741)
stage = "local"
step = 1234
base_url = "http://shadowreader.example.com"
app = App(
name="my-test-app",
replay_start_time=mytime,
loop_duration=60,
base_url=base_url,
rate=100,
baseline=0,
)
headers = Headers(shadowreader_type="past", stage=stage, app=app, step=step).headers
user_agent = headers["x-request-id"]
print(headers)
assert user_agent == f"sr_local_past_{base_url}_my-test-app_03-02-2018-19-59_60m_20"
| [
"classes.headers.Headers",
"classes.app.App",
"classes.mytime.MyTime"
] | [((740, 764), 'classes.mytime.MyTime', 'MyTime', ([], {'epoch': '(1520020741)'}), '(epoch=1520020741)\n', (746, 764), False, 'from classes.mytime import MyTime\n'), ((860, 989), 'classes.app.App', 'App', ([], {'name': '"""my-test-app"""', 'replay_start_time': 'mytime', 'loop_duration': '(60)', 'base_url': 'base_url', 'identifier': '"""qa"""', 'rate': '(100)', 'baseline': '(0)'}), "(name='my-test-app', replay_start_time=mytime, loop_duration=60,\n base_url=base_url, identifier='qa', rate=100, baseline=0)\n", (863, 989), False, 'from classes.app import App\n'), ((1336, 1360), 'classes.mytime.MyTime', 'MyTime', ([], {'epoch': '(1520020741)'}), '(epoch=1520020741)\n', (1342, 1360), False, 'from classes.mytime import MyTime\n'), ((1456, 1568), 'classes.app.App', 'App', ([], {'name': '"""my-test-app"""', 'replay_start_time': 'mytime', 'loop_duration': '(60)', 'base_url': 'base_url', 'rate': '(100)', 'baseline': '(0)'}), "(name='my-test-app', replay_start_time=mytime, loop_duration=60,\n base_url=base_url, rate=100, baseline=0)\n", (1459, 1568), False, 'from classes.app import App\n'), ((1063, 1129), 'classes.headers.Headers', 'Headers', ([], {'shadowreader_type': '"""past"""', 'stage': 'stage', 'app': 'app', 'step': 'step'}), "(shadowreader_type='past', stage=stage, app=app, step=step)\n", (1070, 1129), False, 'from classes.headers import Headers\n'), ((1634, 1700), 'classes.headers.Headers', 'Headers', ([], {'shadowreader_type': '"""past"""', 'stage': 'stage', 'app': 'app', 'step': 'step'}), "(shadowreader_type='past', stage=stage, app=app, step=step)\n", (1641, 1700), False, 'from classes.headers import Headers\n')] |
"""
Longest Word
Have the function LongestWord(sen) take the sen parameter being passed and
return the largest word in the string. If there are two or more words that are
the same length, return the first word from the string with that length.
Ignore punctuation and assume sen will not be empty.
Examples
Input: "fun&!! time"
Output: time
Input: "I love dogs"
Output: love
"""
import re
def LongestWord(sen):
res = ''
for i in re.split('[^a-z|^A-Z|^0-9]', sen):
if len(i) > len(res):
res = i
return res
if __name__ == '__main__':
input = "fun&!! time"
print(LongestWord(input))
#Output: time
input = "I love dogs"
print(LongestWord(input))
#Output: love
input = "0123456789 123456"
print(LongestWord(input))
#Output: 0123456789
| [
"re.split"
] | [((437, 470), 're.split', 're.split', (['"""[^a-z|^A-Z|^0-9]"""', 'sen'], {}), "('[^a-z|^A-Z|^0-9]', sen)\n", (445, 470), False, 'import re\n')] |
from django.views.generic import TemplateView
from .models import Article, Ine, Comment
from django.shortcuts import render, redirect, get_object_or_404
from django.http import Http404
from django.http.response import JsonResponse
import json
import urllib.request
from .forms import ArticleForm
#index表示
class index(TemplateView):
template_name = "blog/index.html"
#新規作成
def new(request):
template_name = "blog/new.html"
if request.method == "POST":
form = ArticleForm(request.POST)
if form.is_valid():
form.save()
return redirect(article_all)
else:
form = ArticleForm
return render(request, template_name, {'form': form })
#記事一覧
def article_all(request):
template_name = "blog/article_all.html"
context = {"articles":Article.objects.all()}
return render(request, template_name, context)
#記事を閲覧
def view_article(request, pk):
template_name = "blog/view_article.html"
try:
article = Article.objects.get(pk=pk)
post = get_object_or_404(Article, pk=pk)
ine = Ine.objects.filter(parent=post).count()
except Article.DoesNotExist:
raise Http404
if request.method == "POST":
# データベースに投稿されたコメントを保存
Comment.objects.create(text=request.POST["text"], article=article)
context = {"article":article, "ine":ine}
return render(request, template_name, context)
#編集ページ
def edit(request,pk):
template_name = "blog/edit.html"
try:
article = Article.objects.get(pk=pk)
except Article.DoesNotExist:
raise Http404
if request.method == "POST":
article.title = request.POST["title"]
article.text = request.POST["text"]
article.save()
return redirect(view_article, pk)
context = {"article": article}
return render(request, template_name, context)
#記事削除
def delete(request, pk):
try:
article = Article.objects.get(pk=pk)
except Article.DoesNotExist:
raise Http404
article.delete()
return redirect(article_all)
#いいね関数
def add_ine(request, pk):
post = get_object_or_404(Article, pk=pk) #Articleの受け取り
ip_address = get_client_ip(request) #IPアドレスをget_client_ip()で取得
ips = [ine.ip_address for ine in Ine.objects.filter(parent=post).all()]
#IPアドレスが未登録の場合はDBに登録
if ip_address in ips:
msg = '登録済みです'
else:
ine = Ine.objects.create(ip_address=ip_address, parent=post)
ine.save()
msg = '登録しました'
#Json形式にして格納
d = {
'count': Ine.objects.filter(parent=post).count(),
'msg': msg,
}
return JsonResponse(d)
#IPアドレスを取得
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
| [
"django.shortcuts.render",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect",
"django.http.response.JsonResponse"
] | [((651, 697), 'django.shortcuts.render', 'render', (['request', 'template_name', "{'form': form}"], {}), "(request, template_name, {'form': form})\n", (657, 697), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((836, 875), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (842, 875), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1373, 1412), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (1379, 1412), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1823, 1862), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (1829, 1862), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2036, 2057), 'django.shortcuts.redirect', 'redirect', (['article_all'], {}), '(article_all)\n', (2044, 2057), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2108, 2141), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Article'], {'pk': 'pk'}), '(Article, pk=pk)\n', (2125, 2141), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2618, 2633), 'django.http.response.JsonResponse', 'JsonResponse', (['d'], {}), '(d)\n', (2630, 2633), False, 'from django.http.response import JsonResponse\n'), ((1029, 1062), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Article'], {'pk': 'pk'}), '(Article, pk=pk)\n', (1046, 1062), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1750, 1776), 'django.shortcuts.redirect', 'redirect', (['view_article', 'pk'], {}), '(view_article, pk)\n', (1758, 1776), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((580, 601), 'django.shortcuts.redirect', 'redirect', (['article_all'], {}), '(article_all)\n', (588, 601), False, 'from django.shortcuts import render, redirect, get_object_or_404\n')] |
"""
Q3D Bus Bar Analysis
--------------------------------------------
This tutorial shows how you can use PyAedt to create a BusBar Project in
in Q3D and run a simulation
"""
# sphinx_gallery_thumbnail_path = 'Resources/busbar.png'
import os
from pyaedt.desktop import Desktop
from pyaedt import Q3d
###############################################################################
# NonGraphical
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Change Boolean to False to open AEDT in graphical mode
NonGraphical = True
###############################################################################
# Launch Desktop and Q3D
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This examples will use AEDT 2021.1 in Graphical mode
# This examples will use SI units.
d = Desktop("2021.1", NonGraphical, False)
q=Q3d()
###############################################################################
# Primitives Creation
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Here pyaedt will create polylines for the three busbar and box for the substrate
q.modeler.primitives.create_polyline([[0, 0, 0], [-100, 0, 0]], name="Bar1", matname="copper", xsection_type="Rectangle",
xsection_width="5mm", xsection_height="1mm")
q.modeler.primitives.create_polyline([[0, -15, 0], [-150, -15, 0]], name="Bar2", matname="aluminum",
xsection_type="Rectangle", xsection_width="5mm", xsection_height="1mm")
q.modeler.primitives.create_polyline([[0, -30, 0], [-175, -30, 0], [-175, -10, 0]], name="Bar3", matname="copper",
xsection_type="Rectangle", xsection_width="5mm", xsection_height="1mm")
q.modeler.primitives.create_box([50,30,-0.5], [-250,-100,-3], name="substrate", matname="FR4_epoxy")
###############################################################################
# Boundary Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Here pyaedt will identify nets and assign source and sink to all nets
# There will be a source and sink for each bus bar
q.auto_identify_nets()
q.assign_source_to_objectface("Bar1",axisdir=q.AxisDir.XPos, source_name="Source1")
q.assign_sink_to_objectface("Bar1",axisdir=q.AxisDir.XNeg, sink_name="Sink1")
q.assign_source_to_objectface("Bar2",axisdir=q.AxisDir.XPos, source_name="Source2")
q.assign_sink_to_objectface("Bar2",axisdir=q.AxisDir.XNeg, sink_name="Sink2")
q.assign_source_to_objectface("Bar3",axisdir=q.AxisDir.XPos, source_name="Source3")
q.assign_sink_to_objectface("Bar3",axisdir=q.AxisDir.YPos, sink_name="Sink3")
###############################################################################
# Add a Q3D Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This method add a setup to the project and define Adaptive Frequency value
q.create_setup(props={"AdaptiveFreq":"100MHz"})
###############################################################################
# Create AEDT Rectangular Plot
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This method add a rectangular plot to Aedt
q.post.create_rectangular_plot("C(Bar1,Bar1)",context="Original")
###############################################################################
# Solve Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This method solve setup
q.analyse_nominal()
###############################################################################
# Close Desktop
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# After the simulaton is completed user can close the desktop or release it (using release_desktop method).
# All methods give possibility to save projects before exit
if os.name != "posix":
d.force_close_desktop()
| [
"pyaedt.Q3d",
"pyaedt.desktop.Desktop"
] | [((757, 795), 'pyaedt.desktop.Desktop', 'Desktop', (['"""2021.1"""', 'NonGraphical', '(False)'], {}), "('2021.1', NonGraphical, False)\n", (764, 795), False, 'from pyaedt.desktop import Desktop\n'), ((799, 804), 'pyaedt.Q3d', 'Q3d', ([], {}), '()\n', (802, 804), False, 'from pyaedt import Q3d\n')] |
from script_for_starting_project import add_url_to_urlpatterns
import os
def making_create_route(cwd, project_name, app_name, model, endpoint):
template = "\nclass {}CreateAPIView(CreateAPIView):\n\tqueryset = {}.objects.all()\n\tserializer_class = {}Serializer\n\n".format(model, model, model)
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'r') as e:
views = e.read()
views = views + template
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'w') as e:
e.write(views)
print("Successfully added create view!!")
add_url_to_urlpatterns(os.path.join(cwd, project_name, app_name, 'urls.py'), '\tpath("{}/", {}CreateAPIView.as_view()),'.format(endpoint, model))
print("Added url to path for the view!")
def making_update_route(cwd, project_name, app_name, model, endpoint):
template = "\nclass {}UpdateAPIView(UpdateAPIView):\n\tqueryset = {}.objects.all()\n\tserializer_class = {}Serializer\n\n".format(model, model, model)
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'r') as e:
views = e.read()
views = views + template
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'w') as e:
e.write(views)
print("Successfully added update view!!")
add_url_to_urlpatterns(os.path.join(cwd, project_name, app_name, 'urls.py'), '\tpath("{}/<id>/", {}UpdateAPIView.as_view()),'.format(endpoint, model))
print("Added url to path for the view!")
def making_read_route(cwd, project_name, app_name, model, endpoint):
template = "\nclass {}RetrieveAPIView(RetrieveAPIView):\n\tqueryset = {}.objects.all()\n\tserializer_class = {}Serializer\n\n".format(model, model, model)
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'r') as e:
views = e.read()
views = views + template
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'w') as e:
e.write(views)
print("Successfully added read view!!")
add_url_to_urlpatterns(os.path.join(cwd, project_name, app_name, 'urls.py'), '\tpath("{}/<id>/", {}RetrieveAPIView.as_view()),'.format(endpoint, model))
print("Added url to path for the view!")
def making_delete_route(cwd, project_name, app_name, model, endpoint):
template = "\nclass {}DestroyAPIView(DestroyAPIView):\n\tqueryset = {}.objects.all()\n\tserializer_class = {}Serializer\n\n".format(model, model, model)
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'r') as e:
views = e.read()
views = views + template
with open(os.path.join(cwd, project_name, app_name, 'views.py'), 'w') as e:
e.write(views)
print("Successfully added delete view!!")
add_url_to_urlpatterns(os.path.join(cwd, project_name, app_name, 'urls.py'), '\tpath("{}/<id>/", {}DestroyAPIView.as_view()),'.format(endpoint, model))
print("Added url to path for the view!")
# making_create_route(os.getcwd(),'new', 'api', 'User', 'user/create')
# making_update_route(os.getcwd(),'new', 'api', 'User', 'user/update')
# making_read_route(os.getcwd(),'new', 'api', 'User', 'user/read')
# making_delete_route(os.getcwd(), 'new', 'api', 'User', 'user/delete') | [
"os.path.join"
] | [((618, 670), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""urls.py"""'], {}), "(cwd, project_name, app_name, 'urls.py')\n", (630, 670), False, 'import os\n'), ((1336, 1388), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""urls.py"""'], {}), "(cwd, project_name, app_name, 'urls.py')\n", (1348, 1388), False, 'import os\n'), ((2056, 2108), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""urls.py"""'], {}), "(cwd, project_name, app_name, 'urls.py')\n", (2068, 2108), False, 'import os\n'), ((2779, 2831), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""urls.py"""'], {}), "(cwd, project_name, app_name, 'urls.py')\n", (2791, 2831), False, 'import os\n'), ((313, 366), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (325, 366), False, 'import os\n'), ((447, 500), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (459, 500), False, 'import os\n'), ((1031, 1084), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (1043, 1084), False, 'import os\n'), ((1165, 1218), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (1177, 1218), False, 'import os\n'), ((1753, 1806), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (1765, 1806), False, 'import os\n'), ((1887, 1940), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (1899, 1940), False, 'import os\n'), ((2474, 2527), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (2486, 2527), False, 'import os\n'), ((2608, 2661), 'os.path.join', 'os.path.join', (['cwd', 'project_name', 'app_name', '"""views.py"""'], {}), "(cwd, project_name, app_name, 'views.py')\n", (2620, 2661), False, 'import os\n')] |
import os
import unittest
from tests import TestCase, dlib
from face_engine import FaceEngine, load_engine
class TestFaceEnginePersistence(TestCase):
def setUp(self):
self.test_engine = FaceEngine()
def tearDown(self):
if os.path.isfile('test_engine.p'):
os.remove('test_engine.p')
if os.path.isfile('basic.estimator.p'):
os.remove('basic.estimator.p')
@unittest.skipUnless(dlib, "dlib package is not installed")
def test_save(self):
self.test_engine.save('test_engine.p')
self.assertEqual(os.path.isfile('test_engine.p'), True)
@unittest.skipUnless(dlib, "dlib package is not installed")
def test_save_with_fitted_engine(self):
images = [self.bubbles1, self.bubbles2]
classes = [0, 0]
self.test_engine.fit(images, classes)
self.test_engine.save('test_engine.p')
self.assertEqual(os.path.isfile('test_engine.p'), True)
self.assertEqual(os.path.isfile('basic.estimator.p'), True)
@unittest.skipIf(dlib, "dlib package is installed")
def test_load_engine_without_dlib(self):
self.test_engine.save('test_engine.p')
engine = load_engine('test_engine.p')
self.assertIsInstance(engine, FaceEngine)
self.assertEqual(engine.detector, 'abstract_detector')
self.assertEqual(engine.embedder, 'abstract_embedder')
self.assertEqual(engine.estimator, 'basic')
@unittest.skipUnless(dlib, "dlib package is not installed")
def test_load_engine_with_dlib(self):
self.test_engine.save('test_engine.p')
engine = load_engine('test_engine.p')
self.assertIsInstance(engine, FaceEngine)
self.assertEqual(engine.detector, 'hog')
self.assertEqual(engine.embedder, 'resnet')
self.assertEqual(engine.estimator, 'basic')
@unittest.skipUnless(dlib, "dlib package is not installed")
def test_load_engine_with_estimator_state(self):
images = [self.bubbles1, self.bubbles2]
classes = [0, 0]
self.test_engine.fit(images, classes)
self.test_engine.save('test_engine.p')
engine = load_engine('test_engine.p')
self.assertIsInstance(engine, FaceEngine)
self.assertEqual(engine.detector, 'hog')
self.assertEqual(engine.embedder, 'resnet')
self.assertEqual(engine.estimator, 'basic')
self.assertEqual(engine.n_classes, 1)
self.assertEqual(engine.n_samples, 2)
if __name__ == '__main__':
unittest.main()
| [
"face_engine.load_engine",
"face_engine.FaceEngine",
"unittest.skipIf",
"unittest.skipUnless",
"os.path.isfile",
"unittest.main",
"os.remove"
] | [((421, 479), 'unittest.skipUnless', 'unittest.skipUnless', (['dlib', '"""dlib package is not installed"""'], {}), "(dlib, 'dlib package is not installed')\n", (440, 479), False, 'import unittest\n'), ((622, 680), 'unittest.skipUnless', 'unittest.skipUnless', (['dlib', '"""dlib package is not installed"""'], {}), "(dlib, 'dlib package is not installed')\n", (641, 680), False, 'import unittest\n'), ((1029, 1079), 'unittest.skipIf', 'unittest.skipIf', (['dlib', '"""dlib package is installed"""'], {}), "(dlib, 'dlib package is installed')\n", (1044, 1079), False, 'import unittest\n'), ((1452, 1510), 'unittest.skipUnless', 'unittest.skipUnless', (['dlib', '"""dlib package is not installed"""'], {}), "(dlib, 'dlib package is not installed')\n", (1471, 1510), False, 'import unittest\n'), ((1855, 1913), 'unittest.skipUnless', 'unittest.skipUnless', (['dlib', '"""dlib package is not installed"""'], {}), "(dlib, 'dlib package is not installed')\n", (1874, 1913), False, 'import unittest\n'), ((2507, 2522), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2520, 2522), False, 'import unittest\n'), ((203, 215), 'face_engine.FaceEngine', 'FaceEngine', ([], {}), '()\n', (213, 215), False, 'from face_engine import FaceEngine, load_engine\n'), ((252, 283), 'os.path.isfile', 'os.path.isfile', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (266, 283), False, 'import os\n'), ((335, 370), 'os.path.isfile', 'os.path.isfile', (['"""basic.estimator.p"""'], {}), "('basic.estimator.p')\n", (349, 370), False, 'import os\n'), ((1189, 1217), 'face_engine.load_engine', 'load_engine', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (1200, 1217), False, 'from face_engine import FaceEngine, load_engine\n'), ((1617, 1645), 'face_engine.load_engine', 'load_engine', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (1628, 1645), False, 'from face_engine import FaceEngine, load_engine\n'), ((2150, 2178), 'face_engine.load_engine', 'load_engine', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (2161, 2178), False, 'from face_engine import FaceEngine, load_engine\n'), ((297, 323), 'os.remove', 'os.remove', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (306, 323), False, 'import os\n'), ((384, 414), 'os.remove', 'os.remove', (['"""basic.estimator.p"""'], {}), "('basic.estimator.p')\n", (393, 414), False, 'import os\n'), ((577, 608), 'os.path.isfile', 'os.path.isfile', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (591, 608), False, 'import os\n'), ((916, 947), 'os.path.isfile', 'os.path.isfile', (['"""test_engine.p"""'], {}), "('test_engine.p')\n", (930, 947), False, 'import os\n'), ((980, 1015), 'os.path.isfile', 'os.path.isfile', (['"""basic.estimator.p"""'], {}), "('basic.estimator.p')\n", (994, 1015), False, 'import os\n')] |
#!/usr/bin/env python3
import os
import re
import subprocess
from functools import cmp_to_key
def open_settings(path: str):
with open(path, "r", encoding="UTF-8") as file:
return file.readlines()
def get_hwmon_location():
base_path = "/sys/devices/platform/coretemp.0/hwmon"
hwmon_dirs = os.listdir(base_path)
assert len(hwmon_dirs) == 1
hwmon_dir = f"{base_path}/{hwmon_dirs[0]}"
return [f"hwmon {hwmon_dir}/{x}\n" for x in os.listdir(hwmon_dir) if
os.path.isfile(f"{hwmon_dir}/{x}") and re.match("temp[0-9]+_input", x)]
def update_settings(current, new):
config = [x for x in current if (not x.startswith("hwmon"))] + new
return sorted(config, key=cmp_to_key(lambda i1, i2: line_fitness(i1) - line_fitness(i2)))
def line_fitness(line: str):
if line.startswith("tp_fan"):
return 1
elif line.startswith("hwmon"):
return 3
elif line.startswith("("):
return 5
else:
return 4
def are_same(current, new):
cur = set([x.strip() for x in current if x.startswith("hwmon")])
ne = set([x.strip() for x in new if x.startswith("hwmon")])
return cur.issubset(ne) and cur.issuperset(ne)
def save_settings(current, new):
updated_config = "".join(update_settings(current, new))
print(f"Updating config!\n{updated_config}")
write_settings(config_path, updated_config)
def write_settings(path: str, payload: str):
with open(path, "w", encoding="UTF-8") as file:
file.write(payload)
def restart_thinkfan():
process = subprocess.Popen("systemctl restart thinkfan.service".split(), stdout=subprocess.PIPE)
output, error = process.communicate()
if error:
print(f"Error ocurred during the thinkfan restart!\n{error}")
else:
print("Service was restarted successfully, everything set.")
if __name__ == "__main__":
config_path = "/etc/thinkfan.conf"
current = open_settings(config_path)
new = get_hwmon_location()
if are_same(current, new):
print("All set! No change detected.")
else:
save_settings(current, new)
restart_thinkfan()
| [
"os.path.isfile",
"os.listdir",
"re.match"
] | [((312, 333), 'os.listdir', 'os.listdir', (['base_path'], {}), '(base_path)\n', (322, 333), False, 'import os\n'), ((461, 482), 'os.listdir', 'os.listdir', (['hwmon_dir'], {}), '(hwmon_dir)\n', (471, 482), False, 'import os\n'), ((498, 532), 'os.path.isfile', 'os.path.isfile', (['f"""{hwmon_dir}/{x}"""'], {}), "(f'{hwmon_dir}/{x}')\n", (512, 532), False, 'import os\n'), ((537, 568), 're.match', 're.match', (['"""temp[0-9]+_input"""', 'x'], {}), "('temp[0-9]+_input', x)\n", (545, 568), False, 'import re\n')] |
#coding:utf-8
from logging import Handler,StreamHandler
from logging.handlers import RotatingFileHandler
class LogHandlerBase(object):
def __init__(self):
self.logger = None
def setLogger(self, logger):
self.logger = logger
class LogFileHandler(LogHandlerBase,RotatingFileHandler):
"""
"""
TYPE = 'file'
def __init__(self, *args, **kwargs):
RotatingFileHandler.__init__(self, *args, **kwargs)
LogHandlerBase.__init__(self)
class LogConsoleHandler(LogHandlerBase,StreamHandler):
"""
"""
TYPE = 'console'
def __init__(self, *args, **kwargs):
StreamHandler.__init__(self, *args, **kwargs)
LogHandlerBase.__init__(self)
| [
"logging.handlers.RotatingFileHandler.__init__",
"logging.StreamHandler.__init__"
] | [((397, 448), 'logging.handlers.RotatingFileHandler.__init__', 'RotatingFileHandler.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (425, 448), False, 'from logging.handlers import RotatingFileHandler\n'), ((631, 676), 'logging.StreamHandler.__init__', 'StreamHandler.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (653, 676), False, 'from logging import Handler, StreamHandler\n')] |
"""
Accumulation workspace
To test: nc -l 31466 < REF_M_25640.adara
"""
from mantid import simpleapi
simpleapi.logger.notice("Starting proc")
try:
simpleapi.CloneWorkspace(InputWorkspace=input, OutputWorkspace=output)
except:
return input
| [
"mantid.simpleapi.CloneWorkspace",
"mantid.simpleapi.logger.notice"
] | [((112, 152), 'mantid.simpleapi.logger.notice', 'simpleapi.logger.notice', (['"""Starting proc"""'], {}), "('Starting proc')\n", (135, 152), False, 'from mantid import simpleapi\n'), ((162, 232), 'mantid.simpleapi.CloneWorkspace', 'simpleapi.CloneWorkspace', ([], {'InputWorkspace': 'input', 'OutputWorkspace': 'output'}), '(InputWorkspace=input, OutputWorkspace=output)\n', (186, 232), False, 'from mantid import simpleapi\n')] |
# coding: utf-8
# # Assignment 3
#
# Welcome to Assignment 3. This will be even more fun. Now we will calculate statistical measures on the test data you have created.
#
# YOU ARE NOT ALLOWED TO USE ANY OTHER 3RD PARTY LIBRARIES LIKE PANDAS. PLEASE ONLY MODIFY CONTENT INSIDE THE FUNCTION SKELETONS
# Please read why: https://www.coursera.org/learn/exploring-visualizing-iot-data/discussions/weeks/3/threads/skjCbNgeEeapeQ5W6suLkA
# . Just make sure you hit the play button on each cell from top to down. There are seven functions you have to implement. Please also make sure than on each change on a function you hit the play button again on the corresponding cell to make it available to the rest of this notebook.
# Please also make sure to only implement the function bodies and DON'T add any additional code outside functions since this might confuse the autograder.
#
# So the function below is used to make it easy for you to create a data frame from a cloudant data frame using the so called "DataSource" which is some sort of a plugin which allows ApacheSpark to use different data sources.
#
# All functions can be implemented using DataFrames, ApacheSparkSQL or RDDs. We are only interested in the result. You are given the reference to the data frame in the "df" parameter and in case you want to use SQL just use the "spark" parameter which is a reference to the global SparkSession object. Finally if you want to use RDDs just use "df.rdd" for obtaining a reference to the underlying RDD object.
#
# Let's start with the first function. Please calculate the minimal temperature for the test data set you have created. We've provided a little skeleton for you in case you want to use SQL. You can use this skeleton for all subsequent functions. Everything can be implemented using SQL only if you like.
# In[1]:
def minTemperature(df,spark):
return spark.sql("SELECT min(temperature) as mintemp from washing").first().mintemp
# Please now do the same for the mean of the temperature
# In[2]:
def meanTemperature(df,spark):
return spark.sql("SELECT avg(temperature) as meantemp from washing").first().meantemp
# Please now do the same for the maximum of the temperature
# In[3]:
def maxTemperature(df,spark):
return spark.sql("SELECT max(temperature) as maxtemp from washing").first().maxtemp
# Please now do the same for the standard deviation of the temperature
# In[4]:
def sdTemperature(df,spark):
return spark.sql("SELECT stddev(temperature) as stddevtemp from washing").first().stddevtemp
# Please now do the same for the skew of the temperature. Since the SQL statement for this is a bit more complicated we've provided a skeleton for you. You have to insert custom code at four position in order to make the function work. Alternatively you can also remove everything and implement if on your own. Note that we are making use of two previously defined functions, so please make sure they are correct. Also note that we are making use of python's string formatting capabilitis where the results of the two function calls to "meanTemperature" and "sdTemperature" are inserted at the "%s" symbols in the SQL string.
# In[5]:
def skewTemperature(df,spark):
return spark.sql("""
SELECT
(
1/count(temperature)
) *
SUM (
POWER(temperature-%s,3)/POWER(%s,3)
)
as skewTemp from washing
""" %(meanTemperature(df,spark),sdTemperature(df,spark))).first().skewTemp
# Kurtosis is the 4th statistical moment, so if you are smart you can make use of the code for skew which is the 3rd statistical moment. Actually only two things are different.
# In[6]:
def kurtosisTemperature(df,spark):
return spark.sql("""
SELECT
(
1/count(temperature)
) *
SUM (
POWER(temperature-%s,4)/POWER(%s,4)
)
as kurtTemp from washing
""" %(meanTemperature(df,spark),sdTemperature(df,spark))).first().kurtTemp
# Just a hint. This can be solved easily using SQL as well, but as shown in the lecture also using RDDs.
# In[7]:
def correlationTemperatureHardness(df,spark):
return df.stat.corr("temperature","hardness")
# ### PLEASE DON'T REMOVE THIS BLOCK - THE FOLLOWING CODE IS NOT GRADED
# #axx
# ### PLEASE DON'T REMOVE THIS BLOCK - THE FOLLOWING CODE IS NOT GRADED
# Now it is time to connect to the object store and read a PARQUET file and create a dataframe out of it. We've created that data for you already. Using SparkSQL you can handle it like a database.
# In[8]:
import ibmos2spark
# @hidden_cell
credentials = {
'endpoint': 'https://s3-api.us-geo.objectstorage.service.networklayer.com',
'api_key': '<KEY>',
'service_id': 'iam-ServiceId-9cd8e66e-3bb4-495a-807a-588692cca4d0',
'iam_service_endpoint': 'https://iam.bluemix.net/oidc/token'}
configuration_name = 'os_b0f1407510994fd1b793b85137baafb8_configs'
cos = ibmos2spark.CloudObjectStorage(sc, credentials, configuration_name, 'bluemix_cos')
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
# Since JSON data can be semi-structured and contain additional metadata, it is possible that you might face issues with the DataFrame layout.
# Please read the documentation of 'SparkSession.read()' to learn more about the possibilities to adjust the data loading.
# PySpark documentation: http://spark.apache.org/docs/2.0.2/api/python/pyspark.sql.html#pyspark.sql.DataFrameReader.json
df = spark.read.parquet(cos.url('washing.parquet', 'courseradsnew-donotdelete-pr-1hffrnl2pprwut'))
df.show()
# In[9]:
df.rdd.toDF().registerTempTable("washing")
# In[10]:
minTemperature(df,spark)
# In[11]:
meanTemperature(df,spark)
# In[12]:
maxTemperature(df,spark)
# In[13]:
sdTemperature(df,spark)
# In[14]:
skewTemperature(df,spark)
# In[15]:
kurtosisTemperature(df,spark)
# In[16]:
correlationTemperatureHardness(df,spark)
# Congratulations, you are done, please download this notebook as python file using the export function and submit is to the gader using the filename "assignment3.1.py"
| [
"pyspark.sql.SparkSession.builder.getOrCreate",
"ibmos2spark.CloudObjectStorage"
] | [((4917, 5003), 'ibmos2spark.CloudObjectStorage', 'ibmos2spark.CloudObjectStorage', (['sc', 'credentials', 'configuration_name', '"""bluemix_cos"""'], {}), "(sc, credentials, configuration_name,\n 'bluemix_cos')\n", (4947, 5003), False, 'import ibmos2spark\n'), ((5046, 5080), 'pyspark.sql.SparkSession.builder.getOrCreate', 'SparkSession.builder.getOrCreate', ([], {}), '()\n', (5078, 5080), False, 'from pyspark.sql import SparkSession\n')] |
#!/usr/bin/python
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
import RPi.GPIO as GPIO
import time
import atexit
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
trigger = 18
echo = 24
GPIO.setup(trigger, GPIO.OUT)
GPIO.setup(echo, GPIO.IN)
GPIO.output(trigger, False)
time.sleep(2)
def distance(echo, trigger):
GPIO.output(trigger, True)
time.sleep(0.00001)
GPIO.output(trigger, False)
start = 0
stop = 0
while GPIO.input(echo) == 0:
start = time.time()
while GPIO.input(echo) == 1:
stop = time.time()
if stop != 0 and start != 0:
elapsed = stop - start
distance = elapsed * 33440
distance = distance / 2
return distance
"""while 0 == 0:
distance(echo, trigger)
time.sleep(0.5)"""
# create a default object, no changes to I2C address or frequency
mh = Adafruit_MotorHAT(addr=0x60)
# recommended for auto-disabling motors on shutdown!
def turnOffMotors():
mh.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
mh.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
atexit.register(turnOffMotors)
################################# DC motor test!
m1 = mh.getMotor(1)
m2 = mh.getMotor(2)
m3 = mh.getMotor(3)
m4 = mh.getMotor(4)
# set the speed to start, from 0 (off) to 255 (max speed)
m1.setSpeed(150)
m2.setSpeed(150)
m3.setSpeed(150)
m4.setSpeed(150)
m1.run(Adafruit_MotorHAT.FORWARD);
m2.run(Adafruit_MotorHAT.FORWARD);
m3.run(Adafruit_MotorHAT.FORWARD);
m4.run(Adafruit_MotorHAT.FORWARD);
# turn on motor
m1.run(Adafruit_MotorHAT.RELEASE);
m2.run(Adafruit_MotorHAT.RELEASE);
m3.run(Adafruit_MotorHAT.RELEASE);
m4.run(Adafruit_MotorHAT.RELEASE);
"""while (True):
print "Forward! "
m1.run(Adafruit_MotorHAT.FORWARD)
m2.run(Adafruit_MotorHAT.FORWARD)
m3.run(Adafruit_MotorHAT.FORWARD)
m4.run(Adafruit_MotorHAT.FORWARD)
print "\tSpeed up..."
for i in range(255):
m1.setSpeed(i)
m2.setSpeed(i)
m3.setSpeed(i)
m4.setSpeed(i)
time.sleep(0.01)
print "\tSlow down..."
for i in reversed(range(255)):
m1.setSpeed(i)
m2.setSpeed(i)
m3.setSpeed(i)
m4.setSpeed(i)
time.sleep(0.01)
print "Backward! "
m1.run(Adafruit_MotorHAT.BACKWARD)
m2.run(Adafruit_MotorHAT.BACKWARD)
m3.run(Adafruit_MotorHAT.BACKWARD)
m4.run(Adafruit_MotorHAT.BACKWARD)
print "\tSpeed up..."
for i in range(255):
m1.setSpeed(i)
m2.setSpeed(i)
m3.setSpeed(i)
m4.setSpeed(i)
time.sleep(0.01)
print "\tSlow down..."
for i in reversed(range(255)):
m1.setSpeed(i)
m2.setSpeed(i)
m3.setSpeed(i)
m4.setSpeed(i)
time.sleep(0.01)
print "Release"
m1.run(Adafruit_MotorHAT.RELEASE)
m2.run(Adafruit_MotorHAT.RELEASE)
m3.run(Adafruit_MotorHAT.RELEASE)
m4.run(Adafruit_MotorHAT.RELEASE)
time.sleep(1.0)"""
while(True):
while distance(echo, trigger) > 10.0:
m1.run(Adafruit_MotorHAT.FORWARD)
m2.run(Adafruit_MotorHAT.FORWARD)
m3.run(Adafruit_MotorHAT.FORWARD)
m4.run(Adafruit_MotorHAT.FORWARD)
else:
m1.run(Adafruit_MotorHAT.RELEASE)
m2.run(Adafruit_MotorHAT.RELEASE)
m3.run(Adafruit_MotorHAT.RELEASE)
m4.run(Adafruit_MotorHAT.RELEASE)
time.sleep(0.5)
GPIO.cleanup()
| [
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"RPi.GPIO.output",
"RPi.GPIO.setwarnings",
"time.sleep",
"Adafruit_MotorHAT.Adafruit_MotorHAT",
"RPi.GPIO.input",
"time.time",
"atexit.register",
"RPi.GPIO.setmode"
] | [((136, 159), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (152, 159), True, 'import RPi.GPIO as GPIO\n'), ((160, 182), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (172, 182), True, 'import RPi.GPIO as GPIO\n'), ((208, 237), 'RPi.GPIO.setup', 'GPIO.setup', (['trigger', 'GPIO.OUT'], {}), '(trigger, GPIO.OUT)\n', (218, 237), True, 'import RPi.GPIO as GPIO\n'), ((238, 263), 'RPi.GPIO.setup', 'GPIO.setup', (['echo', 'GPIO.IN'], {}), '(echo, GPIO.IN)\n', (248, 263), True, 'import RPi.GPIO as GPIO\n'), ((265, 292), 'RPi.GPIO.output', 'GPIO.output', (['trigger', '(False)'], {}), '(trigger, False)\n', (276, 292), True, 'import RPi.GPIO as GPIO\n'), ((293, 306), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (303, 306), False, 'import time\n'), ((862, 888), 'Adafruit_MotorHAT.Adafruit_MotorHAT', 'Adafruit_MotorHAT', ([], {'addr': '(96)'}), '(addr=96)\n', (879, 888), False, 'from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor\n'), ((1155, 1185), 'atexit.register', 'atexit.register', (['turnOffMotors'], {}), '(turnOffMotors)\n', (1170, 1185), False, 'import atexit\n'), ((3218, 3232), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (3230, 3232), True, 'import RPi.GPIO as GPIO\n'), ((340, 366), 'RPi.GPIO.output', 'GPIO.output', (['trigger', '(True)'], {}), '(trigger, True)\n', (351, 366), True, 'import RPi.GPIO as GPIO\n'), ((370, 387), 'time.sleep', 'time.sleep', (['(1e-05)'], {}), '(1e-05)\n', (380, 387), False, 'import time\n'), ((393, 420), 'RPi.GPIO.output', 'GPIO.output', (['trigger', '(False)'], {}), '(trigger, False)\n', (404, 420), True, 'import RPi.GPIO as GPIO\n'), ((3202, 3217), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3212, 3217), False, 'import time\n'), ((460, 476), 'RPi.GPIO.input', 'GPIO.input', (['echo'], {}), '(echo)\n', (470, 476), True, 'import RPi.GPIO as GPIO\n'), ((497, 508), 'time.time', 'time.time', ([], {}), '()\n', (506, 508), False, 'import time\n'), ((525, 541), 'RPi.GPIO.input', 'GPIO.input', (['echo'], {}), '(echo)\n', (535, 541), True, 'import RPi.GPIO as GPIO\n'), ((561, 572), 'time.time', 'time.time', ([], {}), '()\n', (570, 572), False, 'import time\n')] |
import subprocess
import pytest
import os
import shutil
import re
import time
import threading
import queue
import pexpect
from pexpect.fdpexpect import fdspawn
from qmp import QMP
# If not specifying --run-dir, then make sure that the CODEBUILD_SRC_DIR env
# var is set. This is the directory where the hpsc-bsp directory is located.
# On AWS CodeBuild, this is done automatically. In any other environment, it
# needs to be set.
def pytest_configure():
# This string will be evaluated whenever a call to subprocess.run fails
pytest.run_fail_str = "\"\\nARGS:\\n\" + str(out.args) + \"\\nRETURN CODE:\\n\" + str(out.returncode) + \"\\nSTDOUT:\\n\" + out.stdout.decode('ascii') + \"\\nSTDERR:\\n\" + out.stderr.decode('ascii')"
def sink_serial_port(conn, stop_ev):
"""Consume data from a pexpect file descriptor to prevent stall.
If the FIFO of the PTY fills up, the QEMU thread of the respective target
processor simply stalls -- this has been observed. Also, this way we
capture asynchronous output, that may show up at the console at a time not
tied to our pexpect send-expect calls (e.g. while we're running commands
over SSH).
"""
while not stop_ev.is_set():
poll_interval = 2 # check for request to stop (seconds)
r = conn.expect([pexpect.TIMEOUT, pexpect.EOF], poll_interval)
if r == 0:
continue
elif r == 1:
return
SPAWN_ARGS = dict(encoding='ascii', codec_errors='ignore', timeout=1000)
# TODO: These might need to be dynamic: not all profiles use all subsystems
trch_port = "serial0"
rtps_port = "serial1"
hpps_port = "serial2"
class Req:
pass
class ExpectReq(Req):
def __init__(self, patterns):
self.patterns = patterns
class SendLineReq(Req):
def __init__(self, line):
self.line = line
class SendIntrReq(Req):
pass
class StartEatReq(Req):
pass
class StopEatReq(Req):
pass
class PidReq(Req):
pass
class WaitReq(Req):
pass
class StopReq(Req):
pass
class Resp:
pass
class ErrorResp(Resp):
def __init__(self, exc):
self.exc = exc
class RcResp(Resp):
def __init__(self, rc, match=None):
self.rc = rc
self.match = match
class PidResp(Resp):
def __init__(self, pid):
self.pid = pid
class Chan:
def __init__(self):
self.req_qu = queue.Queue(maxsize=1)
self.resp_qu = queue.Queue(maxsize=1)
# private
def do_rc_req(self, req):
self.req_qu.put(req)
ret = self.resp_qu.get()
if isinstance(ret, RcResp):
return ret.rc
elif isinstance(ret, ErrorResp):
raise Exception("request to pexpect thread failed") from ret.exc
def expect(self, patterns):
# Mimic pexpect interface
if not isinstance(patterns, list):
patterns = [patterns]
self.req_qu.put(ExpectReq(patterns))
ret = self.resp_qu.get()
if isinstance(ret, RcResp):
if ret.match is not None:
self.match = ret.match
return ret.rc
elif isinstance(ret, ErrorResp):
raise Exception("expect request failed") from ret.exc
def sendline(self, line):
return self.do_rc_req(SendLineReq(line))
def sendintr(self):
return self.do_rc_req(SendIntrReq())
# private
def do_eat_req(self, req):
self.req_qu.put(req)
ret = self.resp_qu.get()
assert isinstance(ret, RcResp)
assert ret.rc == 0
def start_eat(self):
self.do_eat_req(StartEatReq())
def stop_eat(self):
self.do_eat_req(StopEatReq())
def wait(self):
return self.do_rc_req(WaitReq())
def pid(self):
self.req_qu.put(PidReq())
ret = self.resp_qu.get()
assert isinstance(ret, PidResp)
return ret.pid
def stop(self):
self.req_qu.put(StopReq())
# We want ability to consume streams in the background to get all log, not just
# the log during our expect calls) and to prevent Qemu CPU thread blocking when
# some FIFO along the serial port output path fills up (not 100% confirmed that
# this is happening, but suspected as root cause of stalls; might be PySerial
# related). So we need background threads that call expect(EOF).
#
# BUT Pexpect handles cannot be passed among threads (accoding to Common
# Problems in the docs; it appwars to work, but we won't risk it), so we need
# to spawn to get a handle within the thread and only use it from that thread.
# Also, having multiple spawns done from the same thread may not be supported
# by pexpect (again, appears to work, but we won't risk it).
def attach_port(port, pty, log, chan):
conn = os.open(pty, os.O_RDWR|os.O_NONBLOCK|os.O_NOCTTY)
handle = fdspawn(conn, logfile=log, **SPAWN_ARGS)
service_loop(handle, chan)
handle.close() # closes the conn file descriptor
def spawn(cmd, cwd, log, chan):
handle = pexpect.spawn(cmd, cwd=cwd, logfile=log, **SPAWN_ARGS)
service_loop(handle, chan)
handle.close()
def service_loop(handle, chan):
poll_interval = 2 # check for requests (seconds)
eat = False
run = True
while run:
try:
req = chan.req_qu.get(timeout=poll_interval)
if isinstance(req, ExpectReq) or \
isinstance(req, SendLineReq) or \
isinstance(req, SendIntrReq):
try:
if isinstance(req, ExpectReq):
r = handle.expect(req.patterns)
chan.resp_qu.put(RcResp(r, handle.match))
elif isinstance(req, SendLineReq):
r = handle.sendline(req.line)
chan.resp_qu.put(RcResp(r))
elif isinstance(req, SendIntrReq):
r = handle.sendintr() # not available for fdspawn
chan.resp_qu.put(RcResp(r))
except Exception as e:
chan.resp_qu.put(ErrorResp(e))
elif isinstance(req, StartEatReq):
eat = True
chan.resp_qu.put(RcResp(0))
elif isinstance(req, StopEatReq):
eat = False
chan.resp_qu.put(RcResp(0))
elif isinstance(req, WaitReq):
r = handle.wait()
chan.resp_qu.put(RcResp(r))
elif isinstance(req, PidReq):
chan.resp_qu.put(PidResp(handle.pid))
elif isinstance(req, StopReq):
run = False
except queue.Empty:
pass
if eat:
# Suprizingly, EOF does happen sometimes, even though files not
# closed while we're here. Not sure why. Expect EOF too, then.
handle.expect([pexpect.TIMEOUT, pexpect.EOF], poll_interval)
# This function will bringup QEMU, and create a channel to each serial port
# in the Qemu machine; then perform a QEMU teardown when the assigned tests complete.
def qemu_instance(config):
log_dir_name = 'logs'
tstamp = time.strftime('%Y%m%d%H%M%S')
qemu_cmd = config.getoption('qemu_cmd')
run_dir = config.getoption('run_dir')
if run_dir is None:
run_dir = os.path.join(os.environ['CODEBUILD_SRC_DIR'], "hpsc-bsp")
log_dir = os.path.join(run_dir, log_dir_name)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# Create a ser_fd dictionary object with each subsystem's serial file descriptor
ser_fd = dict();
log_files = []
handles = {}
# Now start QEMU without any screen sessions
# Note that the Popen call below combines stdout and stderr together
qemu_log_name = 'qemu'
qemu_log = open(os.path.join(log_dir, qemu_log_name + '.log'), "w")
log_files.append((qemu_log_name, qemu_log))
qemu = Chan()
qemu_th = threading.Thread(target=spawn, args=(qemu_cmd, run_dir, qemu_log, qemu))
qemu_th.start()
qemu_pid = qemu.pid()
qemu.expect('QMP_PORT = (\d+)')
qmp_port = int(qemu.match.group(1))
qemu.expect('\(qemu\) ') # just for good measure
qemu.start_eat() # consume to get log and avoid blocking due to full FIFOs
qmp = QMP('localhost', qmp_port, timeout=10)
reply = qmp.command("query-chardev")
cdevs = reply["return"]
pty_devs = {}
for cdev in cdevs:
devname = cdev[u"filename"]
if devname.startswith('pty:'):
pty_devs[cdev[u"label"]] = devname.split(':')[1]
# Association defined by order of UARTs in Qemu machine model (device tree)
serial_ports = ["serial0", "serial1", "serial2"]
threads = {}
chans = {}
# Connect to the serial ports, then issue a continue command to QEMU
for port in serial_ports:
log = open(os.path.join(log_dir, port + '.log'), "w")
log_files.append((port, log))
chans[port] = Chan()
threads[port] = threading.Thread(target=attach_port,
args=(port, pty_devs[port], log, chans[port]))
threads[port].start()
qmp.command("cont")
# For convenience
trch = chans[trch_port]
rtps = chans[rtps_port]
hpps = chans[hpps_port]
# Wait for subsystems to boot
trch.expect('\[\d+\] Waiting for interrupt...')
rtps.expect('SHLL \[/\] # ')
# Log into HPPS Linux
hpps.expect('hpsc-chiplet login: ')
hpps.sendline('root')
hpps.expect('root@hpsc-chiplet:~# ')
# Eat the output until a test requests a fixture for the respective port
for chan in chans.values():
chan.start_eat()
yield chans
for chan in chans.values():
chan.stop_eat()
chan.stop()
for th in threads.values():
th.join()
qemu.stop_eat()
qemu.sendline("quit")
qemu.expect(pexpect.EOF)
rc = qemu.wait()
assert rc == 0, "Qemu process exited uncleanly"
qemu.stop()
qemu_th.join()
for log_name, log_file in log_files:
log_file.close()
shutil.copyfile(os.path.join(log_dir, log_name + '.log'),
os.path.join(log_dir,
log_name + '.' + tstamp + '.' + str(qemu_pid) + '.log'))
@pytest.fixture(scope="module")
def qemu_instance_per_mdl(request):
yield from qemu_instance(request.config)
@pytest.fixture(scope="function")
def qemu_instance_per_fnc(request):
yield from qemu_instance(request.config)
def use_console(qemu_inst, serial_port):
chan = qemu_inst[serial_port]
chan.stop_eat() # Pause eating the output
yield chan
chan.start_eat() # Resume eating the output
# The serial port fixture is always per function, for either Qemu instance scope. */
@pytest.fixture(scope="function")
def trch_serial(qemu_instance_per_mdl):
yield from use_console(qemu_instance_per_mdl, trch_port)
@pytest.fixture(scope="function")
def rtps_serial(qemu_instance_per_mdl):
yield from use_console(qemu_instance_per_mdl, rtps_port)
@pytest.fixture(scope="function")
def hpps_serial(qemu_instance_per_mdl):
yield from use_console(qemu_instance_per_mdl, hpps_port)
# The serial port fixture is always per function, for either Qemu instance scope. */
@pytest.fixture(scope="function")
def trch_serial_per_fnc(qemu_instance_per_fnc):
yield from use_console(qemu_instance_per_fnc, trch_port)
@pytest.fixture(scope="function")
def rtps_serial_per_fnc(qemu_instance_per_fnc):
yield from use_console(qemu_instance_per_fnc, rtps_port)
@pytest.fixture(scope="function")
def hpps_serial_per_fnc(qemu_instance_per_fnc):
yield from use_console(qemu_instance_per_fnc, hpps_port)
def pytest_addoption(parser):
parser.addoption("--host", action="store", help="remote hostname")
parser.addoption("--run-dir", action="store", help="directory where to invoke Qemu")
parser.addoption("--qemu-cmd", action="store", help="command to use to invoke Qemu",
default="./run-qemu.sh -e ./qemu-env.sh -- -S -D")
def pytest_generate_tests(metafunc):
# this is called for every test
opts = {
'host': metafunc.config.option.host,
}
for opt, val in opts.items():
if opt in metafunc.fixturenames and val is not None:
metafunc.parametrize(opt, [val])
| [
"os.path.exists",
"qmp.QMP",
"os.makedirs",
"pexpect.spawn",
"os.open",
"time.strftime",
"os.path.join",
"pexpect.fdpexpect.fdspawn",
"pytest.fixture",
"threading.Thread",
"queue.Queue"
] | [((10128, 10158), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (10142, 10158), False, 'import pytest\n'), ((10242, 10274), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (10256, 10274), False, 'import pytest\n'), ((10629, 10661), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (10643, 10661), False, 'import pytest\n'), ((10764, 10796), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (10778, 10796), False, 'import pytest\n'), ((10899, 10931), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (10913, 10931), False, 'import pytest\n'), ((11120, 11152), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (11134, 11152), False, 'import pytest\n'), ((11263, 11295), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (11277, 11295), False, 'import pytest\n'), ((11406, 11438), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (11420, 11438), False, 'import pytest\n'), ((4703, 4756), 'os.open', 'os.open', (['pty', '(os.O_RDWR | os.O_NONBLOCK | os.O_NOCTTY)'], {}), '(pty, os.O_RDWR | os.O_NONBLOCK | os.O_NOCTTY)\n', (4710, 4756), False, 'import os\n'), ((4766, 4806), 'pexpect.fdpexpect.fdspawn', 'fdspawn', (['conn'], {'logfile': 'log'}), '(conn, logfile=log, **SPAWN_ARGS)\n', (4773, 4806), False, 'from pexpect.fdpexpect import fdspawn\n'), ((4937, 4991), 'pexpect.spawn', 'pexpect.spawn', (['cmd'], {'cwd': 'cwd', 'logfile': 'log'}), '(cmd, cwd=cwd, logfile=log, **SPAWN_ARGS)\n', (4950, 4991), False, 'import pexpect\n'), ((7059, 7088), 'time.strftime', 'time.strftime', (['"""%Y%m%d%H%M%S"""'], {}), "('%Y%m%d%H%M%S')\n", (7072, 7088), False, 'import time\n'), ((7291, 7326), 'os.path.join', 'os.path.join', (['run_dir', 'log_dir_name'], {}), '(run_dir, log_dir_name)\n', (7303, 7326), False, 'import os\n'), ((7838, 7910), 'threading.Thread', 'threading.Thread', ([], {'target': 'spawn', 'args': '(qemu_cmd, run_dir, qemu_log, qemu)'}), '(target=spawn, args=(qemu_cmd, run_dir, qemu_log, qemu))\n', (7854, 7910), False, 'import threading\n'), ((8178, 8216), 'qmp.QMP', 'QMP', (['"""localhost"""', 'qmp_port'], {'timeout': '(10)'}), "('localhost', qmp_port, timeout=10)\n", (8181, 8216), False, 'from qmp import QMP\n'), ((2353, 2375), 'queue.Queue', 'queue.Queue', ([], {'maxsize': '(1)'}), '(maxsize=1)\n', (2364, 2375), False, 'import queue\n'), ((2399, 2421), 'queue.Queue', 'queue.Queue', ([], {'maxsize': '(1)'}), '(maxsize=1)\n', (2410, 2421), False, 'import queue\n'), ((7218, 7275), 'os.path.join', 'os.path.join', (["os.environ['CODEBUILD_SRC_DIR']", '"""hpsc-bsp"""'], {}), "(os.environ['CODEBUILD_SRC_DIR'], 'hpsc-bsp')\n", (7230, 7275), False, 'import os\n'), ((7338, 7361), 'os.path.exists', 'os.path.exists', (['log_dir'], {}), '(log_dir)\n', (7352, 7361), False, 'import os\n'), ((7371, 7391), 'os.makedirs', 'os.makedirs', (['log_dir'], {}), '(log_dir)\n', (7382, 7391), False, 'import os\n'), ((7705, 7750), 'os.path.join', 'os.path.join', (['log_dir', "(qemu_log_name + '.log')"], {}), "(log_dir, qemu_log_name + '.log')\n", (7717, 7750), False, 'import os\n'), ((8892, 8980), 'threading.Thread', 'threading.Thread', ([], {'target': 'attach_port', 'args': '(port, pty_devs[port], log, chans[port])'}), '(target=attach_port, args=(port, pty_devs[port], log, chans\n [port]))\n', (8908, 8980), False, 'import threading\n'), ((8757, 8793), 'os.path.join', 'os.path.join', (['log_dir', "(port + '.log')"], {}), "(log_dir, port + '.log')\n", (8769, 8793), False, 'import os\n'), ((9969, 10009), 'os.path.join', 'os.path.join', (['log_dir', "(log_name + '.log')"], {}), "(log_dir, log_name + '.log')\n", (9981, 10009), False, 'import os\n')] |
"""
Checks that a Makefile and a Makefile.Microsoft_nmake file is present for every
implementation of the specified scheme.
"""
import os
import pytest
import helpers
import pqclean
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def test_makefile_present(implementation):
p1 = os.path.join(implementation.path(), 'Makefile')
assert os.path.isfile(p1)
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def test_microsoft_nmakefile_present(implementation):
p2 = os.path.join(implementation.path(), 'Makefile.Microsoft_nmake')
if implementation.supported_on_os(os='Windows'):
assert os.path.isfile(p2)
else:
assert not os.path.isfile(p2), "Should not have an NMake file"
if __name__ == '__main__':
import sys
pytest.main(sys.argv)
| [
"os.path.isfile",
"pqclean.Scheme.all_implementations",
"pytest.main"
] | [((426, 444), 'os.path.isfile', 'os.path.isfile', (['p1'], {}), '(p1)\n', (440, 444), False, 'import os\n'), ((239, 275), 'pqclean.Scheme.all_implementations', 'pqclean.Scheme.all_implementations', ([], {}), '()\n', (273, 275), False, 'import pqclean\n'), ((499, 535), 'pqclean.Scheme.all_implementations', 'pqclean.Scheme.all_implementations', ([], {}), '()\n', (533, 535), False, 'import pqclean\n'), ((918, 939), 'pytest.main', 'pytest.main', (['sys.argv'], {}), '(sys.argv)\n', (929, 939), False, 'import pytest\n'), ((770, 788), 'os.path.isfile', 'os.path.isfile', (['p2'], {}), '(p2)\n', (784, 788), False, 'import os\n'), ((818, 836), 'os.path.isfile', 'os.path.isfile', (['p2'], {}), '(p2)\n', (832, 836), False, 'import os\n')] |
import seamless
from seamless.core import macro_mode_on
from seamless.core import context,cell
with macro_mode_on():
ctx = context(toplevel=True)
ctx.json = cell("plain").set({"a": 1})
ctx.json.mount("/tmp/test.json", authority="cell")
ctx.compute()
| [
"seamless.core.context",
"seamless.core.cell",
"seamless.core.macro_mode_on"
] | [((101, 116), 'seamless.core.macro_mode_on', 'macro_mode_on', ([], {}), '()\n', (114, 116), False, 'from seamless.core import macro_mode_on\n'), ((128, 150), 'seamless.core.context', 'context', ([], {'toplevel': '(True)'}), '(toplevel=True)\n', (135, 150), False, 'from seamless.core import context, cell\n'), ((166, 179), 'seamless.core.cell', 'cell', (['"""plain"""'], {}), "('plain')\n", (170, 179), False, 'from seamless.core import context, cell\n')] |
# (C) Copyright 2019 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created on Oct 24, 2019
import unittest
from pandas import DataFrame, Series
from causallib.datasets import load_nhefs, load_acic16
class BaseTestDatasets(unittest.TestCase):
def ensure_return_types(self, loader):
data = loader()
self.assertTrue(isinstance(data, dict))
self.assertTrue(hasattr(data, "X"))
self.assertTrue(hasattr(data, "a"))
self.assertTrue(hasattr(data, "y"))
self.assertTrue(hasattr(data, "descriptors"))
self.assertTrue(isinstance(data.X, DataFrame))
self.assertTrue(isinstance(data.a, Series))
self.assertTrue(isinstance(data.y, Series))
def ensure_dimensions_agree(self, data):
self.assertEqual(data.X.shape[0], data.a.shape[0])
self.assertEqual(data.X.shape[0], data.y.shape[0])
class TestSmokingWeight(BaseTestDatasets):
def test_return_types(self):
self.ensure_return_types(load_nhefs)
def test_dimensions_agree(self):
with self.subTest("Test restrict=True"):
data = load_nhefs(restrict=True)
self.ensure_dimensions_agree(data)
with self.subTest("Test restrict=False"):
data = load_nhefs(restrict=False)
self.ensure_dimensions_agree(data)
def test_raw_parameter(self):
with self.subTest("Test raw=True"):
data = load_nhefs(raw=True)
self.assertTrue(isinstance(data, tuple))
self.assertEqual(len(data), 2) # 2 = data and descriptors
self.assertTrue(isinstance(data[0], DataFrame))
self.assertTrue(isinstance(data[1], Series))
with self.subTest("Test raw=False"):
# Already asserted in test_return_Xay_parameter, return_Xay=True
self.assertTrue(True)
def test_restrict_parameter(self):
with self.subTest("Test restrict=True"):
data = load_nhefs(restrict=True)
self.assertFalse(data.y.isnull().any())
with self.subTest("Test restrict=False"):
data = load_nhefs(restrict=False)
self.assertTrue(data.y.isnull().any())
class TestACIC16(BaseTestDatasets):
def test_return_types(self):
self.ensure_return_types(load_acic16)
data = load_acic16()
self.assertTrue(hasattr(data, "po"))
self.assertTrue(isinstance(data.po, DataFrame))
def test_dimensions_agree(self):
for i in range(1, 11):
with self.subTest("Test dimension for instance {}".format(i)):
data = load_acic16(i)
self.ensure_dimensions_agree(data)
self.assertEqual(data.X.shape[0], data.po.shape[0])
self.assertEqual(data.po.shape[1], 2)
def test_non_dummy_loading(self):
X_dummy = load_acic16(raw=False).X
X_factor, zymu = load_acic16(raw=True)
self.assertEqual(X_factor.shape[0], X_dummy.shape[0])
self.assertEqual(X_factor.shape[0], zymu.shape[0])
self.assertEqual(5, zymu.shape[1])
self.assertGreater(X_dummy.shape[1], X_factor.shape[1]) # Dummies has more columns
| [
"causallib.datasets.load_acic16",
"causallib.datasets.load_nhefs"
] | [((2814, 2827), 'causallib.datasets.load_acic16', 'load_acic16', ([], {}), '()\n', (2825, 2827), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((3391, 3412), 'causallib.datasets.load_acic16', 'load_acic16', ([], {'raw': '(True)'}), '(raw=True)\n', (3402, 3412), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((1614, 1639), 'causallib.datasets.load_nhefs', 'load_nhefs', ([], {'restrict': '(True)'}), '(restrict=True)\n', (1624, 1639), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((1757, 1783), 'causallib.datasets.load_nhefs', 'load_nhefs', ([], {'restrict': '(False)'}), '(restrict=False)\n', (1767, 1783), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((1929, 1949), 'causallib.datasets.load_nhefs', 'load_nhefs', ([], {'raw': '(True)'}), '(raw=True)\n', (1939, 1949), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((2456, 2481), 'causallib.datasets.load_nhefs', 'load_nhefs', ([], {'restrict': '(True)'}), '(restrict=True)\n', (2466, 2481), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((2604, 2630), 'causallib.datasets.load_nhefs', 'load_nhefs', ([], {'restrict': '(False)'}), '(restrict=False)\n', (2614, 2630), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((3341, 3363), 'causallib.datasets.load_acic16', 'load_acic16', ([], {'raw': '(False)'}), '(raw=False)\n', (3352, 3363), False, 'from causallib.datasets import load_nhefs, load_acic16\n'), ((3096, 3110), 'causallib.datasets.load_acic16', 'load_acic16', (['i'], {}), '(i)\n', (3107, 3110), False, 'from causallib.datasets import load_nhefs, load_acic16\n')] |
from email_tools.classes import DemandEmailManager
src_path = r'\\NAS-CP1B\data\MKTG\BUS_ANAL\EVAL\Forecasting\8-Peak\Stata\EMS Peak Emails - Source Files'
dest_path = r'\\NAS-CP1B\data\MKTG\BUS_ANAL\EVAL\Forecasting\8-Peak\Stata\EMS Peak Emails'
# Instantiate Demand Email Manager
dem = DemandEmailManager(src_path, dest_path)
# Load all compatible email message formats into Email Queue
dem.email_queue()
print(f'Queue contains {len(dem.queue)} emails')
# Load formatted email strings into "emails" attribute
dem.load_all_emails()
print(f'Loaded {len(dem.emails)} unique emails')
# Convert emails into TXT format and save them in specified save destinations
dem.reformat_emails()
print('Converted all emails into destination folder')
| [
"email_tools.classes.DemandEmailManager"
] | [((290, 329), 'email_tools.classes.DemandEmailManager', 'DemandEmailManager', (['src_path', 'dest_path'], {}), '(src_path, dest_path)\n', (308, 329), False, 'from email_tools.classes import DemandEmailManager\n')] |
from django import forms
from crappymemes.models import Meme, Comment
class NewCrappyMemeForm(forms.ModelForm):
# pic = forms.ImageField(label='Meme image',required=True,
# error_messages={'invalid':"Image files only"},
# widget=forms.FileInput)
class Meta:
model = Meme
fields = ['pic', 'title']
widgets = {
'title': forms.TextInput({'class': 'form-control'}),
}
class UpdateCrappyMemeForm(forms.Form):
pic = forms.ImageField(
label='Meme image',
required=False,
error_messages={'invalid':"Image files only"},
widget=forms.FileInput)
title = forms.CharField(max_length=128, required=True)
def __init__(self, *args, instance=None, **kwargs):
super(UpdateCrappyMemeForm, self).__init__(*args, **kwargs)
if instance:
self.fields['pic'].initial = instance.pic
self.fields['title'].initial = instance.title
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ['message']
| [
"django.forms.ImageField",
"django.forms.CharField",
"django.forms.TextInput"
] | [((524, 653), 'django.forms.ImageField', 'forms.ImageField', ([], {'label': '"""Meme image"""', 'required': '(False)', 'error_messages': "{'invalid': 'Image files only'}", 'widget': 'forms.FileInput'}), "(label='Meme image', required=False, error_messages={\n 'invalid': 'Image files only'}, widget=forms.FileInput)\n", (540, 653), False, 'from django import forms\n'), ((693, 739), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(128)', 'required': '(True)'}), '(max_length=128, required=True)\n', (708, 739), False, 'from django import forms\n'), ((418, 460), 'django.forms.TextInput', 'forms.TextInput', (["{'class': 'form-control'}"], {}), "({'class': 'form-control'})\n", (433, 460), False, 'from django import forms\n')] |
#!/usr/bin/python
import xmlrpclib, pickle, sys, socket
rpc = xmlrpclib.Server("http://127.0.0.1:10000/xm.rem")
socket.setdefaulttimeout(15)
f = open(sys.argv[1], 'r')
data = pickle.load(f)
f.close()
for entry in data:
rpc.localproxy("RpcDhtProxy.Register", entry["Key"], entry["Value"], entry["Ttl"])
| [
"xmlrpclib.Server",
"pickle.load",
"socket.setdefaulttimeout"
] | [((63, 112), 'xmlrpclib.Server', 'xmlrpclib.Server', (['"""http://127.0.0.1:10000/xm.rem"""'], {}), "('http://127.0.0.1:10000/xm.rem')\n", (79, 112), False, 'import xmlrpclib, pickle, sys, socket\n'), ((113, 141), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(15)'], {}), '(15)\n', (137, 141), False, 'import xmlrpclib, pickle, sys, socket\n'), ((177, 191), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (188, 191), False, 'import xmlrpclib, pickle, sys, socket\n')] |
import sys
import nltk
from nltk.parse import DependencyGraph
from nltk.parse.transitionparser import TransitionParser, Transition, Configuration
data = []
parse = ""
for l in open(sys.argv[1]):
if not l.strip():
data.append(parse)
parse = ""
else:
t = l.strip().split()
# print " ".join([t[1], t[3], t[6], t[7].upper()])
parse += " ".join([t[1], t[3], t[6], t[7].upper()]) + "\n"
# parse += " ".join([t[1], t[4], t[8], t[10].upper()]) + "\n"
data.append(parse)
d = [ DependencyGraph(q) for q in data]
class MyTransitionParser(TransitionParser):
def _create_training_examples_arc_std(self, depgraphs, input_file):
"""
ADAPTED FROM NTLK
Create the training example in the libsvm format and write it to the input_file.
Reference : Page 32, Chapter 3. Dependency Parsing by <NAME>, <NAME> and <NAME> (2009)
"""
operation = Transition(self.ARC_STANDARD)
count_proj = 0
training_seq = []
for depgraph in depgraphs[:-1]:
if not self._is_projective(depgraph):
print >>sys.stderr, "fail non proj"
continue
count_proj += 1
conf = Configuration(depgraph)
print >>input_file, depgraph.nodes[conf.buffer[0]]["word"],
while len(conf.buffer) > 0:
b0 = conf.buffer[0]
features = conf.extract_features()
binary_features = self._convert_to_binary_features(features)
if len(conf.stack) > 0:
s0 = conf.stack[len(conf.stack) - 1]
# Left-arc operation
rel = self._get_dep_relation(b0, s0, depgraph)
if rel is not None:
key = Transition.LEFT_ARC + ':' + rel
print >>input_file, "@L_"+rel,
# self._write_to_file(key, binary_features, input_file)
operation.left_arc(conf, rel)
training_seq.append(key)
continue
# Right-arc operation
rel = self._get_dep_relation(s0, b0, depgraph)
if rel is not None:
precondition = True
# Get the max-index of buffer
maxID = conf._max_address
for w in range(maxID + 1):
if w != b0:
relw = self._get_dep_relation(b0, w, depgraph)
if relw is not None:
if (b0, relw, w) not in conf.arcs:
precondition = False
if precondition:
if rel == "ROOT":
print >>input_file, "ROOT",
print >>input_file, "@R_"+rel,
# self._write_to_file(
# key,
# binary_features,
# input_file)
operation.right_arc(conf, rel)
training_seq.append(key)
continue
# Shift operation as the default
key = Transition.SHIFT
# print conf.buffer
if len(conf.buffer) > 1:
print >>input_file, depgraph.nodes[conf.buffer[1]]["word"],
# self._write_to_file(key, binary_features, input_file)
operation.shift(conf)
training_seq.append(key)
print >>input_file, ""
print(" Number of training examples : " + str(len(depgraphs)))
print(" Number of valid (projective) examples : " + str(count_proj))
return training_seq
MyTransitionParser(TransitionParser.ARC_STANDARD)._create_training_examples_arc_std(d, open("/tmp/targetparses.txt", "w"))
#java -cp stanford-parser-3.3.0.jar edu.stanford.nlp.trees.EnglishGrammaticalStructure -treeFile ~/Projects/group/data/wsj/test.txt -conllx -basic > test.conll.txt
| [
"nltk.parse.transitionparser.Transition",
"nltk.parse.transitionparser.Configuration",
"nltk.parse.DependencyGraph"
] | [((533, 551), 'nltk.parse.DependencyGraph', 'DependencyGraph', (['q'], {}), '(q)\n', (548, 551), False, 'from nltk.parse import DependencyGraph\n'), ((941, 970), 'nltk.parse.transitionparser.Transition', 'Transition', (['self.ARC_STANDARD'], {}), '(self.ARC_STANDARD)\n', (951, 970), False, 'from nltk.parse.transitionparser import TransitionParser, Transition, Configuration\n'), ((1249, 1272), 'nltk.parse.transitionparser.Configuration', 'Configuration', (['depgraph'], {}), '(depgraph)\n', (1262, 1272), False, 'from nltk.parse.transitionparser import TransitionParser, Transition, Configuration\n')] |
from __future__ import print_function
import datetime
import sys
import time
import azure.batch.batch_service_client as batch
import azure.batch.models as batchmodels
import os
sys.path.append('.')
import common.helpers # noqa
def create_pool_and_wait_for_vms(
batch_service_client, pool_id,
publisher, offer, sku, vm_size,
target_dedicated_nodes,
command_line=None, resource_files=None,
elevation_level=batchmodels.ElevationLevel.admin,
enable_inter_node_communication=True):
"""
Creates a pool of compute nodes with the specified OS settings.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str pool_id: An ID for the new pool.
:param str publisher: Marketplace Image publisher
:param str offer: Marketplace Image offer
:param str sku: Marketplace Image sku
:param str vm_size: The size of VM, eg 'Standard_A1' or 'Standard_D1' per
https://azure.microsoft.com/en-us/documentation/articles/
virtual-machines-windows-sizes/
:param int target_dedicated_nodes: Number of target VMs for the pool
:param str command_line: command line for the pool's start task.
:param list resource_files: A collection of resource files for the pool's
start task.
:param str elevation_level: Elevation level the task will be run as;
either 'admin' or 'nonadmin'.
"""
print('Creating pool [{}]...'.format(pool_id))
sku_to_use, image_ref_to_use = \
common.helpers.select_latest_verified_vm_image_with_node_agent_sku(
batch_service_client, publisher, offer, sku)
user = batchmodels.AutoUserSpecification(
scope=batchmodels.AutoUserScope.pool,
elevation_level=elevation_level)
new_pool = batch.models.PoolAddParameter(
id=pool_id,
virtual_machine_configuration=batchmodels.VirtualMachineConfiguration(
image_reference=image_ref_to_use,
node_agent_sku_id=sku_to_use),
vm_size=vm_size,
target_dedicated_nodes=target_dedicated_nodes,
resize_timeout=datetime.timedelta(minutes=15),
enable_inter_node_communication=enable_inter_node_communication,
max_tasks_per_node=1,
start_task=batch.models.StartTask(
command_line=command_line,
user_identity=batchmodels.UserIdentity(auto_user=user),
wait_for_success=False,
resource_files=resource_files) if command_line else None,
)
common.helpers.create_pool_if_not_exist(batch_service_client, new_pool)
# because we want all nodes to be available before any tasks are assigned
# to the pool, here we will wait for all compute nodes to reach idle
nodes = common.helpers.wait_for_all_nodes_state(
batch_service_client, new_pool,
frozenset(
(batchmodels.ComputeNodeState.start_task_failed,
batchmodels.ComputeNodeState.unusable,
batchmodels.ComputeNodeState.idle)
)
)
# ensure all node are idle
if any(node.state != batchmodels.ComputeNodeState.idle for node in nodes):
raise RuntimeError('node(s) of pool {} not in idle state'.format(
pool_id))
def add_task(
batch_service_client, job_id, task_id, num_instances,
application_cmdline, input_files, elevation_level,
output_file_names, output_container_sas,
coordination_cmdline, common_files):
"""
Adds a task for each input file in the collection to the specified job.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str job_id: The ID of the job to which to add the task.
:param str task_id: The ID of the task to be added.
:param str application_cmdline: The application commandline for the task.
:param list input_files: A collection of input files.
:param elevation_level: Elevation level used to run the task; either
'admin' or 'nonadmin'.
:type elevation_level: `azure.batch.models.ElevationLevel`
:param int num_instances: Number of instances for the task
:param str coordination_cmdline: The application commandline for the task.
:param list common_files: A collection of common input files.
"""
print('Adding {} task to job [{}]...'.format(task_id, job_id))
multi_instance_settings = None
if coordination_cmdline or (num_instances and num_instances > 1):
multi_instance_settings = batchmodels.MultiInstanceSettings(
number_of_instances=num_instances,
coordination_command_line=coordination_cmdline,
common_resource_files=common_files)
user = batchmodels.AutoUserSpecification(
scope=batchmodels.AutoUserScope.pool,
elevation_level=elevation_level)
output_file = batchmodels.OutputFile(
file_pattern=output_file_names,
destination=batchmodels.OutputFileDestination(
container=batchmodels.OutputFileBlobContainerDestination(
container_url=output_container_sas)),
upload_options=batchmodels.OutputFileUploadOptions(
upload_condition=batchmodels.
OutputFileUploadCondition.task_completion))
task = batchmodels.TaskAddParameter(
id=task_id,
command_line=application_cmdline,
user_identity=batchmodels.UserIdentity(auto_user=user),
resource_files=input_files,
multi_instance_settings=multi_instance_settings,
output_files=[output_file])
batch_service_client.task.add(job_id, task)
def wait_for_subtasks_to_complete(
batch_service_client, job_id, task_id, timeout):
"""
Returns when all subtasks in the specified task reach the Completed state.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str job_id: The id of the job whose tasks should be to monitored.
:param str task_id: The id of the task whose subtasks should be monitored.
:param timedelta timeout: The duration to wait for task completion. If all
tasks in the specified job do not reach Completed state within this time
period, an exception will be raised.
"""
timeout_expiration = datetime.datetime.now() + timeout
print("Monitoring all tasks for 'Completed' state, timeout in {}..."
.format(timeout), end='')
while datetime.datetime.now() < timeout_expiration:
print('.', end='')
sys.stdout.flush()
subtasks = batch_service_client.task.list_subtasks(job_id, task_id)
incomplete_subtasks = [subtask for subtask in subtasks.value if
subtask.state !=
batchmodels.TaskState.completed]
if not incomplete_subtasks:
print("Subtask complete!")
return True
else:
time.sleep(10)
print("Subtasks did not reach completed state within timeout period!")
raise RuntimeError(
"ERROR: Subtasks did not reach 'Completed' state within "
"timeout period of " + str(timeout))
def wait_for_tasks_to_complete(batch_service_client, job_id, timeout):
"""
Returns when all tasks in the specified job reach the Completed state.
:param batch_service_client: A Batch service client.
:type batch_service_client: `azure.batch.BatchServiceClient`
:param str job_id: The id of the job whose tasks should be to monitored.
:param timedelta timeout: The duration to wait for task completion. If all
tasks in the specified job do not reach Completed state within this time
period, an exception will be raised.
"""
timeout_expiration = datetime.datetime.now() + timeout
print("Monitoring all tasks for 'Completed' state, timeout in {}..."
.format(timeout), end='')
while datetime.datetime.now() < timeout_expiration:
print('.', end='')
sys.stdout.flush()
tasks = batch_service_client.task.list(job_id)
for task in tasks:
if task.state == batchmodels.TaskState.completed:
# Pause execution until subtasks reach Completed state.
wait_for_subtasks_to_complete(batch_service_client, job_id,
task.id,
datetime.timedelta(minutes=10))
incomplete_tasks = [task for task in tasks if
task.state != batchmodels.TaskState.completed]
if not incomplete_tasks:
print("Taskss complete!")
return True
else:
time.sleep(10)
print("Tasks did not reach completed state within timeout period!")
raise RuntimeError("ERROR: Tasks did not reach 'Completed' state within "
"timeout period of " + str(timeout))
| [
"azure.batch.models.UserIdentity",
"azure.batch.models.OutputFileUploadOptions",
"azure.batch.models.AutoUserSpecification",
"time.sleep",
"datetime.timedelta",
"azure.batch.models.MultiInstanceSettings",
"datetime.datetime.now",
"azure.batch.models.OutputFileBlobContainerDestination",
"azure.batch.models.VirtualMachineConfiguration",
"sys.stdout.flush",
"sys.path.append"
] | [((186, 206), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (201, 206), False, 'import sys\n'), ((1733, 1841), 'azure.batch.models.AutoUserSpecification', 'batchmodels.AutoUserSpecification', ([], {'scope': 'batchmodels.AutoUserScope.pool', 'elevation_level': 'elevation_level'}), '(scope=batchmodels.AutoUserScope.pool,\n elevation_level=elevation_level)\n', (1766, 1841), True, 'import azure.batch.models as batchmodels\n'), ((4869, 4977), 'azure.batch.models.AutoUserSpecification', 'batchmodels.AutoUserSpecification', ([], {'scope': 'batchmodels.AutoUserScope.pool', 'elevation_level': 'elevation_level'}), '(scope=batchmodels.AutoUserScope.pool,\n elevation_level=elevation_level)\n', (4902, 4977), True, 'import azure.batch.models as batchmodels\n'), ((4664, 4825), 'azure.batch.models.MultiInstanceSettings', 'batchmodels.MultiInstanceSettings', ([], {'number_of_instances': 'num_instances', 'coordination_command_line': 'coordination_cmdline', 'common_resource_files': 'common_files'}), '(number_of_instances=num_instances,\n coordination_command_line=coordination_cmdline, common_resource_files=\n common_files)\n', (4697, 4825), True, 'import azure.batch.models as batchmodels\n'), ((6478, 6501), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6499, 6501), False, 'import datetime\n'), ((6638, 6661), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6659, 6661), False, 'import datetime\n'), ((6721, 6739), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (6737, 6739), False, 'import sys\n'), ((7969, 7992), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7990, 7992), False, 'import datetime\n'), ((8129, 8152), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (8150, 8152), False, 'import datetime\n'), ((8212, 8230), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (8228, 8230), False, 'import sys\n'), ((1966, 2073), 'azure.batch.models.VirtualMachineConfiguration', 'batchmodels.VirtualMachineConfiguration', ([], {'image_reference': 'image_ref_to_use', 'node_agent_sku_id': 'sku_to_use'}), '(image_reference=image_ref_to_use,\n node_agent_sku_id=sku_to_use)\n', (2005, 2073), True, 'import azure.batch.models as batchmodels\n'), ((2204, 2234), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(15)'}), '(minutes=15)\n', (2222, 2234), False, 'import datetime\n'), ((5283, 5395), 'azure.batch.models.OutputFileUploadOptions', 'batchmodels.OutputFileUploadOptions', ([], {'upload_condition': 'batchmodels.OutputFileUploadCondition.task_completion'}), '(upload_condition=batchmodels.\n OutputFileUploadCondition.task_completion)\n', (5318, 5395), True, 'import azure.batch.models as batchmodels\n'), ((5549, 5589), 'azure.batch.models.UserIdentity', 'batchmodels.UserIdentity', ([], {'auto_user': 'user'}), '(auto_user=user)\n', (5573, 5589), True, 'import azure.batch.models as batchmodels\n'), ((7138, 7152), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (7148, 7152), False, 'import time\n'), ((8924, 8938), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (8934, 8938), False, 'import time\n'), ((5156, 5243), 'azure.batch.models.OutputFileBlobContainerDestination', 'batchmodels.OutputFileBlobContainerDestination', ([], {'container_url': 'output_container_sas'}), '(container_url=\n output_container_sas)\n', (5202, 5243), True, 'import azure.batch.models as batchmodels\n'), ((8633, 8663), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (8651, 8663), False, 'import datetime\n'), ((2452, 2492), 'azure.batch.models.UserIdentity', 'batchmodels.UserIdentity', ([], {'auto_user': 'user'}), '(auto_user=user)\n', (2476, 2492), True, 'import azure.batch.models as batchmodels\n')] |
#! /usr/bin/python
import argparse
import numpy as np
from algorithms import Solver
class Planner(Solver):
"""wrapper class for solving algorithms"""
def __init__(self, mdp, algorithm):
self.mdp_path = mdp
self.algorithm = algorithm
mdp = self.getMDP(printMDP = False)
super().__init__(mdp["S"], mdp["A"], mdp["T"],
mdp["R"], mdp["gamma"],)# mdp["mdptype"], mdp["end"])
def printArgs(self):
print(self.mdp_path, self.algo)
def getMDP(self, printMDP = False):
f = open(self.mdp_path, 'r')
#get number of states
S = int(f.readline().split()[-1])
A = int(f.readline().split()[-1])
#get start, end
start = int(f.readline().split()[-1])
end = np.array(f.readline().split()[1:], dtype = "int")
#get transition probability measure and reward function
T = np.zeros((S,A,S), dtype="float64")
R = np.zeros_like(T)
line = []
for i in range(S*A*S):
line = f.readline().split()
if(line[0]!="transition"):
break
s1, ac, s2, = int(line[1]), int(line[2]), int(line[3])
R[s1, ac, s2] = float(line[-2])
T[s1, ac, s2] = float(line[-1])
#get discount
mdptype = line[-1]
gamma = float(f.readline().split()[-1])
#print if required
if(printMDP):
print("type:",mdptype)
print("number of states:", S)
print("number of actions:", A)
print("start:", start)
print("end states:", end)
print("discount:", gamma)
#print("","","",sep = "\n")
print("T shape:", T.shape)
#print("","","",sep = "\n")
print("R shape:",R.shape)
f.close()
mdp = {"S":S, "A":A, "T":T, "R":R, "gamma":gamma,
"start":start, "end":end, "mdptype":mdptype }
return mdp
def solve(self, error = 1e-12):
if(self.algorithm=="vi"):
return self.valueIter(error=error)
elif(self.algorithm=="lp"):
return self.linearProgram(error=error)
elif(self.algorithm=="hpi"):
return self.policyIter(error=error)
else:
raise Exception("please enter valid solver algorithm")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--mdp", type = str, default = "", help = "Path to the mdp")
parser.add_argument("--algorithm", type = str, default = "", help="Name of solving algorithm."+
" Must be one of vi(value iteration), hpi(Howard's policy iteration), or lp(linear programming)")
args = parser.parse_args()
if args.mdp=="":
raise Exception("please provide valid path for mdp")
elif args.algorithm=="":
raise Exception("please provide valid solver algorithm")
planner = Planner(args.mdp, args.algorithm)
V, pi= planner.solve()
for i in range(len(V)):
print(V[i], pi[i]) | [
"numpy.zeros",
"numpy.zeros_like",
"argparse.ArgumentParser"
] | [((1972, 1997), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1995, 1997), False, 'import argparse\n'), ((787, 823), 'numpy.zeros', 'np.zeros', (['(S, A, S)'], {'dtype': '"""float64"""'}), "((S, A, S), dtype='float64')\n", (795, 823), True, 'import numpy as np\n'), ((829, 845), 'numpy.zeros_like', 'np.zeros_like', (['T'], {}), '(T)\n', (842, 845), True, 'import numpy as np\n')] |
import re
"""
1、少于三位的数字是可以匹配的
2、多于3位数字产生逗号
3、不会匹配不,号不是出现在第三位后面的
4、不会匹配多于3位不会产生逗号的
"""
# regex=re.compile(r"((\d*,+\d{3})+)?|\d{1,3}")
# result=regex.search('1234')
# print(result.group())
"""
1、姓氏和名字都是一个首字母大写的单词
2、中间有一个空格
3、不匹配非单词字符
4、不匹配首字母没有大写,没有名字,没有姓氏的单词
"""
# regex=re.compile(r"([A-Z][a-zA-Z]+ )([A-Z][A-Za-z]+)")
# result=regex.search('Abc ABD')
# print(result)
"""
1、第一个单词是Alice、Bob、Carol中的一个
2、第二个单词是eats、pets、throws中的一个
3、第三个单词是apples、cats、baseballs中的一个
4、表达式不区分大小写
"""
regex=re.compile(r"(Alice|Bob|Carol) (eats|pets|throws) (apples|cats|baseballs)\.",re.I)
result=regex.search('Alice eats apples.')
result2=regex.search('Bob pets cats.')
result3=regex.search('Carol throws baseballs.')
result4=regex.search('AlicE throws 7 baseballs.')
print(result)
print(result4) | [
"re.compile"
] | [((492, 579), 're.compile', 're.compile', (['"""(Alice|Bob|Carol) (eats|pets|throws) (apples|cats|baseballs)\\\\."""', 're.I'], {}), "('(Alice|Bob|Carol) (eats|pets|throws) (apples|cats|baseballs)\\\\.',\n re.I)\n", (502, 579), False, 'import re\n')] |
import os
import sys
import subprocess
CompilerPath = r'cl.exe'
Definitions = [
r'UNICODE',
r'_UNICODE',
r'O2',
r'NDEBUG',
r'USE_NAMED_MUTEX'
]
IncludesPaths = []
LibPaths = []
def AddIncludePath(path):
return r'/I "{}" '.format(path)
def AddLibPath(path):
return r'/LIBPATH:"{}" '.format(path)
def AddDefinition(d):
return r'/D "{}" '.format(d)
def BuildCommand(programPath):
# Input
command = r'"{}" .\forward\main.cpp /MD '.format(CompilerPath)
# Definitions
for d in Definitions:
command += AddDefinition(d)
command += AddDefinition(r'PROGRAM_PATH=LR\"({})\"'.format(programPath))
# Include Paths
for p in IncludesPaths:
command += AddIncludePath(p)
# Output
command += r'/Fe: {} '.format(os.path.basename(programPath))
# Lib Paths
command += r'/link /OPT:REF /OPT:ICF '
for p in LibPaths:
command += AddLibPath(p)
return command
if __name__=='__main__':
if len(sys.argv) < 2:
raise RuntimeError("Program path not set.")
programPath = sys.argv[1]
if not os.path.exists(sys.argv[1]):
raise RuntimeError("Program {} do not exist.".format(programPath))
if not os.path.isfile(sys.argv[1]):
raise RuntimeError("{} is not a program file.".format(programPath))
command = BuildCommand(programPath)
# print(command)
subprocess.run(command)
if os.path.exists("main.obj"):
os.remove("main.obj")
| [
"os.path.exists",
"subprocess.run",
"os.path.isfile",
"os.path.basename",
"os.remove"
] | [((1381, 1404), 'subprocess.run', 'subprocess.run', (['command'], {}), '(command)\n', (1395, 1404), False, 'import subprocess\n'), ((1412, 1438), 'os.path.exists', 'os.path.exists', (['"""main.obj"""'], {}), "('main.obj')\n", (1426, 1438), False, 'import os\n'), ((782, 811), 'os.path.basename', 'os.path.basename', (['programPath'], {}), '(programPath)\n', (798, 811), False, 'import os\n'), ((1094, 1121), 'os.path.exists', 'os.path.exists', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1108, 1121), False, 'import os\n'), ((1209, 1236), 'os.path.isfile', 'os.path.isfile', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1223, 1236), False, 'import os\n'), ((1448, 1469), 'os.remove', 'os.remove', (['"""main.obj"""'], {}), "('main.obj')\n", (1457, 1469), False, 'import os\n')] |
import numpy as np
def solver(eps, Nx, method='centered'):
"""
Solver for the two point boundary value problem u'=eps*u'',
u(0)=0, u(1)=1.
"""
x = np.linspace(0, 1, Nx+1) # Mesh points in space
# Make sure dx and dt are compatible with x and t
dx = x[1] - x[0]
u = np.zeros(Nx+1)
# Representation of sparse matrix and right-hand side
diagonal = np.zeros(Nx+1)
lower = np.zeros(Nx)
upper = np.zeros(Nx)
b = np.zeros(Nx+1)
# Precompute sparse matrix (scipy format)
if method == 'centered':
diagonal[:] = 2*eps/dx**2
lower[:] = -1/dx - eps/dx**2
upper[:] = 1/dx - eps/dx**2
elif method == 'upwind':
diagonal[:] = 1/dx + 2*eps/dx**2
lower[:] = 1/dx - eps/dx**2
upper[:] = - eps/dx**2
# Insert boundary conditions
upper[0] = 0
lower[-1] = 0
diagonal[0] = diagonal[-1] = 1
b[-1] = 1.0
# Set up sparse matrix and solve
diags = [0, -1, 1]
import scipy.sparse
import scipy.sparse.linalg
A = scipy.sparse.diags(
diagonals=[diagonal, lower, upper],
offsets=[0, -1, 1], shape=(Nx+1, Nx+1),
format='csr')
u[:] = scipy.sparse.linalg.spsolve(A, b)
return u, x
def u_exact(x, eps):
return (np.exp(x/eps)-1)/(np.exp(1.0/eps)-1)
def demo(eps = 0.01, method='centered'):
import matplotlib.pyplot as plt
x_fine = np.linspace(0, 1, 2001)
for Nx in (20, 40):
u, x = solver(eps, Nx, method=method)
plt.figure()
plt.plot(x, u, 'o-')
plt.hold('on')
plt.plot(x_fine, u_exact(x_fine, eps), 'k--')
plt.legend(['$N_x=%d$' % Nx, 'exact'], loc='upper left')
plt.title(method + ' difference scheme, ' + r'$\epsilon=%g$' % eps)
plt.xlabel('x'); plt.ylabel('u')
stem = 'tmp1_%s_%d_%s' % (method, Nx, str(eps).replace('.','_'))
plt.savefig(stem + '.png'); plt.savefig(stem + '.pdf')
plt.show()
if __name__ == '__main__':
demo(eps=0.1, method='upwind')
demo(eps=0.01, method='upwind')
#demo(eps=0.1, method='centered')
#demo(eps=0.01, mehtod='centered')
| [
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.exp",
"numpy.linspace",
"matplotlib.pyplot.figure",
"numpy.zeros",
"matplotlib.pyplot.title",
"matplotlib.pyplot.hold",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((168, 193), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(Nx + 1)'], {}), '(0, 1, Nx + 1)\n', (179, 193), True, 'import numpy as np\n'), ((306, 322), 'numpy.zeros', 'np.zeros', (['(Nx + 1)'], {}), '(Nx + 1)\n', (314, 322), True, 'import numpy as np\n'), ((395, 411), 'numpy.zeros', 'np.zeros', (['(Nx + 1)'], {}), '(Nx + 1)\n', (403, 411), True, 'import numpy as np\n'), ((425, 437), 'numpy.zeros', 'np.zeros', (['Nx'], {}), '(Nx)\n', (433, 437), True, 'import numpy as np\n'), ((453, 465), 'numpy.zeros', 'np.zeros', (['Nx'], {}), '(Nx)\n', (461, 465), True, 'import numpy as np\n'), ((481, 497), 'numpy.zeros', 'np.zeros', (['(Nx + 1)'], {}), '(Nx + 1)\n', (489, 497), True, 'import numpy as np\n'), ((1419, 1442), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(2001)'], {}), '(0, 1, 2001)\n', (1430, 1442), True, 'import numpy as np\n'), ((1963, 1973), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1971, 1973), True, 'import matplotlib.pyplot as plt\n'), ((1521, 1533), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1531, 1533), True, 'import matplotlib.pyplot as plt\n'), ((1542, 1562), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'u', '"""o-"""'], {}), "(x, u, 'o-')\n", (1550, 1562), True, 'import matplotlib.pyplot as plt\n'), ((1571, 1585), 'matplotlib.pyplot.hold', 'plt.hold', (['"""on"""'], {}), "('on')\n", (1579, 1585), True, 'import matplotlib.pyplot as plt\n'), ((1648, 1704), 'matplotlib.pyplot.legend', 'plt.legend', (["['$N_x=%d$' % Nx, 'exact']"], {'loc': '"""upper left"""'}), "(['$N_x=%d$' % Nx, 'exact'], loc='upper left')\n", (1658, 1704), True, 'import matplotlib.pyplot as plt\n'), ((1713, 1780), 'matplotlib.pyplot.title', 'plt.title', (["(method + ' difference scheme, ' + '$\\\\epsilon=%g$' % eps)"], {}), "(method + ' difference scheme, ' + '$\\\\epsilon=%g$' % eps)\n", (1722, 1780), True, 'import matplotlib.pyplot as plt\n'), ((1789, 1804), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (1799, 1804), True, 'import matplotlib.pyplot as plt\n'), ((1807, 1822), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""u"""'], {}), "('u')\n", (1817, 1822), True, 'import matplotlib.pyplot as plt\n'), ((1904, 1930), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(stem + '.png')"], {}), "(stem + '.png')\n", (1915, 1930), True, 'import matplotlib.pyplot as plt\n'), ((1932, 1958), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(stem + '.pdf')"], {}), "(stem + '.pdf')\n", (1943, 1958), True, 'import matplotlib.pyplot as plt\n'), ((1291, 1306), 'numpy.exp', 'np.exp', (['(x / eps)'], {}), '(x / eps)\n', (1297, 1306), True, 'import numpy as np\n'), ((1309, 1326), 'numpy.exp', 'np.exp', (['(1.0 / eps)'], {}), '(1.0 / eps)\n', (1315, 1326), True, 'import numpy as np\n')] |
import time
def get(request, response):
type = request.attribute("type")
reqTime = request.dateHeader("If-Modified-Since")
# pretend it was modified 5 seconds ago
modTime = time.time()
if type == "stale":
# pretend it was just modified
modTime = modTime - 1
else:
# pretend it was modified over a minute ago
modTime = modTime - 62
if reqTime < modTime:
# the client's copy is out of date
response.setDateHeader("Last-Modified", modTime)
response.write("Downloaded copy")
else:
response.setStatus(304)
| [
"time.time"
] | [((197, 208), 'time.time', 'time.time', ([], {}), '()\n', (206, 208), False, 'import time\n')] |
import json
from flask.testing import FlaskClient
from urllib.parse import urlencode
from werkzeug.routing import BaseConverter
class RegexConverter(BaseConverter):
# http://werkzeug.pocoo.org/docs/routing/#custom-converters
def __init__(self, url_map, *items):
super().__init__(url_map)
self.regex = items[0]
class AWSTestHelper(FlaskClient):
def action_data(self, action_name, **kwargs):
"""
Method calls resource with action_name and returns data of response.
"""
opts = {"Action": action_name}
opts.update(kwargs)
res = self.get(
"/?{0}".format(urlencode(opts)),
headers={
"Host": "{0}.us-east-1.amazonaws.com".format(self.application.service)
},
)
return res.data.decode("utf-8")
def action_json(self, action_name, **kwargs):
"""
Method calls resource with action_name and returns object obtained via
deserialization of output.
"""
return json.loads(self.action_data(action_name, **kwargs))
| [
"urllib.parse.urlencode"
] | [((643, 658), 'urllib.parse.urlencode', 'urlencode', (['opts'], {}), '(opts)\n', (652, 658), False, 'from urllib.parse import urlencode\n')] |
from flask import Blueprint, make_response
from app.database.db_connection import exercise_collection as exercisesdb
from app.utils.msg_tools import ResponseTools as msg_tools
###########################################################################
# blueprint for flask
exercises = Blueprint('exercises', __name__, url_prefix='/exercises')
###########################################################################
@exercises.route('/exercisesForMuscle/<muscle>')
def get_exercises(muscle):
exercise = exercisesdb.find_one({'muscle': muscle})
return msg_tools.response_success(objects=exercise['exercises'])
@exercises.route('/getMuscles')
def get_muscles():
muscles = exercisesdb.find()
musclelist = []
for document in muscles:
musclelist.append(document["muscle"])
return msg_tools.response_success(objects=musclelist)
| [
"app.database.db_connection.exercise_collection.find",
"flask.Blueprint",
"app.database.db_connection.exercise_collection.find_one",
"app.utils.msg_tools.ResponseTools.response_success"
] | [((287, 344), 'flask.Blueprint', 'Blueprint', (['"""exercises"""', '__name__'], {'url_prefix': '"""/exercises"""'}), "('exercises', __name__, url_prefix='/exercises')\n", (296, 344), False, 'from flask import Blueprint, make_response\n'), ((515, 555), 'app.database.db_connection.exercise_collection.find_one', 'exercisesdb.find_one', (["{'muscle': muscle}"], {}), "({'muscle': muscle})\n", (535, 555), True, 'from app.database.db_connection import exercise_collection as exercisesdb\n'), ((567, 624), 'app.utils.msg_tools.ResponseTools.response_success', 'msg_tools.response_success', ([], {'objects': "exercise['exercises']"}), "(objects=exercise['exercises'])\n", (593, 624), True, 'from app.utils.msg_tools import ResponseTools as msg_tools\n'), ((692, 710), 'app.database.db_connection.exercise_collection.find', 'exercisesdb.find', ([], {}), '()\n', (708, 710), True, 'from app.database.db_connection import exercise_collection as exercisesdb\n'), ((818, 864), 'app.utils.msg_tools.ResponseTools.response_success', 'msg_tools.response_success', ([], {'objects': 'musclelist'}), '(objects=musclelist)\n', (844, 864), True, 'from app.utils.msg_tools import ResponseTools as msg_tools\n')] |
from shorty.service import Shorty
import json
from configs.config import *
class MockResponse:
def __init__(self, status_code: int = None, url: str = None, text: str = None,
provider: str = None, message: str = None):
self.status_code = status_code
self.url = url
self.text = text
self.provider = provider
self.message = message
self.attributes = {"url": self.url, "link": self.text, "message": self.message}
self.attributes_str = json.dumps(self.attributes)
def json(self):
return json.loads(self.attributes_str)
def test_choose_and_init_bitly():
shorty_instance = Shorty(url="https://www.example.com", provider="bitly")
shorty_instance._choose_and_init_provider()
assert shorty_instance.header == {GeneralStrs.AUTHORIZATION: BitLy.TOKEN, GeneralStrs.CONTENT_TYPE: BitLy.CONTENT_TYPE}
assert shorty_instance.URL_method == BitLy.API_URL
def test_choose_and_init_tinyurl():
shorty_instance = Shorty(url="https://www.example.com",
provider="tinyurl")
shorty_instance._choose_and_init_provider()
assert shorty_instance.URL_method == TinyUrl.API_URL
def test_choose_and_init_unknown():
shorty_instance = Shorty(url="https://www.example.com",
provider="dummyprovider")
shorty_instance._choose_and_init_provider()
assert shorty_instance.provider == GeneralStrs.UNKNOWN
def test_alter_provider():
shorty_instance = Shorty(url="https://www.example.com")
shorty_instance._alter_provider()
assert shorty_instance.URL_method == TinyUrl.API_URL
def test_make_requests_unknown():
shorty_instance = Shorty(url="https://www.example.com",
provider="dummyprovider")
response = shorty_instance._make_requests()
assert response.get_status_code() == 404
assert response.get_message() == Messages.WRONG_PROVIDER_MSG
def test_make_requests_bitly(mocker):
mocker.patch('shorty.handler.requests.post',
return_value=MockResponse(status_code=200, url="https://www.example.com",
text="https://bitly.com/peakb",
provider="https://api-ssl.bitly.com/v4/shorten"))
shorty_instance = Shorty(url="https://www.example.com",
provider="bitly")
response = shorty_instance._make_requests()
assert response.get_status_code() == 200
assert response.get_url() == "https://www.example.com"
assert response.get_link() == "https://bitly.com/peakb"
assert response.get_provider() == "https://api-ssl.bitly.com/v4/shorten"
def test_make_requests_tinyurl(mocker):
mocker.patch('shorty.handler.requests.post',
return_value=MockResponse(status_code=200, url="https://example.com",
text="https://tinyurl.com/peakb",
provider="http://tinyurl.com/api-create.php"))
shorty_instance = Shorty(url="https://www.example.com",
provider="tinyurl")
response = shorty_instance._make_requests()
assert response.get_status_code() == 200
assert response.get_url() == "https://www.example.com"
assert response.get_link() == "https://tinyurl.com/peakb"
assert response.get_provider() == "http://tinyurl.com/api-create.php"
def test_shorten_success(mocker):
mocker.patch('shorty.handler.requests.post',
return_value=MockResponse(status_code=201, url="https://example.com",
text="https://tinyurl.com/peakb",
provider="http://tinyurl.com/api-create.php"))
shorty_instance = Shorty(url="https://www.example.com",
provider="tinyurl")
response = shorty_instance.shorten()
assert response.json['url'] == "https://www.example.com"
assert response.json['link'] == "https://tinyurl.com/peakb"
def test_shorten_fail(mocker):
mocker.patch('shorty.handler.requests.post',
return_value=MockResponse(status_code=400, url="https://example.com",
text=None,
provider="http://tinyurl.com/api-create.php"))
shorty_instance = Shorty(url="https://www.example.com",
provider="tinyurl")
response = shorty_instance.shorten()
assert response.json['status_code'] == 400
assert response.json['message'] == "Bad Request"
def test_shorten_fallback_fail(mocker):
mocker.patch('shorty.handler.requests.post',
return_value=MockResponse(status_code=400, url="https://example.com",
text=None, provider=None))
shorty_instance = Shorty(url="https://www.example.com")
response = shorty_instance.shorten()
assert response.json['status_code'] == 400
assert response.json['message'] == "Bad Request"
| [
"shorty.service.Shorty",
"json.loads",
"json.dumps"
] | [((664, 719), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""bitly"""'}), "(url='https://www.example.com', provider='bitly')\n", (670, 719), False, 'from shorty.service import Shorty\n'), ((1007, 1064), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""tinyurl"""'}), "(url='https://www.example.com', provider='tinyurl')\n", (1013, 1064), False, 'from shorty.service import Shorty\n'), ((1259, 1322), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""dummyprovider"""'}), "(url='https://www.example.com', provider='dummyprovider')\n", (1265, 1322), False, 'from shorty.service import Shorty\n'), ((1510, 1547), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""'}), "(url='https://www.example.com')\n", (1516, 1547), False, 'from shorty.service import Shorty\n'), ((1701, 1764), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""dummyprovider"""'}), "(url='https://www.example.com', provider='dummyprovider')\n", (1707, 1764), False, 'from shorty.service import Shorty\n'), ((2324, 2379), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""bitly"""'}), "(url='https://www.example.com', provider='bitly')\n", (2330, 2379), False, 'from shorty.service import Shorty\n'), ((3067, 3124), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""tinyurl"""'}), "(url='https://www.example.com', provider='tinyurl')\n", (3073, 3124), False, 'from shorty.service import Shorty\n'), ((3805, 3862), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""tinyurl"""'}), "(url='https://www.example.com', provider='tinyurl')\n", (3811, 3862), False, 'from shorty.service import Shorty\n'), ((4396, 4453), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""', 'provider': '"""tinyurl"""'}), "(url='https://www.example.com', provider='tinyurl')\n", (4402, 4453), False, 'from shorty.service import Shorty\n'), ((4897, 4934), 'shorty.service.Shorty', 'Shorty', ([], {'url': '"""https://www.example.com"""'}), "(url='https://www.example.com')\n", (4903, 4934), False, 'from shorty.service import Shorty\n'), ((510, 537), 'json.dumps', 'json.dumps', (['self.attributes'], {}), '(self.attributes)\n', (520, 537), False, 'import json\n'), ((574, 605), 'json.loads', 'json.loads', (['self.attributes_str'], {}), '(self.attributes_str)\n', (584, 605), False, 'import json\n')] |
from benchmark.modules.configs.paddle_recognizer_config import PaddleRecognizerConfig
from benchmark.modules.recognizer.paddle_recognizer import PaddleRecognizer
from shutil import move
import numpy as np
import fastwer
import cv2
import os
class Benchmark:
def __init__(self, config):
self.config = config
self.dataset = config.dataset
self.recognizer = PaddleRecognizer(self.config)
def CER(self, gt, pred):
return fastwer.score_sent(pred, gt, char_level=True)
def WER(self, gt, pred):
return fastwer.score_sent(pred, gt)
def check(self, result):
if result == "True":
return 1
else:
return 0
def GenPythonRecord(self):
lb_fp = open(os.path.join(self.dataset, "gt.txt"))
record_path = os.path.join(self.config.result, self.dataset.split("/")[-1] + "_python.txt")
if os.path.exists(record_path):
os.remove(record_path)
result = open(record_path,"a")
for line in lb_fp:
line = line.split("\t")
img_fp = os.path.join(self.dataset, line[0])
img_gt = line[1][:-1]
img = cv2.imread(img_fp)
rec_objs = self.recognizer.recognize([img])
for obj in rec_objs:
result_log = f"{img_fp} {img_gt} {obj.text} {obj.score} \n"
result.write(result_log)
def GenCppRecord(self, pred_result_fp, framework,):
if framework != "deepstream" and framework != "tensorrt":
print ("Only support deepstream or tensorrt")
return
else:
lb_fp = open(os.path.join(self.dataset, "gt.txt"))
record_path = os.path.join(self.config.result, self.dataset.split("/")[-1] + f"_{framework}_test.txt")
if os.path.exists(record_path):
os.remove(record_path)
result = open(record_path,"a")
if os.path.exists(pred_result_fp):
cpp_pred = open(pred_result_fp)
else:
print("pred_result_fp is not exists")
return
cpp_results = []
for line in cpp_pred:
line.split(" ") # Todo add score
cpp_results.append([line[:-1], 0.9])
for i, line in enumerate(lb_fp):
line = line.split("\t")
img_fp = os.path.join(self.dataset, line[0])
img_gt = line[1][:-1]
result_line = f"{img_fp} {img_gt} {cpp_results[i][0]} {cpp_results[i][1]}\n"
result.write(result_line)
def MergeBenchmarkResults(self, python_record, deepstream_record, tensorrt_record):
merge_result_file = open(os.path.join(self.config.result, "merged_result.txt"), "a")
merge_result_view_file = open(os.path.join(self.config.result, "merged_result_view.txt"), "a")
python_file = open(python_record)
deepstream_file = open(deepstream_record)
tensorrt_file = open(tensorrt_record)
deepstream_pred_list = []
tensorrt_pred_list = []
for line in deepstream_file:
deepstream_pred_list.append(line[:-1])
for line in tensorrt_file:
tensorrt_pred_list.append(line[:-1])
for i, line in enumerate(python_file):
line = line.split(" ")
if deepstream_pred_list[i]==line[1]: ds_result = "True"
else: ds_result = "False"
if tensorrt_pred_list[i]==line[1]: trt_result = "True"
else: trt_result = "False"
new_line = f"{line[0]} {line[1]} {line[2]} {line[3]} {line[4]} {line[5]} {deepstream_pred_list[i]} {ds_result} {tensorrt_pred_list[i]} {trt_result} \n"
view_line = f'{line[0]:40s} | {line[1]:10s} | {line[2]:10} | {line[3]:5s} | {line[4]:20s} | {line[5]:20s} | {deepstream_pred_list[i]:10s} | {ds_result:5s} | {tensorrt_pred_list[i]:10s} | {trt_result:5s}|\n'
merge_result_view_file.write(view_line)
merge_result_file.write(new_line)
def ExportResult(self, record_path):
result_file = open(record_path)
result = []
for i,line in enumerate(result_file):
if i > 0:
line = line.split(" ")
result.append([self.check(line[3]), self.check(line[7]), self.check(line[9])])
result = np.array(result)
print("Paddle: ", sum(result[:,0])/len(result[:,0]))
print("Deepstream: ", sum(result[:,1])/len(result[:,0]))
print("Tensorrt: ", sum(result[:,2])/len(result[:,0]))
# time = time + float(line[5])
# if line[3] == "True": true_plate += 1
# else : false_plate += 1
# print(true_plate/(true_plate+false_plate))
# print(time/347)
def CalculateAccuracy(self, record_path):
result_file = open(record_path)
result = []
for i,line in enumerate(result_file):
line = line.split(" ")
gt = str(line[1])
pred = str(line[2])
result.append([self.CER(gt, pred), self.WER(gt, pred)])
result = np.array(result)
print("CER: ", round(sum(result[:,0])/len(result[:,0]), 2), " %")
print("WER: ", round(sum(result[:,1])/len(result[:,1]), 2), " %")
| [
"benchmark.modules.recognizer.paddle_recognizer.PaddleRecognizer",
"os.path.exists",
"fastwer.score_sent",
"os.path.join",
"numpy.array",
"cv2.imread",
"os.remove"
] | [((384, 413), 'benchmark.modules.recognizer.paddle_recognizer.PaddleRecognizer', 'PaddleRecognizer', (['self.config'], {}), '(self.config)\n', (400, 413), False, 'from benchmark.modules.recognizer.paddle_recognizer import PaddleRecognizer\n'), ((459, 504), 'fastwer.score_sent', 'fastwer.score_sent', (['pred', 'gt'], {'char_level': '(True)'}), '(pred, gt, char_level=True)\n', (477, 504), False, 'import fastwer\n'), ((554, 582), 'fastwer.score_sent', 'fastwer.score_sent', (['pred', 'gt'], {}), '(pred, gt)\n', (572, 582), False, 'import fastwer\n'), ((900, 927), 'os.path.exists', 'os.path.exists', (['record_path'], {}), '(record_path)\n', (914, 927), False, 'import os\n'), ((4416, 4432), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (4424, 4432), True, 'import numpy as np\n'), ((5183, 5199), 'numpy.array', 'np.array', (['result'], {}), '(result)\n', (5191, 5199), True, 'import numpy as np\n'), ((751, 787), 'os.path.join', 'os.path.join', (['self.dataset', '"""gt.txt"""'], {}), "(self.dataset, 'gt.txt')\n", (763, 787), False, 'import os\n'), ((941, 963), 'os.remove', 'os.remove', (['record_path'], {}), '(record_path)\n', (950, 963), False, 'import os\n'), ((1089, 1124), 'os.path.join', 'os.path.join', (['self.dataset', 'line[0]'], {}), '(self.dataset, line[0])\n', (1101, 1124), False, 'import os\n'), ((1178, 1196), 'cv2.imread', 'cv2.imread', (['img_fp'], {}), '(img_fp)\n', (1188, 1196), False, 'import cv2\n'), ((1823, 1850), 'os.path.exists', 'os.path.exists', (['record_path'], {}), '(record_path)\n', (1837, 1850), False, 'import os\n'), ((1950, 1980), 'os.path.exists', 'os.path.exists', (['pred_result_fp'], {}), '(pred_result_fp)\n', (1964, 1980), False, 'import os\n'), ((2746, 2799), 'os.path.join', 'os.path.join', (['self.config.result', '"""merged_result.txt"""'], {}), "(self.config.result, 'merged_result.txt')\n", (2758, 2799), False, 'import os\n'), ((2844, 2902), 'os.path.join', 'os.path.join', (['self.config.result', '"""merged_result_view.txt"""'], {}), "(self.config.result, 'merged_result_view.txt')\n", (2856, 2902), False, 'import os\n'), ((1655, 1691), 'os.path.join', 'os.path.join', (['self.dataset', '"""gt.txt"""'], {}), "(self.dataset, 'gt.txt')\n", (1667, 1691), False, 'import os\n'), ((1868, 1890), 'os.remove', 'os.remove', (['record_path'], {}), '(record_path)\n', (1877, 1890), False, 'import os\n'), ((2415, 2450), 'os.path.join', 'os.path.join', (['self.dataset', 'line[0]'], {}), '(self.dataset, line[0])\n', (2427, 2450), False, 'import os\n')] |
"""Tests for `easyprophet` package."""
import pytest
from easyprophet import easyprophet
from easyprophet import hello_world
def test_raises_exception_on_non_string_arguments():
with pytest.raises(TypeError):
hello_world.capital_case(9)
def test_create_grid():
input_grid = {
"model": ["m1", "m2"],
"initial": ["cv_initial", "cv2"],
"period": ["cv_period", "cv_period2"],
"horizon": ["cv_horizon", "cv_horizon2"],
}
param_grid = list(easyprophet.create_grid(input_grid))
assert len(param_grid) == 16
| [
"easyprophet.hello_world.capital_case",
"pytest.raises",
"easyprophet.easyprophet.create_grid"
] | [((189, 213), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (202, 213), False, 'import pytest\n'), ((223, 250), 'easyprophet.hello_world.capital_case', 'hello_world.capital_case', (['(9)'], {}), '(9)\n', (247, 250), False, 'from easyprophet import hello_world\n'), ((494, 529), 'easyprophet.easyprophet.create_grid', 'easyprophet.create_grid', (['input_grid'], {}), '(input_grid)\n', (517, 529), False, 'from easyprophet import easyprophet\n')] |
#!/usr/bin/python
import sys
import Quartz
d = Quartz.CGSessionCopyCurrentDictionary()
# we want to return 0, not 1, if a session is active
sys.exit(not (d and
d.get("CGSSessionScreenIsLocked", 0) == 0 and
d.get("kCGSSessionOnConsoleKey", 0) == 1))
| [
"Quartz.CGSessionCopyCurrentDictionary"
] | [((49, 88), 'Quartz.CGSessionCopyCurrentDictionary', 'Quartz.CGSessionCopyCurrentDictionary', ([], {}), '()\n', (86, 88), False, 'import Quartz\n')] |
from pyrogram import Client, filters
s = -100
@app.on_message(filters.chat(s) & ~ filters.edited)
async def main(client, message):
file = open("ferrari.txt","r")
lines = file.readlines()
file.close()
for line in lines:
p = line.split()
for r in p:
mes = await client.send_message(int(r),message.text)
fie = open(str(r)+".txt","a")
fie.write(" " + str(message.message_id) + " " + str(mes.message_id))
fie.close()
| [
"pyrogram.filters.chat"
] | [((62, 77), 'pyrogram.filters.chat', 'filters.chat', (['s'], {}), '(s)\n', (74, 77), False, 'from pyrogram import Client, filters\n')] |
from math import radians, cos, sin, asin, sqrt
from typing import Union
import numpy as np
import scipy.interpolate as si
from numpy.linalg import norm
def calc_unit_vector(v: Union[list, np.ndarray]) -> np.ndarray:
""" Returns the unit vector of the given vector v
Parameters
----------
v : array_like
Vector of which the unit vector should be calculated
"""
return v / np.linalg.norm(v)
def calc_angle_between(v1: Union[list, np.ndarray], v2: Union[list, np.ndarray]) -> np.ndarray:
""" Returns the angle in radians between vectors v1 and v2
Parameters
----------
v1 : array_like
First vector
v2 : array_like
Second vector
"""
v1_u = calc_unit_vector(v1)
v2_u = calc_unit_vector(v2)
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
def bspline(cv, n=10000, degree=3, periodic=False):
""" Calculate n samples on a bspline
Parameters
----------
cv: array_like
Array ov control vertices
n: int
Number of samples to return
degree: int
Curve degree
periodic: bool
True - Curve is closed, False - Curve is open
Returns
-------
np.ndarray
"""
# If periodic, extend the point array by count+degree+1
cv = np.asarray(cv)
count = len(cv)
if periodic:
factor, fraction = divmod(count + degree + 1, count)
cv = np.concatenate((cv,) * factor + (cv[:fraction],))
count = len(cv)
degree = np.clip(degree, 1, degree)
# If opened, prevent degree from exceeding count-1
else:
degree = np.clip(degree, 1, count - 1)
# Calculate knot vector
kv = None
if periodic:
kv = np.arange(0 - degree, count + degree + degree - 1, dtype='int')
else:
kv = np.concatenate(([0] * degree, np.arange(count - degree + 1), [count - degree] * degree))
# Calculate query range
u = np.linspace(periodic, (count - degree), n)
# Calculate result
return np.array(si.splev(u, (kv, cv.T, degree))).T
def project_point_onto_line(line: Union[np.ndarray, list], point: Union[np.ndarray, list]) -> tuple:
"""
Parameters
----------
line: np.ndarray
List of two points defining the line in the form of [[x1, y1],[x2, y2]]
point: np.ndarray
Point of which the distance perpendicular from the line should be calculated to
Returns
-------
tuple
Returns a tuple with the point where the orthogonal projections of the given points intersects the given the
line and secondly the (perpendicular) distance to this point
"""
if type(line) is list:
line = np.array(line)
if type(point) is list:
point = np.array(point)
p1 = line[0]
p2 = line[1]
p3 = point
if np.array_equal(p1, p2):
raise ValueError("Given line consists of two identical points!")
d = norm(np.cross(p2 - p1, p1 - p3)) / norm(p2 - p1)
n = p2 - p1
n = n / norm(n, 2)
p = p1 + n * np.dot(p3 - p1, n)
return p, d
def is_point_within_line_projection(line: Union[np.ndarray, list], point: Union[np.ndarray, list]) -> bool:
""" Checks whether a given points line projection falls within the points that the define the line
Parameters
----------
line: np.ndarray
List of two points defining the line in the form of [[x1, y1],[x2, y2]]
point: np.ndarray
Point of which it should be determined whether the projection onto the line falls within the points that
define the line
Returns
-------
"""
if type(line) is list:
line = np.array(line)
if type(point) is list:
point = np.array(point)
p1 = line[0]
p2 = line[1]
p3 = point
s = p2 - p1
v = p3 - p1
b = (0 <= np.inner(v, s) <= np.inner(s, s))
return b
def haversine(lon1: float, lat1: float, lon2: float, lat2: float):
""" Calculate the great circle distance in kilometers between two points on the earth (specified in decimal degrees)
Source: https://stackoverflow.com/questions/4913349/haversine-formula-in-python-bearing-and-distance-between-two-gps-points
Parameters
----------
lon1 : float
Longitude of first point
lat1 : float
Latitude of first point
lon2 : float
Longitude of second point
lat2 : float
Latitude of second point
Returns
-------
float
Distance between the points in meters
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 6371000 # Radius of earth in meters
return c * r
| [
"numpy.clip",
"numpy.cross",
"numpy.asarray",
"math.sqrt",
"numpy.inner",
"math.cos",
"numpy.array",
"numpy.linspace",
"numpy.dot",
"numpy.array_equal",
"scipy.interpolate.splev",
"numpy.concatenate",
"numpy.linalg.norm",
"math.sin",
"numpy.arange"
] | [((1343, 1357), 'numpy.asarray', 'np.asarray', (['cv'], {}), '(cv)\n', (1353, 1357), True, 'import numpy as np\n'), ((1987, 2027), 'numpy.linspace', 'np.linspace', (['periodic', '(count - degree)', 'n'], {}), '(periodic, count - degree, n)\n', (1998, 2027), True, 'import numpy as np\n'), ((2867, 2889), 'numpy.array_equal', 'np.array_equal', (['p1', 'p2'], {}), '(p1, p2)\n', (2881, 2889), True, 'import numpy as np\n'), ((407, 424), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {}), '(v)\n', (421, 424), True, 'import numpy as np\n'), ((1470, 1519), 'numpy.concatenate', 'np.concatenate', (['((cv,) * factor + (cv[:fraction],))'], {}), '((cv,) * factor + (cv[:fraction],))\n', (1484, 1519), True, 'import numpy as np\n'), ((1561, 1587), 'numpy.clip', 'np.clip', (['degree', '(1)', 'degree'], {}), '(degree, 1, degree)\n', (1568, 1587), True, 'import numpy as np\n'), ((1671, 1700), 'numpy.clip', 'np.clip', (['degree', '(1)', '(count - 1)'], {}), '(degree, 1, count - 1)\n', (1678, 1700), True, 'import numpy as np\n'), ((1774, 1837), 'numpy.arange', 'np.arange', (['(0 - degree)', '(count + degree + degree - 1)'], {'dtype': '"""int"""'}), "(0 - degree, count + degree + degree - 1, dtype='int')\n", (1783, 1837), True, 'import numpy as np\n'), ((2733, 2747), 'numpy.array', 'np.array', (['line'], {}), '(line)\n', (2741, 2747), True, 'import numpy as np\n'), ((2793, 2808), 'numpy.array', 'np.array', (['point'], {}), '(point)\n', (2801, 2808), True, 'import numpy as np\n'), ((3008, 3021), 'numpy.linalg.norm', 'norm', (['(p2 - p1)'], {}), '(p2 - p1)\n', (3012, 3021), False, 'from numpy.linalg import norm\n'), ((3051, 3061), 'numpy.linalg.norm', 'norm', (['n', '(2)'], {}), '(n, 2)\n', (3055, 3061), False, 'from numpy.linalg import norm\n'), ((3695, 3709), 'numpy.array', 'np.array', (['line'], {}), '(line)\n', (3703, 3709), True, 'import numpy as np\n'), ((3755, 3770), 'numpy.array', 'np.array', (['point'], {}), '(point)\n', (3763, 3770), True, 'import numpy as np\n'), ((3869, 3883), 'numpy.inner', 'np.inner', (['v', 's'], {}), '(v, s)\n', (3877, 3883), True, 'import numpy as np\n'), ((3887, 3901), 'numpy.inner', 'np.inner', (['s', 's'], {}), '(s, s)\n', (3895, 3901), True, 'import numpy as np\n'), ((801, 819), 'numpy.dot', 'np.dot', (['v1_u', 'v2_u'], {}), '(v1_u, v2_u)\n', (807, 819), True, 'import numpy as np\n'), ((2074, 2105), 'scipy.interpolate.splev', 'si.splev', (['u', '(kv, cv.T, degree)'], {}), '(u, (kv, cv.T, degree))\n', (2082, 2105), True, 'import scipy.interpolate as si\n'), ((2978, 3004), 'numpy.cross', 'np.cross', (['(p2 - p1)', '(p1 - p3)'], {}), '(p2 - p1, p1 - p3)\n', (2986, 3004), True, 'import numpy as np\n'), ((3080, 3098), 'numpy.dot', 'np.dot', (['(p3 - p1)', 'n'], {}), '(p3 - p1, n)\n', (3086, 3098), True, 'import numpy as np\n'), ((4746, 4759), 'math.sin', 'sin', (['(dlat / 2)'], {}), '(dlat / 2)\n', (4749, 4759), False, 'from math import radians, cos, sin, asin, sqrt\n'), ((4827, 4834), 'math.sqrt', 'sqrt', (['a'], {}), '(a)\n', (4831, 4834), False, 'from math import radians, cos, sin, asin, sqrt\n'), ((1891, 1920), 'numpy.arange', 'np.arange', (['(count - degree + 1)'], {}), '(count - degree + 1)\n', (1900, 1920), True, 'import numpy as np\n'), ((4767, 4776), 'math.cos', 'cos', (['lat1'], {}), '(lat1)\n', (4770, 4776), False, 'from math import radians, cos, sin, asin, sqrt\n'), ((4779, 4788), 'math.cos', 'cos', (['lat2'], {}), '(lat2)\n', (4782, 4788), False, 'from math import radians, cos, sin, asin, sqrt\n'), ((4791, 4804), 'math.sin', 'sin', (['(dlon / 2)'], {}), '(dlon / 2)\n', (4794, 4804), False, 'from math import radians, cos, sin, asin, sqrt\n')] |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module for constructing RNN Cells. These codes are derived from tf.nn.rnn_cell"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import math
from tensorflow.contrib.compiler import jit
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.contrib.rnn.python.ops import core_rnn_cell
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base as base_layer
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_impl # pylint: disable=unused-import
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables # pylint: disable=unused-import
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
def _get_concat_variable(name, shape, dtype, num_shards):
"""Get a sharded variable concatenated into one tensor."""
sharded_variable = _get_sharded_variable(name, shape, dtype, num_shards)
if len(sharded_variable) == 1:
return sharded_variable[0]
concat_name = name + "/concat"
concat_full_name = vs.get_variable_scope().name + "/" + concat_name + ":0"
for value in ops.get_collection(ops.GraphKeys.CONCATENATED_VARIABLES):
if value.name == concat_full_name:
return value
concat_variable = array_ops.concat(sharded_variable, 0, name=concat_name)
ops.add_to_collection(ops.GraphKeys.CONCATENATED_VARIABLES, concat_variable)
return concat_variable
def _get_sharded_variable(name, shape, dtype, num_shards):
"""Get a list of sharded variables with the given dtype."""
if num_shards > shape[0]:
raise ValueError("Too many shards: shape=%s, num_shards=%d" % (shape,num_shards))
unit_shard_size = int(math.floor(shape[0] / num_shards))
remaining_rows = shape[0] - unit_shard_size * num_shards
shards = []
for i in range(num_shards):
current_size = unit_shard_size
if i < remaining_rows:
current_size += 1
shards.append(
vs.get_variable(
name + "_%d" % i, [current_size] + shape[1:], dtype=dtype))
return shards
def _norm(g, b, inp, scope):
shape = inp.get_shape()[-1:]
gamma_init = init_ops.constant_initializer(g)
beta_init = init_ops.constant_initializer(b)
with vs.variable_scope(scope):
# Initialize beta and gamma for use by layer_norm.
vs.get_variable("gamma", shape=shape, initializer=gamma_init)
vs.get_variable("beta", shape=shape, initializer=beta_init)
normalized = layers.layer_norm(inp, reuse=True, scope=scope)
return normalized
class ConvLSTMCell(rnn_cell_impl.RNNCell):
"""Convolutional LSTM recurrent network cell, we added "reuse" item.
https://arxiv.org/pdf/1506.04214v1.pdf
"""
def __init__(self,
conv_ndims,
input_shape,
output_channels,
kernel_shape,
use_bias=True,
skip_connection=False,
forget_bias=1.0,
initializers=None,
reuse=None):
"""Construct ConvLSTMCell.
Args:
conv_ndims: Convolution dimensionality (1, 2 or 3).
input_shape: Shape of the input as int tuple, excluding the batch size.
output_channels: int, number of output channels of the conv LSTM.
kernel_shape: Shape of kernel as in tuple (of size 1,2 or 3).
use_bias: (bool) Use bias in convolutions.
skip_connection: If set to `True`, concatenate the input to the
output of the conv LSTM. Default: `False`.
forget_bias: Forget bias.
initializers: Unused.
name: Name of the module.
Raises:
ValueError: If `skip_connection` is `True` and stride is different from 1
or if `input_shape` is incompatible with `conv_ndims`.
"""
super(ConvLSTMCell, self).__init__(_reuse=reuse)
if conv_ndims != len(input_shape) - 1:
raise ValueError("Invalid input_shape {} for conv_ndims={}.".format(
input_shape, conv_ndims))
self._conv_ndims = conv_ndims
self._input_shape = input_shape
self._output_channels = output_channels
self._kernel_shape = kernel_shape
self._use_bias = use_bias
self._forget_bias = forget_bias
self._skip_connection = skip_connection
self._reuse = reuse
self._total_output_channels = output_channels
if self._skip_connection:
self._total_output_channels += self._input_shape[-1]
state_size = tensor_shape.TensorShape(
self._input_shape[:-1] + [self._output_channels])
self._state_size = rnn_cell_impl.LSTMStateTuple(state_size, state_size)
self._output_size = tensor_shape.TensorShape(
self._input_shape[:-1] + [self._total_output_channels])
@property
def output_size(self):
return self._output_size
@property
def state_size(self):
return self._state_size
def call(self, inputs, state, scope=None):
with vs.variable_scope(scope, reuse=self._reuse):
cell, hidden = state
new_hidden = _conv([inputs, hidden], self._kernel_shape,
4 * self._output_channels, self._use_bias)
gates = array_ops.split(
value=new_hidden, num_or_size_splits=4, axis=self._conv_ndims + 1)
input_gate, new_input, forget_gate, output_gate = gates
new_cell = math_ops.sigmoid(forget_gate + self._forget_bias) * cell
new_cell += math_ops.sigmoid(input_gate) * math_ops.tanh(new_input)
output = math_ops.tanh(new_cell) * math_ops.sigmoid(output_gate)
if self._skip_connection:
output = array_ops.concat([output, inputs], axis=-1)
new_state = rnn_cell_impl.LSTMStateTuple(new_cell, output)
return output, new_state
class Conv1DLSTMCell(ConvLSTMCell):
"""1D Convolutional LSTM recurrent network cell.
https://arxiv.org/pdf/1506.04214v1.pdf
"""
def __init__(self, name="conv_1d_lstm_cell", **kwargs):
"""Construct Conv1DLSTM. See `ConvLSTMCell` for more details."""
super(Conv1DLSTMCell, self).__init__(conv_ndims=1, name=name, **kwargs)
class Conv2DLSTMCell(ConvLSTMCell):
"""2D Convolutional LSTM recurrent network cell.
https://arxiv.org/pdf/1506.04214v1.pdf
"""
def __init__(self, name="conv_2d_lstm_cell", **kwargs):
"""Construct Conv2DLSTM. See `ConvLSTMCell` for more details."""
super(Conv2DLSTMCell, self).__init__(conv_ndims=2, name=name, **kwargs)
class Conv3DLSTMCell(ConvLSTMCell):
"""3D Convolutional LSTM recurrent network cell.
https://arxiv.org/pdf/1506.04214v1.pdf
"""
def __init__(self, name="conv_3d_lstm_cell", **kwargs):
"""Construct Conv3DLSTM. See `ConvLSTMCell` for more details."""
super(Conv3DLSTMCell, self).__init__(conv_ndims=3, name=name, **kwargs)
def _conv(args, filter_size, num_features, bias, bias_start=0.0):
"""Convolution.
Args:
args: a Tensor or a list of Tensors of dimension 3D, 4D or 5D,
batch x n, Tensors.
filter_size: int tuple of filter height and width.
num_features: int, number of features.
bias: Whether to use biases in the convolution layer.
bias_start: starting value to initialize the bias; 0 by default.
Returns:
A 3D, 4D, or 5D Tensor with shape [batch ... num_features]
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
# Calculate the total size of arguments on dimension 1.
total_arg_size_depth = 0
shapes = [a.get_shape().as_list() for a in args]
shape_length = len(shapes[0])
for shape in shapes:
if len(shape) not in [3, 4, 5]:
raise ValueError("Conv Linear expects 3D, 4D "
"or 5D arguments: %s" % str(shapes))
if len(shape) != len(shapes[0]):
raise ValueError("Conv Linear expects all args "
"to be of same Dimension: %s" % str(shapes))
else:
total_arg_size_depth += shape[-1]
dtype = [a.dtype for a in args][0]
# determine correct conv operation
if shape_length == 3:
conv_op = nn_ops.conv1d
strides = 1
elif shape_length == 4:
conv_op = nn_ops.conv2d
strides = shape_length * [1]
elif shape_length == 5:
conv_op = nn_ops.conv3d
strides = shape_length * [1]
# Now the computation.
kernel = vs.get_variable(
"kernel", filter_size + [total_arg_size_depth, num_features], dtype=dtype)
if len(args) == 1:
res = conv_op(args[0], kernel, strides, padding="SAME")
else:
res = conv_op(
array_ops.concat(axis=shape_length - 1, values=args),
kernel,
strides,
padding="SAME")
if not bias:
return res
bias_term = vs.get_variable(
"biases", [num_features],
dtype=dtype,
initializer=init_ops.constant_initializer(bias_start, dtype=dtype))
return res + bias_term
| [
"tensorflow.contrib.layers.python.layers.layers.layer_norm",
"tensorflow.python.ops.variable_scope.variable_scope",
"math.floor",
"tensorflow.python.ops.math_ops.sigmoid",
"tensorflow.python.ops.variable_scope.get_variable_scope",
"tensorflow.python.ops.math_ops.tanh",
"tensorflow.python.framework.ops.get_collection",
"tensorflow.python.framework.tensor_shape.TensorShape",
"tensorflow.python.ops.array_ops.split",
"tensorflow.python.ops.init_ops.constant_initializer",
"tensorflow.python.framework.ops.add_to_collection",
"tensorflow.python.ops.variable_scope.get_variable",
"tensorflow.python.ops.array_ops.concat",
"tensorflow.python.ops.rnn_cell_impl.LSTMStateTuple"
] | [((2448, 2504), 'tensorflow.python.framework.ops.get_collection', 'ops.get_collection', (['ops.GraphKeys.CONCATENATED_VARIABLES'], {}), '(ops.GraphKeys.CONCATENATED_VARIABLES)\n', (2466, 2504), False, 'from tensorflow.python.framework import ops\n'), ((2585, 2640), 'tensorflow.python.ops.array_ops.concat', 'array_ops.concat', (['sharded_variable', '(0)'], {'name': 'concat_name'}), '(sharded_variable, 0, name=concat_name)\n', (2601, 2640), False, 'from tensorflow.python.ops import array_ops\n'), ((2643, 2719), 'tensorflow.python.framework.ops.add_to_collection', 'ops.add_to_collection', (['ops.GraphKeys.CONCATENATED_VARIABLES', 'concat_variable'], {}), '(ops.GraphKeys.CONCATENATED_VARIABLES, concat_variable)\n', (2664, 2719), False, 'from tensorflow.python.framework import ops\n'), ((3440, 3472), 'tensorflow.python.ops.init_ops.constant_initializer', 'init_ops.constant_initializer', (['g'], {}), '(g)\n', (3469, 3472), False, 'from tensorflow.python.ops import init_ops\n'), ((3487, 3519), 'tensorflow.python.ops.init_ops.constant_initializer', 'init_ops.constant_initializer', (['b'], {}), '(b)\n', (3516, 3519), False, 'from tensorflow.python.ops import init_ops\n'), ((3753, 3800), 'tensorflow.contrib.layers.python.layers.layers.layer_norm', 'layers.layer_norm', (['inp'], {'reuse': '(True)', 'scope': 'scope'}), '(inp, reuse=True, scope=scope)\n', (3770, 3800), False, 'from tensorflow.contrib.layers.python.layers import layers\n'), ((9456, 9551), 'tensorflow.python.ops.variable_scope.get_variable', 'vs.get_variable', (['"""kernel"""', '(filter_size + [total_arg_size_depth, num_features])'], {'dtype': 'dtype'}), "('kernel', filter_size + [total_arg_size_depth, num_features\n ], dtype=dtype)\n", (9471, 9551), True, 'from tensorflow.python.ops import variable_scope as vs\n'), ((3006, 3039), 'math.floor', 'math.floor', (['(shape[0] / num_shards)'], {}), '(shape[0] / num_shards)\n', (3016, 3039), False, 'import math\n'), ((3527, 3551), 'tensorflow.python.ops.variable_scope.variable_scope', 'vs.variable_scope', (['scope'], {}), '(scope)\n', (3544, 3551), True, 'from tensorflow.python.ops import variable_scope as vs\n'), ((3612, 3673), 'tensorflow.python.ops.variable_scope.get_variable', 'vs.get_variable', (['"""gamma"""'], {'shape': 'shape', 'initializer': 'gamma_init'}), "('gamma', shape=shape, initializer=gamma_init)\n", (3627, 3673), True, 'from tensorflow.python.ops import variable_scope as vs\n'), ((3678, 3737), 'tensorflow.python.ops.variable_scope.get_variable', 'vs.get_variable', (['"""beta"""'], {'shape': 'shape', 'initializer': 'beta_init'}), "('beta', shape=shape, initializer=beta_init)\n", (3693, 3737), True, 'from tensorflow.python.ops import variable_scope as vs\n'), ((5683, 5757), 'tensorflow.python.framework.tensor_shape.TensorShape', 'tensor_shape.TensorShape', (['(self._input_shape[:-1] + [self._output_channels])'], {}), '(self._input_shape[:-1] + [self._output_channels])\n', (5707, 5757), False, 'from tensorflow.python.framework import tensor_shape\n'), ((5790, 5842), 'tensorflow.python.ops.rnn_cell_impl.LSTMStateTuple', 'rnn_cell_impl.LSTMStateTuple', (['state_size', 'state_size'], {}), '(state_size, state_size)\n', (5818, 5842), False, 'from tensorflow.python.ops import rnn_cell_impl\n'), ((5867, 5952), 'tensorflow.python.framework.tensor_shape.TensorShape', 'tensor_shape.TensorShape', (['(self._input_shape[:-1] + [self._total_output_channels])'], {}), '(self._input_shape[:-1] + [self._total_output_channels]\n )\n', (5891, 5952), False, 'from tensorflow.python.framework import tensor_shape\n'), ((3258, 3332), 'tensorflow.python.ops.variable_scope.get_variable', 'vs.get_variable', (["(name + '_%d' % i)", '([current_size] + shape[1:])'], {'dtype': 'dtype'}), "(name + '_%d' % i, [current_size] + shape[1:], dtype=dtype)\n", (3273, 3332), True, 'from tensorflow.python.ops import variable_scope as vs\n'), ((6144, 6187), 'tensorflow.python.ops.variable_scope.variable_scope', 'vs.variable_scope', (['scope'], {'reuse': 'self._reuse'}), '(scope, reuse=self._reuse)\n', (6161, 6187), True, 'from tensorflow.python.ops import variable_scope as vs\n'), ((6369, 6456), 'tensorflow.python.ops.array_ops.split', 'array_ops.split', ([], {'value': 'new_hidden', 'num_or_size_splits': '(4)', 'axis': '(self._conv_ndims + 1)'}), '(value=new_hidden, num_or_size_splits=4, axis=self.\n _conv_ndims + 1)\n', (6384, 6456), False, 'from tensorflow.python.ops import array_ops\n'), ((6872, 6918), 'tensorflow.python.ops.rnn_cell_impl.LSTMStateTuple', 'rnn_cell_impl.LSTMStateTuple', (['new_cell', 'output'], {}), '(new_cell, output)\n', (6900, 6918), False, 'from tensorflow.python.ops import rnn_cell_impl\n'), ((9670, 9722), 'tensorflow.python.ops.array_ops.concat', 'array_ops.concat', ([], {'axis': '(shape_length - 1)', 'values': 'args'}), '(axis=shape_length - 1, values=args)\n', (9686, 9722), False, 'from tensorflow.python.ops import array_ops\n'), ((9911, 9965), 'tensorflow.python.ops.init_ops.constant_initializer', 'init_ops.constant_initializer', (['bias_start'], {'dtype': 'dtype'}), '(bias_start, dtype=dtype)\n', (9940, 9965), False, 'from tensorflow.python.ops import init_ops\n'), ((6548, 6597), 'tensorflow.python.ops.math_ops.sigmoid', 'math_ops.sigmoid', (['(forget_gate + self._forget_bias)'], {}), '(forget_gate + self._forget_bias)\n', (6564, 6597), False, 'from tensorflow.python.ops import math_ops\n'), ((6625, 6653), 'tensorflow.python.ops.math_ops.sigmoid', 'math_ops.sigmoid', (['input_gate'], {}), '(input_gate)\n', (6641, 6653), False, 'from tensorflow.python.ops import math_ops\n'), ((6656, 6680), 'tensorflow.python.ops.math_ops.tanh', 'math_ops.tanh', (['new_input'], {}), '(new_input)\n', (6669, 6680), False, 'from tensorflow.python.ops import math_ops\n'), ((6698, 6721), 'tensorflow.python.ops.math_ops.tanh', 'math_ops.tanh', (['new_cell'], {}), '(new_cell)\n', (6711, 6721), False, 'from tensorflow.python.ops import math_ops\n'), ((6724, 6753), 'tensorflow.python.ops.math_ops.sigmoid', 'math_ops.sigmoid', (['output_gate'], {}), '(output_gate)\n', (6740, 6753), False, 'from tensorflow.python.ops import math_ops\n'), ((6808, 6851), 'tensorflow.python.ops.array_ops.concat', 'array_ops.concat', (['[output, inputs]'], {'axis': '(-1)'}), '([output, inputs], axis=-1)\n', (6824, 6851), False, 'from tensorflow.python.ops import array_ops\n'), ((2377, 2400), 'tensorflow.python.ops.variable_scope.get_variable_scope', 'vs.get_variable_scope', ([], {}), '()\n', (2398, 2400), True, 'from tensorflow.python.ops import variable_scope as vs\n')] |
from rest_framework import viewsets
from api.models import Pessoa
from api.serializer import PessoaSerializer
class PessoaViewSet(viewsets.ModelViewSet):
queryset = Pessoa.objects.all()
serializer_class = PessoaSerializer | [
"api.models.Pessoa.objects.all"
] | [((170, 190), 'api.models.Pessoa.objects.all', 'Pessoa.objects.all', ([], {}), '()\n', (188, 190), False, 'from api.models import Pessoa\n')] |
from time import sleep
def contagem(i, f, p):
print('~' * 40)
if p == 0: # Segunda opção para p == 0
print('passo = 1')
p = 1 # end
if p < 0:
p = -p # p *= -1
print(f'Contagem de {i} até {f} de {p} em {p}')
sleep(1)
if i <= f:
while i <= f:
print(i, end=' ')
i += p
sleep(0.3)
else:
while i >= f:
print(i, end=' ')
i -= p
sleep(0.3)
print('FIM')
print()
# Principal
contagem(1, 10, 1)
contagem(10, 0, 2)
print('='*50)
print('Agora é sua vez de personalizar a contagem!')
ini = int(input('Início: '))
fim = int(input('Fim: '))
while True: # Opção para p == 0
pas = int(input('Passo: '))
if pas != 0:
break
print(' * ERRO: digite um valor diferente de 0!')
contagem(ini, fim, pas)
| [
"time.sleep"
] | [((256, 264), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (261, 264), False, 'from time import sleep\n'), ((363, 373), 'time.sleep', 'sleep', (['(0.3)'], {}), '(0.3)\n', (368, 373), False, 'from time import sleep\n'), ((467, 477), 'time.sleep', 'sleep', (['(0.3)'], {}), '(0.3)\n', (472, 477), False, 'from time import sleep\n')] |
"""Default configuration options."""
from pathlib import Path
ROOT = Path(__file__).parent.parent
# Application options
STATIC_FOLDERS: str = [ROOT / 'assets']
STATIC_URL_PREFIX: str = '/assets'
# Muffin-Jinja options
JINJA2_TEMPLATE_FOLDERS: str = [ROOT / 'templates']
# Muffin-OAuth options
OAUTH_CLIENTS = {
'github': {
'client_id': 'b212c829c357ea0bd950',
'client_secret': 'e2bdda59f9da853ec39d0d1e07baade595f50202',
'scope': 'user:email',
}
}
# Muffin-Peewee options
PEEWEE_MIGRATIONS_PATH = ROOT.parent / 'migrations'
PEEWEE_CONNECTION = 'aiosqlite:///example.sqlite'
PEEWEE_AUTO_TRANSACTION = False
# Muffin-Session options
SESSION_SECRET_KEY = 'Example-Secret'
SESSION_AUTO_MANAGE = True
# Muffin-Admin options
ADMIN_LOGOUT_URL = '/logout'
ADMIN_CUSTOM_CSS_URL = '/assets/admin.css'
ADMIN_AUTH_STORAGE = 'cookies'
ADMIN_AUTH_STORAGE_NAME = 'session'
| [
"pathlib.Path"
] | [((72, 86), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (76, 86), False, 'from pathlib import Path\n')] |
# -*- coding: utf-8 -*-
import os
import re
import sys
from macdaily.cli.help import get_help_parser, parse_args
from macdaily.util.const.macro import (CMD_ARCHIVE, CMD_BUNDLE, # pylint: disable=unused-import
CMD_CLEANUP, CMD_CONFIG, CMD_DEPENDENCY, CMD_HELP,
CMD_INSTALL, CMD_LAUNCH, CMD_LOGGING, CMD_POSTINSTALL,
CMD_REINSTALL, CMD_UNINSTALL, CMD_UPDATE, COMMANDS, MAP_DICT,
MAP_ALL, MAP_ARCHIVE, MAP_BUNDLE, MAP_CLEANUP, MAP_COMMANDS,
MAP_CONFIG, MAP_DEPENDENCY, MAP_HELP, MAP_INSTALL,
MAP_LAUNCH, MAP_LOGGING, MAP_POSTINSTALL, MAP_REINSTALL,
MAP_UNINSTALL, MAP_UPDATE, ROOT)
from macdaily.util.exceptions import CommandNotImplemented
def help_(argv=None):
# parse args
args = parse_args(argv)
if args.command is None:
pth = os.path.join(ROOT, 'man/macdaily.8')
os.execlp('man', 'man', pth)
command = args.command.strip().lower()
if command in MAP_COMMANDS:
print(COMMANDS, end='')
return
# split args, fetch cmd & sub
temp = command.split('-', maxsplit=1)
if len(temp) == 2:
cmd, sub = temp
else:
cmd, sub = temp[0], None
def _find_help(cmd, sub, man):
pth = None
if sub is None:
pth = os.path.join(ROOT, f'man/macdaily-{cmd}.8')
if sub in man:
pth = os.path.join(ROOT, f'man/macdaily-{cmd}-{MAP_DICT[sub]}.8')
if pth is None:
CMD = globals().get(f'CMD_{cmd.upper()}', set())
parser = get_help_parser()
pattern = rf'.*{command}.*'
matches = f"', '".join(filter(lambda s: re.match(pattern, s, re.IGNORECASE), # pylint: disable=cell-var-from-loop
(r'%s-%s' % (cmd, sub) for sub in CMD)))
if matches:
parser.error(f"argument CMD: invalid choice: {args.command!r} "
f"(did you mean: '{matches}')")
else:
parser.error(f"argument CMD: invalid choice: {args.command!r} "
f"(choose from {cmd}-{(', %s-' % cmd).join(sorted(CMD))})")
os.execlp('man', 'man', pth)
if cmd in MAP_ARCHIVE:
_find_help('archive', sub, CMD_ARCHIVE)
elif cmd in MAP_BUNDLE:
# _find_help('bundle', sub, CMD_BUNDLE)
raise CommandNotImplemented
elif cmd in MAP_CLEANUP:
_find_help('cleanup', sub, CMD_CLEANUP)
elif cmd in MAP_CONFIG:
_find_help('config', sub, CMD_CONFIG)
elif cmd in MAP_DEPENDENCY:
_find_help('dependency', sub, CMD_DEPENDENCY)
elif cmd in MAP_HELP:
_find_help('help', sub, CMD_HELP)
elif cmd in MAP_INSTALL:
_find_help('install', sub, CMD_INSTALL)
elif cmd in MAP_LAUNCH:
_find_help('launch', sub, CMD_LAUNCH)
elif cmd in MAP_LOGGING:
_find_help('logging', sub, CMD_LOGGING)
elif cmd in MAP_POSTINSTALL:
_find_help('postinstall', sub, CMD_POSTINSTALL)
elif cmd in MAP_REINSTALL:
_find_help('reinstall', sub, CMD_REINSTALL)
elif cmd in MAP_UNINSTALL:
_find_help('uninstall', sub, CMD_UNINSTALL)
elif cmd in MAP_UPDATE:
_find_help('update', sub, CMD_UPDATE)
else:
parser = get_help_parser()
pattern = rf'.*{cmd}.*'
matches = "', '".join(filter(lambda s: re.match(pattern, s, re.IGNORECASE), MAP_ALL)) # pylint: disable=cell-var-from-loop
if matches:
parser.error(f'unrecognized arguments: {args.command!r} '
f"(did you mean: '{matches}')")
else:
parser.error(f"argument CMD: invalid choice: {args.command!r} "
f"(choose from {', '.join(sorted(MAP_ALL))})")
if __name__ == '__main__':
sys.exit(help_())
| [
"os.execlp",
"macdaily.cli.help.parse_args",
"os.path.join",
"re.match",
"macdaily.cli.help.get_help_parser"
] | [((966, 982), 'macdaily.cli.help.parse_args', 'parse_args', (['argv'], {}), '(argv)\n', (976, 982), False, 'from macdaily.cli.help import get_help_parser, parse_args\n'), ((1027, 1063), 'os.path.join', 'os.path.join', (['ROOT', '"""man/macdaily.8"""'], {}), "(ROOT, 'man/macdaily.8')\n", (1039, 1063), False, 'import os\n'), ((1072, 1100), 'os.execlp', 'os.execlp', (['"""man"""', '"""man"""', 'pth'], {}), "('man', 'man', pth)\n", (1081, 1100), False, 'import os\n'), ((2367, 2395), 'os.execlp', 'os.execlp', (['"""man"""', '"""man"""', 'pth'], {}), "('man', 'man', pth)\n", (2376, 2395), False, 'import os\n'), ((1488, 1531), 'os.path.join', 'os.path.join', (['ROOT', 'f"""man/macdaily-{cmd}.8"""'], {}), "(ROOT, f'man/macdaily-{cmd}.8')\n", (1500, 1531), False, 'import os\n'), ((1573, 1632), 'os.path.join', 'os.path.join', (['ROOT', 'f"""man/macdaily-{cmd}-{MAP_DICT[sub]}.8"""'], {}), "(ROOT, f'man/macdaily-{cmd}-{MAP_DICT[sub]}.8')\n", (1585, 1632), False, 'import os\n'), ((1739, 1756), 'macdaily.cli.help.get_help_parser', 'get_help_parser', ([], {}), '()\n', (1754, 1756), False, 'from macdaily.cli.help import get_help_parser, parse_args\n'), ((1849, 1884), 're.match', 're.match', (['pattern', 's', 're.IGNORECASE'], {}), '(pattern, s, re.IGNORECASE)\n', (1857, 1884), False, 'import re\n'), ((3473, 3490), 'macdaily.cli.help.get_help_parser', 'get_help_parser', ([], {}), '()\n', (3488, 3490), False, 'from macdaily.cli.help import get_help_parser, parse_args\n'), ((3570, 3605), 're.match', 're.match', (['pattern', 's', 're.IGNORECASE'], {}), '(pattern, s, re.IGNORECASE)\n', (3578, 3605), False, 'import re\n')] |
import logging
from typing import Any, Dict, List, Optional
import torch
from torch.nn.functional import nll_loss
from allennlp.common.checks import check_dimensions_match
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import Highway
from allennlp.modules import Seq2SeqEncoder, SimilarityFunction, TimeDistributed, TextFieldEmbedder
from allennlp.modules.matrix_attention.legacy_matrix_attention import LegacyMatrixAttention
from allennlp.nn import util, InitializerApplicator, RegularizerApplicator
from allennlp.training.metrics import BooleanAccuracy, CategoricalAccuracy, SquadEmAndF1
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class FiLM(torch.nn.Module):
"""
A Feature-wise Linear Modulation Layer from
'FiLM: Visual Reasoning with a General Conditioning Layer'
"""
def forward(self, x, gammas, betas):
gammas = gammas.unsqueeze(1)
betas = betas.unsqueeze(1)
return (gammas * x) + betas
@Model.register("bidaf")
class BidirectionalAttentionFlow(Model):
"""
This class implements Minjoon Seo's `Bidirectional Attention Flow model
<https://www.semanticscholar.org/paper/Bidirectional-Attention-Flow-for-Machine-Seo-Kembhavi/7586b7cca1deba124af80609327395e613a20e9d>`_
for answering reading comprehension questions (ICLR 2017).
The basic layout is pretty simple: encode words as a combination of word embeddings and a
character-level encoder, pass the word representations through a bi-LSTM/GRU, use a matrix of
attentions to put question information into the passage word representations (this is the only
part that is at all non-standard), pass this through another few layers of bi-LSTMs/GRUs, and
do a softmax over span start and span end.
Parameters
----------
vocab : ``Vocabulary``
text_field_embedder : ``TextFieldEmbedder``
Used to embed the ``question`` and ``passage`` ``TextFields`` we get as input to the model.
num_highway_layers : ``int``
The number of highway layers to use in between embedding the input and passing it through
the phrase layer.
phrase_layer : ``Seq2SeqEncoder``
The encoder (with its own internal stacking) that we will use in between embedding tokens
and doing the bidirectional attention.
similarity_function : ``SimilarityFunction``
The similarity function that we will use when comparing encoded passage and question
representations.
modeling_layer : ``Seq2SeqEncoder``
The encoder (with its own internal stacking) that we will use in between the bidirectional
attention and predicting span start and end.
span_end_encoder : ``Seq2SeqEncoder``
The encoder that we will use to incorporate span start predictions into the passage state
before predicting span end.
dropout : ``float``, optional (default=0.2)
If greater than 0, we will apply dropout with this probability after all encoders (pytorch
LSTMs do not apply dropout to their last layer).
mask_lstms : ``bool``, optional (default=True)
If ``False``, we will skip passing the mask to the LSTM layers. This gives a ~2x speedup,
with only a slight performance decrease, if any. We haven't experimented much with this
yet, but have confirmed that we still get very similar performance with much faster
training times. We still use the mask for all softmaxes, but avoid the shuffling that's
required when using masking with pytorch LSTMs.
initializer : ``InitializerApplicator``, optional (default=``InitializerApplicator()``)
Used to initialize the model parameters.
regularizer : ``RegularizerApplicator``, optional (default=``None``)
If provided, will be used to calculate the regularization penalty during training.
"""
def __init__(self, vocab: Vocabulary,
text_field_embedder: TextFieldEmbedder,
num_highway_layers: int,
phrase_layer: Seq2SeqEncoder,
similarity_function: SimilarityFunction,
modeling_layer: Seq2SeqEncoder,
span_end_encoder: Seq2SeqEncoder,
dropout: float = 0.2,
mask_lstms: bool = True,
initializer: InitializerApplicator = InitializerApplicator(),
regularizer: Optional[RegularizerApplicator] = None,
judge: Model = None,
update_judge: bool = False,
reward_method: str = None,
detach_value_head: bool = False,
qa_loss_weight: float = 0.,
influence_reward: bool = False,
dataset_name: str = 'squad') -> None:
super(BidirectionalAttentionFlow, self).__init__(vocab, regularizer)
self.judge = judge
self.is_judge = self.judge is None
self.reward_method = None if self.is_judge else reward_method
self.update_judge = update_judge and (self.judge is not None)
self._detach_value_head = detach_value_head
self._qa_loss_weight = qa_loss_weight
self.influence_reward = influence_reward
self.answer_type = 'mc' if dataset_name == 'race' else 'span'
self.output_type = 'span' # The actual way the output is given (here it's as a pointer to input)
self._text_field_embedder = text_field_embedder
self._highway_layer = TimeDistributed(Highway(text_field_embedder.get_output_dim(),
num_highway_layers))
self._phrase_layer = phrase_layer
self._matrix_attention = LegacyMatrixAttention(similarity_function)
if not self.is_judge:
self._turn_film_gen = torch.nn.Linear(1, 2 * modeling_layer.get_input_dim())
self._film = FiLM()
self._modeling_layer = modeling_layer
self._span_end_encoder = span_end_encoder
encoding_dim = phrase_layer.get_output_dim()
modeling_dim = modeling_layer.get_output_dim()
span_start_input_dim = encoding_dim * 4 + modeling_dim
if not self.is_judge:
self._value_head = TimeDistributed(torch.nn.Linear(span_start_input_dim, 1)) # Can make MLP
self._span_start_predictor = TimeDistributed(torch.nn.Linear(span_start_input_dim, 1))
span_end_encoding_dim = span_end_encoder.get_output_dim()
span_end_input_dim = encoding_dim * 4 + span_end_encoding_dim
self._span_end_predictor = TimeDistributed(torch.nn.Linear(span_end_input_dim, 1))
# Bidaf has lots of layer dimensions which need to match up - these aren't necessarily
# obvious from the configuration files, so we check here.
check_dimensions_match(modeling_layer.get_input_dim(), 4 * encoding_dim,
"modeling layer input dim", "4 * encoding dim")
check_dimensions_match(text_field_embedder.get_output_dim(), phrase_layer.get_input_dim(),
"text field embedder output dim", "phrase layer input dim")
check_dimensions_match(span_end_encoder.get_input_dim(), 4 * encoding_dim + 3 * modeling_dim,
"span end encoder input dim", "4 * encoding dim + 3 * modeling dim")
# Bidaf has lots of layer dimensions which need to match up - these aren't necessarily
# obvious from the configuration files, so we check here.
check_dimensions_match(modeling_layer.get_input_dim(), 4 * encoding_dim,
"modeling layer input dim", "4 * encoding dim")
check_dimensions_match(text_field_embedder.get_output_dim(), phrase_layer.get_input_dim(),
"text field embedder output dim", "phrase layer input dim")
check_dimensions_match(span_end_encoder.get_input_dim(), 4 * encoding_dim + 3 * modeling_dim,
"span end encoder input dim", "4 * encoding dim + 3 * modeling dim")
self._span_start_accuracy = CategoricalAccuracy()
self._span_end_accuracy = CategoricalAccuracy()
self._span_accuracy = BooleanAccuracy()
self._squad_metrics = SquadEmAndF1()
if dropout > 0:
self._dropout = torch.nn.Dropout(p=dropout)
else:
self._dropout = lambda x: x
self._mask_lstms = mask_lstms
initializer(self)
def forward(self, # type: ignore
question: Dict[str, torch.LongTensor],
passage: Dict[str, torch.LongTensor],
span_start: torch.IntTensor = None,
span_end: torch.IntTensor = None,
metadata: List[Dict[str, Any]] = None,
store_metrics: bool = True,
valid_output_mask: torch.LongTensor = None,
sent_targets: torch.Tensor = None,
stance: torch.LongTensor = None) -> Dict[str, torch.Tensor]:
# pylint: disable=arguments-differ
"""
Parameters
----------
question : Dict[str, torch.LongTensor]
From a ``TextField``.
passage : Dict[str, torch.LongTensor]
From a ``TextField``. The model assumes that this passage contains the answer to the
question, and predicts the beginning and ending positions of the answer within the
passage.
span_start : ``torch.IntTensor``, optional
From an ``IndexField``. This is one of the things we are trying to predict - the
beginning position of the answer with the passage. This is an `inclusive` token index.
If this is given, we will compute a loss that gets included in the output dictionary.
span_end : ``torch.IntTensor``, optional
From an ``IndexField``. This is one of the things we are trying to predict - the
ending position of the answer with the passage. This is an `inclusive` token index.
If this is given, we will compute a loss that gets included in the output dictionary.
metadata : ``List[Dict[str, Any]]``, optional
If present, this should contain the question ID, original passage text, and token
offsets into the passage for each instance in the batch. We use this for computing
official metrics using the official SQuAD evaluation script. The length of this list
should be the batch size, and each dictionary should have the keys ``id``,
``original_passage``, and ``token_offsets``. If you only want the best span string and
don't care about official metrics, you can omit the ``id`` key.
store_metrics : bool
If true, stores metrics (if applicable) within model metric tracker.
If false, returns resulting metrics immediately, without updating the model metric tracker.
valid_output_mask: ``torch.LongTensor``, optional
The locations for a valid answer. Used to limit the model's output space.
Returns
-------
An output dictionary consisting of:
span_start_logits : torch.FloatTensor
A tensor of shape ``(batch_size, passage_length)`` representing unnormalized log
probabilities of the span start position.
span_start_probs : torch.FloatTensor
The result of ``softmax(span_start_logits)``.
span_end_logits : torch.FloatTensor
A tensor of shape ``(batch_size, passage_length)`` representing unnormalized log
probabilities of the span end position (inclusive).
span_end_probs : torch.FloatTensor
The result of ``softmax(span_end_logits)``.
best_span : torch.IntTensor
The result of a constrained inference over ``span_start_logits`` and
``span_end_logits`` to find the most probable span. Shape is ``(batch_size, 2)``
and each offset is a token index.
loss : torch.FloatTensor, optional
A scalar loss to be optimised.
best_span_str : List[str]
If sufficient metadata was provided for the instances in the batch, we also return the
string from the original passage that the model thinks is the best answer to the
question.
"""
embedded_question = self._highway_layer(self._text_field_embedder(question))
embedded_passage = self._highway_layer(self._text_field_embedder(passage))
batch_size = embedded_question.size(0)
passage_length = embedded_passage.size(1)
question_mask = util.get_text_field_mask(question).float()
passage_mask = util.get_text_field_mask(passage).float()
question_lstm_mask = question_mask if self._mask_lstms else None
passage_lstm_mask = passage_mask if self._mask_lstms else None
encoded_question = self._dropout(self._phrase_layer(embedded_question, question_lstm_mask))
encoded_passage = self._dropout(self._phrase_layer(embedded_passage, passage_lstm_mask))
encoding_dim = encoded_question.size(-1)
# Shape: (batch_size, passage_length, question_length)
passage_question_similarity = self._matrix_attention(encoded_passage, encoded_question)
# Shape: (batch_size, passage_length, question_length)
passage_question_attention = util.masked_softmax(passage_question_similarity, question_mask)
# Shape: (batch_size, passage_length, encoding_dim)
passage_question_vectors = util.weighted_sum(encoded_question, passage_question_attention)
# We replace masked values with something really negative here, so they don't affect the
# max below.
masked_similarity = util.replace_masked_values(passage_question_similarity,
question_mask.unsqueeze(1),
-1e7)
# Shape: (batch_size, passage_length)
question_passage_similarity = masked_similarity.max(dim=-1)[0].squeeze(-1)
# Shape: (batch_size, passage_length)
question_passage_attention = util.masked_softmax(question_passage_similarity, passage_mask)
# Shape: (batch_size, encoding_dim)
question_passage_vector = util.weighted_sum(encoded_passage, question_passage_attention)
# Shape: (batch_size, passage_length, encoding_dim)
tiled_question_passage_vector = question_passage_vector.unsqueeze(1).expand(batch_size,
passage_length,
encoding_dim)
# Shape: (batch_size, passage_length, encoding_dim * 4)
final_merged_passage = torch.cat([encoded_passage,
passage_question_vectors,
encoded_passage * passage_question_vectors,
encoded_passage * tiled_question_passage_vector],
dim=-1)
# Debate: Conditioning on whose turn it is (A/B)
if not self.is_judge:
turn_film_params = self._turn_film_gen(stance.to(final_merged_passage).unsqueeze(1))
turn_gammas, turn_betas = torch.split(turn_film_params, self._modeling_layer.get_input_dim(), dim=-1)
final_merged_passage_mask = (final_merged_passage != 0).float() # NOTE: Using heuristic to get mask
final_merged_passage = self._film(
final_merged_passage, 1. + turn_gammas, turn_betas) * final_merged_passage_mask
modeled_passage = self._dropout(self._modeling_layer(final_merged_passage, passage_lstm_mask))
modeling_dim = modeled_passage.size(-1)
# Shape: (batch_size, passage_length, encoding_dim * 4 + modeling_dim))
span_start_input_full = torch.cat([final_merged_passage, modeled_passage], dim=-1)
span_start_input = self._dropout(span_start_input_full)
if not self.is_judge:
value_head_input = span_start_input_full.detach() if self._detach_value_head else span_start_input_full
# Shape: (batch_size)
tokenwise_values = self._value_head(value_head_input).squeeze(-1)
value, value_loc = util.replace_masked_values(tokenwise_values, passage_mask, -1e7).max(-1)
# Shape: (batch_size, passage_length)
span_start_logits = self._span_start_predictor(span_start_input).squeeze(-1)
valid_output_mask = passage_mask if valid_output_mask is None else valid_output_mask
# Shape: (batch_size, passage_length)
span_start_probs = util.masked_softmax(span_start_logits, valid_output_mask)
# Shape: (batch_size, modeling_dim)
span_start_representation = util.weighted_sum(modeled_passage, span_start_probs)
# Shape: (batch_size, passage_length, modeling_dim)
tiled_start_representation = span_start_representation.unsqueeze(1).expand(batch_size,
passage_length,
modeling_dim)
# Shape: (batch_size, passage_length, encoding_dim * 4 + modeling_dim * 3)
span_end_representation = torch.cat([final_merged_passage,
modeled_passage,
tiled_start_representation,
modeled_passage * tiled_start_representation],
dim=-1)
# Shape: (batch_size, passage_length, encoding_dim)
encoded_span_end = self._dropout(self._span_end_encoder(span_end_representation,
passage_lstm_mask))
# Shape: (batch_size, passage_length, encoding_dim * 4 + span_end_encoding_dim)
span_end_input = self._dropout(torch.cat([final_merged_passage, encoded_span_end], dim=-1))
span_end_logits = self._span_end_predictor(span_end_input).squeeze(-1)
span_end_probs = util.masked_softmax(span_end_logits, valid_output_mask)
span_start_logits = util.replace_masked_values(span_start_logits, valid_output_mask, -1e7)
span_end_logits = util.replace_masked_values(span_end_logits, valid_output_mask, -1e7)
best_span = self.get_best_span(span_start_logits, span_end_logits)
output_dict = {
"passage_question_attention": passage_question_attention,
"span_start_logits": span_start_logits,
"span_start_probs": span_start_probs,
"span_end_logits": span_end_logits,
"span_end_probs": span_end_probs,
"best_span": best_span,
"value": value if not self.is_judge else None,
"prob": torch.tensor([
span_start_probs[i, span_start[i]] if span_start[i] < span_start_probs.size(1) else 0.
for i in range(batch_size)]) if self.is_judge else None, # prob(true answer)
"prob_dist": span_start_probs,
}
# Compute the loss for training.
if (span_start is not None) and self.is_judge:
span_start[span_start >= passage_mask.size(1)] = -100 # NB: Hacky. Don't add to loss if span not in input
loss = nll_loss(util.masked_log_softmax(span_start_logits, valid_output_mask), span_start.squeeze(-1))
if store_metrics:
self._span_start_accuracy(span_start_logits, span_start.squeeze(-1))
span_end[span_end >= passage_mask.size(1)] = -100 # NB: Hacky. Don't add to loss if span not in input
loss += nll_loss(util.masked_log_softmax(span_end_logits, valid_output_mask), span_end.squeeze(-1))
if store_metrics:
self._span_end_accuracy(span_end_logits, span_end.squeeze(-1))
self._span_accuracy(best_span, torch.stack([span_start, span_end], -1))
output_dict["loss"] = loss
elif not self.is_judge: # Debate SL
if self.reward_method == 'sl': # sent_targets should be a vector of target indices
output_dict["loss"] = nll_loss(util.masked_log_softmax(span_start_logits, valid_output_mask), sent_targets.squeeze(-1))
if store_metrics:
self._span_start_accuracy(span_start_logits, sent_targets.squeeze(-1))
elif self.reward_method.startswith('sl-sents'):
# sent_targets should be a matrix of target values (non-zero only in EOS indices)
sent_targets = util.replace_masked_values(sent_targets, valid_output_mask, -1e7)
output_dict["loss"] = util.masked_mean(((span_start_logits - sent_targets) ** 2), valid_output_mask, 1)
if store_metrics:
self._span_start_accuracy(span_start_logits, sent_targets.max(-1)[1])
# Compute the EM and F1 on SQuAD and add the tokenized input to the output.
batch_ems = []
batch_f1s = []
if metadata is not None:
output_dict['best_span_str'] = []
question_tokens = []
passage_tokens = []
for i in range(batch_size):
question_tokens.append(metadata[i]['question_tokens'])
passage_tokens.append(metadata[i]['passage_tokens'])
passage_str = metadata[i]['original_passage']
offsets = metadata[i]['token_offsets']
predicted_span = tuple(best_span[i].detach().cpu().numpy())
start_offset = offsets[predicted_span[0]][0]
end_offset = offsets[predicted_span[1]][1]
best_span_string = passage_str[start_offset:end_offset]
output_dict['best_span_str'].append(best_span_string)
answer_texts = metadata[i].get('answer_texts', [])
if answer_texts:
self._squad_metrics(best_span_string, answer_texts)
sample_squad_metrics = SquadEmAndF1()
sample_squad_metrics(best_span_string, answer_texts)
sample_em, sample_f1 = sample_squad_metrics.get_metric(reset=True)
batch_ems.append(sample_em)
batch_f1s.append(sample_f1)
output_dict['question_tokens'] = question_tokens
output_dict['passage_tokens'] = passage_tokens
output_dict['em'] = torch.tensor(batch_ems)
output_dict['f1'] = torch.tensor(batch_f1s)
return output_dict
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
exact_match, f1_score = self._squad_metrics.get_metric(reset)
return {
'start_acc': self._span_start_accuracy.get_metric(reset),
'end_acc': self._span_end_accuracy.get_metric(reset),
'span_acc': self._span_accuracy.get_metric(reset),
'em': exact_match,
'f1': f1_score,
}
@staticmethod
def get_best_span(span_start_logits: torch.Tensor, span_end_logits: torch.Tensor) -> torch.Tensor:
if span_start_logits.dim() != 2 or span_end_logits.dim() != 2:
raise ValueError("Input shapes must be (batch_size, passage_length)")
batch_size, passage_length = span_start_logits.size()
max_span_log_prob = [-1e20] * batch_size
span_start_argmax = [0] * batch_size
best_word_span = span_start_logits.new_zeros((batch_size, 2), dtype=torch.long)
span_start_logits = span_start_logits.detach().cpu().numpy()
span_end_logits = span_end_logits.detach().cpu().numpy()
for b in range(batch_size): # pylint: disable=invalid-name
for j in range(passage_length):
val1 = span_start_logits[b, span_start_argmax[b]]
if val1 < span_start_logits[b, j]:
span_start_argmax[b] = j
val1 = span_start_logits[b, j]
val2 = span_end_logits[b, j]
if val1 + val2 > max_span_log_prob[b]:
best_word_span[b, 0] = span_start_argmax[b]
best_word_span[b, 1] = j
max_span_log_prob[b] = val1 + val2
return best_word_span
| [
"logging.getLogger",
"allennlp.training.metrics.BooleanAccuracy",
"torch.nn.Dropout",
"allennlp.nn.InitializerApplicator",
"allennlp.training.metrics.CategoricalAccuracy",
"allennlp.nn.util.masked_softmax",
"allennlp.nn.util.get_text_field_mask",
"allennlp.models.model.Model.register",
"torch.stack",
"allennlp.nn.util.replace_masked_values",
"allennlp.training.metrics.SquadEmAndF1",
"torch.tensor",
"allennlp.nn.util.weighted_sum",
"allennlp.modules.matrix_attention.legacy_matrix_attention.LegacyMatrixAttention",
"torch.nn.Linear",
"allennlp.nn.util.masked_log_softmax",
"allennlp.nn.util.masked_mean",
"torch.cat"
] | [((654, 681), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (671, 681), False, 'import logging\n'), ((1024, 1047), 'allennlp.models.model.Model.register', 'Model.register', (['"""bidaf"""'], {}), "('bidaf')\n", (1038, 1047), False, 'from allennlp.models.model import Model\n'), ((4385, 4408), 'allennlp.nn.InitializerApplicator', 'InitializerApplicator', ([], {}), '()\n', (4406, 4408), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((5715, 5757), 'allennlp.modules.matrix_attention.legacy_matrix_attention.LegacyMatrixAttention', 'LegacyMatrixAttention', (['similarity_function'], {}), '(similarity_function)\n', (5736, 5757), False, 'from allennlp.modules.matrix_attention.legacy_matrix_attention import LegacyMatrixAttention\n'), ((8100, 8121), 'allennlp.training.metrics.CategoricalAccuracy', 'CategoricalAccuracy', ([], {}), '()\n', (8119, 8121), False, 'from allennlp.training.metrics import BooleanAccuracy, CategoricalAccuracy, SquadEmAndF1\n'), ((8156, 8177), 'allennlp.training.metrics.CategoricalAccuracy', 'CategoricalAccuracy', ([], {}), '()\n', (8175, 8177), False, 'from allennlp.training.metrics import BooleanAccuracy, CategoricalAccuracy, SquadEmAndF1\n'), ((8208, 8225), 'allennlp.training.metrics.BooleanAccuracy', 'BooleanAccuracy', ([], {}), '()\n', (8223, 8225), False, 'from allennlp.training.metrics import BooleanAccuracy, CategoricalAccuracy, SquadEmAndF1\n'), ((8256, 8270), 'allennlp.training.metrics.SquadEmAndF1', 'SquadEmAndF1', ([], {}), '()\n', (8268, 8270), False, 'from allennlp.training.metrics import BooleanAccuracy, CategoricalAccuracy, SquadEmAndF1\n'), ((13409, 13472), 'allennlp.nn.util.masked_softmax', 'util.masked_softmax', (['passage_question_similarity', 'question_mask'], {}), '(passage_question_similarity, question_mask)\n', (13428, 13472), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((13568, 13631), 'allennlp.nn.util.weighted_sum', 'util.weighted_sum', (['encoded_question', 'passage_question_attention'], {}), '(encoded_question, passage_question_attention)\n', (13585, 13631), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((14191, 14253), 'allennlp.nn.util.masked_softmax', 'util.masked_softmax', (['question_passage_similarity', 'passage_mask'], {}), '(question_passage_similarity, passage_mask)\n', (14210, 14253), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((14332, 14394), 'allennlp.nn.util.weighted_sum', 'util.weighted_sum', (['encoded_passage', 'question_passage_attention'], {}), '(encoded_passage, question_passage_attention)\n', (14349, 14394), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((14845, 15008), 'torch.cat', 'torch.cat', (['[encoded_passage, passage_question_vectors, encoded_passage *\n passage_question_vectors, encoded_passage * tiled_question_passage_vector]'], {'dim': '(-1)'}), '([encoded_passage, passage_question_vectors, encoded_passage *\n passage_question_vectors, encoded_passage *\n tiled_question_passage_vector], dim=-1)\n', (14854, 15008), False, 'import torch\n'), ((15987, 16045), 'torch.cat', 'torch.cat', (['[final_merged_passage, modeled_passage]'], {'dim': '(-1)'}), '([final_merged_passage, modeled_passage], dim=-1)\n', (15996, 16045), False, 'import torch\n'), ((16769, 16826), 'allennlp.nn.util.masked_softmax', 'util.masked_softmax', (['span_start_logits', 'valid_output_mask'], {}), '(span_start_logits, valid_output_mask)\n', (16788, 16826), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((16908, 16960), 'allennlp.nn.util.weighted_sum', 'util.weighted_sum', (['modeled_passage', 'span_start_probs'], {}), '(modeled_passage, span_start_probs)\n', (16925, 16960), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((17430, 17570), 'torch.cat', 'torch.cat', (['[final_merged_passage, modeled_passage, tiled_start_representation, \n modeled_passage * tiled_start_representation]'], {'dim': '(-1)'}), '([final_merged_passage, modeled_passage,\n tiled_start_representation, modeled_passage *\n tiled_start_representation], dim=-1)\n', (17439, 17570), False, 'import torch\n'), ((18267, 18322), 'allennlp.nn.util.masked_softmax', 'util.masked_softmax', (['span_end_logits', 'valid_output_mask'], {}), '(span_end_logits, valid_output_mask)\n', (18286, 18322), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((18351, 18428), 'allennlp.nn.util.replace_masked_values', 'util.replace_masked_values', (['span_start_logits', 'valid_output_mask', '(-10000000.0)'], {}), '(span_start_logits, valid_output_mask, -10000000.0)\n', (18377, 18428), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((18448, 18523), 'allennlp.nn.util.replace_masked_values', 'util.replace_masked_values', (['span_end_logits', 'valid_output_mask', '(-10000000.0)'], {}), '(span_end_logits, valid_output_mask, -10000000.0)\n', (18474, 18523), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((6365, 6405), 'torch.nn.Linear', 'torch.nn.Linear', (['span_start_input_dim', '(1)'], {}), '(span_start_input_dim, 1)\n', (6380, 6405), False, 'import torch\n'), ((6595, 6633), 'torch.nn.Linear', 'torch.nn.Linear', (['span_end_input_dim', '(1)'], {}), '(span_end_input_dim, 1)\n', (6610, 6633), False, 'import torch\n'), ((8323, 8350), 'torch.nn.Dropout', 'torch.nn.Dropout', ([], {'p': 'dropout'}), '(p=dropout)\n', (8339, 8350), False, 'import torch\n'), ((18102, 18161), 'torch.cat', 'torch.cat', (['[final_merged_passage, encoded_span_end]'], {'dim': '(-1)'}), '([final_merged_passage, encoded_span_end], dim=-1)\n', (18111, 18161), False, 'import torch\n'), ((22673, 22696), 'torch.tensor', 'torch.tensor', (['batch_ems'], {}), '(batch_ems)\n', (22685, 22696), False, 'import torch\n'), ((22729, 22752), 'torch.tensor', 'torch.tensor', (['batch_f1s'], {}), '(batch_f1s)\n', (22741, 22752), False, 'import torch\n'), ((6254, 6294), 'torch.nn.Linear', 'torch.nn.Linear', (['span_start_input_dim', '(1)'], {}), '(span_start_input_dim, 1)\n', (6269, 6294), False, 'import torch\n'), ((12650, 12684), 'allennlp.nn.util.get_text_field_mask', 'util.get_text_field_mask', (['question'], {}), '(question)\n', (12674, 12684), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((12716, 12749), 'allennlp.nn.util.get_text_field_mask', 'util.get_text_field_mask', (['passage'], {}), '(passage)\n', (12740, 12749), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((19559, 19620), 'allennlp.nn.util.masked_log_softmax', 'util.masked_log_softmax', (['span_start_logits', 'valid_output_mask'], {}), '(span_start_logits, valid_output_mask)\n', (19582, 19620), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((19905, 19964), 'allennlp.nn.util.masked_log_softmax', 'util.masked_log_softmax', (['span_end_logits', 'valid_output_mask'], {}), '(span_end_logits, valid_output_mask)\n', (19928, 19964), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((16399, 16470), 'allennlp.nn.util.replace_masked_values', 'util.replace_masked_values', (['tokenwise_values', 'passage_mask', '(-10000000.0)'], {}), '(tokenwise_values, passage_mask, -10000000.0)\n', (16425, 16470), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((20144, 20183), 'torch.stack', 'torch.stack', (['[span_start, span_end]', '(-1)'], {}), '([span_start, span_end], -1)\n', (20155, 20183), False, 'import torch\n'), ((22250, 22264), 'allennlp.training.metrics.SquadEmAndF1', 'SquadEmAndF1', ([], {}), '()\n', (22262, 22264), False, 'from allennlp.training.metrics import BooleanAccuracy, CategoricalAccuracy, SquadEmAndF1\n'), ((20412, 20473), 'allennlp.nn.util.masked_log_softmax', 'util.masked_log_softmax', (['span_start_logits', 'valid_output_mask'], {}), '(span_start_logits, valid_output_mask)\n', (20435, 20473), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((20815, 20887), 'allennlp.nn.util.replace_masked_values', 'util.replace_masked_values', (['sent_targets', 'valid_output_mask', '(-10000000.0)'], {}), '(sent_targets, valid_output_mask, -10000000.0)\n', (20841, 20887), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n'), ((20919, 20998), 'allennlp.nn.util.masked_mean', 'util.masked_mean', (['((span_start_logits - sent_targets) ** 2)', 'valid_output_mask', '(1)'], {}), '((span_start_logits - sent_targets) ** 2, valid_output_mask, 1)\n', (20935, 20998), False, 'from allennlp.nn import util, InitializerApplicator, RegularizerApplicator\n')] |
#!/usr/bin/python3
import io
import ast
from random import shuffle, randint
import sys
import os
part = sys.argv[1]
final = open('vw/' + part.zfill(5) + '.mc.vw', 'w')
for i in io.open(part):
a = ast.literal_eval(i.strip())
final.write(str(randint(2,10) if a[1] == 0 else 1) + " '" + str(a[0]) + ' | ' + ' '.join(["f" + str(j[0]) + ':' + str(j[1]) for j in a[2][1]]) + '\n')
#shuffle(vw)
| [
"random.randint",
"io.open"
] | [((178, 191), 'io.open', 'io.open', (['part'], {}), '(part)\n', (185, 191), False, 'import io\n'), ((249, 263), 'random.randint', 'randint', (['(2)', '(10)'], {}), '(2, 10)\n', (256, 263), False, 'from random import shuffle, randint\n')] |
import math
from simplestat import mean
#def basis(n=3):
# return [unit(i) for i in range(n)]
#def unit(i):
# ret=[]
# while len(ret)<i:ret.append(0.0)
# ret.append(1.0)
# return wavelet(*ret)
def theta(x):
if x<0.5:
return -1.0
elif x==0.5:
return 0.0
else:
return 1.0
def linear(x):
return (x-0.5)
def cube(x):
return (x-0.5)**3
def sigmoid(alpha):
def sig(x):
return (1/(1+math.exp(-alpha*x)))-0.5
return sig
class wavelet(object):
#r1=-1.0#ranges for the orthogonality
#r2=1.0
#epsilon=0.001
r1=0.0
r2=0.999
epsilon=0.01
def __init__(s,f):
s.f=f
def __add__(a,b):
def ret(x):
return a.f(x)+b.f(x)
return wavelet(ret)
def __sub__(a,b):
def ret(x):
return a.f(x)-b.f(x)
return wavelet(ret)
__rsub__=__sub__
def __getitem__(s,x):
return s.f(x)
def x(s):
ret=[]
ac=s.r1
while ac<=s.r2:
ret.append(ac)
ac+=s.epsilon
return ret
def y(s):
return [s.f(x) for x in s.x()]
def absmean(s):
return mean([abs(zw) for zw in s.y()])
def scalar(a,b):
ret=0.0
for x in a.x():
ret+=a.f(x)*b.f(x)
return ret
def multwithfloat(s,f):
def ret(x):
return s.f(x)*f
return wavelet(ret)
def __mul__(a,b):
if type(a) is int:a=float(a)
if type(b) is int:b=float(b)
if type(a) is float:
if type(b) is float:
raise Exception("THIS SHOULD NOT HAPPEN")
else:
return b.multwithfloat(a)
else:
if type(b) is float:
return a.multwithfloat(b)
else:
return a.scalar(b)
__rmul__=__mul__
def __repr__(s):
return str(s)
def __str__(s):
return "some wavelet"#"+".join([str(q)+"x**"+str(i) for i,q in enumerate(s.q) if not q==0.0])
if __name__=="__main__":
from graham import *
from plt import *
b=[sigmoid(5),linear,cube]
b=[sigmoid(i) for i in [3,5,7]]
b=[wavelet(zw) for zw in b]
bo=findorthonormal(*b)
#bo=b
print(scalarmat(*bo))
for zw in bo:
plt.plot(zw.x(),zw.y())
plt.show()
| [
"math.exp"
] | [((453, 473), 'math.exp', 'math.exp', (['(-alpha * x)'], {}), '(-alpha * x)\n', (461, 473), False, 'import math\n')] |
"""VPN over DNS protocol utilities."""
import binascii
import collections
import enum
import itertools
import regex
import struct
from vodreassembler import util
from vodreassembler import dnsrecord
DEFAULT_FQDN_SUFFIX = 'tun.vpnoverdns.com.'
class Error(Exception):
pass
class UnknownVersionError(Error):
pass
def ipv4_to_bytes(addr):
return ipv4_to_chunk(addr)[0]
def ipv4_to_chunk(addr):
octets = addr.split('.')
if len(octets) != 4:
raise ValueError('IPv4 addresses must have 4 octets')
octets = map(int, octets)
try:
data = ipv4_to_chunk.packer.pack(*octets)
except struct.error as e:
raise ValueError('every octet must be within [0,255]') from e
length = (data[0] >> 6) & 0x3
offset = (data[0] & 0x3f) * 3
return util.DataChunk(data[1:length+1], offset)
ipv4_to_chunk.packer = struct.Struct('!BBBB')
def chunk_to_ipv4(chunk):
if not isinstance(chunk, util.DataChunk):
chunk = util.DataChunk(*chunk)
length = len(chunk.data)
if length <= 0:
raise ValueError('length must be at least 1')
elif length > 3:
raise ValueError('cannot encode chunks longer than 3 bytes')
elif chunk.offset % 3 != 0:
raise ValueError('chunk offset must be multiples of 3')
elif chunk.offset < 0:
raise ValueError('chunk offset cannot be negative')
elif chunk.offset // 3 >= 0x3f:
raise ValueError('chunk offset cannot exceed {}'.format(0x3f))
return '{}.{}.{}.{}'.format((length << 6) + (chunk.offset // 3),
chunk.data[0],
chunk.data[1] if length >= 2 else 255,
chunk.data[2] if length == 3 else 255)
def normalize_fqdn_suffix(fqdn_suffix):
if not fqdn_suffix.endswith('.'):
fqdn_suffix += '.'
if fqdn_suffix.startswith('.'):
fqdn_suffix = fqdn_suffix[1:]
return fqdn_suffix
@enum.unique
class QueryType(enum.Enum):
unknown = 0
open_ticket = 1
request_data = 2
check_request = 3
fetch_response = 4
close_ticket = 5
@staticmethod
def deduce(version, variables, data):
if version != '0':
raise UnknownVersionError(version)
keys = set(variables.keys())
if 'retry' in keys:
# ignore clearly optional variables
keys.remove('retry')
if keys == {'<KEY>'}:
return QueryType.open_ticket
elif keys == {'<KEY>'}:
return QueryType.request_data
elif keys == {'ck', 'id'}:
return QueryType.check_request
elif keys == {'ln', 'rd', 'id'}:
return QueryType.fetch_response
elif keys == {'ac', 'id'}:
return QueryType.close_ticket
return QueryType.unknown
class Query(collections.namedtuple('Query', ['version', 'type',
'variables', 'payload'])):
@classmethod
def create(cls, version, variables, payload):
querytype = QueryType.deduce(version, variables, payload)
variables, payload = cls.normalize_data(querytype, variables, payload)
return cls(version, querytype, variables, payload)
@staticmethod
def normalize_data(querytype, variables, payload):
newvars = {}
for key in variables:
if key in {'id', 'sz', 'rn', 'wr', 'ck', 'ln', 'rd', 'retry'}:
newvars[key] = int(variables[key])
elif key in {'bf'}:
newvars[key] = binascii.unhexlify(variables[key])
else:
newvars[key] = variables[key]
return newvars, payload
@property
def error(self):
# Unfortunately, we currently don't have an easy way to find out whether
# in a fetch_response payload. Simply wish the byte sequences 69 ## doesn't
# appear in the payload.
if len(self.payload.data) == 2 and self.payload.data.startswith(b'E'):
return self.payload.data[1] or None
return None
def encode(self, fqdn_suffix=None):
fqdn_suffix = normalize_fqdn_suffix(fqdn_suffix or DEFAULT_FQDN_SUFFIX)
field_encoders = [
('retry', str),
('sz', '{:08d}'.format),
('rn', '{:08d}'.format),
('bf', lambda x: binascii.hexlify(x).decode('ascii')),
('wr', '{:08d}'.format),
('ck', '{:08d}'.format),
('ln', '{:08d}'.format),
('rd', '{:08d}'.format),
('ac', None),
('id', '{:08d}'.format),
]
def encode_var(field, encoder):
if encoder:
return field + '-' + encoder(self.variables[field])
return field
variables = '.'.join(encode_var(field, encoder)
for field, encoder in field_encoders
if field in self.variables)
return dnsrecord.DnsRecord(
variables + '.v' + str(self.version) + '.' + fqdn_suffix,
'IN', 'A', chunk_to_ipv4(self.payload))
class QueryParser:
def __init__(self, fqdn_suffix=None):
self._suffix = normalize_fqdn_suffix(fqdn_suffix or DEFAULT_FQDN_SUFFIX)
self._re = regex.compile(
r'''^\s*
((?P<flag>\w+)\.)* # flags
((?P<var>\w+)-(?P<value>\w+)\.)+ # variables
v(?P<version>\w+)\. # version
{!s} # suffix
\s*$'''.format(regex.escape(self._suffix)),
regex.VERSION1 | regex.VERBOSE)
def parse(self, dns_record):
m = self._re.fullmatch(dns_record.fqdn)
if not m:
raise ValueError(
"fqdn '{}' is not in the expected format".format(dns_record.fqdn))
variables = dict.fromkeys(m.captures('flag'), True)
variables.update(zip(m.captures('var'), m.captures('value')))
return Query.create(m.group('version'), variables,
ipv4_to_chunk(dns_record.value))
| [
"regex.escape",
"collections.namedtuple",
"binascii.hexlify",
"vodreassembler.util.DataChunk",
"struct.Struct",
"binascii.unhexlify"
] | [((828, 850), 'struct.Struct', 'struct.Struct', (['"""!BBBB"""'], {}), "('!BBBB')\n", (841, 850), False, 'import struct\n'), ((2626, 2702), 'collections.namedtuple', 'collections.namedtuple', (['"""Query"""', "['version', 'type', 'variables', 'payload']"], {}), "('Query', ['version', 'type', 'variables', 'payload'])\n", (2648, 2702), False, 'import collections\n'), ((763, 805), 'vodreassembler.util.DataChunk', 'util.DataChunk', (['data[1:length + 1]', 'offset'], {}), '(data[1:length + 1], offset)\n', (777, 805), False, 'from vodreassembler import util\n'), ((934, 956), 'vodreassembler.util.DataChunk', 'util.DataChunk', (['*chunk'], {}), '(*chunk)\n', (948, 956), False, 'from vodreassembler import util\n'), ((5122, 5148), 'regex.escape', 'regex.escape', (['self._suffix'], {}), '(self._suffix)\n', (5134, 5148), False, 'import regex\n'), ((3283, 3317), 'binascii.unhexlify', 'binascii.unhexlify', (['variables[key]'], {}), '(variables[key])\n', (3301, 3317), False, 'import binascii\n'), ((4000, 4019), 'binascii.hexlify', 'binascii.hexlify', (['x'], {}), '(x)\n', (4016, 4019), False, 'import binascii\n')] |
import numpy as np
import cv2
class Analyzer():
def __init__(self, config):
self.config = config
self.coords = self.config['coordinates']
self.positions = ['goal', 'defense', 'center', 'offense']
self.ball_template = np.array([
[0, 0, 0, 0, 0, 255, 0, 255,
0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 255, 255, 0, 255, 0,
255, 255, 255, 255, 0, 0, 0],
[0, 0, 255, 255, 0, 0, 0, 0,
0, 0, 0, 255, 255, 0, 0],
[0, 255, 255, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 255, 0],
[0, 255, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 0],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 255],
[255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 0],
[255, 255, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 255, 0],
[0, 255, 255, 0, 0, 0, 0, 0,
0, 0, 0, 255, 255, 0, 0],
[0, 0, 255, 255, 0, 0, 0, 0,
0, 0, 255, 255, 0, 0, 0],
[0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0]], dtype=np.uint8)
def extract_table(self, frame, shape, offset_x=0, offset_y=0):
width, height = shape
coords = self.config['coordinates']
pts1 = np.float32([
coords['bottom_left_corner'],
coords['top_left_corner'],
coords['bottom_right_corner'],
coords['top_right_corner']] + np.float32([[offset_x, offset_y],
[offset_x, offset_y],
[offset_x, offset_y],
[offset_x, offset_y]]))
pts2 = np.float32([[0, 0], [0, height], [width, 0], [width, height]])
M = cv2.getPerspectiveTransform(pts1, pts2)
table_area = cv2.warpPerspective(frame, M, shape)
return table_area
def _can_move(self, frame, position, direction):
p = position + '_' + direction
# print((p, frame[self.coords[p][1], self.coords[p][0]]))
return True
def get_possible_moves(self, frame):
return {p: [self._can_move(frame, p, 'left'), self._can_move(
frame, p, 'right')] for p in self.positions}
def add_circles_to_limiters(self, img):
positions = [p for name in self.positions for p in [
name + '_left', name + '_right']]
img = img.copy()
for p in positions:
circle_color = (255, 0, 0)
if self._can_move(img, p.split('_')[0], p.split('_')[1]):
circle_color = (0, 255, 0)
cv2.circle(img, tuple(self.coords[p]), 2, circle_color, 3)
return img
def compute_ball_center(self, frame):
frame_canny = cv2.Canny(frame[:, :, 1], 320, 340)
res = cv2.matchTemplate(frame_canny, self.ball_template, cv2.TM_CCOEFF)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
return (max_val, max_loc[0] + 8, max_loc[1] + 8)
| [
"cv2.getPerspectiveTransform",
"cv2.minMaxLoc",
"numpy.array",
"cv2.warpPerspective",
"cv2.matchTemplate",
"cv2.Canny",
"numpy.float32"
] | [((255, 1250), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 0, 255, 0, 255, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 255, 255, 0, \n 255, 0, 255, 255, 255, 255, 0, 0, 0], [0, 0, 255, 255, 0, 0, 0, 0, 0, 0,\n 0, 255, 255, 0, 0], [0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, \n 0], [0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0], [255, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255], [255, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255],\n [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255], [255, 0, 0, 0, 0, \n 0, 0, 0, 0, 0, 0, 0, 0, 255, 0], [255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 0, 255, 255, 0], [0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 0, 0],\n [0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 255, 255, 0, 0, 0], [0, 0, 0, 255, \n 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0]]'], {'dtype': 'np.uint8'}), '([[0, 0, 0, 0, 0, 255, 0, 255, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 255,\n 255, 0, 255, 0, 255, 255, 255, 255, 0, 0, 0], [0, 0, 255, 255, 0, 0, 0,\n 0, 0, 0, 0, 255, 255, 0, 0], [0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 255, 255, 0], [0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0], [255, \n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \n 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255], [255, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 255], [255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255], [\n 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0], [255, 255, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 255, 255, 0], [0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, \n 255, 255, 0, 0], [0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 255, 255, 0, 0, 0],\n [0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0]], dtype=\n np.uint8)\n', (263, 1250), True, 'import numpy as np\n'), ((2226, 2288), 'numpy.float32', 'np.float32', (['[[0, 0], [0, height], [width, 0], [width, height]]'], {}), '([[0, 0], [0, height], [width, 0], [width, height]])\n', (2236, 2288), True, 'import numpy as np\n'), ((2302, 2341), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['pts1', 'pts2'], {}), '(pts1, pts2)\n', (2329, 2341), False, 'import cv2\n'), ((2364, 2400), 'cv2.warpPerspective', 'cv2.warpPerspective', (['frame', 'M', 'shape'], {}), '(frame, M, shape)\n', (2383, 2400), False, 'import cv2\n'), ((3292, 3327), 'cv2.Canny', 'cv2.Canny', (['frame[:, :, 1]', '(320)', '(340)'], {}), '(frame[:, :, 1], 320, 340)\n', (3301, 3327), False, 'import cv2\n'), ((3342, 3407), 'cv2.matchTemplate', 'cv2.matchTemplate', (['frame_canny', 'self.ball_template', 'cv2.TM_CCOEFF'], {}), '(frame_canny, self.ball_template, cv2.TM_CCOEFF)\n', (3359, 3407), False, 'import cv2\n'), ((3454, 3472), 'cv2.minMaxLoc', 'cv2.minMaxLoc', (['res'], {}), '(res)\n', (3467, 3472), False, 'import cv2\n'), ((1946, 2051), 'numpy.float32', 'np.float32', (['[[offset_x, offset_y], [offset_x, offset_y], [offset_x, offset_y], [\n offset_x, offset_y]]'], {}), '([[offset_x, offset_y], [offset_x, offset_y], [offset_x, offset_y\n ], [offset_x, offset_y]])\n', (1956, 2051), True, 'import numpy as np\n')] |
import math
import time
import unittest
import thalesians.tsa.evaluation as evaluation
def pickleable_fact(x):
time.sleep(1)
return math.factorial(x)
class TestEvaluation(unittest.TestCase):
def test_current_thread_evaluator(self):
def fact(x):
time.sleep(1)
return math.factorial(x)
current_thread_evaluator = evaluation.CurrentThreadEvaluator()
status = evaluation.evaluate(fact, args=[10], evaluator=current_thread_evaluator)
self.assertTrue(status.ready)
self.assertEqual(status.result.result, 3628800)
self.assertIsNone(status.result.exception)
def test_current_thread_evaluator_callbacks(self):
def fact(x):
time.sleep(1)
return math.factorial(x)
current_thread_evaluator = evaluation.CurrentThreadEvaluator()
status = evaluation.evaluate(fact, args=[10], evaluator=current_thread_evaluator)
self.assertTrue(status.ready)
callback1_called = False
def callback1(status):
nonlocal callback1_called
callback1_called = True
callback2_called = False
def callback2(status):
nonlocal callback2_called
callback2_called = True
status.add_callback(callback1)
status.add_callback(callback2)
self.assertTrue(callback1_called)
self.assertTrue(callback2_called)
self.assertTrue(status.ready)
self.assertEqual(status.result.result, 3628800)
self.assertIsNone(status.result.exception)
def test_ipyparallel_evaluator(self):
def fact(x):
time.sleep(1)
return math.factorial(x)
ipp_evaluator = evaluation.IPyParallelEvaluator()
status = evaluation.evaluate(fact, args=[10], evaluator=ipp_evaluator)
self.assertFalse(status.ready)
time.sleep(2)
self.assertTrue(status.ready)
self.assertEqual(status.result.result, 3628800)
self.assertIsNone(status.result.exception)
def test_ipyparallel_evaluator_callback(self):
def fact(x):
time.sleep(1)
return math.factorial(x)
ipp_evaluator = evaluation.IPyParallelEvaluator()
status = evaluation.evaluate(fact, args=[10], evaluator=ipp_evaluator)
self.assertFalse(status.ready)
callback1_called = False
def callback1(status):
nonlocal callback1_called
callback1_called = True
callback2_called = False
def callback2(status):
nonlocal callback2_called
callback2_called = True
status.add_callback(callback1)
status.add_callback(callback2)
self.assertFalse(callback1_called)
self.assertFalse(callback2_called)
time.sleep(2)
self.assertTrue(callback1_called)
self.assertTrue(callback2_called)
self.assertTrue(status.ready)
self.assertEqual(status.result.result, 3628800)
self.assertIsNone(status.result.exception)
def test_multiprocessing_evaluator(self):
mp_evaluator = evaluation.MultiprocessingEvaluator()
status = evaluation.evaluate(pickleable_fact, args=[10], evaluator=mp_evaluator)
self.assertFalse(status.ready)
time.sleep(2)
self.assertTrue(status.ready)
self.assertEqual(status.result.result, 3628800)
self.assertIsNone(status.result.exception)
def test_multiprocessing_evaluator_callback(self):
mp_evaluator = evaluation.MultiprocessingEvaluator()
status = evaluation.evaluate(pickleable_fact, args=[10], evaluator=mp_evaluator)
self.assertFalse(status.ready)
callback_called = False
def callback(status):
nonlocal callback_called
callback_called = True
callback1_called = False
def callback1(status):
nonlocal callback1_called
callback1_called = True
callback2_called = False
def callback2(status):
nonlocal callback2_called
callback2_called = True
status.add_callback(callback1)
status.add_callback(callback2)
self.assertFalse(callback1_called)
self.assertFalse(callback2_called)
time.sleep(2)
self.assertTrue(callback1_called)
self.assertTrue(callback2_called)
self.assertTrue(status.ready)
self.assertEqual(status.result.result, 3628800)
self.assertIsNone(status.result.exception)
if __name__ == '__main__':
unittest.main()
| [
"thalesians.tsa.evaluation.IPyParallelEvaluator",
"math.factorial",
"thalesians.tsa.evaluation.MultiprocessingEvaluator",
"time.sleep",
"thalesians.tsa.evaluation.evaluate",
"unittest.main",
"thalesians.tsa.evaluation.CurrentThreadEvaluator"
] | [((117, 130), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (127, 130), False, 'import time\n'), ((142, 159), 'math.factorial', 'math.factorial', (['x'], {}), '(x)\n', (156, 159), False, 'import math\n'), ((4879, 4894), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4892, 4894), False, 'import unittest\n'), ((367, 402), 'thalesians.tsa.evaluation.CurrentThreadEvaluator', 'evaluation.CurrentThreadEvaluator', ([], {}), '()\n', (400, 402), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((420, 492), 'thalesians.tsa.evaluation.evaluate', 'evaluation.evaluate', (['fact'], {'args': '[10]', 'evaluator': 'current_thread_evaluator'}), '(fact, args=[10], evaluator=current_thread_evaluator)\n', (439, 492), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((831, 866), 'thalesians.tsa.evaluation.CurrentThreadEvaluator', 'evaluation.CurrentThreadEvaluator', ([], {}), '()\n', (864, 866), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((884, 956), 'thalesians.tsa.evaluation.evaluate', 'evaluation.evaluate', (['fact'], {'args': '[10]', 'evaluator': 'current_thread_evaluator'}), '(fact, args=[10], evaluator=current_thread_evaluator)\n', (903, 956), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((1792, 1825), 'thalesians.tsa.evaluation.IPyParallelEvaluator', 'evaluation.IPyParallelEvaluator', ([], {}), '()\n', (1823, 1825), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((1843, 1904), 'thalesians.tsa.evaluation.evaluate', 'evaluation.evaluate', (['fact'], {'args': '[10]', 'evaluator': 'ipp_evaluator'}), '(fact, args=[10], evaluator=ipp_evaluator)\n', (1862, 1904), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((1978, 1991), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1988, 1991), False, 'import time\n'), ((2315, 2348), 'thalesians.tsa.evaluation.IPyParallelEvaluator', 'evaluation.IPyParallelEvaluator', ([], {}), '()\n', (2346, 2348), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((2366, 2427), 'thalesians.tsa.evaluation.evaluate', 'evaluation.evaluate', (['fact'], {'args': '[10]', 'evaluator': 'ipp_evaluator'}), '(fact, args=[10], evaluator=ipp_evaluator)\n', (2385, 2427), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((2977, 2990), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2987, 2990), False, 'import time\n'), ((3316, 3353), 'thalesians.tsa.evaluation.MultiprocessingEvaluator', 'evaluation.MultiprocessingEvaluator', ([], {}), '()\n', (3351, 3353), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((3371, 3442), 'thalesians.tsa.evaluation.evaluate', 'evaluation.evaluate', (['pickleable_fact'], {'args': '[10]', 'evaluator': 'mp_evaluator'}), '(pickleable_fact, args=[10], evaluator=mp_evaluator)\n', (3390, 3442), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((3516, 3529), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3526, 3529), False, 'import time\n'), ((3763, 3800), 'thalesians.tsa.evaluation.MultiprocessingEvaluator', 'evaluation.MultiprocessingEvaluator', ([], {}), '()\n', (3798, 3800), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((3818, 3889), 'thalesians.tsa.evaluation.evaluate', 'evaluation.evaluate', (['pickleable_fact'], {'args': '[10]', 'evaluator': 'mp_evaluator'}), '(pickleable_fact, args=[10], evaluator=mp_evaluator)\n', (3837, 3889), True, 'import thalesians.tsa.evaluation as evaluation\n'), ((4586, 4599), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4596, 4599), False, 'import time\n'), ((280, 293), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (290, 293), False, 'import time\n'), ((313, 330), 'math.factorial', 'math.factorial', (['x'], {}), '(x)\n', (327, 330), False, 'import math\n'), ((744, 757), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (754, 757), False, 'import time\n'), ((777, 794), 'math.factorial', 'math.factorial', (['x'], {}), '(x)\n', (791, 794), False, 'import math\n'), ((1716, 1729), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1726, 1729), False, 'import time\n'), ((1749, 1766), 'math.factorial', 'math.factorial', (['x'], {}), '(x)\n', (1763, 1766), False, 'import math\n'), ((2239, 2252), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2249, 2252), False, 'import time\n'), ((2272, 2289), 'math.factorial', 'math.factorial', (['x'], {}), '(x)\n', (2286, 2289), False, 'import math\n')] |
import unittest
from users import User
class TestUser(unittest.TestCase):
def setUp(self):
"""
Set up method to run before each test cases.
"""
self.new_user = User("Mercy", "<EMAIL>", "<PASSWORD>") # create user object
def test_init(self):
"""
test_init test case to test if the object is initialized properly
"""
self.assertEqual(self.new_user.name, "Mercy")
self.assertEqual(self.new_user.user_email, "<EMAIL>")
self.assertEqual(self.new_user.user_password, "<PASSWORD>")
def test_save_user(self):
"""
test_save_user test case to test if the user object is saved into
the users list
"""
self.new_user.save_user() # saving the new user
self.assertEqual(len(User.user_list), 1)
def test_delete_user(self):
"""
test_delete_user to test if we can remove a user from our users list
"""
self.new_user.save_user()
test_user = User("Aviana", "<EMAIL>", "avi123")
test_user.save_user()
self.new_user.delete_user() # deleting a user object
self.assertEqual(len(User.user_list), 1)
def test_save_multiple_users(self):
"""
test to check if we can save multiple objects in the users list
"""
self.new_user.save_user()
test_user = User("Aviana", "<EMAIL>", "avi123")
test_user.save_user()
self.assertEqual(len(User.user_list), 2)
def test_find_credentials_by_email(self):
"""
test to check if we can find users by their email
"""
self.new_user.save_user()
test_user = User("Aviana", "<EMAIL>", "avi123")
test_user.save_user()
found_user = User.find_by_email("<EMAIL>.com")
self.assertEqual(found_user.name, test_user.name)
def test_user_exists(self):
"""
test to check if we can return a Boolean if we cannot find the user
:return: Boolean
"""
self.new_user.save_user()
test_user = User("Aviana", "aviavi", "avi123")
test_user.save_user()
user_exists = User.exists("aviavi")
self.assertTrue(user_exists)
def test_display_all_users(self):
"""
method that returns a list of all users saved
"""
self.assertEqual(User.display_users(), User.user_list)
def tearDown(self):
"""
method to clear instances created during testing when each test runs
"""
User.user_list = []
if __name__ == '__main__':
unittest.main() | [
"users.User.display_users",
"users.User",
"users.User.find_by_email",
"unittest.main",
"users.User.exists"
] | [((2588, 2603), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2601, 2603), False, 'import unittest\n'), ((199, 237), 'users.User', 'User', (['"""Mercy"""', '"""<EMAIL>"""', '"""<PASSWORD>"""'], {}), "('Mercy', '<EMAIL>', '<PASSWORD>')\n", (203, 237), False, 'from users import User\n'), ((1015, 1050), 'users.User', 'User', (['"""Aviana"""', '"""<EMAIL>"""', '"""avi123"""'], {}), "('Aviana', '<EMAIL>', 'avi123')\n", (1019, 1050), False, 'from users import User\n'), ((1384, 1419), 'users.User', 'User', (['"""Aviana"""', '"""<EMAIL>"""', '"""avi123"""'], {}), "('Aviana', '<EMAIL>', 'avi123')\n", (1388, 1419), False, 'from users import User\n'), ((1682, 1717), 'users.User', 'User', (['"""Aviana"""', '"""<EMAIL>"""', '"""avi123"""'], {}), "('Aviana', '<EMAIL>', 'avi123')\n", (1686, 1717), False, 'from users import User\n'), ((1770, 1803), 'users.User.find_by_email', 'User.find_by_email', (['"""<EMAIL>.com"""'], {}), "('<EMAIL>.com')\n", (1788, 1803), False, 'from users import User\n'), ((2074, 2108), 'users.User', 'User', (['"""Aviana"""', '"""aviavi"""', '"""avi123"""'], {}), "('Aviana', 'aviavi', 'avi123')\n", (2078, 2108), False, 'from users import User\n'), ((2162, 2183), 'users.User.exists', 'User.exists', (['"""aviavi"""'], {}), "('aviavi')\n", (2173, 2183), False, 'from users import User\n'), ((2363, 2383), 'users.User.display_users', 'User.display_users', ([], {}), '()\n', (2381, 2383), False, 'from users import User\n')] |
import requests
import urllib3
import sys
import json
from bs4 import BeautifulSoup as BS
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def get_sitemap_obos() -> BS:
sitemaps = [
"https://www.obos.no/sitemaps/sitemap_obos.no.xml",
"http://www.obos.no/sitemap.xml",
"https://bank.obos.no/sitemap.xml",
]
sitemap = ""
for i in sitemaps:
print(f"Getting {i}")
tekst = requests.get(i, verify=False).text
sitemap += tekst
return BS(sitemap, "xml")
def get_sitemap_nye() -> BS:
sitemaps = BS(
requests.get("https://nye.obos.no/sitemap.xml", verify=False).text, "lxml"
)
sitemaps = sitemaps.findAll("loc")
sitemap = ""
for url in sitemaps:
print(f"Getting {url.text}")
sitemap += requests.get(url.text, verify=False).text
return BS(sitemap, "lxml")
def get_all_sitemaps() -> BS:
nye = get_sitemap_nye()
obos = get_sitemap_obos()
nye.urlset.insert(-1, obos.urlset)
return nye
def find_urlfragment(tag: str, sitemap: BS) -> list:
locs = sitemap.findAll("loc")
print("in urlfragment")
if tag == "":
urls = [url.text for url in locs]
else:
urls = [url.text for url in locs if tag.lower() in url.text.lower()]
return urls
def get_titles(urls: list):
titles = []
for url in urls:
response = requests.get(url)
soup = BS(response.text)
titles.append(soup.title.text)
return titles
def get_title(soup: BS):
try:
return soup.title.text
except Exception:
return "No title found"
if __name__ == "__main__":
if not sys.argv[1]:
print("Add a url fragment ex: /bedrift ")
else:
url = sys.argv[1]
urls = find_urlfragment(url, get_sitemap())
with open(f"{url}.json", "w") as file:
json.dump(urls, file)
print(f"urler lagret som {url}.json")
| [
"bs4.BeautifulSoup",
"urllib3.disable_warnings",
"json.dump",
"requests.get"
] | [((93, 160), 'urllib3.disable_warnings', 'urllib3.disable_warnings', (['urllib3.exceptions.InsecureRequestWarning'], {}), '(urllib3.exceptions.InsecureRequestWarning)\n', (117, 160), False, 'import urllib3\n'), ((521, 539), 'bs4.BeautifulSoup', 'BS', (['sitemap', '"""xml"""'], {}), "(sitemap, 'xml')\n", (523, 539), True, 'from bs4 import BeautifulSoup as BS\n'), ((869, 888), 'bs4.BeautifulSoup', 'BS', (['sitemap', '"""lxml"""'], {}), "(sitemap, 'lxml')\n", (871, 888), True, 'from bs4 import BeautifulSoup as BS\n'), ((1399, 1416), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1411, 1416), False, 'import requests\n'), ((1432, 1449), 'bs4.BeautifulSoup', 'BS', (['response.text'], {}), '(response.text)\n', (1434, 1449), True, 'from bs4 import BeautifulSoup as BS\n'), ((450, 479), 'requests.get', 'requests.get', (['i'], {'verify': '(False)'}), '(i, verify=False)\n', (462, 479), False, 'import requests\n'), ((598, 659), 'requests.get', 'requests.get', (['"""https://nye.obos.no/sitemap.xml"""'], {'verify': '(False)'}), "('https://nye.obos.no/sitemap.xml', verify=False)\n", (610, 659), False, 'import requests\n'), ((816, 852), 'requests.get', 'requests.get', (['url.text'], {'verify': '(False)'}), '(url.text, verify=False)\n', (828, 852), False, 'import requests\n'), ((1880, 1901), 'json.dump', 'json.dump', (['urls', 'file'], {}), '(urls, file)\n', (1889, 1901), False, 'import json\n')] |
'''
author: juzicode
address: www.juzicode.com
公众号: 桔子code/juzicode
date: 2020.6.26
'''
print('\n')
print('-----欢迎来到www.juzicode.com')
print('-----公众号: 桔子code/juzicode\n')
import platform
print('architecture():',platform.architecture())
print('machine():',platform.machine())
print('processor():',platform.processor())
import platform
print('system():',platform.system())
print('platform():',platform.platform())
print('uname():',platform.uname())
print('version():',platform.version())
import platform
print('python_version():',platform.python_version()) #python版本
print('python_build():',platform.python_build()) #构建信息
print('python_compiler():',platform.python_compiler()) #编译器版本
print('python_implementation():',platform.python_implementation()) #python解释器类型
print('python_version_tuple():',platform.python_version_tuple()) #python版本元组
| [
"platform.version",
"platform.python_implementation",
"platform.python_compiler",
"platform.python_version_tuple",
"platform.platform",
"platform.uname",
"platform.python_build",
"platform.architecture",
"platform.system",
"platform.processor",
"platform.machine",
"platform.python_version"
] | [((217, 240), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (238, 240), False, 'import platform\n'), ((261, 279), 'platform.machine', 'platform.machine', ([], {}), '()\n', (277, 279), False, 'import platform\n'), ((302, 322), 'platform.processor', 'platform.processor', ([], {}), '()\n', (320, 322), False, 'import platform\n'), ((360, 377), 'platform.system', 'platform.system', ([], {}), '()\n', (375, 377), False, 'import platform\n'), ((399, 418), 'platform.platform', 'platform.platform', ([], {}), '()\n', (416, 418), False, 'import platform\n'), ((437, 453), 'platform.uname', 'platform.uname', ([], {}), '()\n', (451, 453), False, 'import platform\n'), ((474, 492), 'platform.version', 'platform.version', ([], {}), '()\n', (490, 492), False, 'import platform\n'), ((537, 562), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (560, 562), False, 'import platform\n'), ((601, 624), 'platform.python_build', 'platform.python_build', ([], {}), '()\n', (622, 624), False, 'import platform\n'), ((666, 692), 'platform.python_compiler', 'platform.python_compiler', ([], {}), '()\n', (690, 692), False, 'import platform\n'), ((735, 767), 'platform.python_implementation', 'platform.python_implementation', ([], {}), '()\n', (765, 767), False, 'import platform\n'), ((814, 845), 'platform.python_version_tuple', 'platform.python_version_tuple', ([], {}), '()\n', (843, 845), False, 'import platform\n')] |
import nlp
import pandas as pd
import googlemaps
gmaps = googlemaps.Client(key='INSERT KEY HERE')
dataframe = pd.read_csv('isthisarealjob_table_posts.csv')
dataframe = dataframe.drop(['3', '4', '2018-02-28 00:14:37', '2018-02-28 00:14:37.1', 'hello-world-071235157V21576', 'world,good', 'Unnamed: 9', 'NULL', 'NULL.1'], axis= 1)
dataframe = dataframe[['danny', '#world is greater than #good things', 'Hello World']]
dataframe_address = dataframe['#world is greater than #good things']
#input an address
address= input("Enter address: ")
# Geocoding an address
geocode_result = gmaps.geocode(address)
if geocode_result == []:
print ("This address is invalid")
else:
geocode_result= geocode_result[0]
if 'plus_code' in geocode_result:
print("The Company address is valid")
else:
print("This address is vague, This job invite is likely a scam")
| [
"googlemaps.Client",
"pandas.read_csv"
] | [((58, 98), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': '"""INSERT KEY HERE"""'}), "(key='INSERT KEY HERE')\n", (75, 98), False, 'import googlemaps\n'), ((111, 156), 'pandas.read_csv', 'pd.read_csv', (['"""isthisarealjob_table_posts.csv"""'], {}), "('isthisarealjob_table_posts.csv')\n", (122, 156), True, 'import pandas as pd\n')] |
"""Automated tests for solution 01 - add."""
from unittest import TestCase, main
from random_arguments_generator import generator_01
from config import I_HAVE_THE_TIME_AND_PATIENCE
from intro_01_add import solution
class TestIntro01Add(TestCase):
"""Class to test solution 01 - add.
The class methods are ordered by thoroughness with the last test being
skipped by default. This bahavior can be changed in the configuration
file (config.py).
"""
def test_example(self):
"""Checks the basic case, provided with the problem."""
self.assertEqual(solution(1, 2), 3)
def test_open_tests(self):
"""Checks the open free tests, provided by the platform."""
self.test_example()
self.assertEqual(solution(0, 1000), 1000)
self.assertEqual(solution(2, -39), -37)
self.assertEqual(solution(99, 100), 199)
self.assertEqual(solution(-100, 100), 0)
self.assertEqual(solution(-1000, -1000), -2000)
def test_300_random_inputs(self):
"""Tests if the solution passes 300 random tests.
These are _not_ the tests from the platform and should change with
every run (results may vary).
"""
inputs_iterator = generator_01()
for _ in range(300):
param1, param2 = next(inputs_iterator)
with self.subTest(param1=param1, param2=param2):
self.assertEqual(solution(param1, param2), param1 + param2)
def test_all_cases(self):
"""Comprehensive testing based on guaranteed constraints.
Very slow. Skipped by default.
"""
if not I_HAVE_THE_TIME_AND_PATIENCE:
self.skipTest("Not enough time/patience to run this many tests.")
for i in range(-1000, 1001):
for j in range(-1000, 1001):
with self.subTest(param1=i, param2=j):
self.assertEqual(solution(i, j), i + j)
if __name__ == "__main__":
main()
| [
"unittest.main",
"random_arguments_generator.generator_01",
"intro_01_add.solution"
] | [((1965, 1971), 'unittest.main', 'main', ([], {}), '()\n', (1969, 1971), False, 'from unittest import TestCase, main\n'), ((1235, 1249), 'random_arguments_generator.generator_01', 'generator_01', ([], {}), '()\n', (1247, 1249), False, 'from random_arguments_generator import generator_01\n'), ((587, 601), 'intro_01_add.solution', 'solution', (['(1)', '(2)'], {}), '(1, 2)\n', (595, 601), False, 'from intro_01_add import solution\n'), ((759, 776), 'intro_01_add.solution', 'solution', (['(0)', '(1000)'], {}), '(0, 1000)\n', (767, 776), False, 'from intro_01_add import solution\n'), ((809, 825), 'intro_01_add.solution', 'solution', (['(2)', '(-39)'], {}), '(2, -39)\n', (817, 825), False, 'from intro_01_add import solution\n'), ((857, 874), 'intro_01_add.solution', 'solution', (['(99)', '(100)'], {}), '(99, 100)\n', (865, 874), False, 'from intro_01_add import solution\n'), ((906, 925), 'intro_01_add.solution', 'solution', (['(-100)', '(100)'], {}), '(-100, 100)\n', (914, 925), False, 'from intro_01_add import solution\n'), ((955, 977), 'intro_01_add.solution', 'solution', (['(-1000)', '(-1000)'], {}), '(-1000, -1000)\n', (963, 977), False, 'from intro_01_add import solution\n'), ((1424, 1448), 'intro_01_add.solution', 'solution', (['param1', 'param2'], {}), '(param1, param2)\n', (1432, 1448), False, 'from intro_01_add import solution\n'), ((1910, 1924), 'intro_01_add.solution', 'solution', (['i', 'j'], {}), '(i, j)\n', (1918, 1924), False, 'from intro_01_add import solution\n')] |
# Generated by Django 2.1.5 on 2019-01-11 20:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('property', '0002_auto_20190111_2012'),
]
operations = [
migrations.AddField(
model_name='property',
name='image',
field=models.ImageField(null=True, upload_to='property/'),
),
]
| [
"django.db.models.ImageField"
] | [((336, 387), 'django.db.models.ImageField', 'models.ImageField', ([], {'null': '(True)', 'upload_to': '"""property/"""'}), "(null=True, upload_to='property/')\n", (353, 387), False, 'from django.db import migrations, models\n')] |
import click
from rich import print as rprint
from pydragonfly import DragonflyException, TParams
from .._utils import (
ClickContext,
json_flag_option,
add_options,
)
from ._renderables import _paginate_table, _generate_rule_table
@click.group("rule")
def rule():
"""
Rule Management\n
>>> [API] https://dragonfly.certego.net/api/rule\n
>>> [GUI] https://dragonfly.certego.net/dashboard/rules
"""
@rule.command("list", help="List all rules")
@add_options(json_flag_option)
@click.pass_context
def rule_list(ctx: ClickContext, as_json: bool):
ctx.obj._logger.info("Requesting list of rules..")
ctx.obj._logger.info(f"[+] GUI: {ctx.obj._server_url}/dashboard/rules")
params = TParams(ordering=["-created_at"])
try:
if as_json:
response = ctx.obj.Rule.list(params=params)
rprint(response.data)
else:
generator = ctx.obj.Rule.auto_paging_iter(params=params)
_paginate_table(generator, _generate_rule_table)
except DragonflyException as exc:
ctx.obj._logger.fatal(str(exc))
@rule.command("retrieve", help="Retrieve rule object")
@click.argument("rule_id", type=int)
@click.pass_context
def rule_retrieve(ctx: ClickContext, rule_id: int):
ctx.obj._logger.info(f"Requesting rule [underline blue]#{rule_id}[/]..")
try:
response = ctx.obj.Rule.retrieve(
object_id=rule_id, params=TParams(expand=["user"])
)
rprint(response.data)
except DragonflyException as exc:
ctx.obj._logger.fatal(str(exc))
| [
"click.group",
"click.argument",
"pydragonfly.TParams",
"rich.print"
] | [((248, 267), 'click.group', 'click.group', (['"""rule"""'], {}), "('rule')\n", (259, 267), False, 'import click\n'), ((1157, 1192), 'click.argument', 'click.argument', (['"""rule_id"""'], {'type': 'int'}), "('rule_id', type=int)\n", (1171, 1192), False, 'import click\n'), ((724, 757), 'pydragonfly.TParams', 'TParams', ([], {'ordering': "['-created_at']"}), "(ordering=['-created_at'])\n", (731, 757), False, 'from pydragonfly import DragonflyException, TParams\n'), ((1474, 1495), 'rich.print', 'rprint', (['response.data'], {}), '(response.data)\n', (1480, 1495), True, 'from rich import print as rprint\n'), ((855, 876), 'rich.print', 'rprint', (['response.data'], {}), '(response.data)\n', (861, 876), True, 'from rich import print as rprint\n'), ((1431, 1455), 'pydragonfly.TParams', 'TParams', ([], {'expand': "['user']"}), "(expand=['user'])\n", (1438, 1455), False, 'from pydragonfly import DragonflyException, TParams\n')] |
#!/usr/bin/python
##########################################################################
#
# MTraceCheck
# Copyright 2017 The Regents of the University of Michigan
# <NAME> and <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##########################################################################
import os
import sys
## For input, see dump file example at the end of this script
## For output, see signature_decoder.py. Occurrence count is set to 1
## Output example: 0x00c91437 0x0b8e9e0f: 1
metaAddr = 0x400000C0
startAddr = 0xA0000000
endAddr = None
signatureSize = None # Size in word (4 bytes)
numSignatures = None
wordIdx = 0
signatureString = ""
lastAddr = None
inputFP = open(sys.argv[1], "r")
outputFP = open(sys.argv[2], "w")
for line in inputFP:
#print line
if (len(line) >= 9 and line[8] == ":"):
# NOTE: This is not an absolutely correct way to parse only data.
# But probably this provides much better speed...
tokens = line.split()
currAddr = int(tokens[0][:-1],16)
if lastAddr != None and currAddr <= lastAddr:
print("Error: This script assumes an increasing address")
print(" Meta data (0x40000000 range) should be dumped")
print(" before signature data (0xA0000000 range)")
sys.exit(1)
if (endAddr != None):
if (currAddr < endAddr and currAddr >= startAddr):
for i in range(1, 5):
signatureString += " 0x%s" % tokens[i]
wordIdx += 1
if (wordIdx == signatureSize):
signatureString += ": 1\n"
outputFP.write(signatureString)
signatureString = ""
wordIdx = 0
elif (currAddr >= endAddr):
print("Terminated at line %s" % (line.rstrip()))
break
elif (currAddr == metaAddr):
#400000c0: 00000020 a000c340 0000061a 00010000
assert(signatureSize == None and endAddr == None and numSignatures == None)
signatureSize = int(tokens[1], 16) # size in word
endAddr = int(tokens[2], 16)
numSignatures = int(tokens[3], 16)
print("start %x end %x signature size %d words, %d signatures" % (startAddr, endAddr, signatureSize, numSignatures))
assert(endAddr == startAddr + (signatureSize * 4 * numSignatures))
lastAddr = currAddr
else:
if (line.startswith("Exynos")):
continue
elif (line[0] == "#"):
continue
else:
print("Warning: line %s is ignored" % line.rstrip())
continue
inputFP.close()
outputFP.close()
"""
## Starting application at 0x41000860 ...
## Application terminated, rc = 0x0
40000000: 02030000 00040000 02025c00 00000100 .........\......
40000010: 000002cf 00000200 02030000 10408b10 ..............@.
40000020: 1001008c 20028880 08100000 00000000 ....... ........
40000030: 00090000 04010000 00000c00 020080a0 ................
40000040: 00240000 00040000 00000000 01100000 ..$.............
40000050: 00000000 00000040 38850100 40021201 [email protected]...@
40000060: 00000800 00402480 02000000 08000000 .....$@.........
40000070: 00000000 00000040 00008000 80500241 [email protected].
40000080: ffffffff ffffffff ffffffff ffffffff ................
40000090: ffffff7f fffff7ff ffffffff ffffffff ................
400000a0: ffffffff ffffffff ffffffff ffffffff ................
400000b0: ff7fffff ffffffff ffffffff ffffffff ................
400000c0: 0006b031 08000014 00edf95d 00edf95d 1.......]...]...
400000d0: 001b510c 08000014 019a20c1 019a20c1 .Q....... ... ..
400000e0: 001b510c 08000014 01f62dda 01f62dda .Q.......-...-..
400000f0: 00000020 a000c340 0000061a 00010000 ................
40000100: 00110082 2101c812 00000000 00000800 .......!........
40000110: 00000000 00000000 01800000 c18002ba ................
40000120: 00000000 00800425 00000000 00000000 ....%...........
40000130: 00400000 00000000 20352000 10a40204 ..@...... 5 ....
40000140: 00004010 00009083 00000000 08000000 .@..............
40000150: 00000000 00000000 00800110 04000008 ................
40000160: 00002010 02022008 00000000 00010000 . ... ..........
40000170: 00000000 00000401 0a020040 00085001 [email protected]..
40000180: ffffffff ffffffff ffffffef ffffffff ................
40000190: ffffffff ffffffff ffffffff ffffffff ................
400001a0: ffffffff ffffffff ffffffff ffffffff ................
400001b0: ffffffff ffffffff ffffffff ffffffff ................
400001c0: ffffffff ffffffff ffeffffe ffffffff ................
400001d0: ffffffff ffffffff ffffffff ffffffff ................
400001e0: ffffffff ffffffff ffffffff ffffffff ................
400001f0: ffffffff ffff7fff ffffffff ffffffff ................
Exynos5422 # md 0xa0000000
a0000000: c276e04a 0b24b9ff 0b24babc 00000000 J.v...$...$.....
a0000010: 87778fb2 4b4788c8 8d3990b7 8d3a5596 ..w...GK..9..U:.
a0000020: c276e04a 0b24b9ff 0b24babc 00000000 J.v...$...$.....
a0000030: 8fd74fb2 53a748c8 959950b7 959a1596 .O...H.S.P......
...
a000c300: c63e62cd 0f3acb6c 0f3acc29 00000000 .b>.l.:.).:.....
a000c310: 6cdcc000 b50c0908 f68ecbfd f68f8ca4 ...l............
a000c320: c63e62cd 0f3acb6c 0f3acc29 00000000 .b>.l.:.).:.....
a000c330: 6cdcc000 b50c0908 f68ecbfd f68f8d1c ...l............
a000c340: 00042004 0010000c 00010000 00000000 . ..............
a000c350: 00000000 00000000 00000040 00110011 ........@.......
"""
| [
"sys.exit"
] | [((1829, 1840), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1837, 1840), False, 'import sys\n')] |
import torch
from otbar import Distribution
torch.set_default_tensor_type(torch.DoubleTensor)
def test_distributions():
# generate a distribution with three points
mu_support = torch.tensor([[1., 2.], [-3., 4.], [5., 9.]])
mu0 = Distribution(mu_support)
new_point = torch.tensor([9., 8.])
rho = 0.1
mu0.convexAddSupportPoint(new_point, rho)
| [
"torch.tensor",
"torch.set_default_tensor_type",
"otbar.Distribution"
] | [((47, 96), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['torch.DoubleTensor'], {}), '(torch.DoubleTensor)\n', (76, 96), False, 'import torch\n'), ((191, 242), 'torch.tensor', 'torch.tensor', (['[[1.0, 2.0], [-3.0, 4.0], [5.0, 9.0]]'], {}), '([[1.0, 2.0], [-3.0, 4.0], [5.0, 9.0]])\n', (203, 242), False, 'import torch\n'), ((247, 271), 'otbar.Distribution', 'Distribution', (['mu_support'], {}), '(mu_support)\n', (259, 271), False, 'from otbar import Distribution\n'), ((288, 312), 'torch.tensor', 'torch.tensor', (['[9.0, 8.0]'], {}), '([9.0, 8.0])\n', (300, 312), False, 'import torch\n')] |
from setuptools import setup, Extension
import numpy as np
module = Extension("testmodule_cpp", sources=["testmodule.cpp","testlist.cpp",\
"testobject.cpp", "testnumpy.cpp"], \
language="c++", include_dirs=[np.get_include()], extra_compile_args=["-std=c++11"],
define_macros=[("PY_ARRAY_UNIQUE_SYMBOL","PYEXTEND_ARRAY_API")]
)
setup(
name = "pyextend_testmodule",
ext_modules=[module]
)
| [
"setuptools.setup",
"numpy.get_include"
] | [((329, 384), 'setuptools.setup', 'setup', ([], {'name': '"""pyextend_testmodule"""', 'ext_modules': '[module]'}), "(name='pyextend_testmodule', ext_modules=[module])\n", (334, 384), False, 'from setuptools import setup, Extension\n'), ((208, 224), 'numpy.get_include', 'np.get_include', ([], {}), '()\n', (222, 224), True, 'import numpy as np\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.