code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from __future__ import absolute_import
from __future__ import print_function
import os
import re
import sys
import json
import requests
import argparse
import time
import codecs
from bs4 import BeautifulSoup
from six import u
__version__ = '1.0'
# if python 2, disable verify flag in requests.get()
VERIFY = True
if sys.version_info[0] < 3:
VERIFY = False
requests.packages.urllib3.disable_warnings()
class PttWebCrawler(object):
PTT_URL = 'https://www.ptt.cc'
"""docstring for PttWebCrawler"""
def __init__(self, cmdline=None, as_lib=False):
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description='''
A crawler for the web version of PTT, the largest online community in Taiwan.
Input: board name and page indices (or articla ID)
Output: BOARD_NAME-START_INDEX-END_INDEX.json (or BOARD_NAME-ID.json)
''')
parser.add_argument('-b', metavar='BOARD_NAME', help='Board name', required=True)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-i', metavar=('START_INDEX', 'END_INDEX'), type=int, nargs=2, help="Start and end index")
group.add_argument('-a', metavar='ARTICLE_ID', help="Article ID")
parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + __version__)
if not as_lib:
if cmdline:
args = parser.parse_args(cmdline)
else:
args = parser.parse_args()
board = args.b
if args.i:
start = args.i[0]
if args.i[1] == -1:
end = self.getLastPage(board)
else:
end = args.i[1]
self.parse_articles(start, end, board)
else: # args.a
article_id = args.a
self.parse_article(article_id, board)
def parse_articles(self, start, end, board, path='.', timeout=3):
filename = board + '-' + str(start) + '-' + str(end) + '.json'
filename = os.path.join(path, filename)
# self.store(filename, u'{"articles": [', 'w')
for i in range(end-start+1):
index = start + i
print('Processing index:', str(index))
resp = requests.get(
url = self.PTT_URL + '/bbs/' + board + '/index' + str(index) + '.html',
cookies={'over18': '1'}, verify=VERIFY, timeout=timeout
)
if resp.status_code != 200:
print('invalid url:', resp.url)
continue
soup = BeautifulSoup(resp.text, 'html.parser')
divs = soup.find_all("div", "r-ent")
for div in divs:
try:
# ex. link would be <a href="/bbs/PublicServan/M.1127742013.A.240.html">Re: [問題] 職等</a>
href = div.find('a')['href']
link = self.PTT_URL + href
article_id = re.sub('\.html', '', href.split('/')[-1])
self.parse(link, article_id, board)
# if div == divs[-1] and i == end-start: # last div of last page
# self.store(filename, self.parse(link, article_id, board), 'a')
# else:
# self.store(filename, self.parse(link, article_id, board) + ',\n', 'a')
except:
pass
# time.sleep(0.1)
# self.store(filename, u']}', 'a')
return filename
def parse_article(self, article_id, board, path='.'):
link = self.PTT_URL + '/bbs/' + board + '/' + article_id + '.html'
filename = board + '-' + article_id + '.json'
filename = os.path.join(path, filename)
# self.store(filename, self.parse(link, article_id, board), 'w')
return filename
@staticmethod
def parse(link, article_id, board, timeout=3):
# print('Processing article:', article_id)
resp = requests.get(url=link, cookies={'over18': '1'}, verify=VERIFY, timeout=timeout)
if resp.status_code != 200:
print('invalid url:', resp.url)
# return json.dumps({"error": "invalid url"}, sort_keys=True, ensure_ascii=False)
soup = BeautifulSoup(resp.text, 'html.parser')
main_content = soup.find(id="main-content")
metas = main_content.select('div.article-metaline')
author = ''
title = ''
date = ''
if metas:
author = metas[0].select('span.article-meta-value')[0].string if metas[0].select('span.article-meta-value')[0] else author
title = metas[1].select('span.article-meta-value')[0].string if metas[1].select('span.article-meta-value')[0] else title
date = metas[2].select('span.article-meta-value')[0].string if metas[2].select('span.article-meta-value')[0] else date
# remove meta nodes
for meta in metas:
meta.extract()
for meta in main_content.select('div.article-metaline-right'):
meta.extract()
# remove and keep push nodes
pushes = main_content.find_all('div', class_='push')
for push in pushes:
push.extract()
try:
ip = main_content.find(text=re.compile(u'※ 發信站:'))
ip = re.search('[0-9]*\.[0-9]*\.[0-9]*\.[0-9]*', ip).group()
except:
ip = "None"
# 移除 '※ 發信站:' (starts with u'\u203b'), '◆ From:' (starts with u'\u25c6'), 空行及多餘空白
# 保留英數字, 中文及中文標點, 網址, 部分特殊符號
filtered = [ v for v in main_content.stripped_strings if v[0] not in [u'※', u'◆'] and v[:2] not in [u'--'] ]
expr = re.compile(u(r'[^\u4e00-\u9fa5\u3002\uff1b\uff0c\uff1a\u201c\u201d\uff08\uff09\u3001\uff1f\u300a\u300b\s\w:/-_.?~%()]'))
for i in range(len(filtered)):
filtered[i] = re.sub(expr, '', filtered[i])
filtered = [_f for _f in filtered if _f] # remove empty strings
filtered = [x for x in filtered if article_id not in x] # remove last line containing the url of the article
content = ' '.join(filtered)
content = re.sub(r'(\s)+', ' ', content)
# print 'content', content
# push messages
p, b, n = 0, 0, 0
messages = []
for push in pushes:
if not push.find('span', 'push-tag'):
continue
push_tag = push.find('span', 'push-tag').string.strip(' \t\n\r')
push_userid = push.find('span', 'push-userid').string.strip(' \t\n\r')
# if find is None: find().strings -> list -> ' '.join; else the current way
push_content = push.find('span', 'push-content').strings
push_content = ' '.join(push_content)[1:].strip(' \t\n\r') # remove ':'
push_ipdatetime = push.find('span', 'push-ipdatetime').string.strip(' \t\n\r')
messages.append( {'push_tag': push_tag, 'push_userid': push_userid, 'push_content': push_content, 'push_ipdatetime': push_ipdatetime} )
if push_tag == u'推':
p += 1
elif push_tag == u'噓':
b += 1
else:
n += 1
# count: 推噓文相抵後的數量; all: 推文總數
message_count = {'all': p+b+n, 'count': p-b, 'push': p, 'boo': b, "neutral": n}
# print 'msgs', messages
# print 'mscounts', message_count
# json data
data = {
'url': link,
'board': board,
'article_id': article_id,
'article_title': title,
'author': author,
'date': date,
'content': content,
'ip': ip,
'message_count': message_count,
'messages': messages
}
# print 'original:', d
# return json.dumps(data, sort_keys=True, ensure_ascii=False)
@staticmethod
def getLastPage(board, timeout=3):
content = requests.get(
url= 'https://www.ptt.cc/bbs/' + board + '/index.html',
cookies={'over18': '1'}, timeout=timeout
).content.decode('utf-8')
first_page = re.search(r'href="/bbs/' + board + '/index(\d+).html">‹', content)
if first_page is None:
return 1
return int(first_page.group(1)) + 1
@staticmethod
def store(filename, data, mode):
with codecs.open(filename, mode, encoding='utf-8') as f:
f.write(data)
@staticmethod
def get(filename, mode='r'):
with codecs.open(filename, mode, encoding='utf-8') as f:
return json.load(f)
if __name__ == '__main__':
c = PttWebCrawler() | AioPTTCrawler | /AioPTTCrawler-0.0.12.tar.gz/AioPTTCrawler-0.0.12/PttWebCrawler/crawler.py | crawler.py |
import asyncpg
import jsonpickle as jsonpickle
from asyncio import AbstractEventLoop
from aiogram.fsm.storage.base import BaseStorage, StorageKey, StateType
from typing import Dict, Optional, Any
class PoolManager:
__slots__ = ("_db_auth_data", "_tables_is_created", "pool")
def __init__(self, **kwargs) -> None:
self._db_auth_data = kwargs
self._tables_is_created = False
async def __aenter__(self) -> asyncpg.Pool:
self.pool = await asyncpg.create_pool(**self._db_auth_data, autocommit=True)
if not self._tables_is_created:
await self.pool.execute("""CREATE TABLE IF NOT EXISTS "aiogram_state"(
"key" TEXT NOT NULL PRIMARY KEY,
"state" TEXT NOT NULL)""")
await self.pool.execute("""CREATE TABLE IF NOT EXISTS "aiogram_data"(
"key" TEXT NOT NULL PRIMARY KEY,
"data" TEXT)""")
self._tables_is_created = True
return self.pool
async def __aexit__(self, *args):
await self.pool.close()
class PGStorage(BaseStorage):
__slots__ = ("host", "port", "username", "password", "database", "dsn", "loop")
def __init__(
self, username: str, password: str, database: str,
host: str = "localhost", port: int = 5432, dsn: str = None,
loop: AbstractEventLoop = None
) -> None:
self.__auth_data = {
"host": host,
"port": port,
"username": username,
"password": password,
"database": database
}
if dsn is not None:
self.__auth_data.clear()
self.__auth_data.update({"dsn": dsn})
if loop is not None:
self.__auth_data.update({"loop": loop})
self.__db = PoolManager(**self.__auth_data)
async def set_state(self, key: StorageKey, state: StateType = None) -> None:
async with self.__db as db:
await db.execute("INSERT INTO \"AiogramLegacyStates\" VALUES($1, $2)", key, state)
async def get_state(self, key: StorageKey) -> Optional[str]:
async with self.__db as db:
response = await db.fetchval("SELECT \"state\" FROM \"AiogramLegacyStates\" WHERE key=$1", key)
return response
async def set_data(self, key: StorageKey, data: Dict[str, Any]) -> None:
async with self.__db as db:
await db.execute("INSERT INTO \"AiogramLegacyData\" VALUES($1, $2)", key, jsonpickle.dumps(data))
async def get_data(self, key: StorageKey) -> Dict[str, Any]:
async with self.__db as db:
response = await db.fetchval("SELECT \"data\" FROM \"AiogramLegacyData\" WHERE key=$1", key)
return jsonpickle.loads(response)
async def update_data(self, key: StorageKey, data: Dict[str, Any]) -> Dict[str, Any]:
async with self.__db as db:
response = await db.fetchval(
"UPDATE \"AiogramLegacyData\" SET data=$1 WHERE key=$2 RETURNING data",
jsonpickle.dumps(data), key
)
return response
async def close(self) -> None:
pass | AiogramLegacy | /storages/postgresql.py | postgresql.py |
import aiosqlite
import jsonpickle as jsonpickle
from asyncio import AbstractEventLoop
from aiogram.fsm.storage.base import BaseStorage, StorageKey, StateType
from typing import Dict, Optional, Any
class PoolManager:
__slots__ = ("_db_auth_data", "_tables_is_created", "pool")
def __init__(self, **kwargs) -> None:
self._db_auth_data = kwargs
self._tables_is_created = False
async def __aenter__(self) -> aiosqlite.Connection:
self.pool = await aiosqlite.connect(**self._db_auth_data)
if not self._tables_is_created:
await self.pool.execute("""CREATE TABLE IF NOT EXISTS \"AiogramLegacyStates\"(
"key" TEXT NOT NULL PRIMARY KEY,
"state" TEXT NOT NULL)""")
await self.pool.execute("""CREATE TABLE IF NOT EXISTS \"AiogramLegacyData\"(
"key" TEXT NOT NULL PRIMARY KEY,
"data" TEXT)""")
self._tables_is_created = True
return self.pool
async def __aexit__(self, *args):
await self.pool.commit()
await self.pool.close()
class SQLiteStorage(BaseStorage):
__slots__ = ("database", "loop")
def __init__(self, database_path: str, loop: AbstractEventLoop = None) -> None:
self.__auth_data = {
"database": database_path
}
if loop is not None:
self.__auth_data.update({"loop": loop})
self.__db = PoolManager(**self.__auth_data)
async def set_state(self, key: StorageKey, state: StateType = None) -> None:
async with self.__db as db:
await db.execute("INSERT INTO \"AiogramLegacyStates\" VALUES(?, ?)", (key, state))
await db.commit()
async def get_state(self, key: StorageKey) -> Optional[str]:
async with self.__db as db:
async with db.execute("SELECT \"state\" FROM \"AiogramLegacyStates\" WHERE key=?", (key,)) as cur:
response = await cur.fetchone()
return response[0]
async def set_data(self, key: StorageKey, data: Dict[str, Any]) -> None:
async with self.__db as db:
await db.execute("INSERT INTO \"AiogramLegacyData\" VALUES(?, ?)", (key, jsonpickle.dumps(data)))
await db.commit()
async def get_data(self, key: StorageKey) -> Dict[str, Any]:
async with self.__db as db:
async with db.execute("SELECT \"data\" FROM \"AiogramLegacyData\" WHERE key=?", (key,)) as cur:
response = await cur.fetchone()
return jsonpickle.loads(response[0])
async def update_data(self, key: StorageKey, data: Dict[str, Any]) -> Dict[str, Any]:
async with self.__db as db:
await db.execute("UPDATE \"AiogramLegacyData\" SET data=? WHERE key=?", (jsonpickle.dumps(data), key))
await db.commit()
response = await self.get_data(key)
return response
async def close(self) -> None:
pass | AiogramLegacy | /storages/sqlite.py | sqlite.py |
import aiomysql
import jsonpickle as jsonpickle
from asyncio import AbstractEventLoop
from aiogram.fsm.storage.base import BaseStorage, StorageKey, StateType
from typing import Dict, Optional, Any
class PoolManager:
__slots__ = ("_db_auth_data", "_tables_is_created", "pool", "cursor")
def __init__(self, **kwargs) -> None:
self._db_auth_data = kwargs
self._tables_is_created = False
async def __aenter__(self) -> aiomysql.Cursor:
self.pool: aiomysql.Connection = await aiomysql.connect(**self._db_auth_data, autocommit=True)
self.cursor: aiomysql.Cursor = self.pool.cursor()
if not self._tables_is_created:
await self.cursor.execute("""CREATE TABLE IF NOT EXISTS "aiogram_state"(
"key" TEXT NOT NULL PRIMARY KEY,
"state" TEXT NOT NULL)""")
await self.cursor.execute("""CREATE TABLE IF NOT EXISTS "aiogram_data"(
"key" TEXT NOT NULL PRIMARY KEY,
"data" TEXT)""")
self._tables_is_created = True
return self.cursor
async def __aexit__(self, *args):
await self.cursor.close()
await self.pool.close()
class MySQLStorage(BaseStorage):
__slots__ = ("host", "port", "username", "password", "db", "loop")
def __init__(
self, username: str, password: str, database: str,
host: str = "localhost", port: int = 5432,
loop: AbstractEventLoop = None
) -> None:
self.__auth_data = {
"host": host,
"port": port,
"username": username,
"password": password,
"database": database
}
if loop is not None:
self.__auth_data.update({"loop": loop})
self.__db = PoolManager(**self.__auth_data)
async def set_state(self, key: StorageKey, state: StateType = None) -> None:
async with self.__db as db:
await db.execute("INSERT INTO \"AiogramLegacyStates\" VALUES(%s, %s)", (key, state))
async def get_state(self, key: StorageKey) -> Optional[str]:
async with self.__db as db:
await db.execute("SELECT \"state\" FROM \"AiogramLegacyStates\" WHERE key=%s", (key,))
response = await db.fetchone()
return response[0]
async def set_data(self, key: StorageKey, data: Dict[str, Any]) -> None:
async with self.__db as db:
await db.execute("INSERT INTO \"AiogramLegacyData\" VALUES(%s, %s)", (key, jsonpickle.dumps(data)))
async def get_data(self, key: StorageKey) -> Dict[str, Any]:
async with self.__db as db:
await db.execute("SELECT \"data\" FROM \"AiogramLegacyData\" WHERE key=%s", (key,))
response = await db.fetchone()
return jsonpickle.loads(response[0])
async def update_data(self, key: StorageKey, data: Dict[str, Any]) -> Dict[str, Any]:
async with self.__db as db:
await db.execute("UPDATE \"AiogramLegacyData\" SET data=%s WHERE key=%s", (jsonpickle.dumps(data), key))
response = await self.get_data(key)
return response
async def close(self) -> None:
pass | AiogramLegacy | /storages/mysql.py | mysql.py |

# Save your data!
**Aiogram-storages** was created to extend the standard fsm_storage options in **aiogram**.
Our library supports such databases as:
# Installation
pip install AiogramStorages
# PostgreSQL
Support for storage with the **PostgreSQL** database is due to the **asyncpg** asynchronous library, which gives a **huge data processing speed**, and, accordingly, the bot itself.
In order to use it, you need to create an instance of the **PGStorage** class, to which you need to pass the **required parameters (user, password, db_name).** You can also specify additional parameters (host, port).
Next, this instance must be passed to the **Dispatcher.**
## Example
from AiogramStorages.storages import PGStorage
storage = PGStorage(username='YourUser', password='YourPassword', db_name='YourDbName')
dp = Dispatcher(bot, storage=storage)
## Warning
By default, **PGStorage** creates three tables in your database named: **aiogram-states**, **aiogram-data**, **aiogram-buckets**.
We **strongly recommend** that you do **not use these names as the name of the table**, otherwise there may be disagreements.
# SQLiteStorage
Support for storage with the **SQLite** database is due to the **aiosqlite** asynchronous library, which gives a **huge data processing speed**, and, accordingly, the bot itself.
In order to use it, you need to create an instance of the **SQLiteStorage** class, to which you need to pass the **required parameters (db_path).**
Next, this instance must be passed to the **Dispatcher.**
## Example
from AiogramStorages.storages import SQLiteStorage
storage = SQLiteStorage(db_path='your_path')
dp = Dispatcher(bot, storage=storage)
## Warning
By default, **SQLiteStorage** creates three tables in your database named: **aiogram-states**, **aiogram-data**, **aiogram-buckets**.
We **strongly recommend** that you do **not use these names as the name of the table**, otherwise there may be disagreements.
| AiogramStorages | /AiogramStorages-1.0.0.tar.gz/AiogramStorages-1.0.0/README.md | README.md |
# Air Alerts
Air Alerts is a module for monitorining Air Raid Alerts in Ukraine.
The module uses a complete map of alerts that provides incredibly accurate information.
# WARNING
Module was tested only on Python 3.9 with Windows 10. With other versions of python or on other operating system module may be broken.
# Requirements
This module is lightweight and not need many libraries. You need only 2!
- Requests
- Pillow
All of needed libraries will be automaticly installed on your computer
while you installing Air Alerts
# Updates in new version
- Added two versions: Async and Sync
# Usage
## Sync
#### Getting real-time information dictionary
```python
import AirAlerts
alerts = AirAlerts.AlertClass()
real_time_alerts = alerts.get_alerts()
print(real_time_alerts)
```
This code will return an dictionary of alerts. It will look like this:
```json
{
"kyiv": False,
"chernivtsi_region": False,
"ivano-frankivsk_region": False,
"volyn_region": False,
"zakarpattia_region": False,
"zaporizhia_region": False,
"zhytomyr_region": False,
"kirovohrad_region": False,
"kyiv_region": False,
"luhansk_region": False,
"lviv_region": False,
"mykolaiv_region": False,
"odesa_region": False,
"poltava_region": False,
"rivne_region": False,
"sumy_region": False,
"ternopil_region": False,
"kharkiv_region": False,
"kherson_region": False,
"khmelnytskyi_region": False,
"chernihiv_region": False,
"cherkasy_region" : False,
"donetsk_region" : False
"vinnytsia_region" : False
"dnipropetrovsk_region" : False
}
```
#### Getting air alerts in list
```python
import AirAlerts
alerts = AirAlerts.AlertClass()
alerts_list = alerts.alerts_list()
print(alerts_list)
```
This code will return a list of alerts that will look like this:
['kyiv','kyiv_region','luhansk_region']
##Async
#### Getting real-time information dictionary
```python
import asyncio
from AirAlerts.Async import AsyncAlertClass
async def main():
alerts = AsyncAlertClass()
real_time_alerts = await alerts.get_alerts()
print(real_time_alerts)
await main()
```
This code will return a dictionary of alerts as i mentioned in Sync version
#### Getting air alerts in list
```python
import asyncio
from AirAlerts.Async import AsyncAlertClass
async def main():
alerts = AsyncAlertClass()
alerts_list = await alerts.alerts_list()
print(alerts_list)
await main()
```
### Thanks everybody who is supporting this project. I appreciate it! Thanks to Ukrainian Army and all Ukrainian, Polish, Romanian, American, Israel and much much more people for helping Ukraine
### Ukraine is very grateful for your help. And we wish one day that all ends
### If you have any questions don't be shy, contact me on Discord Romikan#5428
# License
MIT License
Copyright (c)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
| AirAlerts | /AirAlerts-1.0.3.tar.gz/AirAlerts-1.0.3/README.md | README.md |
AirProfile
==========
.. image:: https://badge.fury.io/py/AirProfile.svg
:target: https://badge.fury.io/py/AirProfile
A python package for automatic analysis of Airbnb host profiles.
The package takes an Airbnb profile, automatically tags topics for each sentence, and predicts whether the profile will be perceived as more trustworthy compared to other profiles of similar length.
Example Usage
-------------
LIWC2007_ is a proprietary dependency required for predicting trust. Unfortunately, we can't include it in this package for legal reasons.
.. _LIWC2007: https://liwc.wpengine.com
However, if you do not have LIWC, you can still perform topic classification.
.. code:: python
from AirProfile import AirProfile
ap = AirProfile(liwc_path='../LIWC2007/liwc_2007.trie')
# or ap = AirProfile() if you do not have LIWC.
# Example Airbnb host profile.
input = """I have spent my life in the service industry. I look forward to being your host and I look forward to meeting you."""
# Segments the input at the sentence level and returns the probability that
# each sentence is tagged with the topics described in [1]. This works with or
# without LIWC.
ap.predict_topics(input)
>>> [
[
'i have spent my life in the service industry',
{
'relationships': 0.02,
'workEducation': 0.99,
'travel': 0.0,
'originResidence': 0.07,
'lifeMottoValues': 0.03,
'hospitality': 0.02,
'interestsTastes': 0.03,
'personality': 0.02
}
], [
'i look forward to being your host and i look forward to meeting you',
{
'relationships': 0.0,
'workEducation': 0.0,
'travel': 0.02,
'originResidence': 0.0,
'lifeMottoValues': 0.0,
'hospitality': 1.0,
'interestsTastes': 0.0,
'personality': 0.04
}
]
]
# Segments the input at the sentence level and returns the probability that
# the profile is perceived to be more trustworthy compared to other profiles
# of similar length. This requires LIWC and will throw an error otherwise.
ap.predict_trust(input)
>>> Prediction(prob=0.49, predict=0)
References
----------
[1] Self-disclosure and Perceived Trustworthiness of Airbnb Host Profiles. Xiao Ma, Jeff Hancock, Kenneth Lim Mingjie, and Mor Naaman. CSCW 2017. Honorable Mention for Best Paper. [PDF1_]
.. _PDF1: https://s.tech.cornell.edu/assets/papers/ma2017airbnb.pdf
[2] A Computational Approach to Perceived Trustworthiness of Airbnb Host Profiles. Xiao Ma, Trishala Neeraj, Mor Naamann. ICWSM 2017. Poster. [PDF2_]
.. _PDF2: http://maxiao.info/assets/computational-airbnb.pdf
| AirProfile | /AirProfile-1.0.12.tar.gz/AirProfile-1.0.12/README.rst | README.rst |
<p align="center">
<img src="https://picreso.oss-cn-beijing.aliyuncs.com/airs.png" width="150px">
<p align="center" style="font-weight:bold">
Distributed - Asynchronous - Easy-to-use
<br>
<br>
An Easy-to-use and Fast Python Spider Framework
<img src="http://picreso.oss-cn-beijing.aliyuncs.com/airdemo.png">
</p>
---
## Overview
> + **AirSpider** is a high-performance asynchronous crawler framework for developers
> + Based on Redis: task distribution, task deduplication, and distributed ☁️
## Requirements
- Python 3.6
- Works on Linux, Windows, macOS
## Features
+ Quick to Start
+ Low Coupling
+ High Cohesion
+ Easy Expansion
+ Orderly Workflow
## Installation
``` shell
# For Linux && MacOS
pip3 install airspider
# For Windows
pip3 install airspider
```
## Documents
+ [Introduction](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/introduction.md):Introduction to **AirSpider**
+ [Tutorials](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/tutorials.md): Quick to program a Spider By **AirSpider**
+ [Plugins](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/plugins.md):Program extensions for **AirSpider**
## Topics
- [Item](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/topics/item.md):Define Target in HTML
- [Selector](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/topics/selector.md):Select Target from HTML
- [Request](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/topics/request.md):Request Part
- [Response](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/topics/response.md):Response Part
- [Middleware](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/topics/middleware.md):Make Airspider Support to Extensions
- [Spider](https://github.com/Xunzhuo/AirSpider/tree/master/Docs/cn/topics/spider.md):Entrance for AirSpider
## TODO
+ Complete Plugins of Redis
+ Complete Distributed Architecture
## Contributing
**AirSpider**🕷️ is still under **Developing**
> Feel free to open issues💬 and pull requests
- Report or Fix bugs
- Build Powerful plugins
- Make documentation Better
- Add Examples of Spiders
 | AirSpider | /AirSpider-2.0.4.tar.gz/AirSpider-2.0.4/README.md | README.md |
Copyright (C) 2013 WebItUp. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/LICENSE.md | LICENSE.md |
AirStrip.js
=============
About
-------------
This project is meant to ease dealing with third-party javascript dependencies in ambitious client-side web projects.
Conceptually, Airstrip has similarities with Twitter's Bower, and npm component (https://npmjs.org/package/component).
For the impatients
-------------
Read.
Problem
-------------
Modern javascript projects usually depend on numerous third-party libraries and frameworks
(say: requirejs, handlebars, i18n, emberjs, jasmine).
Picking these, building, minifying, tracking versions, possibly patching or forking them, maintaining dependencies, then integrating into a project can quickly become borringly repetitive and tedious.
Solution
-------------
The idea here is to help do that, by providing tools to quickly assemble dependencies from numerous, widely used libraries, build them uniformly, list various versions, then "dispatching" the results in a build directory to be then used by said projects - and obviously tools that help you do that for your own libraries.
Installation
-------------
`pip install airstrip`
`pip install airstrip --upgrade`
API
-------------
Once the airstrip binary has been installed, you should cd to your project root source folder and may use the following commands.
Command:
```airstrip show ember```
Result:
Details about EmberJS, and list of available versions
Command:
```airstrip require emberjs```
Result:
Add emberjs (in version "master") to your project dependencies. This will create or update the project "airfile.json" listing said dependencies.
Command:
```airstrip require emberjs SOMEVERSION```
Result:
Same as above, but explicitely require a specific version. The "master" version (eg: trunk) keywords should always exist for any library.
Multiple different versions of the same library can be required.
Note that requiring a project that depends on other projects will require them as well, in the recommended version (XXX, not done yet).
Command:
```airstrip remove emberjs```
```airstrip remove emberjs SOMEVERSION```
Result:
Will remove the library from the project dependencies list, if present (possibly in the specified version).
Command:
```airstrip require```
Result:
List currently required libraries for your project, along with versions.
Command:
```airstrip build```
Result:
Build all required libraries for your project, and output them into a "dependencies" folder.
Command:
```airstrip build ember```
Result:
Build, or rebuild only the specified library (that you requested).
Command:
```airstrip use```
Result:
List configuration flags, possibly with their default value if overriden.
Command:
```airstrip use key value```
Result:
Locally (to your project) override a specific configuration key.
API: risky, untested, undocumented, internal
-------------
Command:
```airstrip seed```
Result:
Initialize a new project inside the current working directory, by adding a number of convenient boilerplates files.
Command:
```airstrip init owner repository```
Result:
Initialize (or update) a formula from a project on github ("repository") whose owner is "owner". Will fetch tags and stuff like that.
Command:
```airstrip edit somelibrary```
Result:
Edit an existing or create a new empty "formula" for a given library, locally to your project so you can add new library (XXX untested).
Command:
```airstrip edit somelibrary true```
Result:
Edit an existing or create a new empty "formula" for a given library, globally for airstrip (XXX untested and not recommended).
License
-------------
MIT.
| AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/README.md | README.md |
from puke import *
import json
import os
import re
from distutils import version
from verlib import NormalizedVersion
from distutils.version import StrictVersion, LooseVersion
AIRSTRIP_ROOT = os.path.dirname(os.path.realpath(__file__))
NORMALIZE_VERSION = re.compile(r'([^\d]*)')
# System-wide yawns path
AIRSTRIP_YAWN_PATH = os.path.join(AIRSTRIP_ROOT, 'airs')
# Project user-defined yawns path
PROJECT_YAWN_PATH = './airs'
EMPTY_GLOBAL = """
"description": "",
"keywords": [],
"author": [],
"licenses": [],
"category": "library",
"homepage": "http://",
"tools": [],
"depends": {},
"strict": true,
"git": "",
"versions": {
"master": {
"package": "",
"resources": {
},
"build": [],
"productions": {
}
}
}
}"""
EMPTY_LOCAL_VERSION = """{
"versions": {
"version.name": {
"package": "",
"resources": {
},
"build": [],
"productions": {
}
}
}
}"""
class Air():
def __init__(self, name):
self.name = name
self.hasGlobal = False
self.yawn = json.loads("""{
"name": "%s",
%s""" % (name, EMPTY_GLOBAL))
systemPath = FileSystem.join(AIRSTRIP_YAWN_PATH, '%s.json' % name)
if FileSystem.exists(systemPath):
try:
self.yawn = json.loads(FileSystem.readfile(systemPath))
self.hasGlobal = True
except:
console.error('The system yawn descriptor for %s is borked!' % name)
self.hasLocal = False
self.local = json.loads('{}')
localPath = FileSystem.join(PROJECT_YAWN_PATH, '%s.json' % name)
if FileSystem.exists(localPath):
try:
self.local = json.loads(FileSystem.readfile(localPath))
self.hasLocal = True
except:
console.error('The yawn descriptor for %s in your project is borked!' % name)
@staticmethod
def exists(name):
systemPath = FileSystem.join(AIRSTRIP_YAWN_PATH, '%s.json' % name)
localPath = FileSystem.join(PROJECT_YAWN_PATH, '%s.json' % name)
if not FileSystem.exists(localPath) and not FileSystem.exists(systemPath):
return False
return True
def edit(self, globally = False):
# Global edition, just go
if globally:
p = FileSystem.join(AIRSTRIP_YAWN_PATH, '%s.json' % self.name)
c = self.yawn
else:
p = FileSystem.join(PROJECT_YAWN_PATH, '%s.json' % self.name)
# No local data yet
if not self.hasLocal:
# if no global data either, populate with yawn
if not self.hasGlobal:
self.local = json.loads("""{
"name": "%s",
%s""" % (self.name, EMPTY_GLOBAL))
# if has global data, should start empty instead, as a version specialization
else:
self.local = json.loads(EMPTY_LOCAL_VERSION)
c = self.local
if not FileSystem.exists(p):
FileSystem.writefile(p, json.dumps(c, indent=4))
sh('open "%s"' % p)
self.__init__(self.name)
def get(self, version, key = False):
if key == "safename":
return self.name
keys = ['name', 'homepage', 'git', 'description', 'author', 'keywords', 'strict', 'licenses', 'category', 'tools', 'depends', 'package', 'resources', 'build', 'productions']
#, 'versions']
# if key and not key in keys:
# console.error('There is no such thing as %s' % key)
if self.hasGlobal and (version in self.yawn["versions"]):
ref = self.yawn['versions'][version]
if "package.json" in ref and "component.json" in ref:
for i in ref["component.json"]:
ref["package.json"][i] = ref["component.json"][i]
parent = self.yawn
elif self.hasLocal and (version in self.local["versions"]):
ref = self.local['versions'][version]
if "package.json" in ref and "component.json" in ref:
for i in ref["component.json"]:
ref["package.json"][i] = ref["component.json"][i]
parent = self.local
else:
console.error('The requested version (%s) does not exist' % version)
raise Exception("FAIL")
if not key:
return ref
if key in ref:
return ref[key]
if "package.json" in ref and key in ref["package.json"]:
return ref["package.json"][key]
if not key in parent:
if "branch" in ref:
return self.get(ref["branch"], key)
else:
console.warn('No such key (%s)' % key)
return False
return parent[key]
def versions(self):
r = re.compile(r'([^\d]*)')
versions = {}
result = []
dates = {}
if self.hasGlobal:
self._parseVersions(self.yawn["versions"], versions)
# print(self.yawn["versions"])
if self.hasLocal:
self._parseVersions(self.local["versions"], versions)
hasMaster = versions.pop('master', False)
sortedVersions = versions.keys()
sortedVersions.sort(key=LooseVersion)
for key in sortedVersions:
result.append(versions[key])
if hasMaster:
result.append(hasMaster)
return result
def _parseVersions(self, entries, result):
for version in entries:
date = False
content = entries[version]
if 'date' in content and 'commited' in content['date']:
date = content['date']['commited']
if not date:
date = None
if version == 'master':
normalized = 'master'
else:
normalized = NORMALIZE_VERSION.sub('', version, 1)
result[normalized] = (version, date)
# def latest(self):
# v = self.versions()
# for i in v:
# v = i.lstrip("v")
# better = re.sub(r"([0-9]+)[.]([0-9]+)[.]([0-9]+)(.*)$", r"\1 \2 \3 \4", v).split(" ")
# print better
# try:
# print NormalizedVersion(v)
# except:
# print v.split('.')
# print "WRONG version"
# http://www.python.org/dev/peps/pep-0386/#normalizedversion
# version.StrictVersion('1.0.5') < version.StrictVersion('1.0.8')
# print version.StrictVersion(i)
# print better
# if self.hasGlobal:
# if key in self.yawn:
# ref = self.yawn[key]
# if self.hasGlobal:
# ref =
# return self.yawn[key]
# if self.hasLocal:
# return self.local[key]
# idx = keys.index(key)
# types = ['', '', [], [], '', [], {}, '', {}, [], {}]
# return types[idx] | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/air.py | air.py |
from puke.Task import task
global console, FileSystem
from puke import console, FileSystem, sh, deepcopy
global dateutil, datetime
import dateutil.parser
import datetime
global json
import json
# import airfile
global re
import re
global yawn
import air as yawn
global airc
import airconfig as airc
global airf
import airfile as airf
global airb
import airbuild as airb
global gh
import githelper as gh
global ge
import github as ge
global se
import seeder as se
global lic
import airlicenses as lic
@task("Manage available airstrip licenses for projects")
def license(command = False, key = False):
airlic = lic.AirLicenses()
if not command or command == 'list':
print airlic.list()
elif command == 'edit':
airlic.edit(key)
else:
d = airlic.get(command)
print "License name: %s" % command.upper()
print "License url: %s" % d["url"]
print "License text: %s" % d["content"]
@task("Show current configuration details (airstrip use), or set a configuration flag (airstrip use key value)")
def use(key = False, value = False):
a = airc.AirConfig()
if key == False:
a.list()
else:
if not value == False:
if value.lower() == "true":
value = True
# Doesn't f**** work like it should
elif value.lower() == "false":
value = False
a.override(key, value)
@task("Show project required libraries (airstrip require), or add a new library to the list of project dependencies (airstrip require somelibrary), possibly in a specific version (airstrip require somelibrary somversion)")
def require(key = False, version = False):
a = airf.AirFile()
if key == False:
a.list()
else:
if not version:
version = "master"
# Get a yawn
if not yawn.Air.exists(key):
console.fail('No library by that name (%s)!' % key)
aero = yawn.Air(key)
if not version == 'all':
# Will fail if there is no such version
aero.get(version, 'name')
# Otherwise ok!
a.require(key, version)
else:
for i in aero.versions():
a.require(key, i)
@task("Remove a previously required library from the list of project dependencies (airstrip remove somelibrary), possibly a specific version of it (airstrip remove somelibrary someversion)")
def remove(key, version = False):
a = airf.AirFile()
a.remove(key, version)
@task("Edit a library description file (airstrip edit somelibrary). Passing true as second argument edits the descriptor globally.")
def edit(name, globally = False):
if not globally == False:
globally = True
a = yawn.Air(name)
# Doesn't f**** work like it should
a.edit(globally)
@task("Show all available libraries (airstrip show), or detailed informations about a specific library (airstrip show somelibrary)")
def show(name = False):
if not name:
p = os.path.dirname(os.path.realpath(__file__))
l = puke.FileList(puke.FileSystem.join(p, 'airs'), filter = "*.json")
for i in l.get():
print i.split('/').pop().split('.').pop(0)
l = puke.FileList('airs', filter = "*.json")
for i in l.get():
print i.split('/').pop().split('.').pop(0)
return
# XXXX dirty hack
# name = name.split('/').pop().replace('.json', '')
if not yawn.Air.exists(name):
console.fail('No library by that name (%s)!' % name)
a = yawn.Air(name)
nomasterhack = "master"
try:
a.get('master', 'name')
except:
nomasterhack = a.versions().pop()
console.info('*********************')
console.info(a.get(nomasterhack, 'name'))
console.info('*********************')
console.info(a.get(nomasterhack, 'description'))
console.info('*********************')
# console.info(' - Category: %s' % a.get('master', 'category'))
console.info(' - Keywords: %s' % a.get(nomasterhack, 'keywords'))
console.info(' - Homepage: %s' % a.get(nomasterhack, 'homepage'))
console.info(' - Author: %s' % a.get(nomasterhack, 'author'))
console.info(' - Licenses: %s' % a.get(nomasterhack, 'licenses'))
# console.info(' - Required tools to build: %s' % a.get('master', 'tools'))
console.info('*********************')
console.info('Available versions:')
versions = a.versions()
listOfVersions = tuple(x[0] for x in versions)
maxLength = len(max(listOfVersions, key=len))
for (i, date) in versions:
try:
date = dateutil.parser.parse(date)
date = datetime.datetime.strftime(date, '%d. %B %Y')
except:
date = 'Unkown date'
console.info(' * %s %s | %s' % ( str(i), " " * (maxLength - len(i)), date))
@task("Initialize new project")
def seed(app = False, mobile = False):
s = se.Seeder()
s.project()
# XXX to be completed
# executeTask('require', 'jasmine', 'master')
# executeTask('build')
# @task("List all avalaible libraries")
# def list():
# p = os.path.dirname(os.path.realpath(__file__))
# l = puke.FileList(puke.FileSystem.join(p, 'airs'), filter = "*.json")
# for i in l.get():
# print i.split('/').pop().split('.').pop(0)
# l = puke.FileList('airs', filter = "*.json")
# for i in l.get():
# print i.split('/').pop().split('.').pop(0)
@task("Search for a given library")
# XXX this is dumb for now
def search(key):
cachesearch = {}
result = []
p = os.path.dirname(os.path.realpath(__file__))
l = puke.FileList(puke.FileSystem.join(p, 'airs'), filter = "*.json")
for i in l.get():
d = puke.FileSystem.readfile(i)
if key in d:
result.append(i.split('/').pop().split('.').pop(0))
result2 = []
g = ge.GitHubInit()
d = g.search(key)
if "repositories" in d:
p = d["repositories"][0:20]
for i in p:
short = "%s/%s" % (i["owner"], i["name"])
desc = ""
if i["description"]:
desc = " %s" % i["description"].replace('\n', ' ')
result2.append("%s%s" % (short.ljust(50), desc))
puke.console.warn('Avalaible on github:')
for i in result2:
print i
puke.console.warn('Already installed airs:')
for i in result:
print i
@task("Init an air from github")
def init(owner, repo, name = False):
g = ge.GitHubInit()
if not name:
name = repo
g.retrieve(owner, repo, "airs", name)
# # g.retrieve("documentcloud", "backbone", "airstrip/airs", "backbone")
# # g.retrieve("twitter", "bootstrap", "airstrip/airs", "bootstrap")
# g.retrieve("emberjs", "ember.js", "airstrip/airs", "ember")
# g.retrieve("h5bp", "html5-boilerplate", "airstrip/airs", "h5bp")
# g.retrieve("wycats", "handlebars.js", "airstrip/airs", "handlebars")
# # g.retrieve("jquery", "jquery", "airstrip/airs", "jquery")
# g.retrieve("necolas", "normalize.css", "airstrip/airs", "normalize")
# # g.retrieve("madrobby", "zepto", "airstrip/airs", "zepto")
@task("Refresh all airs")
def reinit():
g = ge.GitHubInit()
p = os.path.dirname(os.path.realpath(__file__))
l = puke.FileList(puke.FileSystem.join(p, 'airs'), filter = "*.json")
for i in l.get():
name = i.split('/').pop().split('.').pop(0)
v = json.loads(puke.FileSystem.readfile(i))['git'].split('/')
repo = v.pop()
owner = v.pop()
# print owner, repo, name
g.retrieve(owner, repo, "airs", name)
l = puke.FileList('airs', filter = "*.json")
for i in l.get():
name = i.split('/').pop().split('.').pop(0)
v = json.loads(puke.FileSystem.readfile(i))['git'].split('/')
repo = v.pop()
owner = v.pop()
g.retrieve(owner, repo, "airs", name)
@task("Build the list of required libraries, or a specifically required library")
def build(name = False):
a = airf.AirFile()
requested = a.requiredLibraries()
config = airc.AirConfig()
conftmp = config.get('temporary')
confdestination = config.get('output')
if name:
# Check the library exists and is required
if not yawn.Air.exists(name):
console.fail('No library by that name (%s)!' % name)
if not a.isRequired(name):
console.fail('You have not required that library (%s)!' % name)
# Build temporary and destination paths for the library
yawnie = yawn.Air(name)
nomasterhack = "master"
try:
yawnie.get('master', 'name')
except:
nomasterhack = yawnie.versions().pop()
tmp = FileSystem.join(conftmp, yawnie.get(nomasterhack, "safename"))
destination = FileSystem.join(confdestination, yawnie.get(nomasterhack, "safename"))
# giti = yawnie.get('master', 'git')
# Get each version informations json
# vinfos = {}
# for version in requested[name]:
# vinfos[version] = yawnie.get(version)
buildit(yawnie, requested[name], tmp, destination)
return
# airb.build(owner, name, libs[name], tmp, destination)
# return
# for version in requested[name]:
# # category = y.get(version, 'category')
# destination = FileSystem.join(config.get('output'), name, version)#category,
# data = y.get(version)
# if ".travis.yml" in data:
# airb.buildtravis(name, version, data[".travis.yml"], tmp, destination)
# print data
# return
# print "version %s is OK travis: %s" % (version, data[".travis.yml"])
# else:
# # Try as smart as possible?
# if data["tree"]
# print "version %s is KO travis" % (version)
# "node_js": [
# 0.6
# ],
# "language": "node_js",
# "script": "script/test"
# airb.buildone(tmp, name, version, y.get(version, 'resources'), y.get(version, 'build'), #category,
# y.get(version, 'productions'), destination, y.get(version, 'strict'))
else:
for name in requested:
yawnie = yawn.Air(name)
nomasterhack = "master"
try:
yawnie.get('master', 'name')
except:
nomasterhack = yawnie.versions().pop()
tmp = FileSystem.join(conftmp, yawnie.get(nomasterhack, "safename"))
destination = FileSystem.join(confdestination, yawnie.get(nomasterhack, "safename"))
# giti = yawnie.get('master', 'git')
# Get each version informations json
# vinfos = {}
# for version in requested[name]:
# vinfos[version] = yawnie.get(version)
buildit(yawnie, requested[name], tmp, destination)
# yawnie = yawn.Air(name)
# tmp = FileSystem.join(config.get('temporary'), yawnie.get("master", "safename"))
# destination = FileSystem.join(config.get('output'), yawnie.get("master", "safename"))
# giti = yawnie.get('master', 'git')
# # Get each version informations json
# vinfos = {}
# for version in requested[name]:
# vinfos[version] = yawnie.get(version)
# build(giti, vinfos, tmp, destination)
# y = yawn.Air(name)
# for version in requested[name]:
# # category = y.get(version, 'category')
# tmp = FileSystem.join(config.get('temporary'), name, version)
# destination = FileSystem.join(config.get('output'), name, version)
# data = y.get(version)
# if ".travis.yml" in data:
# print "version %s is OK travis: %s" % (version, data[".travis.yml"])
# print tmp, name, version, y.get(version, 'resources'), y.get(version, 'build'), y.get(version, 'productions'), destination, y.get(version, 'strict')
# airb.buildone(tmp, name, version, y.get(version, 'resources'), y.get(version, 'build'),
# y.get(version, 'productions'), destination, y.get(version, 'strict'))
global buildit
def buildit(yawnie, versions, tmp, dest):
# XXX horked if a specific version has a specific (different) git url
nomasterhack = "master"
try:
yawnie.get('master', 'name')
except:
nomasterhack = yawnie.versions().pop()
repomanager = gh.GitHelper(yawnie.get(nomasterhack, 'git'), tmp)
repomanager.ensure()
p = repomanager.getPath()
white = ["build", "dist", "test", "tests"]
for version in versions:
print "Building version %s" % version
tree = yawnie.get(version, "tree")
if version == 'master':
repomanager.checkout('master')
else:
repomanager.checkout(yawnie.get(version, "sha"))
identified = False
usenode = False
trav = yawnie.get(version, ".travis.yml")
if trav:
if ("language" in trav) and (trav["language"] == "node_js"):
usenode = identified = True
else:
print "DOESNT KNOW HOW TO BUILD CUSTOM TRAVIS SHIT"
if yawnie.get(version, "devDependencies"):
usenode = identified = True
# Don't build anything but master...
nob = yawnie.get(version, "nobuild")# or (not version == 'master')
if nob:
identified = True
if not nob:
usepuke = False
if "pukefile.py" in tree:
usepuke = identified = True
usebundle = False
if "Gemfile" in tree:
usebundle = identified = True
userake = False
if "Rakefile" in tree:
userake = identified = True
usegrunt = False
if "Gruntfile.js" in tree:
usegrunt = identified = True
if "grunt.js" in tree:
usegrunt = identified = True
useant = False
if "build.xml" in tree:
useant = identified = True
usemake = False
if "Makefile" in tree:
usemake = identified = True
if usenode:
puke.sh('cd "%s"; npm install' % p)
if usebundle:
puke.sh('cd "%s"; bundle' % p, output = True)
if usepuke:
puke.sh('cd "%s"; puke all' % p, output = True)
elif usegrunt:
puke.sh('cd "%s"; grunt' % p, output = True)
elif userake:
puke.sh('cd "%s"; rake' % p, output = True)
elif useant:
puke.sh('cd "%s"; ant' % p, output = True)
elif usemake:
puke.sh('cd "%s"; make' % p, output = True)
elif "build.sh" in tree:
identified = True
puke.sh('cd "%s"; ./build.sh' % p, output = True)
# Yepnope...
elif "compress" in tree:
identified = True
puke.sh('cd "%s"; ./compress' % p, output = True)
# ES5...
elif "minify" in tree:
identified = True
puke.sh('cd "%s"; ./minify' % p, output = True)
if usenode:
scripties = yawnie.get(version, "scripts")
if scripties:
for i in scripties:
if i in white:
puke.sh('cd "%s"; npm run-script %s' % (p, i))
if not identified:
raise "DONT KNOW WHAT TO DO"
productions = yawnie.get(version, "productions")
v = version.lstrip("v")
destination = FileSystem.join(dest, v)
if productions:
for item in productions:
local = productions[item]
if not local.startswith('http://'):
local = FileSystem.realpath(FileSystem.join(p, productions[item]))
if not local.startswith('http://') and not FileSystem.exists(local):
console.error("Missing production! %s (%s)" % (productions[item], local))
else:
if local.startswith('http://'):
puke.combine(puke.FileList(local), FileSystem.join(destination, item))
elif not FileSystem.isfile(local):
puke.deepcopy(puke.FileList(local), FileSystem.join(destination, item))
else:
FileSystem.copyfile(local, FileSystem.join(destination, item))
nos = not yawnie.get(version, "nostrict")
stuff = puke.FileList(dest, filter = "*.js", exclude = "*-min.js")
puke.Tools.JS_COMPRESSOR = "%s.js.compress" % puke.sh("which puke", output = False).strip()
for i in stuff.get():
mined = re.sub(r"(.*).js$", r"\1-min.js", i)
if not FileSystem.exists(mined):
print "Missing minified version %s %s" % (i, mined)
# XXX strict will blow here
puke.minify(str(i), mined, strict = nos)
# XXX too damn dangerous for the benefit - wontfix
# puke.Tools.CSS_COMPRESSOR = "%s.css.compress" % puke.sh("which puke", output = False).strip()
# stuff = puke.FileList(dest, filter = "*.css", exclude = "*-min.css")
# for i in stuff.get():
# mined = re.sub(r"(.*).css$", r"\1-min.css", i)
# if not FileSystem.exists(mined):
# print "Missing minified version %s %s" % (i, mined)
# # XXX strict will blow here
# try:
# puke.minify(str(i), mined, strict = True)
# except:
# # Bootstrap fail on older versions
# print "FAILED COMPRESSION %s" % i
# if local.split('.').pop().lower() == 'js':
# strict = True
# minify(str(local), re.sub(r"(.*).js$", r"\1-min.js", local), strict = strict)
# FileSystem.copyfile(re.sub(r"(.*).js$", r"\1-min.js", local), FileSystem.join(destination, re.sub(r"(.*).js$", r"\1-min.js", item)))
# if productions:
# else:
# @task("Get a specific info about a specific version of a library")
# def get(name, version, key):
# a = yawn.Air(name)
# print a.get(version, key)
# @task("Default task")
# def default():
# print('Victory')
# pass
# executeTask("build")
# executeTask("deploy")
# @task("All")
# def all():
# executeTask("build")
# executeTask("mint")
# executeTask("deploy")
# executeTask("stats")
# @task("Wash the taupe!")
# def clean():
# PH.cleaner()
# # Get whatever has been built and exfilter some crappy stuff
# @task("Deploying")
# def deploy():
# PH.deployer(False)
# @task("Stats report deploy")
# def stats():
# PH.stater(Yak.build_root)
# @task("Minting")
# def mint():
# # list = FileList(Yak.build_root, filter = "*bootstrap*.js", exclude = "*-min.js")
# # for burne in list.get():
# # minify(burne, re.sub(r"(.*).js$", r"\1-min.js", burne), strict = False, ecma3 = True)
# # raise "toto"
# # These dont survive strict
# PH.minter(Yak.build_root, filter = "*raphael*.js,*ember*.js,*yahoo*.js,*yepnope*.js,*modernizr*.js,*jasmine*.js", excluding=",*/jax*,*mathjax/fonts*", strict = False)
# PH.minter(Yak.build_root, excluding = "*raphael*.js,*ember*.js,*yahoo*.js,*yepnope*.js,*modernizr*.js,*jasmine*.js,*/jax*,*mathjax/fonts*", strict = True)
# @task("Deploying the static ressources, including approved third party dependencies")
# def build(buildonly = False):
# # Crossdomain
# sed = Sed()
# sed.add("<\!--.*-->\s*", "")
# combine("src/crossdomain.xml", Yak.build_root + "/crossdomain.xml", replace = sed)
# # Robots
# sed = Sed()
# # XXX partially fucked-up
# sed.add("(?:^|\n+)(?:#[^\n]*\n*)+", "")
# combine("src/robots.txt", Yak.build_root + "/robots.txt", replace = sed)
# # Deepcopy other stuff
# sed = Sed()
# PH.replacer(sed)
# list = FileList("src/", exclude="*robots.txt,*crossdomain.xml,*index.html")
# deepcopy(list, Yak.build_root, replace=sed)
# # Process the remote leaves
# description = {}
# # Yak.collection.items()
# colls = PH.getyanks()
# # print Yak.collection
# # for name in Yak.collection:
# # print name
# for name in colls:
# packinfo = colls[name]
# # Temporary and build output directories definitions
# tmpdir = FileSystem.join(Yak.tmp_root, "lib", packinfo["Destination"], name)
# builddir = FileSystem.join(Yak.build_root, "lib", packinfo["Destination"], name)
# desclist = []
# marker = 'lib/%s/' % packinfo["Destination"]
# for(localname, url) in packinfo["Source"].items():
# # Do the fetch of
# PH.fetchone(url, tmpdir, localname)
# # Copy files that "exists" to build directory
# f = FileSystem.join(tmpdir, localname)
# if FileSystem.exists(f):
# d = FileSystem.join(builddir, localname)
# # if not FileSystem.exists(FileSystem.dirname(d)):
# # FileSystem.makedir(FileSystem.dirname(d));
# FileSystem.copyfile(f, d)
# # Augment desclist with provided localname
# desclist += [FileSystem.join(marker, name, localname)]
# if "Build" in packinfo:
# buildinfo = packinfo["Build"]
# production = buildinfo["production"]
# tmpdir = FileSystem.join(tmpdir, buildinfo["dir"])
# extra = ''
# if 'args' in buildinfo:
# extra = buildinfo["args"]
# if not buildonly or buildonly == name:
# PH.make(tmpdir, buildinfo["type"], extra)
# # Copy production to build dir
# for(local, builded) in production.items():
# f = FileSystem.join(tmpdir, builded)
# d = FileSystem.join(builddir, local)
# desclist += [FileSystem.join(marker, name, local)]
# if FileSystem.isfile(f):
# FileSystem.copyfile(f, d)
# elif FileSystem.isdir(f):
# deepcopy(FileList(f), d)
# # ["coin%s" % key for key in ['item1', 'item2']]
# # map((lambda item: "%s%s" % (name, item)), ['item1', 'item2'])
# # # Augment description list with build result
# # bitch = production.keys();
# # for x in bitch:
# # bitch[x] = FileSystem.join(name, bitch[x]);
# # print bitch
# # raise "toto"
# # desclist = desclist + production.keys()
# description[name] = desclist
# # description[name] = "%s%s" % (name, marker, ('",\n"%s' % marker).join(desclist)))
# # miam += """
# # %s:
# # ["%s%s"]
# # """ % (name, marker, ('", "%s' % marker).join(desclist))
# # FileSystem.writefile(FileSystem.join(Yak.build_root, "airstrip.yaml"), yaml.dump(yaml.load('\n'.join(description))))
# # print json.dumps(description)
# # raise "toto"
# shortversion = Yak.package['version'].split('-').pop(0).split('.')
# shortversion = shortversion[0] + "." + shortversion[1]
# PH.describe(shortversion, "airstrip", description)
# # Write description file
# # FileSystem.writefile(FileSystem.join(Yak.build_root, "airstrip.json"), '{%s}' % ',\n'.join(description))
# # Build-up the description file
# file = "src/index.html"
# sed.add("{PUKE-LIST}", json.dumps(description, indent=4))
# deepcopy(file, Yak.build_root, replace=sed) | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/pukefile.py | pukefile.py |
from puke import *
import json
import airlicenses as al
AIRSTRIP_RC_PATH = '~/.airstriprc'
API = '2'
# Template for empty RC
EMPTY_RC = json.loads("""{
"version": "",
"company": {
"name": "",
"url": "",
"mail": ""
},
"git": "",
"ln": "en-us",
"you": {
"name": "",
"url": "",
"mail": ""
},
"license": "MIT"
}""")
class AirRC():
def __init__(self):
if not FileSystem.exists(AIRSTRIP_RC_PATH):
FileSystem.writefile(AIRSTRIP_RC_PATH, json.dumps(EMPTY_RC, indent = 2))
try:
self.rc = json.loads(FileSystem.readfile(AIRSTRIP_RC_PATH))
except:
console.fail('Your airstrip rc file (%s) is horked! Please rm or fix it' % AIRSTRIP_RC_PATH)
if not self.rc['version'] == API:
self.__ask__()
def __ask__(self):
defaults = self.rc.copy()
for i in EMPTY_RC:
if not i in defaults:
defaults[i] = EMPTY_RC[i]
console.warn("""You don't seem to have documented your default informations,
or airstrip has an upgraded version that requires new infos.""")
console.info("""These infos are stored only in the file %s, which you can edit manually,
are entirely optional, and used only by the airstrip "seed" command to populate package.json
and other projects boilerplates.""" % AIRSTRIP_RC_PATH)
console.info('First, provide informations about your company (if any - used generally for the author fields and copyright owner informations.)')
self.rc['company']['name'] = prompt('Your company name (currently: %s)' % defaults['company']['name'], defaults['company']['name'])
self.rc['company']['mail'] = prompt('Your company mail (currently: %s)' % defaults['company']['mail'], defaults['company']['mail'])
self.rc['company']['url'] = prompt('Your company website / twitter (currently: %s)' % defaults['company']['url'], defaults['company']['url'])
console.info('Now, about you - this will be used for the contributors/maintainers fields.')
self.rc['you']['name'] = prompt('Your name (currently: %s)' % defaults['you']['name'], defaults['you']['name'])
self.rc['you']['mail'] = prompt('Your mail (currently: %s)' % defaults['you']['mail'], defaults['you']['mail'])
self.rc['you']['url'] = prompt('Your website / twitter (currently: %s)' % defaults['you']['url'], defaults['you']['url'])
keys = al.AirLicenses().list()
self.rc['license'] = prompt('Default license for new projects (among %s)? (currently: %s)' % (keys, defaults['license']), defaults['license'])
self.rc['git'] = prompt('Default git owner to use for new projects? (currently: %s)' % defaults['git'], defaults['git'])
self.rc['ln'] = prompt('Default language for projects? (currently: %s)' % defaults['ln'], defaults['ln'])
self.set('version', API)
def get(self, key):
if key in self.rc:
return self.rc[key]
return None
def set(self, key, value):
if key:
self.rc[key] = value
FileSystem.writefile(AIRSTRIP_RC_PATH, json.dumps(self.rc, indent = 2)) | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/airrc.py | airrc.py |
global Tools
from puke import *
from puke import Tools
import json
import re
def fetchsvn(url, dest):
System.check_package('svn')
# If directory exist, then update the tree
if FileSystem.exists(dest):
console.info('Updating')
that = 'cd "%s"; svn up' % dest
else:
if not FileSystem.exists(FileSystem.dirname(dest)):
FileSystem.makedir(FileSystem.dirname(dest))
console.info('Cloning')
that = 'cd "%s"; svn co %s %s' % (FileSystem.dirname(dest), url, FileSystem.basename(dest))
# Do the deed
try:
std = Std()
sh(that, std=std, output=False)
if std.err:
# and (std.err.find('No stash found.') == -1):
raise std.err
except:
# if puke.FileSystem.exists(dest):
# puke.FileSystem.remove(dest)
console.error('Svn operation failed! %s You need to manually fix or remove the directory.' % std.err)
def fetchone(url, dest, rename):
remotefilename = url.split('/').pop()
type = url.split('.').pop().lower()
# Dirty trick to detect zip where the remote has no extension
destype = rename.split('.').pop().lower()
packpath = FileSystem.join(dest, remotefilename)
if type == 'git':
packpath = packpath.split('.')
packpath.pop()
packpath = '.'.join(packpath)
console.info('Git repository')
fetchgit(url, packpath)
return packpath
elif type == 'svn' or destype == 'svn':
console.info('Svn repository %s %s' % (url, packpath))
fetchsvn(url, packpath)
return packpath
else:
deepcopy(FileList(url), dest + '/')
if type == 'zip' or type == 'gz' or type == 'bz2' or destype == 'zip':
try:
dd = FileSystem.join(dest, remotefilename.replace('.' + type, ''))
if FileSystem.exists(dd):
FileSystem.remove(dd)
FileSystem.makedir(dd)
unpack(packpath, dd, verbose = False)
# puke.FileSystem.remove(packpath)
except Exception as e:
sh('cd "%s"; 7z x "%s"' % (dd, FileSystem.abspath(packpath)));
FileSystem.remove(packpath)
return FileSystem.join(dest, rename)
else:
if remotefilename != rename:
if not FileSystem.exists(FileSystem.dirname(FileSystem.join(dest, rename))):
FileSystem.makedir(FileSystem.dirname(FileSystem.join(dest, rename)))
sh('cd "%s"; mv "%s" "%s"' % (dest, remotefilename, rename))
packpath = FileSystem.join(dest, rename)
def make(path, command):
sh('cd "%s"; %s' % (path, command))
# if type == 'rake':
# dorake(path, extra)
# elif type == 'thor':
# dothor(path, extra)
# elif type == 'make':
# domake(path, extra)
# elif type == 'sh':
# def build(owner, name, versions, tmp, destination):
# repo = gh.GitHelper(owner, name, tmp)
# repo.ensure()
# def __init__(self, owner, name, path):
# def checkout(self, ref):
# def getPath(self):
def buildtravis(name, version, ref, travisd, tmp, destination):
# Dead dirty
Tools.JS_COMPRESSOR = "%s.js.compress" % sh("which puke", output = False).strip()
def buildone(tmp, name, version, resources, build, productions, destination, strict):
# Dead dirty
Tools.JS_COMPRESSOR = "%s.js.compress" % sh("which puke", output = False).strip()
lastdir = False
for(localname, url) in resources.items():
# Do the fetch
lastdir = fetchone(url, tmp, localname)
if build:
if not lastdir:
console.fail('Build failure not having a directory!')
for com in build:
make(lastdir, com)
if productions:
if lastdir:
tmp = lastdir
for item in productions:
local = FileSystem.realpath(FileSystem.join(tmp, productions[item]))
if FileSystem.isfile(local):
FileSystem.copyfile(local, FileSystem.join(destination, item))
else:
deepcopy(FileList(local), FileSystem.join(destination, item))
if local.split('.').pop().lower() == 'js':
strict = True
minify(str(local), re.sub(r"(.*).js$", r"\1-min.js", local), strict = strict)
FileSystem.copyfile(re.sub(r"(.*).js$", r"\1-min.js", local), FileSystem.join(destination, re.sub(r"(.*).js$", r"\1-min.js", item)))
else:
for item in resources:
local = FileSystem.realpath(FileSystem.join(tmp, item))
if FileSystem.isfile(local):
FileSystem.copyfile(local, FileSystem.join(destination, item))
else:
deepcopy(FileList(local), FileSystem.join(destination, item))
if local.split('.').pop().lower() == 'js':
minify(str(local), re.sub(r"(.*).js$", r"\1-min.js", local), strict = strict)
FileSystem.copyfile(re.sub(r"(.*).js$", r"\1-min.js", local), FileSystem.join(destination, re.sub(r"(.*).js$", r"\1-min.js", item)))
# if FileSystem.isfile(f):
# FileSystem.copyfile(f, d)
# elif FileSystem.isdir(f):
# deepcopy(FileList(f), d) | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/airbuild.py | airbuild.py |
from puke import *
import puke.Cache
import json
import yaml
import os
import base64
import requests
import sys, logging, os, traceback
GITHUB_ROOT = "https://api.github.com"
GITGIT_ROOT = "https://github.com"
GITRAW_ROOT = "https://raw.github.com"
class GitHubInit():
########################################
# HTTP
########################################
def __simpleGet__(self, url):
# print " [http] simple get %s" % url
r = requests.get(url)
return r.text or r.content
def __cachedGet__(self, u):
# print " [http] cached get %s" % u
id = puke.Cache.fetchHttp(u).split('/').pop()
return puke.Cache.read(id)
########################################
# Github tokens
########################################
def __getToken__(self):
# print " [github-token] searching for existing auth token"
d = requests.get("%s/authorizations" % GITHUB_ROOT, auth = self.auth)
r = json.loads(d.text or d.content)
for i in r:
if i["note"] == "airstrip2":
print " [github-token] found existing auth token %s" % i["token"]
return i["token"]
return False
def __destroyTokens__(self):
# print " [github-token] destroying auth tokens"
d = requests.get("%s/authorizations" % GITHUB_ROOT, auth = self.auth)
r = json.loads(d.text or d.content)
for i in r:
if i["note"] == "airstrip2":
e = requests.delete("%s/authorizations/%s" % (GITHUB_ROOT, i["id"]), auth = self.auth)
def __createToken__(self):
# print " [github-token] creating new auth token"
payload = {"scopes": ["public_repo", "repo"], "note": "airstrip2"}
headers = {'content-type': 'application/json'}
d = requests.post("%s/authorizations" % GITHUB_ROOT, data=json.dumps(payload), headers=headers, auth = self.auth)
r = json.loads(d.text or d.content)
return r["token"]
def apiGet(self, fragment):
if '?' in fragment:
u = "%s/%s&access_token=%s" % (GITHUB_ROOT, fragment, self.token)
else:
u = "%s/%s?access_token=%s" % (GITHUB_ROOT, fragment, self.token)
r = self.__simpleGet__(u)
try:
return json.loads(r)
except Exception as e:
console.fail(" [github-connector] Failed json-interpreting url %s with payload %s" % (u, r))
def apiCacheGet(self, fragment):
# print " [github-connector] cache fetching %s" % fragment
u = "%s/%s?access_token=%s" % (GITHUB_ROOT, fragment, self.token)
r = self.__cachedGet__(u)
try:
return json.loads(r)
except Exception as e:
console.fail(" [github-connector] Failed json-interpreting cached url %s with payload %s" % (u, r))
# def buildUrl(self, fragment):
# return "%s/%s?access_token=%s" % (GITHUB_ROOT, fragment, self.token)
def __init__(self):
# consoleCfg = logging.StreamHandler()
# consoleCfg.setFormatter(logging.Formatter( ' %(message)s' , '%H:%M:%S'))
# logging.getLogger().addHandler(consoleCfg)
# logging.getLogger().setLevel(logging.DEBUG)
self.uname = 'dmp42'#prompt("Github username")
self.pwd = '{$githubmaumau$}'#prompt("Github password")
self.auth = requests.auth.HTTPBasicAuth(self.uname, self.pwd)
token = self.__getToken__()
# self.destroyTokens()
if not token:
token = self.__createToken__()
self.token = token
def search(self, keyword):
return self.apiGet("legacy/repos/search/%s?sort=stars&order=desc" % (keyword))
def retrieve(self, owner, repo, dest, name):
print " [github-connector] working on %s/%s" % (owner, repo)
# Get refs for a starter
refs = self.apiGet("repos/%s/%s/git/refs" % (owner, repo))
print " [github-connector] found %s refs" % len(refs)
tags = {}
# Get and init every tag, plus master
for i in refs:
tag = i["ref"].split('/').pop()
if i["ref"].startswith("refs/tags/") or i["ref"].startswith("refs/heads/master"):
tags[tag] = {"sha": i["object"]["sha"]}
tags[tag]["tree"] = {}
tags[tag]["package.json"] = {
"name": repo,
"author": owner,
"version": tag
}
print " [github-connector] found %s tags" % len(tags)
for tag in tags:
sha = tags[tag]["sha"]
print " [github-connector] analyzing tag %s (sha %s)" % (tag, sha)
if tag == "master":
tree = self.apiGet("repos/%s/%s/git/trees/%s" % (owner, repo, sha))
else:
tree = self.apiCacheGet("repos/%s/%s/git/trees/%s" % (owner, repo, sha))
date = self.apiCacheGet("repos/%s/%s/git/commits/%s" % (owner, repo, sha))
try:
tags[tag]["date"] = {
"authored": date["author"]["date"],
"commited": date["committer"]["date"]
}
except:
tags[tag]["date"] = {
"authored": False,
"commited": False
}
print sha
console.error('Failed fetching a commit!!!')
for item in tree["tree"]:
if item["path"].lower() in ['package.json', 'component.json', '.travis.yml']:
print " [github-connector] actually reading file %s" % item["path"]
# XXX avoid API call
item["url"] = "%s/%s/%s/%s/%s" % (GITRAW_ROOT, owner, repo, tag, item["path"].lower())
if tag == "master":
d = self.__simpleGet__(item["url"])
else:
d = self.__cachedGet__(item["url"])
try:
tags[tag][item["path"].lower()] = json.loads(d)
except:
try:
tags[tag][item["path"].lower()] = yaml.load(d)
except:
pass
elif "url" in item:
tags[tag]["tree"][item["path"]] = item["url"]
previous = {}
p = FileSystem.join(dest, '%s.json' % name)
if FileSystem.exists(p):
previous = json.loads(FileSystem.readfile(p))
previous["versions"] = tags
previous["git"] = "%s/%s/%s" % (GITGIT_ROOT, owner, repo)
FileSystem.writefile(p, json.dumps(previous, indent=4))
# g = GitHubInit()
# # g.retrieve("documentcloud", "backbone", "airstrip/airs", "backbone")
# # g.retrieve("twitter", "bootstrap", "airstrip/airs", "bootstrap")
# g.retrieve("emberjs", "ember.js", "airstrip/airs", "ember")
# g.retrieve("h5bp", "html5-boilerplate", "airstrip/airs", "h5bp")
# g.retrieve("wycats", "handlebars.js", "airstrip/airs", "handlebars")
# # g.retrieve("jquery", "jquery", "airstrip/airs", "jquery")
# g.retrieve("necolas", "normalize.css", "airstrip/airs", "normalize")
# # g.retrieve("madrobby", "zepto", "airstrip/airs", "zepto")
# @staticmethod
# def getblob(url, tmp):
# deepcopy(FileList(url), tmp)
# content = json.loads(FileSystem.readfile(FileSystem.join(tmp, url.split('/').pop())))
# return base64.b64decode(content["content"])
# @staticmethod
# def getraw(url, tmp):
# deepcopy(FileList(url), tmp)
# return FileSystem.readfile(FileSystem.join(tmp, url.split('/').pop()))
# /repos/:owner/:repo/git/trees/:sha
# 4a95dae0378f6e3058f70c51bff03318fb5fc63a
# config = airc.AirConfig()
# config.get('temporary') | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/github.py | github.py |
from puke import *
import json
# This file is meant to manipulate the current project "airfile", containing lists of
# what the user has requested
PROJECT_AIRFILE_PATH = './airfile.json'
class AirFile():
def __init__(self):
self.project = self._load_()
def _load_(self):
ret = json.loads('{}')
if FileSystem.exists(PROJECT_AIRFILE_PATH):
try:
d = json.loads(FileSystem.readfile(PROJECT_AIRFILE_PATH))
if 'libraries' in d:
ret = d['libraries']
except:
console.error('Your project airfile is horked and has been ignored!')
return ret
def _save_(self, data):
original = json.loads('{}')
if FileSystem.exists(PROJECT_AIRFILE_PATH):
try:
original = json.loads(FileSystem.readfile(PROJECT_AIRFILE_PATH))
except:
pass
original['libraries'] = data
FileSystem.writefile(PROJECT_AIRFILE_PATH, json.dumps(original, indent=4))
def require(self, name, version):
if name in self.project:
if version in self.project[name]:
console.error('Library %s in version %s is already required' % (name, version))
return
else:
self.project[name].append(version)
else:
self.project[name] = [version]
self._save_(self.project)
def remove(self, name, version = False):
if not name in self.project:
console.error('That library was not requested in the first place')
return
if version and (not version in self.project[name]):
console.error('That version of the library was not requested in the first place')
return
if version:
self.project[name].remove(version)
console.info('Library %s version %s has been removed from dependencies' % (name, version))
if (not version) or (not len(self.project[name])):
self.project.pop(name, None)
console.info('Library %s is no longer a dependency of your project' % name)
self._save_(self.project)
def list(self):
for i in self.project:
print "%s: " % i
for j in self.project[i]:
print " - version: %s" % j
def isRequired(self, name):
return name in self.project
def requiredLibraries(self):
return self.project | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/airfile.py | airfile.py |
from puke import *
import json
import os
# That file is meant to manipulate the airstrip configuration in the scope of the current project
# That project configuration by-default use the airstrip global configuration
# Possibly overriden by specific elements in the airconfig file in cwd.
AIRSTRIP_ROOT = os.path.dirname(os.path.realpath(__file__))
# Should point to the globally installed airstrip configuration file
AIRSTRIP_CONFIG_PATH = os.path.join(AIRSTRIP_ROOT, 'global.json')
# The project in the current directory airconfig file, if any
PROJECT_CONFIG_PATH = './airfile.json'
class AirConfig():
def __init__(self):
self.general = json.loads(FileSystem.readfile(AIRSTRIP_CONFIG_PATH))
self.project = self._load_()
def _load_(self):
ret = json.loads('{}')
if FileSystem.exists(PROJECT_CONFIG_PATH):
try:
d = json.loads(FileSystem.readfile(PROJECT_CONFIG_PATH))
if 'config' in d:
ret = d['config']
except:
console.error('Your project file configuration is horked and has been ignored!')
return ret
def _save_(self, data):
original = json.loads('{}')
if FileSystem.exists(PROJECT_CONFIG_PATH):
try:
original = json.loads(FileSystem.readfile(PROJECT_CONFIG_PATH))
except:
pass
original['config'] = data
FileSystem.writefile(PROJECT_CONFIG_PATH, json.dumps(original, indent=4))
def list(self):
for i in self.general:
value = self.general[i]['default']
if i in self.project:
value = '%s (default: %s)' % (self.project[i], self.general[i]['default'])
print '%s: %s [%s]' % (i, value, self.general[i]['info'])
def get(self, key):
if not key in self.general:
console.error('No such configuration flag (%s)' % key);
return
if key in self.project:
return self.project[key]
return self.general[key]['default']
def override(self, key, value):
if not key in self.general:
console.error('You are tryin to set a configuration switch that does not exist (%s)' % key);
return
if key in self.project:
# Same value, ignore
if value == self.project[key]:
console.error('Ignoring unchanged property %s (value is already %s)' % (key, value))
return
# Default value, remove from self.project settings
if self.general[key]['default'] == value:
self.project.pop(key, None)
# Otherwise change self.project key override
else:
self.project[key] = value
elif self.general[key]['default'] == value:
console.error('Ignoring unchanged property %s (default is already %s)' % (key, value))
return
else:
self.project[key] = value
self._save_(self.project)
console.info('Configuration switch "%s" has been set to "%s"' % (key, value)) | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/airconfig.py | airconfig.py |
__title__ = 'airstrip'
__version__ = '2.0.2'
__build__ = 0x001400
__author__ = 'WebItUp'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013 WebItUp'
import sys, logging, os, traceback
import pkg_resources
from optparse import OptionParser
import puke
try:
sys.path.insert(1, os.getcwd())
except:
pass
def run():
""" Main routine which should be called on startup """
#
# Parse options
#
parser = OptionParser()
parser.add_option("-v", "--version", action="store_true", dest="version", help="get version")
# parser.add_option("-h", "--help", action="store_true", dest="help", help="get help")
(options, args) = parser.parse_args()
consoleCfg = logging.StreamHandler()
consoleCfg.setFormatter(logging.Formatter( ' %(message)s' , '%H:%M:%S'))
logging.getLogger().addHandler(consoleCfg)
logging.getLogger().setLevel(logging.DEBUG)
#
# Execute tasks
#
if options.version:
print('AirStrip %s' % __version__)
sys.exit(0)
# if options.help:
# puke.printTasks()
# sys.exit(0)
if os.path.isfile('.pukeignore'):
try:
f = open('.pukeignore', 'r')
for line in f:
puke.FileList.addGlobalExclude(line.strip())
except Exception as e:
print('Puke ignore error : %s' % e)
#
# Find and execute build script
#
pukefiles = ["pukefile", "pukeFile", "pukefile", "pukefile.py", "pukeFile.py", "pukefile.py"]
working_dir = os.path.dirname(os.path.realpath(__file__))
script = None
for name in pukefiles:
if os.path.isfile(os.path.join(working_dir, name)):
script = os.path.join(working_dir, name)
retval = execfile(script)
try:
args = args.strip()
except:
pass
if not args:
if puke.hasDefault():
puke.executeTask('default')
else:
logging.error("No tasks to execute. Please choose from: ")
puke.printTasks()
sys.exit(1)
else:
name = args.pop(0)
puke.executeTask(name.strip(), *args)
def gettraceback(level = 0):
trace = ""
exception = ""
exc_list = traceback.format_exception_only (sys.exc_type, sys.exc_value)
reverse = -1 - level
for entry in exc_list:
exception += entry
tb_list = traceback.format_tb(sys.exc_info()[2])
for entry in tb_list[reverse]:
trace += entry
return trace
def main():
try:
run()
except Exception as error:
print("ERROR %s \n %s \n" % (error, gettraceback()))
sys.exit(1)
except KeyboardInterrupt:
print("Build interrupted!\n")
sys.exit(2)
sys.exit(0) | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/__init__.py | __init__.py |
from puke import *
import re
import json
import os
# ==================================================================
# Global helpers for puke
# ==================================================================
# ------------------------------------------------------------------
# Common yak soup
# ------------------------------------------------------------------
# Mint every file in the provided path avoiding xxx files, tests, and already mint files themselves (usually the build root)
class Helpers:
@staticmethod
def enforceunix():
# Puke at large is just untested on windows - and specifically these helpers
if System.OS != System.MACOS and System.OS != System.LINUX:
console.fail('Your platform is not supported')
@staticmethod
def loadyanks():
# Yank the base config file
working_dir = os.path.dirname(os.path.realpath(__file__))
r = Require("package.json")
try:
r.merge(os.path.join(working_dir, 'config.yaml'))
except:
pass
# Try to get separate user specific file, either as json or yaml
usercpath = 'package-%s-%s' % (Env.get("PUKE_LOGIN", System.LOGIN), Env.get("PUKE_OS", System.OS).lower())
try:
r.merge(usercpath + ".json")
except:
try:
r.merge(usercpath + ".yaml")
except:
if System.LOGIN == 'root':
console.fail('You are trying to puke as root without any PUKE_LOGIN specified that matches a known configuration. This is verbotten!')
pass
# Map to older format for lazyness reasons :)
clean = re.sub('[.]git$', '', r['repositories'][0]["url"])
r['yak'] = {
"package": {
"name": r["name"],
"version": r["version"],
"homepage": r["homepage"]
},
"rights": {
"license": '<a href="%s">%s</a>' % (r["licenses"][0]["url"], r["licenses"][0]["type"]),
"copyright": 'All rights reserved <a href="http://www.webitup.fr">copyright %s</a>' % r["author"],
"author": r["author"]
},
"git": {
"root": '%s/blob/master/src' % clean
},
"paths": r["directories"],
"config": r["config"]
}
r.yak('yak')
# Git in the yanks
try:
branch = sh("cd .; git branch | grep '*'", output=False).strip('*').strip()
if branch == '(no branch)':
branch = sh("cd .; git describe --tags", output=False).strip()
commitnb = sh("cd .; git log --pretty=format:%s | wc -l" % '%h', output=False).strip()
commithash = sh("cd .; git log | head -n 1 | cut -f2 -d' '", output=False).strip()
Yak.git['root'] = Yak.git['root'].replace('/master/', '/' + branch + '/')
Yak.git['revision'] = '#' + commitnb + '-' + commithash
except:
Yak.git['revision'] = '#no-git-information'
console.error("FAILED fetching git information - locations won't be accurate")
for (key, path) in Yak.paths.items():
FileSystem.makedir(path)
# def __preparepaths():
# # Aggregate package name and version to the "root" path, if not the default
# if Yak.root != './':
# Yak.root = FileSystem.join(Yak.root, Yak.package['name'], Yak.package['version'])
# # Aggregate all inner paths against the declared ROOT, and build-up all the corresponding top level Yak variables
# for (key, path) in Yak.paths.items():
# # Build-up global key only if not overriden
# if not (key + '_root') in Yak:
# Yak.set(key + '_root', FileSystem.join(Yak.root, path))
# FileSystem.makedir(Yak.get(key + '_root'))
# def __prepareconfig():
# Yak.istrunk = Yak.settings['variant'] == 'bleed'
# XXX still crap
@staticmethod
def minter(path, filter = '', excluding = '', strict = True):
if excluding:
excluding = ',%s' % excluding
if not filter:
filtre = '*.js'
list = FileList(path, filter = filtre, exclude = "*-min.js,%s" % excluding)
for burne in list.get():
print burne
print re.sub(r"(.*).js$", r"\1-min.js", burne)
minify(str(burne), re.sub(r"(.*).js$", r"\1-min.js", burne), strict = strict)
# filtre = '*.css'
# list = FileList(path, filter = filtre, exclude = "*-min.css,%s" % excluding)
# for burne in list.get():
# print burne
# print re.sub(r"(.*).js$", r"\1-min.js", burne)
# minify(str(burne), re.sub(r"(.*).css$", r"\1-min.css", burne))
else:
filtre = filter
list = FileList(path, filter = filtre, exclude = "*-min.js,%s" % excluding)
for burne in list.get():
print burne
print re.sub(r"(.*).js$", r"\1-min.js", burne)
minify(str(burne), re.sub(r"(.*).js$", r"\1-min.js", burne), strict = strict)
# Lint every file (usually src)
@staticmethod
def linter(path, excluding = '', relax=False):
if excluding:
excluding = ',%s' % excluding
list = FileList(path, filter = "*.js", exclude = "*-min.js,%s" % excluding)
jslint(list, relax=relax)
@staticmethod
def hinter(path, excluding = '', relax=False):
System.check_package('node')
System.check_package('npm')
System.check_package('jshint')
if excluding:
excluding = ',%s' % excluding
list = FileList(path, filter = "*.js", exclude = "*-min.js,%s" % excluding)
res = '"' + '" "'.join(list.get()) + '"'
ret = sh('jshint %s' % res, output = False)
if ret:
console.fail(ret)
else:
console.info("You passed the dreaded hinter!")
# npm install -g jshint
# Flint every file (usually src)
@staticmethod
def flinter(path, excluding = '', relax=False):
if excluding:
excluding = ',%s' % excluding
list = FileList(path, filter = "*.js", exclude = "*-min.js,%s" % excluding)
jslint(list, relax=relax, fix=True)
# Stat every file (usually build)
@staticmethod
def stater(path, excluding = ''):
if excluding:
excluding = ',%s' % excluding
list = FileList(path, filter = "*.js", exclude = "*-min.js,%s" % excluding)
stats(list, title = "Javascript")
list = FileList(path, filter = "*-min.js", exclude = "%s" % excluding)
stats(list, title = "Minified javascript")
list = FileList(path, filter = "*.css", exclude = "*-min.css,%s" % excluding)
stats(list, title = "Css")
list = FileList(path, filter = "*-min.css", exclude = "%s" % excluding)
stats(list, title = "Minified css")
list = FileList(path, filter = "*.html,*.xml,*.txt", exclude = "%s" % excluding)
stats(list, title = "(ht|x)ml + txt")
list = FileList(path, exclude = "*.html,*.xml,*.txt,*.js,*.css,%s" % excluding)
stats(list, title = "Other")
# # Cleans every "ROOT" folder cautiously
@staticmethod
def cleaner():
for (key, path) in Yak.paths.items():
if not key == "src" and not key == "tests":
resp = prompt('Delete %s? y/[N]' % path, 'N')
if resp == 'y':
try:
FileSystem.remove(path)
console.info('Deleted %s' % path)
except:
console.error('Failed removing %s' % path)
@staticmethod
def replacer(s):
for (key, value) in Yak.package.items():
s.add('{PUKE-PACKAGE-%s}' % key.replace('_', '-').upper(), str(value))
for (key, value) in Yak.rights.items():
s.add('{PUKE-RIGHTS-%s}' % key.replace('_', '-').upper(), str(value))
for (key, value) in Yak.git.items():
s.add('{PUKE-GIT-%s}' % key.replace('_', '-').upper(), str(value))
for (key, value) in Yak.paths.items():
s.add('{PUKE-%s-ROOT}' % key.replace('_', '-').upper(), str(value))
for (key, value) in Yak.config.items():
s.add('{PUKE-CONFIG-%s}' % key.replace('_', '-').upper(), str(value))
return s
@staticmethod
def deployer(src, withversion = False, destination = False):
list = FileList(src)
if withversion and Yak.paths['dist'] != 'dist':
v = Yak.package['version'].split('-').pop(0).split('.')
d = FileSystem.join(Yak.paths['dist'], Yak.package['name'], v[0] + "." + v[1])
if destination:
d = FileSystem.join(d, destination)
deepcopy(list, d)
else:
d = Yak.paths['dist']
if destination:
d = FileSystem.join(d, destination)
deepcopy(list, d)
# def __preparepaths():
# # Aggregate package name and version to the "root" path, if not the default
# if Yak.root != './':
# Yak.root = FileSystem.join(Yak.root, Yak.package['name'], Yak.package['version'])
# # Aggregate all inner paths against the declared ROOT, and build-up all the corresponding top level Yak variables
# for (key, path) in Yak.paths.items():
# # Build-up global key only if not overriden
# if not (key + '_root') in Yak:
# Yak.set(key + '_root', FileSystem.join(Yak.root, path))
# FileSystem.makedir(Yak.get(key + '_root'))
# def __prepareconfig():
# Yak.istrunk = Yak.settings['variant'] == 'bleed'
Helpers.enforceunix()
Helpers.loadyanks()
# ------------------------------------------------------------------
# Top-level helpers
# ------------------------------------------------------------------
# # Adds a {PUKE-*-*} pattern for every package, link, or path entry in the Yak
# def describe(shortversion, name, description):
# yamu = FileSystem.join(Yak.deploy_root, "%s.json" % name)
# if FileSystem.exists(yamu):
# mama = json.loads(FileSystem.readfile(yamu))
# mama[shortversion] = description
# else:
# mama = {shortversion: description}
# # Straight to service root instead - kind of hackish...
# FileSystem.writefile(yamu, json.dumps(mama, indent=4))
# # ------------------------------------------------------------------
# # Dedicated airstrip helpers
# # ------------------------------------------------------------------
# def getyanks():
# # Airstrip yank in additional description files
# l = FileList('yanks', filter = '*.yaml', exclude = '*xxx*');
# yanks = {}
# for i in l.get():
# a = Load(i)
# yanks = Utils.deepmerge(yanks, a.content['yanks'])
# Yak.collection = yanks
# return yanks
# # Bulding / fetching helpers
# def donode(path, extra):
# System.check_package('node')
# sh('cd "%s"; node %s' % (path, extra))
# def dorake(path, extra = ''):
# System.check_package('rvm')
# System.check_package('npm')
# System.check_package('bundle')
# System.check_package('rake')
# # XXX handlebars requires node as well :/
# System.check_package('node')
# sh('cd "%s"; bundle; rake %s' % (path, extra))
# def dothor(path, extra = ''):
# System.check_package('rvm')
# System.check_package('bundle')
# # System.check_package('tilt')
# # System.check_package('compass')
# sh('cd "%s"; bundle; thor %s' % (path, extra))
# def domake(path, extra = ''):
# sh('cd "%s"; make %s' % (path, extra))
# def make(path, type, extra = ''):
# if type == 'rake':
# dorake(path, extra)
# elif type == 'thor':
# dothor(path, extra)
# elif type == 'make':
# domake(path, extra)
# elif type == 'sh':
# sh('cd "%s"; %s' % (path, extra))
# for (k, ipath) in production.items():
# FileSystem.copyfile(FileSystem.join(path, ipath), FileSystem.join(destination, k))
# else:
# sh('cd "%s"; cp -R %s %s' % (path, latest, destination), output = True)
# sh("cd " + Yak.TMP_ROOT + "/lib/" + burne["Destination"] + "; cp -R " + burne["Latest"] + " " + k + "; rm " + burne["Latest"])
# localtmp = puke.FileSystem.join(tmp, url.split('/').pop())
# if puke.FileSystem.checksum(localtmp) != self.__checksum:
# console.fail("PANIC! Archive doesn't pan out. You may puke -c if in doubt, and anyhow double check integrity. %s vs. %s" % (puke.FileSystem.checksum(localtmp), self.__checksum))
# if type == 'dmg':
# console.info('Processing dmg')
# self.__dodmg(localtmp, self.local, pwd)
# elif type == 'pkg':
# console.info('Processing pkg')
# self.__dopkg(localtmp)
# else:
# console.info('Processing archive file')
# self.__dounpack(localtmp, puke.FileSystem.dirname(pwd)) | AirStrip | /AirStrip-2.0.2.tar.gz/AirStrip-2.0.2/airstrip/boilerplates/helpers.py | helpers.py |
import logging
logging.basicConfig(level=logging.INFO)
import re
try:
import xmltodict
except ImportError:
print """
you have to install xmltodict:
su
pip install xmltodict
"""
from common import get_html
import dump_keyword
def format_name(author):
name = author[1]
surname = author[0]
names = name.split()
initials = "".join(["%s." % n[0] for n in names])
return "%s %s" % (initials, surname)
def format_name_italian(author):
name = author[1]
surname = author[0]
names = name.split()
initials = "".join(["%s." % n[0] for n in names])
return "%s, %s" % (surname, name)
def main(html_inspire, default_institution):
m = re.search(r"/([0-9]+)", html_inspire)
if m is None:
raise ValueError("not valid html")
inspire_number = m.group(1)
url = "http://inspirehep.net/record/{0}/export/xn".format(inspire_number)
xml = get_html(url)
doc = xmltodict.parse(xml)
authors = get_authors(doc)
print "\n" + "=" * 10 + " ALL AUTHORS " + "=" * 10
print ", ".join(map(format_name, authors))
print "\n found %d authors" % len(authors)
milan_authors = [author for author in authors if (default_institution in " ".join(author[2]))]
print "\n" + "=" * 10 + (" %s AUTHORS " % default_institution) + "=" * 10
print "\n".join(map(format_name_italian, milan_authors))
print "\n" + "=" * 10 + " TITLE " + "=" * 10
print get_title(doc)
print "\n" + "=" * 10 + " ABSTRACT " + "=" * 10
print get_abstract(doc)
print "\n===== KEYWORKDS ======\n"
keys = dump_keyword.get_keys_from_html(get_html(html_inspire))
print keys
def get_abstract(xml_dict):
return xml_dict['articles']['article']['front']['abstract']
def get_title(xml_dict):
meta = xml_dict['articles']['article']['front']['article-meta']
title = meta['title-group']['article-title']
return title
def get_authors(xml_dict):
authors = []
meta = xml_dict['articles']['article']['front']['article-meta']
title = meta['title-group']['article-title']
contrib = meta['contrib-group']['contrib']
for c in contrib:
author_institutions = []
aff = c["aff"]
if len(aff) == 1:
author_institutions.append(aff["institution"])
else:
for a in aff:
author_institutions.append(a["institution"])
authors.append((c["name"]["surname"],
c["name"]["given-names"],
author_institutions))
return authors
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog inspire_url")
parser.epilog = "example: python dump_milano_authors.py http://inspirehep.net/record/1240088"
parser.add_option("--institution", type=str, default="Milan U", help="which institution you want to find. Default = 'Milan U'")
(options, args) = parser.parse_args()
if len(args) != 1:
logging.error("you have to specify the inspire url")
exit()
main(args[0], options.institution) | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/old_parse_xml.py | old_parse_xml.py |
import requests
import bibtexparser
from lxml import etree
from multiprocessing.pool import ThreadPool
from airstuff.workers import OffsetsProducer, CallBackConsumer
#logging.getLogger("requests").setLevel(logging.WARNING)
#logging.getLogger("urllib3").setLevel(logging.WARNING)
#logging.getLogger("bibtexparser").setLevel(logging.WARNING)
import colorlog
logger = colorlog.getLogger('airstuff.air')
def get_document_metadata(document_id):
BASEURL = "https://air.unimi.it/references"
r = requests.get(BASEURL, params={'format': 'bibtex', 'item_id': str(document_id)})
try:
return bibtexparser.loads(r.text).entries[0]
except:
logger.error("problem parsing %s", r.text)
return None
def get_document_ids_from_author(author_id):
offset = 0
BUNCH = 10
# get = partial(get_document_ids_from_author_offset, author_id=author_id)
def get(offset):
return get_document_ids_from_author_offset(author_id, BUNCH, offset)
while True:
with ThreadPool(BUNCH) as pool:
offset_bunch = []
for b in range(BUNCH):
offset_bunch.append(offset)
offset += 20
r = pool.map(get, offset_bunch)
for rr in r:
dobreak = False
for rrr in rr:
yield rrr
if not rr:
dobreak = True
if dobreak:
break
def get_document_ids_from_author_offset(author_id, rg, offset):
BASEURL = "https://air.unimi.it/browse?type=author&order=DESC&rpp=%s&authority=%s&offset=%d"
url = BASEURL % (rg, author_id, offset)
logger.debug("getting %s", url)
r = requests.get(url)
html = r.text
root = etree.HTML(html)
result = root.xpath('//form[@class="form-inline"]/*[@name="item_id"]')
result = [r.attrib['value'] for r in result]
logger.debug('results %s', result)
return result
import threading
import queue
class AirConsumer(threading.Thread):
def __init__(self, input_queue, output_queue, author_id, step, infos=None, stop_event=None):
super(AirConsumer, self).__init__()
self.author_id = author_id
self.input_queue = input_queue
self.output_queue = output_queue
self.step = step
self.stop_event = stop_event
def run(self):
while self.stop_event is None or not self.stop_event.is_set():
if not self.input_queue.empty():
offset = self.input_queue.get()
r = get_document_ids_from_author_offset(self.author_id, self.step, offset)
for rr in r:
info = get_document_metadata(rr)
if not info:
logger.error('no info in offset %s, step %s', offset, self.step)
continue
if 'doi' not in info:
logger.warning('no doi for %s', info['title'])
info['doi'] = None
else:
info['doi'] = info['doi'].upper()
logger.debug('putting info for %s into queue', info['doi'])
info['title'] = info['title'].replace('\n', ' ').replace('\t', ' ')
info = {k: info[k] for k in ('doi', 'title', 'year')}
self.output_queue.put(info)
self.input_queue.task_done()
class AirQuery():
def __init__(self, author_id, workers=5, callback=None, buf_size=10):
self.input_queue = queue.Queue(buf_size)
self.output_queue = queue.Queue()
self.author_id = author_id
self.buf_size = buf_size
self.all_producers = []
self.all_workers = []
self.callback = callback
self.callback_worker = None
self.status = 'stopped'
self.stop_event = threading.Event()
self.stop_event_callback = threading.Event()
self.nworkers = workers
def run(self):
self.status = 'starting'
p = OffsetsProducer(self.input_queue, self.buf_size, stop_event=self.stop_event)
p.setDaemon(True)
self.all_producers.append(p)
for w in range(self.nworkers):
worker = AirConsumer(self.input_queue, self.output_queue, self.author_id, self.buf_size, stop_event=self.stop_event)
worker.setDaemon(True)
self.all_workers.append(worker)
worker.start()
if self.callback is not None:
logger.debug('creating callback consumer')
self.callback_worker = CallBackConsumer(self.output_queue, self.callback, stop_event=self.stop_event_callback)
self.callback_worker.setDaemon(True)
self.callback_worker.start()
p.start()
self.status = 'running'
def stop(self):
self.status = 'stopping'
self.stop_event.set()
logger.debug('stopping producer')
for worker in self.all_producers:
worker.join()
logger.debug('stopping consumer')
for worker in self.all_workers:
worker.join()
if self.callback_worker is not None:
self.stop_event_callback.set()
logger.debug('stopping callback')
logger.debug('waiting callback worker to join')
self.callback_worker.join()
self.status = 'stopped'
logger.debug('all stopped')
self.stop_event.clear()
if __name__ == '__main__':
import time
import argparse
parser = argparse.ArgumentParser(description='Query AIR')
parser.add_argument('--max-results', type=int, help='stop the query after number of results')
parser.add_argument('--max-seconds', type=int, help='max number of second for the query')
parser.add_argument('--out', help='output filename')
parser.add_argument('--workers', type=int, default=5)
args = parser.parse_args()
all_publications = []
lock = threading.Lock()
ifound = 0
fout = None
if args.out:
fout = open(args.out, 'w')
def callback(item):
global ifound
global fout
with lock:
all_publications.append(item)
print("%4d %40s %30s %s" % (ifound, item['doi'], str(item['title'][:30]), item['year']))
if fout is not None:
fout.write("%s\t%s\t%s\n" % (item['doi'], item['title'], item['year']))
ifound += 1
all_publications = []
lock = threading.Lock()
ifound = 0
start_time = time.time()
q = AirQuery('rp09852', callback=callback, workers=args.workers)
q.run()
logger.info("running")
while True:
if (args.max_results is not None and ifound >= args.max_results) or \
(args.max_seconds is not None and (time.time() - start_time) > args.max_seconds):
logger.info("stopping")
q.stop()
logger.info("stopped")
logger.info("found %d publications" % len(all_publications))
break
print("found %d publications" % len(all_publications))
"""
g = get_document_ids_from_author('rp09852')
for gg in g:
info = get_document_metadata(gg)
if 'doi' not in info:
logger.info("skipping %s", info)
continue
if gg:
print(info['doi'])
""" | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/air.py | air.py |
from selenium import webdriver
from selenium.webdriver.support.ui import Select, WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.remote.remote_connection import LOGGER as selenium_logger
from selenium.common.exceptions import NoSuchElementException
import pickle
import time
import datetime
import requests
import tempfile
from enum import Enum
from colorama import init as init_colorama
from colorama import Fore, Back, Style
import colorlog
import logging
from common import str2date
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("bibtexparser").setLevel(logging.WARNING)
selenium_logger.setLevel(logging.WARNING)
logger = colorlog.getLogger('airstuff.driverair')
init_colorama()
URL_LOGIN = 'https://air.unimi.it/au/login'
URL_MYDSPACE = 'https://air.unimi.it/mydspace'
URL_SUBMIT = 'https://air.unimi.it/submit'
class ReturnValue(Enum):
SUCCESS = 1
DUPLICATE = 2
def get_driver(debug=False, driver='chrome'):
WINDOW_SIZE = "1920,1080"
if driver == 'chrome':
logger.info('creating chrome')
chrome_options = webdriver.ChromeOptions()
# chrome_options.add_argument("--incognito")
if not debug:
chrome_options.add_argument("--headless")
chrome_options.add_argument("--mute-audio")
chrome_options.add_argument("--window-size=%s" % WINDOW_SIZE)
driver = webdriver.Chrome(options=chrome_options)
elif driver == 'firefox':
logger.info('creating firefox')
firefox_options = webdriver.FirefoxOptions()
# cookies do not work in firefox private session
# firefox_options.add_argument("-private")
if not debug:
firefox_profile = webdriver.FirefoxProfile()
firefox_profile.set_preference("media.volume_scale", "0.0")
firefox_options.profile = firefox_profile
firefox_options.headless = True
firefox_options.add_argument("--width=%s" % WINDOW_SIZE.split(',')[0])
firefox_options.add_argument("--height=%s" % WINDOW_SIZE.split(',')[1])
driver = webdriver.Firefox(options=firefox_options)
return driver
def login(driver):
"""Login to the website"""
driver.get(URL_LOGIN)
input(Fore.RED + Back.GREEN + Style.DIM + 'press ENTER where you are logged in' + Style.RESET_ALL)
save_cookies(driver)
driver.quit()
def save_cookies(driver):
cookies = driver.get_cookies()
logger.info('saving %d cookies', len(cookies))
pickle.dump(cookies, open("cookies.pkl", "wb"))
def load_cookie(driver):
cookies = pickle.load(open("cookies.pkl", "rb"))
if not cookies:
raise IOError("no cookie found. Have you login?")
for cookie in cookies:
driver.add_cookie({'name': cookie['name'], 'value': cookie['value']})
logger.info('%d cookies have been loaded', len(cookies))
def upload_from_doi(driver, info, pause=True):
driver.get('https://air.unimi.it')
try:
load_cookie(driver)
except IOError:
logger.info('no cookies found')
driver.get(URL_SUBMIT)
if 'login' in driver.current_url:
logger.warning("You are not logged in")
input(Fore.RED + Back.GREEN + Style.DIM + 'press ENTER when you are logged in' + Style.RESET_ALL)
save_cookies(driver)
driver.get(URL_SUBMIT)
logger.debug('you are log in')
page = Page(driver, pause=False)
page.close_cookie_banner()
driver.find_element_by_xpath("//a[contains(text(), 'Ricerca per identificativo')]").click()
element_doi = driver.find_element_by_id("identifier_doi")
logger.debug('waiting for element to be visible')
WebDriverWait(driver, 10).until(EC.visibility_of(element_doi))
element_doi.clear()
logger.debug('insert doi %s', info['doi'])
element_doi.send_keys(info['doi'])
driver.find_element_by_id("lookup_idenfifiers").click()
# second page
logger.debug('waiting for page with results from doi %s', info['doi'])
WebDriverWait(driver, 20).until(EC.visibility_of_element_located((By.ID, "checkresult0"))).click()
type_document_selector = driver.find_element_by_id("select-collection0")
sel = Select(type_document_selector)
sel.select_by_visible_text("01 - Articolo su periodico")
logger.debug('ask to import selected records')
driver.find_element_by_xpath("//button[contains(text(), 'Importa i record selezionati')]").click()
# third page (licence)
logger.debug('giving licence')
driver.find_element_by_name("submit_grant").click()
# check duplicate
logger.debug('checking for duplicate box')
duplicate_box_titles = driver.find_elements_by_id('duplicateboxtitle')
if duplicate_box_titles:
box = duplicate_box_titles[0]
logger.debug('sleeping one second')
time.sleep(1) # FIXME: the problem is that the page is slow and this will be visible only if there will be a duplicate, which I don't know.
logger.debug('sleeping finished')
if box.is_displayed():
logger.debug("the duplicate box is displayed")
logger.warning('Trying to insert duplicate')
WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.ID, 'cancelpopup'))).click()
WebDriverWait(driver, 10).until(EC.visibility_of_element_located((By.NAME, 'submit_remove'))).click()
return ReturnValue.DUPLICATE
# warning authors
logger.info("checking if many authors box")
try:
many_author_h4 = driver.find_element_by_xpath("//h4[@class='modal-title' and contains(text(), 'Attenzione')]")
logger.info('box many author found')
many_authors_close_button = many_author_h4.find_element_by_xpath("//../..//button[contains(text(), 'Chiudi')]")
logger.debug('closing many authors button found')
many_authors_close_button.click()
logger.debug('closed many author box')
except NoSuchElementException:
pass
page = PageDescrivere2(driver, pause=pause)
page.wait_form_ready()
logger.debug('filling page Descrivere 2')
if not page.get_title():
logger.debug('set title %s', info['title'])
page.set_title(info['title'])
else:
logger.debug('title already present')
if not page.get_abstract():
logger.debug('set abstract "%s"', info['abstract'][0]['summary'])
page.set_abstract(info['abstract'][0]['summary'])
else:
logger.debug('abstract already present')
if not page.get_keywords():
keywords = [term["term"] for term in info["thesaurus_terms"] if "term" in term]
logger.debug('set keywords %s', keywords)
page.set_keywords(keywords)
else:
logger.debug('keywords already present')
driver.find_element_by_id("widgetContributorEdit_dc_authority_people").click()
authors_field = driver.find_element_by_id("widgetContributorSplitTextarea_dc_authority_people")
authors_field.clear()
authors_field.send_keys('; '.join(info['local_authors']))
driver.find_element_by_id("widgetContributorParse_dc_authority_people").click()
page.set_type_contribution()
page.set_type_referee()
page.set_type_research()
page.set_type_publication()
element_field = driver.find_element_by_id("dc_authority_academicField2000")
element_field.clear()
element_field.send_keys("FIS/01")
WebDriverWait(driver, 5).until(EC.element_to_be_clickable((By.XPATH, '//a[text()="Settore FIS/01 - Fisica Sperimentale"]'))).click()
driver.find_element_by_xpath('//button[@value="Aggiungi ancora"]').click()
element_field = driver.find_element_by_id("dc_authority_academicField2000")
element_field.clear()
element_field.send_keys("FIS/04")
WebDriverWait(driver, 5).until(EC.element_to_be_clickable((By.XPATH, '//a[text()="Settore FIS/04 - Fisica Nucleare e Subnucleare"]'))).click()
page.next_page()
page3 = PageDescrivere3(driver, pause=pause)
if 'imprint' in info and info['imprint']['date']:
date = str2date(info['imprint']['date'])
if page3.get_year():
if int(page3.get_year()) != date.year:
logger.warning('year is different %s != %s', page3.get_year(), date.year)
else:
logger.debug('setting year %s', date.year)
page3.set_year(date.year)
if page3.get_month():
if (int(page3.get_month)) != date.month:
logger.warning('month is different %s != %s', page3.get_month(), date.month)
else:
logger.debug('setting month %s', date.month)
page3.set_month(date.month)
if page3.get_day():
if (int(page3.get_day()) != date.day):
logger.warning('day is different %s != %s', page3.get_day(), date.day)
else:
logger.debug('setting day %s', date.day)
page3.set_day(date.day)
page3.set_pub()
page3.set_rilevanza()
page3.next_page()
# page 4
driver.find_element_by_name("submit_next").click()
# page 5
page5 = PageDescrivere5(driver, pause=pause)
if page5.get_scopus():
if page5.get_scopus() != info['scopus']:
logger.warning("scopus reference are different %s != %s", info['scopus'], page5.get_scopus())
else:
logger.info('scopus information not found')
page5.set_scopus(info['scopus'])
if page5.get_wos():
if page5.get_wos() != info['wos']:
logger.warning("wos reference are different %s != %s", info['wos'], page5.get_wos())
else:
logger.debug("wos information not found")
logger.debug("setting wos to %s", info['wos'])
page5.set_wos(info['wos'])
page5.set_open()
page5.set_url('') # remove url since the automatic one link to the journal and not to the article
page5.next_page()
# page 6
page6 = PageCarica6(driver, pause=pause)
if info.get('pdf_url', None):
logger.debug('downloading pdf from %s', info['pdf_url'])
header = requests.head(info['pdf_url'], allow_redirects=True)
if header.status_code >= 400:
logger.error('cannot download pdf with url %s', info['pdf_url'])
else:
content_length = header.headers.get('content-length', None)
if content_length is not None:
print(content_length)
logger.debug('downloading %s KB pdf', float(content_length) / 1024.)
r = requests.get(info['pdf_url'], stream=True, allow_redirects=True)
with tempfile.NamedTemporaryFile('wb', suffix='.pdf') as ftemp:
dl = 0
for chunk in r.iter_content(chunk_size=1024 * 512):
if chunk:
dl += len(chunk)
ftemp.write(chunk)
ftemp.flush()
if content_length:
percent = '%.1f%%' % (dl / float(content_length) * 100)
else:
percent = ''
print('downloaded %d KB %s' % (dl, percent))
page6.send_file(ftemp.name)
page6.sito_docente(True)
page6.next_page()
# page 6/bis
page6 = Page(driver, pause)
page6.next_page()
return ReturnValue.SUCCESS
class Page:
next_name = 'submit_next'
def __init__(self, driver, pause=True):
self.driver = driver
self.pause = pause
def select_hidden(self, element, value):
old_class = element.get_attribute('class')
self.driver.execute_script("arguments[0].setAttribute('class', '')", element)
sel = Select(element)
sel.select_by_visible_text(value)
self.driver.execute_script("arguments[0].setAttribute('class', '%s')" % old_class, element)
def next_page(self):
if self.pause:
input(Fore.RED + Back.GREEN + Style.DIM + 'press ENTER to go to next page' + Style.RESET_ALL)
self.driver.find_element_by_name(self.next_name).click()
def close_cookie_banner(self):
el = self.driver.find_elements_by_xpath('//div[@id="jGrowl"]//div[@class="jGrowl-close"]')
if not el:
return
el = el[0]
if el.is_enabled():
logger.debug('closing cookies banner')
el.click()
class PageDescrivere2(Page):
def set_title(self, title):
element_title = self.driver.find_element_by_id("dc_title_id")
element_title.clear()
element_title.send_keys(title)
def wait_form_ready(self):
logger.debug('waiting title to be clickable')
WebDriverWait(self.driver, 20).until(EC.element_to_be_clickable((By.ID, "dc_title_id")))
def get_title(self):
element_title = self.driver.find_element_by_id("dc_title_id")
return element_title.text
def set_abstract(self, abstract):
Select(self.driver.find_element_by_name('dc_description_qualifier')).select_by_visible_text('Inglese')
element_abstract = self.driver.find_element_by_name('dc_description_value')
element_abstract.clear()
element_abstract.send_keys(abstract)
def get_abstract(self):
xpath = r'//label[text()="Abstract"]/..//textarea'
textareas = self.driver.find_elements_by_xpath(xpath)
for textarea in textareas:
WebDriverWait(self.driver, 10).until(EC.visibility_of(textarea))
text = textarea.text
if text:
return text
def set_keywords(self, keywords):
k = '; '.join(keywords)
element_keywords = self.driver.find_element_by_id('dc_subject_keywords_id')
element_keywords.clear()
element_keywords.send_keys(k)
def get_keywords(self):
element_keywords = self.driver.find_element_by_id('dc_subject_keywords_id')
return element_keywords.text
def set_type_contribution(self):
element_type_contribution = self.driver.find_element_by_xpath('//select[@name="dc_type_contribution"]')
self.select_hidden(element_type_contribution, 'Articolo')
def set_type_referee(self):
element_type_referee = self.driver.find_element_by_xpath('//select[@name="dc_type_referee"]')
self.select_hidden(element_type_referee, 'Esperti anonimi')
def set_type_research(self):
element_type_referee = self.driver.find_element_by_xpath('//select[@name="dc_type_research"]')
self.select_hidden(element_type_referee, 'Ricerca di base')
def set_type_publication(self):
element_type_publication = self.driver.find_element_by_xpath('//select[@name="dc_type_publication"]')
self.select_hidden(element_type_publication, 'Pubblicazione scientifica')
class PageDescrivere3(Page):
def get_year(self):
el = self.driver.find_element_by_name("dc_date_issued_year")
return el.get_attribute('value')
def set_year(self, year):
el = self.driver.find_element_by_name("dc_date_issued_year")
el.clear()
el.send_keys(str(year))
def get_day(self):
el = self.driver.find_element_by_name("dc_date_issued_day")
return el.get_attribute('value')
def set_day(self, day):
el = self.driver.find_element_by_name("dc_date_issued_day")
el.clear()
el.send_keys(str(day))
def get_month(self):
el = self.driver.find_element_by_name('dc_date_issued_month')
val = el.get_attribute('value')
if val == '-1':
return None
else:
return int(val)
def set_month(self, month):
el = self.driver.find_element_by_name('dc_date_issued_month')
sel = Select(el)
sel.select_by_value(str(month))
def set_pub(self):
el = self.driver.find_element_by_xpath('//select[@name="dc_type_publicationstatus"]')
self.select_hidden(el, 'Pubblicato')
def set_rilevanza(self):
el = self.driver.find_element_by_xpath('//select[@name="dc_type_circulation"]')
self.select_hidden(el, 'Periodico con rilevanza internazionale')
class PageDescrivere5(Page):
def get_scopus(self):
els = self.driver.find_elements_by_xpath('//label[text()="Codice identificativo in banca dati"]/..//Select/option[@selected="selected"]')
scopus_select = None
for el in els:
if el.text == 'Scopus':
scopus_select = el
break
if not scopus_select:
return None
scopus_id = el.find_element_by_xpath('../../..//input[@class="form-control"]').get_attribute('value')
if not scopus_id:
return None
return scopus_id
def set_scopus(self, scopus_id):
els = self.driver.find_elements_by_xpath('//label[text()="Codice identificativo in banca dati"]/..//Select/option[@selected="selected"]')
scopus_selected = None
for el in els:
if el.text == 'Scopus':
scopus_selected = el
break
if scopus_selected:
scopus_element = scopus_selected.find_element_by_xpath('../../..//input[@class="form-control"]')
scopus_element.clear()
scopus_element.send_keys(scopus_id)
else:
el2 = els[0].find_element_by_xpath('../../../../../..//select[@name="dc_identifier_qualifier"]')
sel = Select(el2)
sel.select_by_value("scopus")
el_field = el.find_element_by_xpath('../../../../../..//input[@name="dc_identifier_value"]')
el_field.clear()
el_field.send_keys(scopus_id)
def get_wos(self):
els = self.driver.find_elements_by_xpath('//label[text()="Codice identificativo in banca dati"]/..//Select/option[@selected="selected"]')
isi_select = None
for el in els:
if el.text == 'ISI':
isi_select = el
break
if not isi_select:
return None
isi_id = el.find_element_by_xpath('../../..//input[@class="form-control"]').get_attribute('value')
if not isi_id:
return None
return isi_id
def set_wos(self, isi_id):
els = self.driver.find_elements_by_xpath('//label[text()="Codice identificativo in banca dati"]/..//Select/option[@selected="selected"]')
isi_selected = None
for el in els:
if el.text == 'ISI':
isi_selected = el
break
if isi_selected:
isi_element = isi_selected.find_element_by_xpath('../../..//input[@class="form-control"]')
isi_element.clear()
isi_element.send_keys(isi_id)
else:
el2 = els[0].find_element_by_xpath('../../../../../..//select[@name="dc_identifier_qualifier"]')
sel = Select(el2)
sel.select_by_value("isi")
el_field = els[0].find_element_by_xpath('../../../../../..//input[@name="dc_identifier_value"]')
el_field.clear()
el_field.send_keys(isi_id)
def set_open(self):
el = self.driver.find_element_by_xpath('//select[@name="dc_iris_checkpolicy"]')
self.select_hidden(el, 'Aderisco')
def set_url(self, url):
el = self.driver.find_element_by_id("dc_identifier_url")
el.clear()
if url:
el.send_keys(url)
class PageCarica6(Page):
next_name = "submit_upload"
def send_file(self, fn):
el = self.driver.find_element_by_id("tfile")
self.driver.execute_script('arguments[0].style = ""; arguments[0].style.display = "block"; arguments[0].style.visibility = "visible";', el)
el.send_keys(fn)
def sito_docente(self, value):
el = self.driver.find_element_by_id('sitodoc')
sel = Select(el)
sel.select_by_value('true' if value else 'false')
if __name__ == '__main__':
driver = get_driver(debug=True, driver='chrome')
#login(driver)
#upload(driver, {'title': 'my title',
# 'keywords': ['key1', 'key2'],
# 'abstract': 'my abstract',
# 'authors': ['Attilio Andreazza', 'Leonardo Carminati']})
upload_from_doi(driver, {'doi': '10.1140/epjc/s10052-018-6374-z'}) | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/driver_air.py | driver_air.py |
import requests
import json
from multiprocessing.pool import ThreadPool
import logging
from datetime import datetime
from airstuff.workers import OffsetsProducer, CallBackConsumer, DuplicateFilter
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("bibtexparser").setLevel(logging.WARNING)
import colorlog
logger = colorlog.getLogger('airstuff.inspire')
URL_SEARCH = "http://inspirehep.net/search"
ATLAS_QUERY = 'collaboration:"ATLAS" AND collection:published and NOT collection:conferencepaper and collection:citeable'
def query_inspire(query, rg=100, jrec=1, ot=None):
logging.debug('querying %s, offset=%d', query, jrec)
# see http://inspirehep.net/help/hacking/search-engine-api
r = requests.get(URL_SEARCH,
params=dict(
of='recjson', # json format
rg=rg, # range
action_search="Search",
jrec=jrec, # offset
do='d',
ot=ot, # ouput tags
sf='earliestdate', # sorting
so='d', # descending
p=query))
logging.debug('getting %s' % r.url)
try:
j = json.loads(r.text)
except json.decoder.JSONDecodeError:
logging.error("problem decoding", r.text)
return None
return j
def fix_title(title):
if type(title) is str and title:
return title
elif type(title) is str and len(title) == 0:
return 'NO TITLE'
elif title is None:
return 'NO TITLE'
elif type(title) is dict:
return title['title']
elif type(title) is list:
s = set(t['title'].replace('$', '') for t in title)
if len(s) == 1:
title = title[0]['title']
return title
else:
return title[0]['title'] # too complicated (difference is in latex style)
else:
logging.warning('cannot parse this title:%s', title)
return title
def fix_info(info):
if 'doi' in info and info['doi'] is not None:
if type(info['doi']) is str:
info['doi'] = [info['doi']]
info['doi'] = [doi.upper() for doi in info['doi']]
info['doi'] = sorted(list(set(info['doi'])))
else:
info['doi'] = []
if 'date' in info and type(info['date'] is datetime):
pass
else:
date = '?'
if 'imprint' in info and info['imprint'] is not None and 'date' in info['imprint']:
date = info['imprint']['date']
elif 'prepublication' in info and info['prepublication'] is not None and 'date' in info['prepublication']:
date = info['prepublication']['date']
info['date'] = date
if 'title' in info:
info['title'] = fix_title(info['title'])
return info
def get_all_collaboration(collaboration, infos=None):
infos = infos or ['recid', 'imprint', 'prepublication', 'number_of_authors', 'system_control_number', 'doi', 'title']
nthread = 10
shift = 20
offset = 1
def get(offset):
query = ATLAS_QUERY.replace("ATLAS", collaboration)
return query_inspire(query, shift, offset, infos)
while True:
offset_bunch = []
for b in range(nthread):
offset_bunch.append(offset)
offset += shift
with ThreadPool(nthread) as pool:
r = pool.map(get, offset_bunch)
for rr in r:
dobreak = False
for rrr in rr:
if rrr['number_of_authors'] is not None and int(rrr['number_of_authors']) < 30:
continue
yield fix_info(rrr)
if not rr:
dobreak = True
if dobreak:
break
import threading
import queue
ndone = 0
nlow_author = 0
lock_ndone = threading.Lock()
class InspireConsumer(threading.Thread):
def __init__(self, input_queue, output_queue, query, step, infos=None, stop_event=None):
super(InspireConsumer, self).__init__()
self.input_queue = input_queue
self.output_queue = output_queue
self.query = query
self.step = step
self.stop_event = stop_event
self.infos = infos or ['recid', 'imprint', 'prepublication', 'number_of_authors', 'system_control_number', 'doi', 'title']
def run(self):
while self.stop_event is None or not self.stop_event.is_set():
if not self.input_queue.empty():
offset = self.input_queue.get()
r = query_inspire(self.query, self.step, offset, self.infos)
if len(r) == 0:
logging.info("getting empty response")
if self.stop_event is not None:
self.stop_event.set()
logging.debug('stop event set')
for rr in r:
with lock_ndone:
global ndone
ndone += 1
info_fixed = fix_info(rr)
if int(info_fixed['number_of_authors']) < 30: ## TODO: FIXME
logging.debug('ignoring %s %s since it has only %d authors',
info_fixed['doi'], info_fixed['title'], info_fixed['number_of_authors'])
with lock_ndone:
global nlow_author
nlow_author += 1
continue
self.output_queue.put(info_fixed)
logging.debug('found %d entries from offset %s', len(r), offset)
self.input_queue.task_done()
logging.debug('thread at the end')
class ManagerWorker(threading.Thread):
def __init__(self, stopping_event, stopping_action):
super(ManagerWorker, self).__init__()
self.stopping_event = stopping_event
self.stopping_action = stopping_action
def run(self):
self.stopping_event.wait()
logging.debug('stopping condition met')
self.stopping_action()
logging.debug('stopping action done')
return
class InspireQuery():
def __init__(self, query, workers=5, callback=None, buf_size=10):
self.input_queue = queue.Queue(buf_size)
self.output_queue = queue.Queue()
self.query = query
self.buf_size = buf_size
self.all_producers = []
self.all_workers = []
self.callback = callback
self.callback_worker = None
self.status = 'stopped'
self.stop_event = threading.Event()
self.nworkers = workers
def run(self):
self.status = 'starting'
p = OffsetsProducer(self.input_queue, self.buf_size, stop_event=self.stop_event)
p.name = 'producer'
p.setDaemon(True)
self.all_producers.append(p)
self.manager = ManagerWorker(stopping_event=self.stop_event,
stopping_action=self.stop)
self.manager.name = 'manager'
self.manager.setDaemon(True)
self.manager.start()
logging.debug('manager started')
queue_duplicates = queue.Queue()
for w in range(self.nworkers):
worker = InspireConsumer(self.input_queue, self.output_queue, self.query, self.buf_size, stop_event=self.stop_event)
worker.name = 'consumer-%d' % w
worker.setDaemon(True)
self.all_workers.append(worker)
worker.start()
logging.debug('worker started')
#for w in range(2):
# worker = DuplicateFilter(queue_duplicates, self.output_queue, stop_event=self.stop_event)
# worker.name = 'duplicate-%d' % w
# worker.setDaemon(True)
# worker.start()
#logging.debug('worker duplicates started')
if self.callback is not None:
self.callback_worker = CallBackConsumer(self.output_queue, self.callback, stop_event=self.stop_event)
self.callback_worker.name = 'callback'
self.callback_worker.setDaemon(True)
self.callback_worker.start()
logging.debug('callback started')
p.start()
logging.debug('produced started')
self.status = 'running'
def stop(self):
logging.debug('start stopping procedure')
self.status = 'stopping'
self.stop_event.set()
logging.info('wait produer to join')
for worker in self.all_producers:
worker.join()
logging.info('wait consumer to join')
for worker in self.all_workers:
#worker.join()
logging.debug('worker %s joined' % worker.name)
if self.callback_worker is not None:
logging.info('wait callback to join')
self.callback_worker.join()
if threading.get_ident() != self.manager.ident:
logging.info('wait manager to join')
self.manager.join()
self.status = 'stopped'
logging.info('all stopped')
logging.info("Number of inspire entries found: %d" % ndone)
logging.info("Ignored %d entries since low author" % nlow_author)
self.stop_event.clear()
if __name__ == '__main__':
import time
import argparse
parser = argparse.ArgumentParser(description='Query inspire')
parser.add_argument('--max-results', type=int, help='stop the query after number of results')
parser.add_argument('--max-seconds', type=int, help='max number of second for the query')
parser.add_argument('--out', help='output filename')
parser.add_argument('--workers', type=int, default=5)
args = parser.parse_args()
f = open(args.out, 'w')
for x in get_all_collaboration('ATLAS'):
to_write = '%s\t%s\t%s' % (','.join(x['doi']), x['title'], x['date'])
print(to_write)
f.write(to_write + '\n')
exit()
all_publications = []
doi_set = set()
lock = threading.Lock()
ifound = 0
fout = None
if args.out:
fout = open(args.out, 'w')
def callback(item):
global ifound
global fout
with lock:
all_publications.append(item)
if item['doi'] in doi_set:
logging.warning('duplicate: %s' % item['doi'])
doi_set.add(item['doi'])
print("%4d %40s %30s %s" % (ifound, item['doi'], str(item['title'][:30]), item['imprint']['date']))
if fout is not None:
fout.write("%s\t%s\t%s\n" % (item['doi'], item['title'], item['imprint']['date']))
ifound += 1
start_time = time.time()
q = InspireQuery(ATLAS_QUERY, callback=callback, workers=args.workers)
q.run()
logging.info("running")
while True:
if (args.max_results is not None and ifound >= args.max_results) or \
(args.max_seconds is not None and (time.time() - start_time) > args.max_seconds) or \
q.status == 'stopped':
logging.info("stopping")
q.stop()
logging.info("stopped")
logging.info("found %d publications" % len(all_publications))
break | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/inspire.py | inspire.py |
import logging
logging.basicConfig(level=logging.INFO)
import re
import xmltodict
from .common import get_html
from . import dump_keyword
from tkinter import *
def format_name(author):
name = author[1]
surname = author[0]
names = name.split()
initials = "".join(["%s." % n[0] for n in names])
return "%s %s" % (initials, surname)
def format_name_italian(author):
name = author[1]
surname = author[0]
return "%s, %s" % (surname, name)
def parse(html_inspire, default_institution):
m = re.search(r"/([0-9]+)", html_inspire)
if m is None:
raise ValueError("not valid html")
inspire_number = m.group(1)
url = "http://inspirehep.net/record/{0}/export/xn".format(inspire_number)
xml = get_html(url)
doc = xmltodict.parse(xml)
authors = get_authors(doc)
print("\n" + "=" * 10 + " ALL AUTHORS " + "=" * 10)
authors_list = ", ".join(map(format_name, authors))
print(authors_list)
print("\n found %d authors" % len(authors))
milan_authors = [author for author in authors if (default_institution in " ".join(author[2]))]
print("\n" + "=" * 10 + (" %s AUTHORS " % default_institution) + "=" * 10)
milan_list = "\n".join(map(format_name_italian, milan_authors))
print(milan_list)
print("\n" + "=" * 10 + " TITLE " + "=" * 10)
title = get_title(doc)
print(title)
print("\n" + "=" * 10 + " ABSTRACT " + "=" * 10)
abstract = get_abstract(doc)
print(abstract)
print("\n===== KEYWORKDS ======\n")
keys = dump_keyword.get_keys_from_html(get_html(html_inspire))
print(keys)
return authors_list, milan_list, title, abstract, keys
def get_abstract(xml_dict):
return xml_dict['articles']['article']['front']['abstract']
def get_title(xml_dict):
meta = xml_dict['articles']['article']['front']['article-meta']
title = meta['title-group']['article-title']
return title
def get_authors(xml_dict):
authors = []
meta = xml_dict['articles']['article']['front']['article-meta']
contrib = meta['contrib-group']['contrib']
for c in contrib:
author_institutions = []
try:
aff = c["aff"]
if len(aff) == 1:
author_institutions.append(aff["institution"])
else:
for a in aff:
author_institutions.append(a["institution"])
except KeyError:
logging.warning("author %s %s has no institution, check manually" % (c["name"]["surname"], c["name"]["given-names"]))
author_intitutions = ["unknown"]
authors.append((c["name"]["surname"],
c["name"]["given-names"],
author_institutions))
return authors
class Application(Frame):
def run(self):
url = self.input_inspirehep.get()
self.text_titles.delete("1.0", END)
self.text_all_authors.delete("1.0", END)
self.text_milan_authors.delete("1.0", END)
self.text_abstract.delete("1.0", END)
self.text_keywords.delete("1.0", END)
authors_list, milan_list, title, abstract, keys = parse(url,
self.institution)
self.text_titles.insert(INSERT, title)
self.text_all_authors.insert(INSERT, authors_list)
self.text_milan_authors.insert(INSERT, milan_list)
self.text_abstract.insert(INSERT, abstract)
self.text_keywords.insert(INSERT, keys)
def say_hi(self):
print("hi there, everyone!")
def copy(self, widget):
text = widget.get("1.0", END)
self.clipboard_clear()
self.clipboard_append(text)
def createWidgets(self, url):
self.label_input = Label(self, text="inspirehep url:")
self.label_input.grid(row=0, sticky=W)
self.input_inspirehep = Entry(self)
self.input_inspirehep.configure(width=50)
self.input_inspirehep.grid(row=0, column=1)
self.input_inspirehep.insert(INSERT, url)
self.button_run = Button(self, text="run", command=self.run)
self.button_run.grid(row=0, column=2)
self.text_titles = Text(self)
self.text_titles.config(height=2)
self.text_titles.grid(row=1, sticky=N, columnspan=2)
self.copy_button_titles = Button(self, text="copy",
command=lambda: self.copy(self.text_titles))
self.copy_button_titles.grid(row=1, column=2)
self.text_all_authors = Text(self)
self.text_all_authors.config(height=10)
self.text_all_authors.grid(row=2, sticky=N, columnspan=2)
self.copy_button_all_authors = Button(self, text="copy",
command=lambda: self.copy(self.text_all_authors))
self.copy_button_all_authors.grid(row=2, column=2)
self.text_milan_authors = Text(self)
self.text_milan_authors.config(height=10)
self.text_milan_authors.grid(row=3, sticky=N, columnspan=2)
self.copy_button_milan_authors = Button(self, text="copy",
command=lambda: self.copy(self.text_milan_authors))
self.copy_button_milan_authors.grid(row=3, column=2)
self.text_abstract = Text(self)
self.text_abstract.config(height=10)
self.text_abstract.grid(row=4, sticky=N, columnspan=2)
self.copy_button_milan_authors = Button(self, text="copy",
command=lambda: self.copy(self.text_abstract))
self.copy_button_milan_authors.grid(row=4, column=2)
self.text_keywords = Text(self)
self.text_keywords.config(height=10)
self.text_keywords.grid(row=4, sticky=N, columnspan=2)
self.copy_button_milan_authors = Button(self, text="copy",
command=lambda: self.copy(self.text_keywords))
self.copy_button_milan_authors.grid(row=4, column=2)
def __init__(self, url, institution, master=None):
Frame.__init__(self, master)
self.institution = institution
self.pack()
self.createWidgets(url)
def main():
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog inspire_url")
parser.epilog = "example: parse_inspire.py http://inspirehep.net/record/1240088"
parser.add_option("--institution", type=str, default="Milan U", help="which institution you want to find. Default = 'Milan U'")
parser.add_option("--no-gui", action="store_true", default=False, help="do no show GUI")
(options, args) = parser.parse_args()
if options.no_gui:
parse(args[0], options.institution)
exit()
root = Tk()
app = Application(args[0] if len(args) else "",
institution=options.institution, master=root)
app.mainloop()
root.destroy()
if __name__ == '__main__':
main() | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/parse_inspire.py | parse_inspire.py |
from urllib.parse import urlparse, urljoin
import lxml.html
import requests
import colorlog
from functools import wraps
from time import time
logger = colorlog.getLogger('airstuff.journal')
URL_DOI = 'https://doi.org'
def timing_warning(max_time):
def wrap(f):
@wraps(f)
def wrapped_f(*args, **kwargs):
tstart = time()
result = f(*args, **kwargs)
tstop = time()
if tstop - tstart > max_time:
logger.warning('function %s with arguments %s %s took much time: %f ms', f.__name__, args, kwargs, (tstop - tstart) * 1000)
return result
return wrapped_f
return wrap
@timing_warning(1)
def check_url_exists(url):
return requests.head(url).status_code < 400
@timing_warning(3)
def get_redirect(url):
r = requests.head(url, allow_redirects=True)
if r.status_code == 404:
return None
location_redirect = r.url
return location_redirect
def get_redirect_doi(doi):
url_doi = urljoin(URL_DOI, doi)
redirect = get_redirect(url_doi)
if not redirect:
logger.warning('cannot get redirect for doi %s', url_doi)
return None
return redirect
def get_pdf_url(doi):
logger.debug('getting redirect for doi %s', doi)
url = get_redirect_doi(doi)
if not url:
logger.warning('cannot resolve doi %s', doi)
tentative_url = None
if 'epjc' in doi.lower():
tentative_url = 'http://link.springer.com/content/pdf/%s' % doi
elif 'jhep' in doi.lower():
tentative_url = 'https://link.springer.com/content/pdf/%s' % doi
logger.debug('tentative url: %s from doi: %s', tentative_url, doi)
if tentative_url:
if check_url_exists(tentative_url):
return tentative_url
return None
hostname = urlparse(url).hostname
logger.debug('redirect from doi %s is %s', doi, url)
if hostname == 'link.springer.com':
return get_pdf_url_springler(doi, url)
elif hostname == 'journals.aps.org':
return url.replace('abstract', 'pdf')
elif hostname == 'linkinghub.elsevier.com':
return get_pdf_url_science_direct(url)
elif hostname == 'iopscience.iop.org':
return urljoin(url, '/pdf')
else:
logger.error('not able to get pdf for %s from %s', doi, url)
return None
def get_pdf_url_science_direct(url):
if 'linkinghub' in url:
n = url.split('/')[-1]
url = 'https://www.sciencedirect.com/science/article/pii/%s/pdfft' % n
return url
else:
raise NotImplementedError
def get_pdf_url_springler(doi, url):
r = get_pdf_url_springler_tight(url)
if r:
return r
logger.warning('problem to find pdf link for %s on springler, try another method', doi)
r = get_pdf_url_springler_loose(doi)
return r
@timing_warning(1)
def get_pdf_url_springler_tight(url):
r = requests.get(url)
html = r.text
root = lxml.html.fromstring(html)
els = root.xpath('//div[@class="download-article test-pdf-link"]//a[@title="Download this article in PDF format"]')
if len(els) != 1:
logger.error('more than one download link on %s', url)
return None
elif not els:
logger.error('no download link on %s', url)
return None
return urljoin(url, els[0].attrib['href'])
def get_pdf_url_springler_loose(doi):
url = urljoin('https://link.springer.com/content/pdf/', doi)
if check_url_exists(url):
return url
else:
return None | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/journals.py | journals.py |
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GObject, GLib, Gio
from threading import Lock
import datetime
import logging
import colorlog
from airstuff import inspire
from airstuff.air import AirQuery
from airstuff.air_info import WindowDoi
import jellyfish
import re
from common import str2date
colors = colorlog.default_log_colors
colors['DEBUG'] = 'blue'
formatter = colorlog.ColoredFormatter('%(log_color)s %(name)s %(levelname)s %(threadName)s %(message)s',
log_colors=colors)
handler = colorlog.StreamHandler()
handler.setFormatter(formatter)
logger = colorlog.getLogger('airstuff')
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
lock = Lock()
def delatex(s):
replaces = ((' ', ' '), (' ', ' '), ('$', ''), ('{', ''), ('}', ''), (r'\ell', 'l'), ('^', ''),
(r'\text', ''), (r'\sqrt', 'sqrt'), (r'\to', '->'), ('->', '->'), (' = ', '='),
('√', 'sqrt'), (r'\mathrm', ''), ('pp', 'proton-proton'), ('pb+pb', 'lead-lead'))
for l, r in replaces:
s = s.replace(l, r)
s = s.replace('-', ' ')
s = s.replace('with the atlas detector', '')
s = s.replace('in proton proton collisions', '')
return s
re13 = re.compile(r'13[ ]*tev')
re8 = re.compile(r'8[ ]*tev')
re7 = re.compile(r'7[ ]*tev')
re5p02 = re.compile(r'5\.02[ ]*tev')
re2p76 = re.compile(r'2\.76[ ]*tev')
re900 = re.compile(r'900[ ]*gev')
def get_energy_title(title):
if re13.search(title): return 13
if re8.search(title) : return 8
if re7.search(title): return 7
if re5p02.search(title): return 5.02
if re2p76.search(title): return 2.76
if re900.search(title): return 900
return None
class StatBox(Gtk.Box):
def __init__(self):
Gtk.Box.__init__(self)
self.first_year = 2010
self.years = ['?', '< %s' % self.first_year]
self.years += [str(year) for year in range(self.first_year, datetime.date.today().year + 1)]
self.data = {str(year): 0 for year in self.years}
self.labels = {}
self.layout_stat_box()
def fill(self, year):
if str(year) not in self.years:
try:
y = int(year)
if y < self.first_year:
y = self.years[1]
else:
y = '?'
except ValueError:
y = '?'
else:
y = str(year)
self.data[y] += 1
self.update_labels()
def set_label(self, year, content):
self.data[str(year)] = content
self.update_labels()
def reset(self):
for year in self.years:
self.data[str(year)] = 0
self.update_labels()
def update_labels(self):
for year in self.years:
self.labels[year].set_text("%s" % self.data[year])
def layout_stat_box(self):
for year in self.years:
label_year = Gtk.Label(label="%s: " % year)
label_number = Gtk.Label(label='0')
self.labels[year] = label_number
self.pack_start(label_year, True, True, 0)
self.pack_start(label_number, True, True, 0)
class MyWindow(Gtk.Window):
def __init__(self, air_file=None, inspire_file=None, additional_authors=None):
Gtk.Window.__init__(self, title="Air Stuff")
self.set_default_size(800, 350)
self.create_interface()
self.additional_authors = additional_authors
if air_file is not None:
self.upload_air_from_file(air_file)
if inspire_file is not None:
self.upload_inspire_from_file(inspire_file)
self.file_blacklist = Gio.File.new_for_path(self.entry_blacklist.get_text())
self.monitor_blacklist = self.file_blacklist.monitor_file(Gio.FileMonitorFlags.NONE, None)
self.monitor_blacklist.connect("changed", self.changed_file)
def changed_file(self, m, f, o, event):
# Without this check, multiple 'ok's will be printed for each file change
if event == Gio.FileMonitorEvent.CHANGES_DONE_HINT:
if len(self.table_diff_store):
logger.debug('redoing table')
self.make_diff(None)
def create_interface(self):
main_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(main_box)
# inspire
box_inspire = Gtk.Box()
main_box.add(box_inspire)
# buttons inpire
self.button_start = Gtk.Button(label="search on inspire")
self.button_start.connect("clicked", self.run_stop_inspire)
self.inspire_running = False
box_inspire.pack_start(self.button_start, True, True, 0)
self.button_upload_inspire = Gtk.Button(label='upload from file')
self.button_upload_inspire.connect("clicked", self.upload_inspire)
box_inspire.pack_start(self.button_upload_inspire, True, True, 0)
# inspire table
box_table = Gtk.Box()
main_box.add(box_table)
sw = Gtk.ScrolledWindow()
sw.set_policy(
Gtk.PolicyType.ALWAYS, Gtk.PolicyType.ALWAYS)
sw.set_size_request(1000, 150)
box_table.pack_start(sw, True, True, 0)
self.table_inspire_store = Gtk.ListStore(str, str, str)
self.table_view = Gtk.TreeView(model=self.table_inspire_store)
sw.add(self.table_view)
self.table_view.set_search_column(1)
headers = 'doi', 'title', 'date'
for i in range(3):
column = Gtk.TreeViewColumn(headers[i], Gtk.CellRendererText(), text=i)
column.set_sort_column_id(i)
self.table_view.append_column(column)
self.inspire_query = None
# stat box inspire
self.stat_box_inspire = StatBox()
main_box.add(self.stat_box_inspire)
# air button
box_air = Gtk.Box()
main_box.add(box_air)
self.button_start_air = Gtk.Button(label="search on AIR")
self.button_start_air.connect("clicked", self.run_stop_air)
self.air_running = False
box_air.pack_start(self.button_start_air, True, True, 0)
self.button_upload_air = Gtk.Button(label='upload from file')
self.button_upload_air.connect("clicked", self.upload_air)
box_air.pack_start(self.button_upload_air, True, True, 0)
# air table
box_table_air = Gtk.Box()
main_box.add(box_table_air)
sw = Gtk.ScrolledWindow()
sw.set_policy(
Gtk.PolicyType.ALWAYS, Gtk.PolicyType.ALWAYS)
sw.set_size_request(1000, 150)
box_table_air.pack_start(sw, True, True, 0)
self.table_air_store = Gtk.ListStore(str, str, str)
self.table_air_view = Gtk.TreeView(model=self.table_air_store)
sw.add(self.table_air_view)
self.table_air_view.set_search_column(1)
headers = 'doi', 'title', 'year'
for i in range(3):
column = Gtk.TreeViewColumn(headers[i], Gtk.CellRendererText(), text=i)
column.set_sort_column_id(i)
self.table_air_view.append_column(column)
self.stat_box_air = StatBox()
main_box.add(self.stat_box_air)
# diff buttons
box_diff = Gtk.Box()
button_diff = Gtk.Button(label='make diff')
button_diff.connect('clicked', self.make_diff)
box_diff.pack_start(button_diff, True, True, 0)
main_box.add(box_diff)
self.entry_blacklist = Gtk.Entry()
self.entry_blacklist.set_text("blacklist.txt")
box_diff.pack_start(self.entry_blacklist, True, True, 0)
self.button_blacklist = Gtk.CheckButton()
self.button_blacklist.set_label("Remove blacklist")
box_diff.pack_start(self.button_blacklist, True, True, 0)
self.button_blacklist.set_active(True)
self.button_blacklist.connect("toggled", self.remove_blacklist)
# diff table
box_table_diff = Gtk.Box()
main_box.add(box_table_diff)
sw = Gtk.ScrolledWindow()
sw.set_policy(
Gtk.PolicyType.ALWAYS, Gtk.PolicyType.ALWAYS)
sw.set_size_request(1000, 150)
box_table_diff.pack_start(sw, True, True, 0)
self.table_diff_store = Gtk.ListStore(str, str, str)
self.table_diff_view = Gtk.TreeView(model=self.table_diff_store)
sw.add(self.table_diff_view)
self.table_diff_view.set_search_column(1)
headers = 'doi', 'title', 'date'
for i in range(3):
column = Gtk.TreeViewColumn(headers[i], Gtk.CellRendererText(), text=i)
column.set_sort_column_id(i)
self.table_diff_view.append_column(column)
self.stat_box_diff = StatBox()
main_box.add(self.stat_box_diff)
# footer
box_action = Gtk.Box()
main_box.add(box_action)
button_go = Gtk.Button(label='process selected')
button_go.connect('clicked', self.go)
box_action.pack_start(button_go, True, True, 0)
def get_blacklist(self):
fn = self.entry_blacklist.get_text()
try:
with open(fn) as f:
lines = f.read().split('\n')
return [l for l in lines if l]
except FileNotFoundError:
logger.warning('file %s not found', fn)
return []
def remove_blacklist(self, button):
if self.table_diff_store:
logger.debug('redoing table')
self.make_diff(None)
def run_stop_inspire(self, widget):
if self.inspire_running:
self.button_start.get_children()[0].set_label('stopping')
self.button_start.set_sensitive(False)
self.stop_inspire()
self.inspire_running = False
self.button_start.get_children()[0].set_label('start')
self.button_start.set_sensitive(True)
else:
self.button_start.get_children()[0].set_label('starting')
self.button_start.set_sensitive(False)
self.run_inspire()
self.inspire_running = True
self.button_start.get_children()[0].set_label('stop')
self.button_start.set_sensitive(True)
def run_stop_air(self, widget):
if self.air_running:
self.button_start_air.get_children()[0].set_label('stopping')
self.button_start_air.set_sensitive(False)
self.stop_air()
self.air_running = False
self.button_start_air.get_children()[0].set_label('start')
self.button_start_air.set_sensitive(True)
else:
self.button_start_air.get_children()[0].set_label('starting')
self.button_start_air.set_sensitive(False)
self.run_air()
self.air_running = True
self.button_start_air.get_children()[0].set_label('stop')
self.button_start_air.set_sensitive(True)
def run_inspire(self):
def f(item):
with lock:
GLib.idle_add(self.add_inspire, item)
self.inspire_query = inspire.InspireQuery(query=inspire.ATLAS_QUERY, callback=f)
self.inspire_query.run()
def stop_inspire(self):
self.inspire_query.stop()
self.inspire_query = None
def add_inspire(self, item):
item = inspire.fix_info(item)
self.table_inspire_store.append([','.join(item['doi']), str(item['title']), str(item['date'])])
try:
date = str2date(item['date'])
self.stat_box_inspire.fill(date.year)
except ValueError:
self.stat_box_inspire.fill('?')
def upload_inspire(self, item):
dlg = Gtk.FileChooserDialog(title="Please choose a file",
parent=self,
action=Gtk.FileChooserAction.OPEN)
dlg.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
dlg.add_button(Gtk.STOCK_OPEN, Gtk.ResponseType.OK)
answer = dlg.run()
fn = None
try:
if answer == Gtk.ResponseType.OK:
fn = dlg.get_filename()
finally:
dlg.destroy()
if not fn:
return
self.upload_inspire_from_file(fn)
def upload_inspire_from_file(self, fn):
with open(fn) as f:
for line in f:
row = line.split('\t')
date = row[2].strip()
item = {'doi': row[0].strip(), 'title': row[1].strip(), 'date': date}
self.add_inspire(item)
def run_air(self):
def f(item):
GLib.idle_add(self.add_air, item)
self.air_query = AirQuery('rp09852', callback=f, workers=10)
self.air_query.run()
def stop_air(self):
self.air_query.stop()
self.air_query = None
def add_air(self, item):
self.table_air_store.append([item['doi'], str(item['title']), str(item['year'])])
self.stat_box_air.fill(item['year'])
def upload_air(self, item):
dlg = Gtk.FileChooserDialog(title="Please choose a file",
parent=self,
action=Gtk.FileChooserAction.OPEN)
dlg.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
dlg.add_button(Gtk.STOCK_OPEN, Gtk.ResponseType.OK)
answer = dlg.run()
fn = None
try:
if answer == Gtk.ResponseType.OK:
fn = dlg.get_filename()
finally:
dlg.destroy()
if not fn:
return
self.upload_air_from_file(fn)
def upload_air_from_file(self, fn):
with open(fn) as f:
for iline, line in enumerate(f, 1):
row = line.split('\t')
try:
item = {'doi': row[0].strip(), 'title': row[1].strip(), 'year': row[2].strip()}
except IndexError:
logger.error('problem parsing %s:%d "%s"', fn, iline, line)
self.add_air(item)
def make_diff(self, widget):
logger.debug('making diff table')
air_values = [list(row) for row in self.table_air_store]
inspire_values = [list(row) for row in self.table_inspire_store]
doi_blacklisted = []
if self.button_blacklist.get_active():
doi_blacklisted = self.get_blacklist()
dois_air = set(v[0] for v in air_values)
info_diff = []
for inspire_info in inspire_values:
found = False
doi_inspire = inspire_info[0].split(',')
for d in doi_inspire:
if d in dois_air:
found = True
break
if d in doi_blacklisted:
found = True
break
if not found:
inspire_title_normalized = delatex(inspire_info[1].strip().lower())
energy_inspire = get_energy_title(inspire_title_normalized)
year_inspire = str2date(inspire_info[2]).year
for air in air_values:
air_title = air[1]
air_title_normalized = delatex(air_title.strip().lower())
energy_air = get_energy_title(air_title_normalized)
year_air = int(air[2])
if energy_inspire is not None and energy_air is not None and energy_inspire != energy_air:
continue
if abs(year_inspire - year_air) >= 2:
continue
distance = jellyfish.levenshtein_distance(air_title_normalized, inspire_title_normalized)
distance /= float(len(air_title_normalized))
if distance < 0.2 and (not air[0] or not doi_inspire or air[0]=='None'):
logger.warning('removing title matching between (inspire %s)\n "%s" with doi %s with (air %s)\n "%s" with doi %s',
inspire_info[2], inspire_info[1], doi_inspire, air[2], air_title, air[0])
found = True
break
elif distance < 0.2:
logger.info('suspicious title matching %f between (inspire %s)\n "%s" with doi %s with (air %s)\n "%s" with doi %s',
distance, inspire_info[2], inspire_info[1], doi_inspire, air[2], air_title_normalized, air[0])
if not found:
info_diff.append([doi_inspire[0], inspire_info[1], inspire_info[2]])
self.stat_box_diff.reset()
self.table_diff_store.clear()
for item in info_diff:
if not item[0] in doi_blacklisted:
self.table_diff_store.append([item[0], str(item[1]), str(item[2])])
self.stat_box_diff.fill(str2date(item[2]).year)
def go(self, widget):
index_selected = self.table_diff_view.get_selection().get_selected_rows()
if not index_selected[1]:
return
doi = self.table_diff_store[index_selected[1][0][0]][0]
new_window = WindowDoi(doi=doi, institute="Milan U.", additional_authors=self.additional_authors)
new_window.show_all()
def app_main(args):
win = MyWindow(air_file=args.air_file,
inspire_file=args.inspire_file,
additional_authors=args.add_author)
win.connect("destroy", Gtk.main_quit)
win.show_all()
def main():
import argparse
parser = argparse.ArgumentParser(description='Air Stuff')
parser.add_argument('--air-file', help='txt file with air entries')
parser.add_argument('--inspire-file', help='txt file with inspire entries')
parser.add_argument('--add-author', nargs='*', help='add authors to the list. Use format: "Surname, Name"')
args = parser.parse_args()
import threading
threading.Thread(target=lambda: None).start()
GObject.threads_init()
if not Gtk.init_check:
logger.fatal('Cannot initialize Gtk')
return
app_main(args)
Gtk.main()
if __name__ == '__main__':
main() | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/app.py | app.py |
import tkinter as tk
from tkinter import ttk
import logging
logging.basicConfig(level=logging.DEBUG)
import inspire
from threading import Lock
class Application(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.pack()
self.create_widgets()
self.indico_query = None
def create_widgets(self):
self.label_input = tk.Label(self, text="inspirehep query:")
self.label_input.grid(row=0, sticky=tk.W)
self.input_inspirehep = tk.Entry(self)
self.input_inspirehep.configure(width=50)
self.input_inspirehep.grid(row=0, column=1)
self.input_inspirehep.insert(tk.INSERT, "collaboration:'ATLAS' AND collection:published")
self.button_run = tk.Button(self, text="run", command=self.run_search_inspire)
self.button_run.grid(row=0, column=2)
self.button_stop = tk.Button(self, text="stop", command=self.stop_search_inspire)
self.button_stop.grid(row=0, column=3)
cols = 'Date', 'Title', 'doi'
self.table = ttk.Treeview(self, columns=cols, show='headings')
# set column headings
for col in cols:
self.table.heading(col, text=col)
self.table.grid(row=1, column=0, columnspan=3)
self.table_index = 1
self.button_login = tk.Button(self, text="login", command=self.air_login)
self.button_login.grid(row=2, column=0)
self.button_values = tk.Button(self, text="get values", command=self.air_login)
self.button_values.grid(row=2, column=1)
self.button_upload = tk.Button(self, text="upload", command=self.air_login)
self.button_upload.grid(row=2, column=2)
self.log = tk.Text(self)
self.log.grid(row=3, column=0, columnspan=3)
def run_search_inspire(self):
self.button_run.config(state="disabled")
self.master.update()
lock = Lock()
def f(item):
with lock:
self.update_table(item)
self.button_stop.update_idletasks()
self.button_run.update_idletasks()
#self.indico_query = inspire.IndicoQuery(callback=f)
#self.indico_query.run()
self.indico_query = inspire.IndicoQuery()
self.indico_query.run()
while self.indico_query.status != 'stopped':
if not self.indico_query.output_queue.empty():
item = self.indico_query.output_queue.get()
self.update_table(item)
self.button_stop.update_idletasks()
self.button_run.update_idletasks()
def update_table(self, item):
logging.debug('adding to table %s' % item)
self.table.insert("", self.table_index, self.table_index,
values=(item['doi'], item['title']))
self.table_index += 1
def stop_search_inspire(self):
self.indico_query.stop()
self.indico_query = None
self.button_run.config(state="enabled")
self.log.insert(tk.INSERT, "Found %d entries from inspirehep\n" % self.table_index)
def air_login(self):
pass
def main():
root = tk.Tk()
app = Application(master=root)
app.master.title("Automate AIR")
app.master.maxsize(1000, 400)
app.mainloop()
if __name__ == "__main__":
main() | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/app_tkinter.py | app_tkinter.py |
import tkinter as tk
import threading
import queue
import time
from threading import Lock
class Producer(threading.Thread):
def __init__(self, queue):
super(Producer, self).__init__()
self.running = True
self.input_queue = queue
def run(self):
i = 0
while self.running:
if not self.input_queue.full():
self.input_queue.put(i)
i += 1
return
class Consumer(threading.Thread):
def __init__(self, input_queue, output_queue):
super(Consumer, self).__init__()
self.running = True
self.input_queue = input_queue
self.output_queue = output_queue
def run(self):
while self.running:
if not self.input_queue.empty():
item = self.input_queue.get()
time.sleep(0.1)
self.output_queue.put(item ** 2)
self.input_queue.task_done()
class CallBackConsumer(threading.Thread):
def __init__(self, input_queue, callback):
super(CallBackConsumer, self).__init__()
self.callback = callback
self.input_queue = input_queue
self.running = True
def run(self):
while True:
if not self.input_queue.empty():
item = self.input_queue.get()
self.callback(item)
self.input_queue.task_done()
if not self.running and self.input_queue.empty():
break
class Manager:
def __init__(self, callback=None, buf_size=10):
self.input_queue = queue.Queue(buf_size)
self.output_queue = queue.Queue()
self.all_producers = []
self.all_workers = []
self.callback = callback
self.callback_worker = None
self.status = 'stopped'
def run(self):
self.status = 'starting'
p = Producer(self.input_queue)
p.setDaemon(True)
self.all_producers.append(p)
for w in range(5):
worker = Consumer(self.input_queue, self.output_queue)
worker.setDaemon(True)
self.all_workers.append(worker)
worker.start()
if self.callback is not None:
self.callback_worker = CallBackConsumer(self.output_queue, self.callback)
self.callback_worker.setDaemon(True)
self.callback_worker.start()
p.start()
self.status = 'running'
def stop(self):
self.status = 'stopping'
print('stopping producer')
for i in range(len(self.all_producers)):
self.all_producers[i].running = False
self.all_producers[i].join()
print('stopping consumer')
for i in range(len(self.all_workers)):
self.all_workers[i].running = False
self.all_workers[i].join()
if self.callback_worker is not None:
self.callback_worker.running = False
print('waiting callback worker to join')
self.callback_worker.join()
self.status = 'stopped'
print('all stopped')
class Application(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.pack()
self.create_widgets()
self.task = None
def create_widgets(self):
self.button_run = tk.Button(self, text="run", command=self.run).pack()
self.button_stop = tk.Button(self, text="stop", command=self.stop).pack()
self.log = tk.Text(self)
self.log.pack()
def run(self):
lock = Lock()
def f(item):
with lock:
self.debug(item)
self.task = Manager(callback=f)
self.task.run()
def stop(self):
self.task.stop()
self.task = None
def debug(self, item):
msg = "%d\n" % item
self.log.insert(tk.INSERT, msg) # commenting this line it works
print(msg)
root = tk.Tk()
app = Application(master=root)
app.mainloop() | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/example.py | example.py |
import gi
gi.require_version('Gtk', '3.0')
import colorlog
from gi.repository import Gtk, Gdk
from airstuff.wos import get_wos_from_doi
from airstuff.inspire import query_inspire, fix_info
from airstuff.scopus import get_eid_from_doi
from airstuff import driver_air
from airstuff import journals
logger = colorlog.getLogger('airstuff.info')
class WindowDoi(Gtk.Window):
def __init__(self, doi=None, institute=None, additional_authors=None):
Gtk.Window.__init__(self, title="Air Stuff")
self.info = None
self.additional_authors = additional_authors
self.clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
self.browser_name = 'chrome'
self.set_size_request(1000, 500)
main_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(main_box)
doi_box, _, self.entry_doi = self.create_row("DOI")
find_button = Gtk.Button.new_with_label("Search")
find_button.connect("clicked", self.search_doi)
doi_box.pack_start(find_button, True, True, 0)
main_box.pack_start(doi_box, True, True, 0)
institute_box, _, self.entry_institute = self.create_row('institute')
main_box.pack_start(institute_box, True, True, 0)
title_box, _, self.entry_title = self.create_row("title", copy=True)
main_box.pack_start(title_box, True, True, 0)
selected_authors_box = Gtk.Box()
selected_authors_box.pack_start(Gtk.Label(label='selected authors'), True, True, 0)
scrolledwindow = Gtk.ScrolledWindow()
scrolledwindow.set_hexpand(True)
scrolledwindow.set_vexpand(True)
self.selected_authors_textview = Gtk.TextView()
self.selected_authors_textbuffer = self.selected_authors_textview.get_buffer()
scrolledwindow.add(self.selected_authors_textview)
selected_authors_box.pack_start(scrolledwindow, True, True, 0)
main_box.pack_start(selected_authors_box, True, True, 0)
button_copy = Gtk.Button.new_with_label('copy')
button_copy.connect("clicked",
lambda w: self.clipboard.set_text(
self.selected_authors_textbuffer.get_text(
self.selected_authors_textbuffer.get_start_iter(),
self.selected_authors_textbuffer.get_end_iter(),
True
), -1))
selected_authors_box.pack_start(button_copy, True, True, 0)
keywords, _, self.entry_keyworkds = self.create_row('keywords', copy=True)
main_box.pack_start(keywords, True, True, 0)
scopus, _, self.entry_scopus = self.create_row('scopus', copy=True)
main_box.pack_start(scopus, True, True, 0)
wos, _, self.entry_wos = self.create_row('web of knoledge', copy=True)
main_box.pack_start(wos, True, True, 0)
pdf_url, _, self.entry_pdf_url = self.create_row('url link', copy=True)
main_box.pack_start(pdf_url, True, True, 0)
frame_selenium = Gtk.Frame(label='automatic insertion')
main_box.pack_start(frame_selenium, True, True, 8)
box_selenium = Gtk.Box()
frame_selenium.add(box_selenium)
button_chrome = Gtk.RadioButton.new_with_label_from_widget(None, "Chrome")
button_chrome.connect("toggled", self.on_browser_toggled, "chrome")
box_selenium.pack_start(button_chrome, False, False, 0)
button_firefox = Gtk.RadioButton.new_with_label_from_widget(button_chrome, "Firefox")
button_firefox.connect("toggled", self.on_browser_toggled, "firefox")
box_selenium.pack_start(button_firefox, False, False, 0)
button_login_selenium = Gtk.Button(label='login to AIR')
button_login_selenium.connect('clicked', self.login_selenium)
box_selenium.pack_start(button_login_selenium, True, True, 0)
button_start_selenium = Gtk.Button(label='insert from doi')
button_start_selenium.connect('clicked', self.start_selenium)
box_selenium.pack_start(button_start_selenium, True, True, 0)
self.button_pause = Gtk.CheckButton()
self.button_pause.set_label("Wait after each page")
box_selenium.pack_start(self.button_pause, True, True, 0)
self.button_pause.set_active(True)
if institute is not None:
self.entry_institute.set_text(institute)
if doi is not None:
self.entry_doi.set_text(doi)
self.search_doi(self)
def create_row(self, label, copy=False):
box = Gtk.Box()
label = Gtk.Label(label=label)
entry = Gtk.Entry()
box.pack_start(label, True, True, 0)
box.pack_start(entry, True, True, 0)
if copy:
button_copy = Gtk.Button.new_with_label('copy')
button_copy.connect("clicked", lambda w: self.clipboard.set_text(entry.get_text(), -1))
box.pack_start(button_copy, True, True, 0)
return box, label, entry
def search_doi(self, widget):
doi = self.entry_doi.get_text()
info = query_inspire("doi:%s" % doi)
if not info == 0:
pass
if len(info) > 1:
pass
info = self.info = fix_info(info[0])
self.entry_title.set_text(info['title'])
all_authors = ';'.join([author['full_name'] for author in info['authors']])
selected_institutes = set([self.entry_institute.get_text()])
if not selected_institutes:
logger.warning('no institute specified')
selected_authors = [author['full_name'] for author in info['authors']
if selected_institutes.intersection(set(author.get('affiliation', [])))]
self.info['local_authors'] = selected_authors
if self.additional_authors is not None:
logger.debug('adding additional authors %s', self.additional_authors)
for aa in self.additional_authors:
if aa in self.info['local_authors']:
logger.warning('additional author %s already present', aa)
else:
self.info['local_authors'].append(aa)
if not selected_authors:
logger.warning('no author found for institute %s', selected_institutes)
self.selected_authors_textbuffer.set_text('\n'.join(selected_authors))
eid = get_eid_from_doi(doi)
if eid is not None:
self.entry_scopus.set_text(eid)
info['scopus'] = eid
wos = get_wos_from_doi(doi)
if wos is not None:
info['wos'] = wos
self.entry_wos.set_text(wos)
if 'thesaurus_terms' in info:
keywords = [k['term'] for k in info['thesaurus_terms'] if 'term' in k]
self.entry_keyworkds.set_text(';'.join(keywords))
logger.info('getting url from journal')
pdf_url = journals.get_pdf_url(doi)
if pdf_url:
info['pdf_url'] = pdf_url
self.entry_pdf_url.set_text(pdf_url)
@property
def driver(self):
if hasattr(self, '_driver') and self._driver is not None and self._driver.name == self.browser_name:
return self._driver
else:
self._driver = driver_air.get_driver(debug=True, driver=self.browser_name)
return self._driver
def login_selenium(self, widget):
driver_air.login(self.driver)
def on_browser_toggled(self, button, value):
self.browser_name = value
def start_selenium(self, widget):
r = driver_air.upload_from_doi(self.driver, self.info, pause=self.button_pause.get_active())
doi = self.info['doi']
if type(doi) == list:
doi = doi[0]
if r == driver_air.ReturnValue.DUPLICATE:
logger.warning('do not create duplicate %s', doi)
self.ignore_in_future(doi)
elif r == driver_air.ReturnValue.SUCCESS:
logger.info('entry %s correctly submitted', doi)
self.ignore_in_future(doi)
d = self.driver
d.close()
del d
def ignore_in_future(self, doi):
with open('blacklist.txt', 'a') as f:
f.write('%s\n' % doi)
def app_main(doi=None, institute=None):
win = WindowDoi(doi, institute)
win.connect("destroy", Gtk.main_quit)
win.show_all()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Insert information on air')
parser.add_argument("doi", nargs='?', default=None)
parser.add_argument('--institute')
args = parser.parse_args()
app_main(args.doi, args.institute)
Gtk.main() | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/air_info.py | air_info.py |
import threading
import logging
import colorlog
logger = colorlog.getLogger('airstuff.driverair')
class OffsetsProducer(threading.Thread):
def __init__(self, input_queue, step, stop_event=None):
super(OffsetsProducer, self).__init__()
self.step = step
self.input_queue = input_queue
self.stop_event = stop_event
def run(self):
i = 0
while self.stop_event is None or not self.stop_event.is_set():
if not self.input_queue.full():
logger.debug("adding %d", i)
self.input_queue.put(i)
logger.debug('added %d', i)
i += self.step
logger.debug('producer end')
return
class DuplicateFilter(threading.Thread):
added = []
lock_duplicated = threading.Lock()
def __init__(self, input_queue, output_queue, stop_event=None):
super(DuplicateFilter, self).__init__()
self.input_queue = input_queue
self.output_queue = output_queue
self.stop_event = stop_event
def run(self):
while self.stop_event is None or not self.stop_event.is_set():
if not self.input_queue.empty():
item = self.input_queue.get()
if item in self.added:
logger.warning('duplicate: %s', item)
self.input_queue.task_done()
continue
with self.lock_duplicated:
self.added.append(item)
self.input_queue.task_done()
self.output_queue.put(item)
class CallBackConsumer(threading.Thread):
def __init__(self, input_queue, callback, stop_event=None):
super(CallBackConsumer, self).__init__()
self.callback = callback
self.input_queue = input_queue
self.stop_event = stop_event
def run(self):
while True:
if not self.input_queue.empty():
item = self.input_queue.get()
self.callback(item)
self.input_queue.task_done()
if self.stop_event is not None and self.stop_event.is_set() and self.input_queue.empty():
logger.debug('breaking main loop in callback worker')
break
logger.debug("callback worker end") | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/workers.py | workers.py |
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GObject, GLib
import threading
import queue
import logging
import time
logging.basicConfig(level=logging.DEBUG)
class Producer(threading.Thread):
def __init__(self, input_queue, step, stop_event=None):
super(Producer, self).__init__()
self.step = step
self.input_queue = input_queue
self.stop_event = stop_event
def run(self):
i = 0
while self.stop_event is None or not self.stop_event.is_set():
if not self.input_queue.full():
self.input_queue.put(i)
i += self.step
class WorkerConsumer(threading.Thread):
def __init__(self, input_queue, output_queue, stop_event=None):
super(WorkerConsumer, self).__init__()
self.input_queue = input_queue
self.output_queue = output_queue
self.stop_event = stop_event
def run(self):
while self.stop_event is not None and not self.stop_event.is_set():
#time.sleep(0.001)
item = self.input_queue.get()
self.output_queue.put(item ** 0.5)
self.input_queue.task_done()
class CallBackConsumer(threading.Thread):
def __init__(self, input_queue, callback, stop_event=None):
super(CallBackConsumer, self).__init__()
self.callback = callback
self.input_queue = input_queue
self.stop_event = stop_event
def run(self):
while True:
if not self.input_queue.empty():
item = self.input_queue.get()
self.callback(item)
self.input_queue.task_done()
if self.stop_event is not None and self.stop_event.is_set() and self.input_queue.empty():
break
class MyWindow(Gtk.Window):
lock = threading.Lock()
def __init__(self):
Gtk.Window.__init__(self, title="Air Stuff")
main_box = Gtk.Box()
self.add(main_box)
self.entry = Gtk.Entry()
main_box.pack_start(self.entry, True, True, 0)
start_button = Gtk.Button(label='start')
stop_button = Gtk.Button(label='stop')
main_box.pack_start(start_button, True, True, 0)
main_box.pack_start(stop_button, True, True, 0)
start_button.connect('clicked', self.start)
stop_button.connect('clicked', self.stop)
def start(self, widget):
logging.debug('start')
def callback(x):
with self.lock:
#self.entry.set_text(str(x))
GLib.idle_add(self.entry.set_text, str(x))
self.input_queue = queue.Queue(10)
self.output_queue = queue.Queue()
self.stop_event = threading.Event()
self.producer = Producer(self.input_queue, 2, self.stop_event)
self.producer.start()
self.consumer = WorkerConsumer(self.input_queue, self.output_queue, self.stop_event)
self.callback_worker = CallBackConsumer(self.output_queue, callback, self.stop_event)
self.callback_worker.start()
self.consumer.start()
def stop(self, widget):
logging.debug('stop')
self.stop_event.set()
self.producer.join()
self.consumer.join()
win = MyWindow()
win.connect("destroy", Gtk.main_quit)
win.show_all()
Gtk.main() | AirStuff | /AirStuff-0.1.2-py3-none-any.whl/airstuff/problem_gtk.py | problem_gtk.py |
# airthings-api
## Description
Python Wrappers for AirThings API
## Installation
* Package published at https://pypi.org/project/airthings-api/
* Install package: `pip install -i airthings-api`
## Usage (Jupyter)
> NB: You can now use async/await at the top level in the IPython terminal and in the notebook, it should — in most of the cases — “just work”. Update IPython to version 7+, IPykernel to version 5+.
```python
# import the library
ata = __import__('airthings-api')
import aiohttp
session = aiohttp.ClientSession()
# Create an API manager; use your dashboard's credentials
manager = ata.api.web.AirThingsManager(
username='[email protected]',
password='xxxxxxxx',
session=session)
# Get the 'me' instance
me = await manager.get_me_instance()
print(me.email)
# Should be '[email protected]' I guess
# Get the 'locations' instances
locations_instance = await manager.get_locations_instance()
# Print devices and sensor values
for location in locations_instance.locations:
for device in location.devices:
print('device: {0}'.format(device.room_name))
for current_sensor_value in device.current_sensor_values:
print(' {0}: {1} {2}'.format(
current_sensor_value.type_,
current_sensor_value.value,
current_sensor_value.provided_unit))
await session.close()
# device: Wave Mini
# temp: 21.6 c
# humidity: 41.0 pct
# voc: 253.0 ppb
# mold: 0.0 riskIndex
# device: Wave
# radonShortTermAvg: 103.0 bq
# temp: 20.5 c
# humidity: 47.0 pct
# device: Hub AirThings
```
## Usage (Python file)
> NB: If you run in a Python file, wrapping the call within `asyncio.run(...)` will do the trick.
```python
ata = __import__('airthings-api')
import aiohttp
import asyncio
async def test():
session = aiohttp.ClientSession()
# Create an API manager; use your dashboard's credentials
manager = ata.api.web.AirThingsManager(
username='[email protected]',
password='xxxxxxxx',
session=session)
# Get the 'me' instance
me = await manager.get_me_instance()
print(me.email)
# Should be '[email protected]' I guess
# Get the 'locations' instances
locations_instance = await manager.get_locations_instance()
# Print devices and sensor values
for location in locations_instance.locations:
for device in location.devices:
print('device: {0}'.format(device.room_name))
for current_sensor_value in device.current_sensor_values:
print(' {0}: {1} {2}'.format(
current_sensor_value.type_,
current_sensor_value.value,
current_sensor_value.provided_unit))
await session.close()
# device: Wave Mini
# temp: 21.6 c
# humidity: 41.0 pct
# voc: 253.0 ppb
# mold: 0.0 riskIndex
# device: Wave
# radonShortTermAvg: 103.0 bq
# temp: 20.5 c
# humidity: 47.0 pct
# device: Hub AirThings
if __name__ == '__main__':
asyncio.run(test())
```
| AirThings-API | /AirThings-API-0.1.5.tar.gz/AirThings-API-0.1.5/README.md | README.md |
import aiohttp
import asyncio
import json
import logging
import math
from urllib import parse as up
import datetime as dt
import enum
from typing import Any, Dict, List, Optional, Coroutine
from typing_extensions import Literal
from ..responses import relay_devices_instance as rdi
from ..responses import locations_instance as li
from ..responses import thresholds_instance as ti
from ..responses import me_instance as mi
_LOGGER = logging.getLogger(__name__)
class AirThingsConstant:
CT_JSON = 'application/json'
CT_USER_AGENT = 'Mozilla/5.0 Chrome/87.0'
CT_BEARER_FORMAT = 'Bearer {0}'
CT_ACCOUNTS_API_BASE = 'https://accounts-api.airthings.com/v1/{0}'
CT_WEB_API_BASE = 'https://web-api.airthin.gs/v1/{0}'
CT_ACCOUNTS_ORIGIN = 'https://accounts.airthings.com'
CT_DASHBOARD_ORIGIN = 'https://dashboard.airthings.com'
CT_DASHBOARD_SECRET = 'e333140d-4a85-4e3e-8cf2-bd0a6c710aaa'
@enum.unique
class AirThingsAuthenticationAdvise(enum.Enum):
ShouldWait = 0
ShouldLogin = 1
ShouldRefreshToken = 2
ShouldBeGood = 3
ShouldCheckCredentials = 4
class AirThingsException(Exception):
"""Base exception for AirThings API errors."""
def __init__(self, error_code: int, error_details: str) -> None:
"""Initialise AirThingsException."""
self.error_code = error_code
self.error_details = error_details
class AirThingsInvalidCredentialsException(Exception):
"""Highlevel Exception for AirThings invalid credentials errors."""
pass
class AirThingsUnauthorizedException(AirThingsException):
"""Exception for AirThings API unauthorized errors."""
pass
class AirThingsManager:
def __init__(self, username: str, password: str, session: aiohttp.ClientSession) -> None:
self.username = username
self.password = password
self.session = session
self.tokens: Optional[Dict[str, Any]] = None
async def get_relay_devices_instance(self) -> rdi.RelayDevicesInstance:
return rdi.relay_devices_instance_from_dict(
await self.__execute_poll(
poll_coroutine=self.__poll_relay_devices()))
async def get_locations_instance(self) -> li.LocationsInstance:
return li.locations_instance_from_dict(
await self.__execute_poll(
poll_coroutine=self.__poll_locations()))
async def get_thresholds_instance(self) -> ti.ThresholdsInstance:
return ti.thresholds_instance_from_dict(
await self.__execute_poll(
poll_coroutine=self.__poll_thresholds()))
async def get_me_instance(self) -> mi.MeInstance:
return mi.me_instance_from_dict(
await self.__execute_poll(
poll_coroutine=self.__poll_me()))
async def validate_credentials(self) -> bool:
advise = await self.__assert_ready()
return (advise == AirThingsAuthenticationAdvise.ShouldBeGood)
async def __execute_poll(self, poll_coroutine: Coroutine) -> Optional[Dict[str, Any]]:
advise = await self.__assert_ready()
if advise == AirThingsAuthenticationAdvise.ShouldBeGood:
try:
return await poll_coroutine
except AirThingsUnauthorizedException as atue:
_LOGGER.error(
AirThingsManager.log(
method=poll_coroutine.__name__,
error_code=atue.error_code,
error_details=atue.error_details))
return None
except AirThingsException as ate:
_LOGGER.error(
AirThingsManager.log(
method=poll_coroutine.__name__,
error_code=ate.error_code,
error_details=ate.error_details))
return None
elif advise == AirThingsAuthenticationAdvise.ShouldCheckCredentials:
_LOGGER.warning(
AirThingsManager.log(
method=poll_coroutine.__name__,
advise=advise,
message='invalid credentials'))
raise AirThingsInvalidCredentialsException()
else:
_LOGGER.warning(
AirThingsManager.log(
method=poll_coroutine.__name__,
advise=advise,
message='cannot execute poll'))
return None
def __get_authentication_advise(self) -> AirThingsAuthenticationAdvise:
if self.tokens is None:
return AirThingsAuthenticationAdvise.ShouldLogin
if self.tokens['access_token'] is None:
return AirThingsAuthenticationAdvise.ShouldLogin
else:
if dt.datetime.utcnow() - self.tokens['timestamp'] >= dt.timedelta(seconds=self.tokens['expires_in']):
if self.tokens['refresh_token'] is None:
return AirThingsAuthenticationAdvise.ShouldLogin
else:
return AirThingsAuthenticationAdvise.ShouldRefreshToken
else:
return AirThingsAuthenticationAdvise.ShouldBeGood
async def __assert_ready(self) -> AirThingsAuthenticationAdvise:
advise = self.__get_authentication_advise()
if advise == AirThingsAuthenticationAdvise.ShouldLogin:
return await self.__perform_login()
elif advise == AirThingsAuthenticationAdvise.ShouldRefreshToken:
return await self.__perform_refresh()
return advise
async def __perform_login(self) -> AirThingsAuthenticationAdvise:
try:
token = await AirThingsManager.__get_token(
session=self.session,
username=self.username,
password=self.password)
consent = await AirThingsManager.__get_consent(
session=self.session,
token=token)
authorization_code = await AirThingsManager.__get_authorization_code(
session=self.session,
token=token,
consent=consent)
self.tokens = await AirThingsManager.__get_access_and_refresh_token(
session=self.session,
authorization_code=authorization_code)
return AirThingsAuthenticationAdvise.ShouldBeGood
except AirThingsUnauthorizedException as atue:
_LOGGER.error(
AirThingsManager.log(
method='__perform_login',
error_code=atue.error_code,
error_details=atue.error_details))
self.tokens = None
return AirThingsAuthenticationAdvise.ShouldCheckCredentials
except AirThingsException as ate:
_LOGGER.error(
AirThingsManager.log(
method='__perform_login',
error_code=ate.error_code,
error_details=ate.error_details))
self.tokens = None
return AirThingsAuthenticationAdvise.ShouldWait
async def __perform_refresh(self) -> AirThingsAuthenticationAdvise:
try:
self.tokens = await AirThingsManager.__refresh_access_and_refresh_token(
session=self.session,
previous_refresh_token=self.tokens['refresh_token'])
return AirThingsAuthenticationAdvise.ShouldBeGood
except AirThingsUnauthorizedException as atue:
_LOGGER.error(
AirThingsManager.log(
method='__perform_refresh',
error_code=atue.error_code,
error_details=atue.error_details))
self.tokens = None
return AirThingsAuthenticationAdvise.ShouldLogin
except AirThingsException as ate:
_LOGGER.error(
AirThingsManager.log(
method='__perform_refresh',
error_code=ate.error_code,
error_details=ate.error_details))
self.tokens = None
return AirThingsAuthenticationAdvise.ShouldWait
@staticmethod
async def __get_token(session: aiohttp.ClientSession, username: str, password: str) -> str:
async with session.post(
url=AirThingsManager.format_string(
AirThingsConstant.CT_ACCOUNTS_API_BASE,
'token'),
headers={
'origin': AirThingsConstant.CT_ACCOUNTS_ORIGIN,
'accept': AirThingsConstant.CT_JSON,
'content-type': AirThingsConstant.CT_JSON,
'user-agent': AirThingsConstant.CT_USER_AGENT,
},
json={
'username': username,
'password': password,
'grant_type': 'password',
'client_id': 'accounts'
}) as response:
if math.floor(response.status / 100) == 2:
rjson = await response.json()
return rjson['access_token']
elif math.floor(response.status / 100) == 4:
raise AirThingsUnauthorizedException(
error_code=response.status,
error_details=await response.text())
else:
raise AirThingsException(
error_code=response.status,
error_details=await response.text())
@staticmethod
async def __get_consent(session: aiohttp.ClientSession, token: str) -> Optional[Dict[str, Any]]:
async with session.get(
url=AirThingsManager.format_string(
AirThingsConstant.CT_ACCOUNTS_API_BASE,
'consents/dashboard?client_id=dashboard&redirect_uri={0}'.format(
AirThingsConstant.CT_DASHBOARD_ORIGIN)),
headers={
'origin': AirThingsConstant.CT_ACCOUNTS_ORIGIN,
'accept': AirThingsConstant.CT_JSON,
'content-type': AirThingsConstant.CT_JSON,
'user-agent': AirThingsConstant.CT_USER_AGENT,
'authorization': AirThingsManager.format_string(
AirThingsConstant.CT_BEARER_FORMAT,
token),
}) as response:
if math.floor(response.status / 100) == 2:
return await response.json()
elif math.floor(response.status / 100) == 4:
raise AirThingsUnauthorizedException(
error_code=response.status,
error_details=await response.text())
else:
raise AirThingsException(
error_code=response.status,
error_details=await response.text())
@staticmethod
async def __get_authorization_code(session: aiohttp.ClientSession, token: str, consent) -> str:
async with session.post(
url=AirThingsManager.format_string(
AirThingsConstant.CT_ACCOUNTS_API_BASE,
'authorize?client_id=dashboard&redirect_uri={0}'.format(
AirThingsConstant.CT_DASHBOARD_ORIGIN)),
headers={
'origin': AirThingsConstant.CT_ACCOUNTS_ORIGIN,
'accept': AirThingsConstant.CT_JSON,
'content-type': AirThingsConstant.CT_JSON,
'user-agent': AirThingsConstant.CT_USER_AGENT,
'authorization': AirThingsManager.format_string(
AirThingsConstant.CT_BEARER_FORMAT,
token),
},
json=consent) as response:
if math.floor(response.status / 100) == 2:
rjson = await response.json()
redirect_uri = rjson['redirect_uri']
fragments = up.urlparse(redirect_uri)
code = up.parse_qs(fragments.query)['code'][0]
return code
elif math.floor(response.status / 100) == 4:
raise AirThingsUnauthorizedException(
error_code=response.status,
error_details=await response.text())
else:
raise AirThingsException(
error_code=response.status,
error_details=await response.text())
@staticmethod
async def __get_access_and_refresh_token(session: aiohttp.ClientSession, authorization_code: str) -> Optional[Dict[str, Any]]:
async with session.post(
url=AirThingsManager.format_string(
AirThingsConstant.CT_ACCOUNTS_API_BASE,
'token'),
headers={
'origin': AirThingsConstant.CT_DASHBOARD_ORIGIN,
'accept': AirThingsConstant.CT_JSON,
'content-type': AirThingsConstant.CT_JSON,
'user-agent': AirThingsConstant.CT_USER_AGENT,
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'cross-site',
},
json={
'client_id': 'dashboard',
'client_secret': AirThingsConstant.CT_DASHBOARD_SECRET,
'code': authorization_code,
'grant_type': 'authorization_code',
'redirect_uri': AirThingsConstant.CT_DASHBOARD_ORIGIN,
}) as response:
if math.floor(response.status / 100) == 2:
response_dict = await response.json()
return {
'access_token': response_dict['access_token'],
'refresh_token': response_dict['refresh_token'],
'expires_in': response_dict['expires_in'],
'timestamp': dt.datetime.utcnow(),
}
elif math.floor(response.status / 100) == 4:
raise AirThingsUnauthorizedException(
error_code=response.status,
error_details=await response.text())
else:
raise AirThingsException(
error_code=response.status,
error_details=await response.text())
@staticmethod
async def __refresh_access_and_refresh_token(session: aiohttp.ClientSession, previous_refresh_token: str) -> Optional[Dict[str, Any]]:
async with session.post(
url=AirThingsManager.format_string(
AirThingsConstant.CT_ACCOUNTS_API_BASE,
'token'),
headers={
'origin': AirThingsConstant.CT_DASHBOARD_ORIGIN,
'accept': AirThingsConstant.CT_JSON,
'content-type': AirThingsConstant.CT_JSON,
'user-agent': AirThingsConstant.CT_USER_AGENT,
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'cross-site',
},
json={
'client_id': 'dashboard',
'client_secret': AirThingsConstant.CT_DASHBOARD_SECRET,
'refresh_token': previous_refresh_token,
'grant_type': 'refresh_token',
}) as response:
if math.floor(response.status / 100) == 2:
response_dict = await response.json()
return {
'access_token': response_dict['access_token'],
'refresh_token': response_dict['refresh_token'],
'expires_in': response_dict['expires_in'],
'timestamp': dt.datetime.utcnow(),
}
elif math.floor(response.status / 100) == 4:
raise AirThingsUnauthorizedException(
error_code=response.status,
error_details=await response.text())
else:
raise AirThingsException(
error_code=response.status,
error_details=await response.text())
@staticmethod
def log(**kwargs):
logger = ''
for key, value in kwargs.items():
logger = logger + '{0}: "{1}" | '.format(key, value)
return logger
@staticmethod
def format_string(template: AirThingsConstant, *args) -> str:
return str(template).format(*args)
@staticmethod
async def __poll_generic_entity(session: aiohttp.ClientSession, access_token: str, entity: str) -> Optional[Dict[str, Any]]:
async with session.get(
url=AirThingsManager.format_string(
AirThingsConstant.CT_WEB_API_BASE,
entity),
headers={
'origin': AirThingsConstant.CT_DASHBOARD_ORIGIN,
'accept': AirThingsConstant.CT_JSON,
'content-type': AirThingsConstant.CT_JSON,
'user-agent': AirThingsConstant.CT_USER_AGENT,
'authorization': access_token,
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'cross-site',
}) as response:
if math.floor(response.status / 100) == 2:
return await response.json()
elif math.floor(response.status / 100) == 4:
raise AirThingsUnauthorizedException(
error_code=response.status,
error_details=await response.text())
else:
raise AirThingsException(
error_code=response.status,
error_details=await response.text())
@staticmethod
async def __poll_relay_devices_base(session: aiohttp.ClientSession, access_token: str) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_generic_entity(
session=session,
access_token=access_token,
entity='relay-devices')
async def __poll_relay_devices(self) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_relay_devices_base(
session=self.session,
access_token=self.tokens['access_token'])
@staticmethod
async def __poll_locations_base(session: aiohttp.ClientSession, access_token: str) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_generic_entity(
session=session,
access_token=access_token,
entity='location')
async def __poll_locations(self) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_locations_base(
session=self.session,
access_token=self.tokens['access_token'])
@staticmethod
async def __poll_thresholds_base(session: aiohttp.ClientSession, access_token: str) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_generic_entity(
session=session,
access_token=access_token,
entity='thresholds')
async def __poll_thresholds(self) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_thresholds_base(
session=self.session,
access_token=self.tokens['access_token'])
@staticmethod
async def __poll_me_base(session: aiohttp.ClientSession, access_token: str) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_generic_entity(
session=session,
access_token=access_token,
entity='me/')
async def __poll_me(self) -> Optional[Dict[str, Any]]:
return await AirThingsManager.__poll_me_base(
session=self.session,
access_token=self.tokens['access_token']) | AirThings-API | /AirThings-API-0.1.5.tar.gz/AirThings-API-0.1.5/airthings-api/api/web.py | web.py |
from dataclasses import dataclass
from uuid import UUID
from datetime import datetime
from typing import Any, Dict, List, TypeVar, Callable, Type, cast
import dateutil.parser
T = TypeVar("T")
def from_str(x: Any) -> str:
assert isinstance(x, str)
return x
def from_bool(x: Any) -> bool:
assert isinstance(x, bool)
return x
def from_datetime(x: Any) -> datetime:
return dateutil.parser.parse(x)
def from_int(x: Any) -> int:
assert isinstance(x, int) and not isinstance(x, bool)
return x
def from_float(x: Any) -> float:
assert isinstance(x, (float, int)) and not isinstance(x, bool)
return float(x)
def to_float(x: Any) -> float:
assert isinstance(x, float)
return x
def from_dict(f: Callable[[Any], T], x: Any) -> Dict[str, T]:
assert isinstance(x, dict)
return { k: f(v) for (k, v) in x.items() }
def to_class(c: Type[T], x: Any) -> dict:
assert isinstance(x, c)
return cast(Any, x).to_dict()
def from_list(f: Callable[[Any], T], x: Any) -> List[T]:
assert isinstance(x, list)
return [f(y) for y in x]
@dataclass
class Group:
id_: UUID
group_name: str
genesis: bool
role: str
created_by_user_id: UUID
created_at: datetime
updated_at: datetime
display_subscription: bool
@staticmethod
def from_dict(obj: Any) -> 'Group':
assert isinstance(obj, dict)
id_ = UUID(obj.get("id"))
group_name = from_str(obj.get("groupName"))
genesis = from_bool(obj.get("genesis"))
role = from_str(obj.get("role"))
created_by_user_id = UUID(obj.get("createdByUserId"))
created_at = from_datetime(obj.get("createdAt"))
updated_at = from_datetime(obj.get("updatedAt"))
display_subscription = from_bool(obj.get("displaySubscription"))
return Group(id_, group_name, genesis, role, created_by_user_id, created_at, updated_at, display_subscription)
def to_dict(self) -> dict:
result: dict = {}
result["id"] = str(self.id_)
result["groupName"] = from_str(self.group_name)
result["genesis"] = from_bool(self.genesis)
result["role"] = from_str(self.role)
result["createdByUserId"] = str(self.created_by_user_id)
result["createdAt"] = self.created_at.isoformat()
result["updatedAt"] = self.updated_at.isoformat()
result["displaySubscription"] = from_bool(self.display_subscription)
return result
@dataclass
class Threshold:
default_high: int
default_low: int
min_selectable_value: int
max_selectable_value: int
unit: str
threshold_delta: float
@staticmethod
def from_dict(obj: Any) -> 'Threshold':
assert isinstance(obj, dict)
default_high = from_int(obj.get("defaultHigh"))
default_low = from_int(obj.get("defaultLow"))
min_selectable_value = from_int(obj.get("minSelectableValue"))
max_selectable_value = from_int(obj.get("maxSelectableValue"))
unit = from_str(obj.get("unit"))
threshold_delta = from_float(obj.get("thresholdDelta"))
return Threshold(default_high, default_low, min_selectable_value, max_selectable_value, unit, threshold_delta)
def to_dict(self) -> dict:
result: dict = {}
result["defaultHigh"] = from_int(self.default_high)
result["defaultLow"] = from_int(self.default_low)
result["minSelectableValue"] = from_int(self.min_selectable_value)
result["maxSelectableValue"] = from_int(self.max_selectable_value)
result["unit"] = from_str(self.unit)
result["thresholdDelta"] = to_float(self.threshold_delta)
return result
@dataclass
class Notifications:
thresholds: Dict[str, Threshold]
@staticmethod
def from_dict(obj: Any) -> 'Notifications':
assert isinstance(obj, dict)
thresholds = from_dict(Threshold.from_dict, obj.get("thresholds"))
return Notifications(thresholds)
def to_dict(self) -> dict:
result: dict = {}
result["thresholds"] = from_dict(lambda x: to_class(Threshold, x), self.thresholds)
return result
@dataclass
class MeInstance:
name: str
email: str
date_format: str
measurement_unit: str
is_pro_user: bool
notifications: Notifications
rf_region: str
is_demo_user: bool
groups: List[Group]
language: str
intercom_user_hash: str
user_id: UUID
@staticmethod
def from_dict(obj: Any) -> 'MeInstance':
assert isinstance(obj, dict)
name = from_str(obj.get("name"))
email = from_str(obj.get("email"))
date_format = from_str(obj.get("dateFormat"))
measurement_unit = from_str(obj.get("measurementUnit"))
is_pro_user = from_bool(obj.get("isProUser"))
notifications = Notifications.from_dict(obj.get("notifications"))
rf_region = from_str(obj.get("rfRegion"))
is_demo_user = from_bool(obj.get("isDemoUser"))
groups = from_list(Group.from_dict, obj.get("groups"))
language = from_str(obj.get("language"))
intercom_user_hash = from_str(obj.get("intercomUserHash"))
user_id = UUID(obj.get("userId"))
return MeInstance(name, email, date_format, measurement_unit, is_pro_user, notifications, rf_region, is_demo_user, groups, language, intercom_user_hash, user_id)
def to_dict(self) -> dict:
result: dict = {}
result["name"] = from_str(self.name)
result["email"] = from_str(self.email)
result["dateFormat"] = from_str(self.date_format)
result["measurementUnit"] = from_str(self.measurement_unit)
result["isProUser"] = from_bool(self.is_pro_user)
result["notifications"] = to_class(Notifications, self.notifications)
result["rfRegion"] = from_str(self.rf_region)
result["isDemoUser"] = from_bool(self.is_demo_user)
result["groups"] = from_list(lambda x: to_class(Group, x), self.groups)
result["language"] = from_str(self.language)
result["intercomUserHash"] = from_str(self.intercom_user_hash)
result["userId"] = str(self.user_id)
return result
def me_instance_from_dict(s: Any) -> MeInstance:
return MeInstance.from_dict(s)
def me_instance_to_dict(x: MeInstance) -> Any:
return to_class(MeInstance, x) | AirThings-API | /AirThings-API-0.1.5.tar.gz/AirThings-API-0.1.5/airthings-api/responses/me_instance.py | me_instance.py |
from enum import Enum
from dataclasses import dataclass
from typing import Optional, Any, List, TypeVar, Type, Callable, cast
T = TypeVar("T")
EnumT = TypeVar("EnumT", bound=Enum)
def from_int(x: Any) -> int:
assert isinstance(x, int) and not isinstance(x, bool)
return x
def from_none(x: Any) -> Any:
assert x is None
return x
def from_union(fs, x):
for f in fs:
try:
return f(x)
except:
pass
assert False
def to_enum(c: Type[EnumT], x: Any) -> EnumT:
assert isinstance(x, c)
return x.value
def from_str(x: Any) -> str:
assert isinstance(x, str)
return x
def from_list(f: Callable[[Any], T], x: Any) -> List[T]:
assert isinstance(x, list)
return [f(y) for y in x]
def to_class(c: Type[T], x: Any) -> dict:
assert isinstance(x, c)
return cast(Any, x).to_dict()
class Rating(Enum):
FAIR = "FAIR"
GOOD = "GOOD"
POOR = "POOR"
@dataclass
class Co2Range:
rating: Rating
to: Optional[int] = None
range_from: Optional[int] = None
@staticmethod
def from_dict(obj: Any) -> 'Co2Range':
assert isinstance(obj, dict)
rating = Rating(obj.get("rating"))
to = from_union([from_int, from_none], obj.get("to"))
range_from = from_union([from_int, from_none], obj.get("from"))
return Co2Range(rating, to, range_from)
def to_dict(self) -> dict:
result: dict = {}
result["rating"] = to_enum(Rating, self.rating)
result["to"] = from_union([from_int, from_none], self.to)
result["from"] = from_union([from_int, from_none], self.range_from)
return result
@dataclass
class Co2:
type_: str
unit: str
ranges: List[Co2Range]
@staticmethod
def from_dict(obj: Any) -> 'Co2':
assert isinstance(obj, dict)
type_ = from_str(obj.get("type"))
unit = from_str(obj.get("unit"))
ranges = from_list(Co2Range.from_dict, obj.get("ranges"))
return Co2(type_, unit, ranges)
def to_dict(self) -> dict:
result: dict = {}
result["type"] = from_str(self.type_)
result["unit"] = from_str(self.unit)
result["ranges"] = from_list(
lambda x: to_class(Co2Range, x), self.ranges)
return result
@dataclass
class PressureRange:
rating: Rating
@staticmethod
def from_dict(obj: Any) -> 'PressureRange':
assert isinstance(obj, dict)
rating = Rating(obj.get("rating"))
return PressureRange(rating)
def to_dict(self) -> dict:
result: dict = {}
result["rating"] = to_enum(Rating, self.rating)
return result
@dataclass
class Pressure:
type_: str
unit: str
ranges: List[PressureRange]
@staticmethod
def from_dict(obj: Any) -> 'Pressure':
assert isinstance(obj, dict)
type_ = from_str(obj.get("type"))
unit = from_str(obj.get("unit"))
ranges = from_list(PressureRange.from_dict, obj.get("ranges"))
return Pressure(type_, unit, ranges)
def to_dict(self) -> dict:
result: dict = {}
result["type"] = from_str(self.type_)
result["unit"] = from_str(self.unit)
result["ranges"] = from_list(
lambda x: to_class(PressureRange, x), self.ranges)
return result
@dataclass
class Thresholds:
temp: Co2
humidity: Co2
voc: Co2
co2: Co2
radon_short_term_avg: Co2
pressure: Pressure
mold: Co2
virus_risk: Co2
@staticmethod
def from_dict(obj: Any) -> 'Thresholds':
assert isinstance(obj, dict)
temp = Co2.from_dict(obj.get("temp"))
humidity = Co2.from_dict(obj.get("humidity"))
voc = Co2.from_dict(obj.get("voc"))
co2 = Co2.from_dict(obj.get("co2"))
radon_short_term_avg = Co2.from_dict(obj.get("radonShortTermAvg"))
pressure = Pressure.from_dict(obj.get("pressure"))
mold = Co2.from_dict(obj.get("mold"))
virus_risk = Co2.from_dict(obj.get("virusRisk"))
return Thresholds(temp, humidity, voc, co2, radon_short_term_avg, pressure, mold, virus_risk)
def to_dict(self) -> dict:
result: dict = {}
result["temp"] = to_class(Co2, self.temp)
result["humidity"] = to_class(Co2, self.humidity)
result["voc"] = to_class(Co2, self.voc)
result["co2"] = to_class(Co2, self.co2)
result["radonShortTermAvg"] = to_class(Co2, self.radon_short_term_avg)
result["pressure"] = to_class(Pressure, self.pressure)
result["mold"] = to_class(Co2, self.mold)
result["virusRisk"] = to_class(Co2, self.virus_risk)
return result
@dataclass
class ThresholdsInstance:
thresholds: Thresholds
@staticmethod
def from_dict(obj: Any) -> 'ThresholdsInstance':
assert isinstance(obj, dict)
thresholds = Thresholds.from_dict(obj.get("thresholds"))
return ThresholdsInstance(thresholds)
def to_dict(self) -> dict:
result: dict = {}
result["thresholds"] = to_class(Thresholds, self.thresholds)
return result
def thresholds_instance_from_dict(s: Any) -> ThresholdsInstance:
return ThresholdsInstance.from_dict(s)
def thresholds_instance_to_dict(x: ThresholdsInstance) -> Any:
return to_class(ThresholdsInstance, x) | AirThings-API | /AirThings-API-0.1.5.tar.gz/AirThings-API-0.1.5/airthings-api/responses/thresholds_instance.py | thresholds_instance.py |
from dataclasses import dataclass
from datetime import datetime
from typing import List, Dict, Any, TypeVar, Callable, Type, cast
from uuid import UUID
import dateutil.parser
T = TypeVar("T")
def from_datetime(x: Any) -> datetime:
return dateutil.parser.parse(x)
def from_str(x: Any) -> str:
assert isinstance(x, str)
return x
def from_list(f: Callable[[Any], T], x: Any) -> List[T]:
assert isinstance(x, list)
return [f(y) for y in x]
def from_dict(f: Callable[[Any], T], x: Any) -> Dict[str, T]:
assert isinstance(x, dict)
return {k: f(v) for (k, v) in x.items()}
def from_int(x: Any) -> int:
assert isinstance(x, int) and not isinstance(x, bool)
return x
def from_bool(x: Any) -> bool:
assert isinstance(x, bool)
return x
def to_class(c: Type[T], x: Any) -> dict:
assert isinstance(x, c)
return cast(Any, x).to_dict()
@dataclass
class MetaData:
last_seen: datetime
ble_firmware_version: str
sub_firmware_version: str
st_firmware_version: str
last_seen_devices: List[str]
devices: Dict[str, int]
region: str
cell: bool
@staticmethod
def from_dict(obj: Any) -> 'MetaData':
assert isinstance(obj, dict)
last_seen = from_datetime(obj.get("lastSeen"))
ble_firmware_version = from_str(obj.get("bleFirmwareVersion"))
sub_firmware_version = from_str(obj.get("subFirmwareVersion"))
st_firmware_version = from_str(obj.get("stFirmwareVersion"))
last_seen_devices = from_list(from_str, obj.get("lastSeenDevices"))
devices = from_dict(from_int, obj.get("devices"))
region = from_str(obj.get("region"))
cell = from_bool(obj.get("cell"))
return MetaData(last_seen, ble_firmware_version, sub_firmware_version, st_firmware_version, last_seen_devices, devices, region, cell)
def to_dict(self) -> dict:
result: dict = {}
result["lastSeen"] = self.last_seen.isoformat()
result["bleFirmwareVersion"] = from_str(self.ble_firmware_version)
result["subFirmwareVersion"] = from_str(self.sub_firmware_version)
result["stFirmwareVersion"] = from_str(self.st_firmware_version)
result["lastSeenDevices"] = from_list(from_str, self.last_seen_devices)
result["devices"] = from_dict(from_int, self.devices)
result["region"] = from_str(self.region)
result["cell"] = from_bool(self.cell)
return result
@dataclass
class Hub:
serial_number: str
device_type: str
location_id: UUID
name: str
meta_data: MetaData
@staticmethod
def from_dict(obj: Any) -> 'Hub':
assert isinstance(obj, dict)
serial_number = from_str(obj.get("serialNumber"))
device_type = from_str(obj.get("deviceType"))
location_id = UUID(obj.get("locationId"))
name = from_str(obj.get("name"))
meta_data = MetaData.from_dict(obj.get("metaData"))
return Hub(serial_number, device_type, location_id, name, meta_data)
def to_dict(self) -> dict:
result: dict = {}
result["serialNumber"] = from_str(self.serial_number)
result["deviceType"] = from_str(self.device_type)
result["locationId"] = str(self.location_id)
result["name"] = from_str(self.name)
result["metaData"] = to_class(MetaData, self.meta_data)
return result
@dataclass
class RelayDevicesInstance:
hubs: List[Hub]
@staticmethod
def from_dict(obj: Any) -> 'RelayDevicesInstance':
assert isinstance(obj, dict)
hubs = from_list(Hub.from_dict, obj.get("hubs"))
return RelayDevicesInstance(hubs)
def to_dict(self) -> dict:
result: dict = {}
result["hubs"] = from_list(lambda x: to_class(Hub, x), self.hubs)
return result
def relay_devices_instance_from_dict(s: Any) -> RelayDevicesInstance:
return RelayDevicesInstance.from_dict(s)
def relay_devices_instance_to_dict(x: RelayDevicesInstance) -> Any:
return to_class(RelayDevicesInstance, x) | AirThings-API | /AirThings-API-0.1.5.tar.gz/AirThings-API-0.1.5/airthings-api/responses/relay_devices_instance.py | relay_devices_instance.py |
from dataclasses import dataclass
from typing import List, Any, Optional, TypeVar, Callable, Type, cast
from uuid import UUID
from datetime import datetime
import dateutil.parser
T = TypeVar("T")
def from_str(x: Any) -> str:
assert isinstance(x, str)
return x
def from_float(x: Any) -> float:
assert isinstance(x, (float, int)) and not isinstance(x, bool)
return float(x)
def from_bool(x: Any) -> bool:
assert isinstance(x, bool)
return x
def from_list(f: Callable[[Any], T], x: Any) -> List[T]:
assert isinstance(x, list)
return [f(y) for y in x]
def from_int(x: Any) -> int:
assert isinstance(x, int) and not isinstance(x, bool)
return x
def to_float(x: Any) -> float:
assert isinstance(x, float)
return x
def from_datetime(x: Any) -> datetime:
return dateutil.parser.parse(x)
def from_none(x: Any) -> Any:
assert x is None
return x
def from_union(fs, x):
for f in fs:
try:
return f(x)
except:
pass
assert False
def to_class(c: Type[T], x: Any) -> dict:
assert isinstance(x, c)
return cast(Any, x).to_dict()
@dataclass
class CurrentSensorValue:
type_: str
value: Optional[float]
provided_unit: str
preferred_unit: str
is_alert: bool
thresholds: List[float]
@staticmethod
def from_dict(obj: Any) -> 'CurrentSensorValue':
assert isinstance(obj, dict)
type_ = from_str(obj.get("type"))
value = from_union([from_float, from_none], obj.get("value"))
provided_unit = from_str(obj.get("providedUnit"))
preferred_unit = from_str(obj.get("preferredUnit"))
is_alert = from_bool(obj.get("isAlert"))
thresholds = from_list(from_float, obj.get("thresholds"))
return CurrentSensorValue(type_, value, provided_unit, preferred_unit, is_alert, thresholds)
def to_dict(self) -> dict:
result: dict = {}
result["type"] = from_str(self.type_)
result["value"] = from_union([from_float, from_none], self.value)
result["providedUnit"] = from_str(self.provided_unit)
result["preferredUnit"] = from_str(self.preferred_unit)
result["isAlert"] = from_bool(self.is_alert)
result["thresholds"] = from_list(from_float, self.thresholds)
return result
@dataclass
class Device:
serial_number: str
location_name: str
location_id: UUID
room_name: str
publicly_available: bool
segment_id: UUID
segment_start: datetime
current_sensor_values: List[CurrentSensorValue]
type_: str
latest_sample: Optional[datetime] = None
battery_percentage: Optional[int] = None
rssi: Optional[int] = None
relay_device: Optional[str] = None
is_hub_connection_lost: Optional[bool] = None
@staticmethod
def from_dict(obj: Any) -> 'Device':
assert isinstance(obj, dict)
serial_number = from_str(obj.get("serialNumber"))
location_name = from_str(obj.get("locationName"))
location_id = UUID(obj.get("locationId"))
room_name = from_str(obj.get("roomName"))
publicly_available = from_bool(obj.get("publiclyAvailable"))
segment_id = UUID(obj.get("segmentId"))
segment_start = from_datetime(obj.get("segmentStart"))
current_sensor_values = from_list(
CurrentSensorValue.from_dict, obj.get("currentSensorValues"))
type_ = from_str(obj.get("type"))
latest_sample = from_union(
[from_datetime, from_none], obj.get("latestSample"))
battery_percentage = from_union(
[from_int, from_none], obj.get("batteryPercentage"))
rssi = from_union([from_int, from_none], obj.get("rssi"))
relay_device = from_union(
[from_str, from_none], obj.get("relayDevice"))
is_hub_connection_lost = from_union(
[from_bool, from_none], obj.get("isHubConnectionLost"))
return Device(serial_number, location_name, location_id, room_name, publicly_available, segment_id, segment_start, current_sensor_values, type_, latest_sample, battery_percentage, rssi, relay_device, is_hub_connection_lost)
def to_dict(self) -> dict:
result: dict = {}
result["serialNumber"] = from_str(self.serial_number)
result["locationName"] = from_str(self.location_name)
result["locationId"] = str(self.location_id)
result["roomName"] = from_str(self.room_name)
result["publiclyAvailable"] = from_bool(self.publicly_available)
result["segmentId"] = str(self.segment_id)
result["segmentStart"] = self.segment_start.isoformat()
result["currentSensorValues"] = from_list(lambda x: to_class(
CurrentSensorValue, x), self.current_sensor_values)
result["type"] = from_str(self.type_)
result["latestSample"] = from_union(
[lambda x: x.isoformat(), from_none], self.latest_sample)
result["batteryPercentage"] = from_union(
[from_int, from_none], self.battery_percentage)
result["rssi"] = from_union([from_int, from_none], self.rssi)
result["relayDevice"] = from_union(
[from_str, from_none], self.relay_device)
result["isHubConnectionLost"] = from_union(
[from_bool, from_none], self.is_hub_connection_lost)
return result
@dataclass
class UsageHours:
pass
@staticmethod
def from_dict(obj: Any) -> 'UsageHours':
assert isinstance(obj, dict)
return UsageHours()
def to_dict(self) -> dict:
result: dict = {}
return result
@dataclass
class Location:
id_: UUID
name: str
lat: float
lng: float
devices: List[Device]
low_battery_count: int
device_count: int
floorplans: List[Any]
usage_hours: UsageHours
address: Optional[str] = None
@staticmethod
def from_dict(obj: Any) -> 'Location':
assert isinstance(obj, dict)
id_ = UUID(obj.get("id"))
name = from_str(obj.get("name"))
lat = from_float(obj.get("lat"))
lng = from_float(obj.get("lng"))
devices = from_list(Device.from_dict, obj.get("devices"))
low_battery_count = from_int(obj.get("lowBatteryCount"))
device_count = from_int(obj.get("deviceCount"))
floorplans = from_list(lambda x: x, obj.get("floorplans"))
usage_hours = UsageHours.from_dict(obj.get("usageHours"))
address = from_union([from_str, from_none], obj.get("address"))
return Location(id_, name, lat, lng, devices, low_battery_count, device_count, floorplans, usage_hours, address)
def to_dict(self) -> dict:
result: dict = {}
result["id"] = str(self.id_)
result["name"] = from_str(self.name)
result["lat"] = to_float(self.lat)
result["lng"] = to_float(self.lng)
result["devices"] = from_list(
lambda x: to_class(Device, x), self.devices)
result["lowBatteryCount"] = from_int(self.low_battery_count)
result["deviceCount"] = from_int(self.device_count)
result["floorplans"] = from_list(lambda x: x, self.floorplans)
result["usageHours"] = to_class(UsageHours, self.usage_hours)
result["address"] = from_union([from_str, from_none], self.address)
return result
@dataclass
class LocationsInstance:
locations: List[Location]
@staticmethod
def from_dict(obj: Any) -> 'LocationsInstance':
assert isinstance(obj, dict)
locations = from_list(Location.from_dict, obj.get("locations"))
return LocationsInstance(locations)
def to_dict(self) -> dict:
result: dict = {}
result["locations"] = from_list(
lambda x: to_class(Location, x), self.locations)
return result
def locations_instance_from_dict(s: Any) -> LocationsInstance:
return LocationsInstance.from_dict(s)
def locations_instance_to_dict(x: LocationsInstance) -> Any:
return to_class(LocationsInstance, x) | AirThings-API | /AirThings-API-0.1.5.tar.gz/AirThings-API-0.1.5/airthings-api/responses/locations_instance.py | locations_instance.py |
import inspect
import logging
import traceback
import multiprocessing
import urllib2
import sys
import xmlbuilder
from airbrakepy import __version__ ,__source_url__, __app_name__
_POISON = "xxxxPOISONxxxx"
class AirbrakeSender(multiprocessing.Process):
def __init__(self, work_queue, timeout_in_ms, service_url):
multiprocessing.Process.__init__(self, name="AirbrakeSender")
self.work_queue = work_queue
self.timeout_in_seconds = timeout_in_ms / 1000.0
self.service_url = service_url
def _handle_error(self):
ei = sys.exc_info()
try:
traceback.print_exception(ei[0], ei[1], ei[2],
file=sys.stderr)
except IOError:
pass
finally:
del ei
def run(self):
global _POISON
while True:
try:
message = self.work_queue.get()
if message == _POISON:
break
self._sendMessage(message)
except Exception:
self._handle_error()
def _sendHttpRequest(self, headers, message):
request = urllib2.Request(self.service_url, message, headers)
try:
response = urllib2.urlopen(request, timeout=self.timeout_in_seconds)
status = response.getcode()
except urllib2.HTTPError as e:
status = e.code
return status
def _sendMessage(self, message):
headers = {"Content-Type": "text/xml"}
status = self._sendHttpRequest(headers, message)
if status == 200:
return
exceptionMessage = "Unexpected status code {0}".format(str(status))
if status == 403:
exceptionMessage = "Unable to send using SSL"
elif status == 422:
exceptionMessage = "Invalid XML sent: {0}".format(message)
elif status == 500:
exceptionMessage = "Destination server is unavailable. Please check the remote server status."
elif status == 503:
exceptionMessage = "Service unavailable. You may be over your quota."
raise StandardError(exceptionMessage)
_DEFAULT_AIRBRAKE_URL = "http://airbrakeapp.com/notifier_api/v2/notices"
class AirbrakeHandler(logging.Handler):
def __init__(self, api_key, environment=None, component_name=None, node_name=None,
use_ssl=False, timeout_in_ms=30000, airbrake_url=_DEFAULT_AIRBRAKE_URL):
logging.Handler.__init__(self)
self.api_key = api_key
self.environment = environment
self.component_name = component_name
self.node_name = node_name
self.work_queue = multiprocessing.Queue()
self.work_queue.cancel_join_thread()
self.worker = AirbrakeSender(self.work_queue, timeout_in_ms, self._serviceUrl(airbrake_url, use_ssl))
self.worker.start()
self.logger = logging.getLogger(__name__)
def emit(self, record):
try:
message = self._generate_xml(record)
self.work_queue.put(message)
if self.logger.isEnabledFor(logging.DEBUG):
self.logger.debug("Airbrake message queued for delivery")
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def close(self):
if self.work_queue:
if self.logger.isEnabledFor(logging.DEBUG):
self.logger.debug("POISONING QUEUE")
global _POISON
self.work_queue.put(_POISON, False)
self.work_queue.close()
self.work_queue = None
if self.worker:
self.logger.info("Waiting for remaining items to be sent to Airbrake.")
self.worker.join(timeout=5.0)
if self.worker.is_alive():
self.logger.info("AirbrakeSender did not exit in an appropriate amount of time...terminating")
self.worker.terminate()
self.worker = None
logging.Handler.close(self)
def _serviceUrl(self, airbrake_url, use_ssl):
if use_ssl:
return airbrake_url.replace('http://', 'https://', 1)
else:
return airbrake_url.replace('https://', 'http://', 1)
#
# This is largely based on the code example presented here:
# http://robots.thoughtbot.com/post/323503523/writing-a-hoptoad-notifier-contacting-the-toad
#
def _generate_xml(self, record):
exn = None
trace = None
if record.exc_info:
_, exn, trace = record.exc_info
message = record.getMessage()
if exn:
message = "{0}: {1}".format(message, str(exn))
xml = xmlbuilder.XMLBuilder()
with xml.notice(version=2.0):
xml << ('api-key', self.api_key)
with xml.notifier:
xml << ('name', __app_name__)
xml << ('version', __version__)
xml << ('url', __source_url__)
with xml('server-environment'):
xml << ('environment-name', self.environment)
with xml.request:
xml << ("url", "")
xml << ("component", self.component_name)
with xml("cgi-data"):
with xml("var", key="nodeName"):
xml << self.node_name
with xml("var", key="componentName"):
xml << self.component_name
with xml.error:
xml << ('class', '' if exn is None else exn.__class__.__name__)
xml << ('message', message)
with xml.backtrace:
if trace is None:
[xml << ('line', {'file': record.pathname, 'number': record.lineno, 'method': record.funcName})]
else:
[xml << ('line', {'file': filename, 'number': line_number, 'method': "{0}: {1}".format(function_name, text)})\
for filename, line_number, function_name, text in traceback.extract_tb(trace)]
return str(xml) | AirbrakePy | /AirbrakePy-1.0.0b1.tar.gz/AirbrakePy-1.0.0b1/airbrakepy/logging/handlers.py | handlers.py |
import models,pygame,random,sys,traceback
from pygame.locals import *
# import os
# CMD = r'D:\ImageMagick-7.0.8-Q16-HDRI\convert.exe' #ImageMagick安装目录下convert.exe所在目录
# SOURCE_PATH = r'E:\作业练习\游戏开发\bg' #png图片所在目录
#
# def doStrip(path):
# data = {}
# print(path)v
# for root, dir
# s, files in os.walk(path):
# for file in files:
# name = file.lower()
# if name.find('.png') != -1:
# path = os.path.join(root, file)
# os.system('"{0}" {1} -strip {1}'.format(CMD, path, path))
# doStrip(SOURCE_PA
# 添加背景音乐
# pygame.mixer.init()
# pygame.mixer.music.load("毛毛 - 奇迹再现.mp3")
# pygame.mixer.music.set_volume(0.5) #设置音量
#对要用到的颜色进行宏定义
color_black = (0,0,0)
color_green = (0,255,0)
color_red = (255,0,0)
color_white = (255,255,255)
color_blue = (0,0,255)
class Game_Engine():
def __init__(self):
#引擎类型中,初始化服务对象
#定义背景精灵
self.bg1 = models.BackgroundSprite()
self.bg2 = models.BackgroundSprite(next = True)
#定义英雄飞机对象
self.hero = models.HeroSprite()
#定义英雄飞机对象总血量
self.energy = self.hero.energy
#定义补给一(子弹强化)对象
self.supply1 = models.Supply1()
#定义补给二(神圣护盾)对象
self.supply2 = models.Supply2()
#定义一个补给一(子弹强化)的精灵组对象
self.supply_first = pygame.sprite.Group(self.supply1)
#定义一个补给二(神圣护盾)的精灵组对象
self.supply_sencond = pygame.sprite.Group(self.supply2)
#定义一个敌机对象
self.enemy = models.EnemySprite()
#定义一个敌机boss对象
self.enemy_boss = models.Enemy_Boss()
#定义初始化精灵组对象
self.resources = pygame.sprite.Group(self.bg1,self.bg2,self.hero)
#再创建一个英雄飞机精灵组,,用来实现英雄飞机与敌方boss飞机之间的碰撞
self.hero_boss = pygame.sprite.Group(self.hero)
# 定义一个敌机的精灵组对象
self.enemys = pygame.sprite.Group()
#定义一个boss敌机的精灵组对象
self.enemys_boss = pygame.sprite.Group()
#定义一个敌机被摧毁的精灵组对象
self.destroy_images = pygame.sprite.Group()
#同时创建一个字典用以保存被摧毁的敌机的数据
self.destroy_images_dict = {}
def start(self):
# 初始化所有模块
pygame.init()
clock = pygame.time.Clock()
#设定敌机出现的时间
pygame.time.set_timer(models.ENEMY_CREATE,2000)
#设定boss敌机出现的时间(在游戏进行一分钟时出现)
pygame.time.set_timer(models.ENEMY_CREATE +1,20000)
self.create_scene()
#游戏场景循环
while True:
# pygame.mixer.music.play(-1) # 循环播放
#定义时钟刷新帧:每秒让循环运行多少次!
clock.tick(24)
self.update_scene() #创建游戏场景并渲染精灵组
self.check_event() #监听所有的事件&获取当前用户在键盘上操作的按键
self.check_collide() #碰撞检测
self.blood_groove() #对英雄飞机绘制血槽
self.boss_blood_groove() #对敌方boss飞机绘制血槽
self.create_enetgy_list() #绘制能量表
def blood_groove(self):
#对英雄飞机生命值绘制血槽
pygame.draw.line(self.screen, color_blue, (self.hero.rect.left, self.hero.rect.top - 5),
(self.hero.rect.right, self.hero.rect.top - 5), 15)
energy_remain = self.hero.blood_volume / 30 # 英雄飞机当前血量除以英雄飞机总血量
if energy_remain > 0.4: # 如果血量大于百分之二十则为绿色,否则为红色
energy_color = color_green
else:
energy_color = color_red
pygame.draw.line(self.screen, energy_color,
(self.hero.rect.left, self.hero.rect.top - 5),
(self.hero.rect.left + self.hero.rect.width * energy_remain, self.hero.rect.top - 5),
2)
#更新血槽图片
pygame.display.update()
def boss_blood_groove(self):
#对敌方boss飞机生命值绘制血槽
pygame.draw.line(self.screen,color_blue,(self.enemy_boss.rect.left,self.enemy_boss.rect.top -5),
(self.enemy_boss.rect.right,self.enemy_boss.rect.top -5),15)
energy_remain_boss = self.enemy_boss.boss_blood_volume / 200 #敌方boss飞机当前血量除以敌方boss飞机总血量
if energy_remain_boss > 0.4: #如果血量大于百分之四十则为绿色,否则为红色
energy_color = color_green
else:
energy_color = color_red
pygame.draw.line(self.screen,energy_color,(self.enemy_boss.rect.left,self.enemy_boss.rect.top -5),
(self.enemy_boss.rect.left + self.enemy_boss.rect.width * energy_remain_boss,self.enemy_boss.rect.top-5),2)
#更新血槽图片
pygame.display.update()
def create_scene(self):
#创建游戏场景
self.screen = pygame.display.set_mode(models.SCREEN_SIZE)
# 窗口名称的设置
pygame.display.set_caption("塑料版飞机大战")
def create_enetgy_list(self):
# 使用系统字体
self.fonts = pygame.font.SysFont("fangsong", 20, True)
# 加粗
self.fonts.set_bold(True)
# 斜体
self.fonts.set_italic(True)
#创建一个能量表,用以积蓄能量
self.screen.blit(self.fonts.render("当前能量:%s" % self.hero.energy, True, color_blue, color_red),[10,10])
#刷新游戏场景
pygame.display.update()
def update_scene(self):
#新增英雄飞机精灵组渲染
self.hero_boss.update()
self.hero_boss.draw(self.screen)
#精灵组渲染
self.resources.update()
self.resources.draw(self.screen)
#英雄子弹精灵组渲染
self.hero.bullets.update()
self.hero.bullets.draw(self.screen)
#英雄加强版子弹精灵组渲染
self.hero.strengthen_bullets.update()
self.hero.strengthen_bullets.draw(self.screen)
#普通敌机子弹精灵组渲染
self.enemy.destroy_bullets.update()
self.enemy.destroy_bullets.draw(self.screen)
#boss敌机子弹精灵组渲染
self.enemy_boss.enemy_boss_bullets.update()
self.enemy_boss.enemy_boss_bullets.draw(self.screen)
#渲染敌机精灵组中的所有飞机
self.enemys.update()
self.enemys.draw(self.screen)
#渲染boss敌机精灵组中的所有飞机
self.enemys_boss.update()
self.enemys_boss.draw(self.screen)
#补给精灵组一渲染
self.supply_first.update()
self.supply_first.draw(self.screen)
#补给精灵组二渲染
self.supply_sencond.update()
self.supply_sencond.draw(self.screen)
# 更新游戏场景
pygame.display.update()
def check_collide(self):
#碰撞检测
#碰撞检测:子弹与敌方飞机之间的碰撞
self.destroy_images_dict = pygame.sprite.groupcollide(self.hero.bullets,self.enemys,True,True)
#将被毁灭的敌机添加到敌机被摧毁的精灵组中
self.destroy_images.add(self.destroy_images_dict)
for destroy_image in self.destroy_images:
#调整帧率,实现慢速爆炸的效果
clock = pygame.time.Clock()
clock.tick(20)
self.destroy(destroy_image)#将敌机返回到敌机被摧毁的精灵组中,调用函数
print("爆炸>>>>>>>")
self.destroy_images.remove(destroy_image)
while len(self.destroy_images_dict) > 0:
self.hero.energy += 5
break
#英雄加强版子弹与敌机之间的碰撞
self.destroy_images_dict = pygame.sprite.groupcollide(self.hero.strengthen_bullets, self.enemys, True, True)
# 将被毁灭的敌机添加到敌机被摧毁的精灵组中
self.destroy_images.add(self.destroy_images_dict)
for destroy_image in self.destroy_images:
# 调整帧率,实现慢速爆炸的效果
clock = pygame.time.Clock()
clock.tick(20)
self.destroy(destroy_image) # 将敌机返回到敌机被摧毁的精灵组中,调用函数
print("爆炸>>>>>>>")
self.destroy_images.remove(destroy_image)
while len(self.destroy_images_dict) > 0:
self.hero.energy += 5
break
# 调整帧率,以实现英雄飞机与敌方boss碰撞时英雄飞机慢速减血的效果
clock = pygame.time.Clock()
#碰撞检测:英雄飞机子弹与敌方boss飞机之间的碰撞
u = pygame.sprite.groupcollide(self.hero.bullets,self.enemys_boss,True,False)
while len(u) > 0:
clock.tick(24)
self.enemy_boss.boss_blood_volume -= 1
if self.enemy_boss.boss_blood_volume > 0: #判断敌方boss飞机血量
print("敌方boss剩余血量" , self.enemy_boss.boss_blood_volume)
break
elif self.enemy_boss.boss_blood_volume == 0:
self.enemy_boss.kill()
pygame.quit()
exit()
#碰撞检测:英雄飞机与敌方boss飞机之间的碰撞
e = pygame.sprite.groupcollide(self.hero_boss,self.enemys_boss,False,False)
while len(e) > 0: #判断英雄飞机血量
self.hero.blood_volume -= 1
if self.hero.blood_volume > 0: #判断英雄飞机血量
print("剩余生命:", self.hero.blood_volume)
break
elif self.hero.blood_volume == 0:
self.hero.kill()
pygame.quit()
exit()
#碰撞检测: 英雄飞机与敌方boss子弹之间的碰撞
b = pygame.sprite.spritecollide(self.hero,self.enemy_boss.enemy_boss_bullets,True)
while len(b) > 0:
self.hero.blood_volume -= 1
if self.hero.blood_volume > 0: #判断英雄飞机血量
print("剩余生命:", self.hero.blood_volume)
break
elif self.hero.blood_volume == 0:
# 判断英雄飞机血量
self.hero.kill()
pygame.quit()
exit()
#英雄飞机与补给物品一(子弹强化)之间的碰撞
x = pygame.sprite.spritecollide(self.hero,self.supply_first,True)
if len(x) > 0:
self.hero.strengthen_fire()
#英雄飞机与补给物品二(神圣护盾)之间的碰撞
z = pygame.sprite.spritecollide(self.hero,self.supply_sencond,True)
if len(z) > 0:
pass#英雄飞机得到一个护盾,在六秒钟内达到无敌的效果
#碰撞检测:英雄飞机和敌方飞机之间的碰撞
e = pygame.sprite.spritecollide(self.hero,self.enemys,True)
while len(e) >0:
self.hero.blood_volume -= 1
if self.hero.blood_volume > 0: #判断英雄飞机血量
print("剩余生命:",self.hero.blood_volume)
break
elif self.hero.blood_volume == 0:
self.hero.kill()
pygame.quit()
exit()
#碰撞检测:英雄飞机和敌机子弹之间的碰撞
c = pygame.sprite.spritecollide(self.hero,self.enemy.destroy_bullets,True)
while len(c) >0:
self.hero.blood_volume -= 1
if self.hero.blood_volume > 0: #判断英雄飞机血量
print("剩余生命:",self.hero.blood_volume)
break
elif self.hero.blood_volume == 0:
self.hero.kill()
pygame.quit()
exit()
def destroy(self,enemy):
# 敌机被摧毁
print("敌机销毁")
# 添加飞机被摧毁的过程图
for i in range(2,5):
clock = pygame.time.Clock()
clock.tick(35)
destroy_image = pygame.image.load("./bg/destroy_images" + str(i) + ".png")
self.screen.blit(destroy_image,enemy.rect)
pygame.display.update()
print(destroy_image)
#让pygame完全控制鼠标
sprite_speed = 300
sprite_rotation = 0
sprite_rotation_speed = 360
def check_event(self):
#监听所有的事件
event_list = pygame.event.get()
if len(event_list) > 0:
print(event_list)
for event in event_list:
print(event.type,pygame.KEYDOWN,pygame.K_LEFT)
#如果当前的事件,是QUIT事件
if event.type == pygame.QUIT:
#卸载所有pygame资源 ,退出程序
pygame.quit()
exit()
elif event.type == models.ENEMY_CREATE:
print("创建一架敌方飞机....")
self.enemy = models.EnemySprite()
#添加到敌方飞机精灵组中
self.enemys.add(self.enemy)
elif event.type == models.ENEMY_CREATES:
print("创建一架敌方boss飞机....")
self.enemy_boss = models.Enemy_Boss()
#添加到敌方boss飞机精灵组中
self.enemys_boss.add(self.enemy_boss)
#添加敌机子弹到敌机对象中
self.enemy.destroy_fire()
# # 自动发射子弹的实现
# self.hero.fire()
# print("发射子弹》》》", self.hero.bullets)
# #使用鼠标来操控飞机
# pos = pygame.mouse.get_pos()
# self.hero.rect.x = pos[0] - self.he
# self.hero.rect.y = pos[1] - self.he
# pygame.mouse.set_visible(False)
# pressed_keys = pygame.key.get_pressed()
# #定位鼠标的x.y坐标&获取当前用户鼠标的操作
# pressed_mouse = pygame.mouse.get_pressed()
# #通过移动偏移量计算移动
# rotation_direction = pygame.mouse.get_rel()[0] /2.0
# if pressed_mouse[0]:
# print("向左移动<<<<<<<<<<<<")
# self.hero.rect.x -= 15
# elif pressed_mouse[2]:
# print("向右移动>>>>>>>>>>>>")
# self.hero.rect.x += 15
# elif pressed_mouse[1]:
# print("向上移动^^^^^^^^^^^")
# self.hero.rect.y -= 15
# elif pressed_mouse[1]:
# print("向下移动vvvvvvvvvvv")
# self.hero.rect.y += 15
#获取当前用户键盘上被操作的按键
key_down = pygame.key.get_pressed()
if key_down[pygame.K_LEFT]:
print("向左移动<<<<<<<<<<<<")
self.hero.rect.x -= 15
elif key_down[pygame.K_RIGHT]:
print("向右移动>>>>>>>>>>>>")
self.hero.rect.x += 15
elif key_down[pygame.K_UP]:
print("向上移动^^^^^^^^^^^")
self.hero.rect.y -= 15
elif key_down[pygame.K_DOWN]:
print("向下移动vvvvvvvvv")
self.hero.rect.y += 15
if key_down[pygame.K_SPACE]:
self.hero.fire()
print("发射子弹》》》", self.hero.bullets )
if key_down[pygame.K_q]:
self.hero.strengthen_fire()
print("发射强化版子弹》》》", self.hero.strengthen_bullets) | Aircraft-war | /Aircraft_war-1.0.tar.gz/Aircraft_war-1.0/game_engine.py | game_engine.py |
import pygame,random,traceback
from pygame.locals import *
# #定义需要的常量
SCREEN_SIZE = (512,768)
SCREEN_RECT = pygame.Rect(0,0,*SCREEN_SIZE) #定义游戏场景的大小
# #自定义一个事件
ENEMY_CREATE = pygame.USEREVENT
#自定义一个事件,用来定义敌方boss的出现时间
ENEMY_CREATES = pygame.USEREVENT +1
#时间声明
clock = pygame.time.Clock()
class GameSprite(pygame.sprite.Sprite):
#游戏精灵对象:用于表示游戏中的各种元素
def __init__(self,image_path,speed = 1):
#调用父类初始化数据
super().__init__()
self.image = pygame.image.load(image_path)
self.rect = self.image.get_rect()
self.speed = speed
def update(self):
#默认运动更新方法
self.rect.y += self.speed
class BackgroundSprite(GameSprite):
def __init__(self,next = False):
super().__init__("./bg/img_bg_level_2.jpg",speed = 5)
if next:
self.rect.y = - SCREEN_SIZE[1]
def update(self):
#调用父类的方法,执行运动
super().update()
#子类中判断边界
if self.rect.y > SCREEN_SIZE[1]:
self.rect.y = -SCREEN_SIZE[1]
class HeroSprite(GameSprite):
#英雄飞机精灵对象
#添加energy成员变量用来表示英雄飞机血量
# energy = 5
def __init__(self):
#初始化英雄飞机的图片、速度
super().__init__("./bg/hero.png",speed = 0)
#初始化英雄飞机的位置
self.rect.centerx = SCREEN_RECT.centerx
self.rect.y = SCREEN_RECT.centery + 200
#定义并初始化英雄飞机的血量
self.blood_volume = 30
#定义初始化定义英雄飞机的能量
self.energy = 0
#创建一个英雄飞机子弹精灵组
self.bullets = pygame.sprite.Group()
#创建一个英雄飞机加强版子弹精灵组
self.strengthen_bullets = pygame.sprite.Group()
def update(self):
#水平边界判断
if self.rect.x <= 0:
self.rect.x = 0
elif self.rect.x >= SCREEN_RECT.width - self.rect.width:
self.rect.x = SCREEN_RECT.width - self.rect.width
#垂直边界判断
if self.rect.y <= 0:
self.rect.y = 0
elif self.rect.y >= SCREEN_RECT.height - self.rect.height:
self.rect.y = SCREEN_RECT.height - self.rect.height
def fire(self):
#飞机攻击 &创建一个子弹对象
bullet = BulletSprite(self.rect.centerx-63,self.rect.y)
# 添加到精灵组对象
self.bullets.add(bullet)
def strengthen_fire(self):
#飞机攻击 &创建一个加强版子弹对象
reinforced_bullet = Reinforced_Bullet(self.rect.centerx-10,self.rect.y)
#将加强版英雄子弹添加到加强版子弹精灵组对象
self.strengthen_bullets.add(reinforced_bullet)
class BulletSprite(GameSprite):
#子弹精灵
def __init__(self,x,y):
super().__init__("./bg/pic2.png",speed = -8)
self.rect.x = x
self.rect.y = y
#self.speed = speed
def update(self):
#调用父类的方法进行操作
super().update()
#边界判断
if self.rect.y <= -self.rect.height:
#子弹从精灵组中删除
self.kill()
def __del__(self):
print("子弹对象已经销毁")
class Reinforced_Bullet(GameSprite):
#英雄加强版子弹精灵
def __init__(self,x,y):
super().__init__("./bg/bullet1.png",speed = -16)
self.rect.x = x-180
self.rect.y = y
def update(self):
#调用分类的方法进行操作
super().update()
#边界判断
if self.rect.y <= -self.rect.height:
#子弹从精灵组中删除
self.kill()
def __del__(self):
print("加强版子弹已经销毁")
class Destroy_Bullet_Sprite(GameSprite):
#普通敌机子弹精灵
def __init__(self,x,y):
super().__init__("./bg/destroy_bullets.png", speed = 16)
self.rect.x = x
self.rect.y = y
def update(self):
#调用父类的方法进行操作
super().update()
#边界判断
if self.rect.y <= -SCREEN_RECT.height:
#子弹从精灵组中删除
self.kill()
def __del__(self):
print("敌机子弹已销毁")
class Destroy_Boss_Bullet(GameSprite):
#boss敌机子弹精灵
def __init__(self,x,y):
super().__init__("./bg/destroy_bullets.png", speed = 14)
self.rect.x = x
self.rect.y = y
def update(self):
#调用父类的方法进行操作
super().update()
#边界判断
if self.rect.y <= -SCREEN_RECT.height:
#子弹从精灵组中删除
self.kill()
def __del__(self):
print("敌机子弹已销毁")
class EnemySprite(GameSprite):
#敌方飞机
def __init__(self):
#初始化敌方飞机的数据:图片,速度
super().__init__("./bg/png1.png",speed = random.randint(3,5))
#初始化敌方飞机的位置
self.rect.x = random.randint(0,SCREEN_RECT.width - self.rect.width)
self.rect.y = -self.rect.height
#创建一个敌方飞机子弹精灵组
self.destroy_bullets = pygame.sprite.Group()
def destroy_fire(self):
#敌机进行攻击 & 创建一个子弹对象
destroy_bullet = Destroy_Bullet_Sprite(self.rect.centerx -31,self.rect.y)
#添加到敌机子弹精灵组对象
self.destroy_bullets.add(destroy_bullet)
def update(self):
#调用父类的方法直接运动
super().update()
#边界判断
if self.rect.y > SCREEN_RECT.height:
#飞机一旦超出屏幕,销毁!
self.kill()
class Enemy_Boss(GameSprite):
#敌方boss飞机
def __init__(self):
#初始化敌方boss飞机的数据: 图片,速度
super().__init__("./bg/boss.png",speed = 5)
#初始化敌方boss飞机的位置
self.rect.x = random.randint(0,SCREEN_RECT.width - self.rect.width)
self.rect.y = - self.rect.height
# 定义并初始化敌方boss飞机的血量
self.boss_blood_volume = 200
#创建一个敌方boss飞机子弹精灵组
self.enemy_boss_bullets = pygame.sprite.Group()
def destroy_boss_fire(self):
#敌方boss进行攻击 & 创建一个boss子弹对象
destroy_boss_bullets = Destroy_Boss_Bullet(self.rect.centerx -31,self.rect.y)
#添加到敌机boss子弹精灵组对象
self.enemy_boss_bullets.add(destroy_boss_bullets)
def update(self):
#重写父类方法进行运动
super().update()
#边界判断
self.rect.x -= self.speed
if self.rect.x <= 0:
self.rect.x = 0
elif self.rect.x >= SCREEN_RECT.width - self.rect.width:
self.rect.x = SCREEN_RECT.width - self.rect.width
if self.rect.y <= 0:
self.rect.y = 0
self.speed = -self.speed
elif self.rect.y >= SCREEN_RECT.centery - self.rect.height:
self.rect.y = SCREEN_RECT.centery - self.rect.height
class Supply1(GameSprite):
def __init__(self):
#初始化补给物品一(子弹加强)的数据::图片,速度
super().__init__("./bg/purple.png" ,speed = random.randint(2,5))
#初始化补给物品一(子弹加强)的落下位置
self.rect.x = random.randint(0,SCREEN_RECT.width - self.rect.width)
self.rect.y = -self.rect.height
def update(self):
#调用父类的方法直接运动
super().update()
#边界判断
if self.rect.y > SCREEN_RECT.height:
#补给一旦超出屏幕,错过也就错过了,所谓爱情,不过如此!
self.kill()
class Supply2(GameSprite):
def __init__(self):
#初始化补给物品二(神圣护盾)的数据,:图片,速度
super().__init__("./bg/shield_blue.png" , speed = random.randint(2,5))
#初始化补给物品二(神圣护盾)的落下位置
self.rect.x = random.randint(0,SCREEN_RECT.width - self.rect.width)
self.rect.y = -self.rect.height
def update(self):
#调用分类的方法直接运动
super().update()
#边界判断
if self.rect.y > SCREEN_RECT.height:
#补给一旦超出屏幕,错过也就错过了,所谓爱情,不过如此!
self.kill() | Aircraft-war | /Aircraft_war-1.0.tar.gz/Aircraft_war-1.0/models.py | models.py |
plane_parameters = {
"plane_type": "biplane",
"wing1_x": 0,
"wing1_y": 0,
"wing1_z": 0,
"wing1_clmax_airfoil": 2.2,
"wing1_airfoil1_name": "s1223",
"wing1_airfoil2_name": "s1223",
"wing1_airfoil3_name": "s1223",
"wing1_span1": 0.8605,
"wing1_span2": 0.3895,
"wing1_chord1": 0.4,
"wing1_chord2": 0.35,
"wing1_chord3": 0.25,
"wing1_twist1": 0,
"wing1_twist2": 0,
"wing1_twist3": 0,
"wing1_incidence": 0,
"wing2_x": 0,
"wing2_y": 0,
"wing2_z": 0.6,
"wing2_clmax_airfoil": 2.2,
"wing2_airfoil1_name": "s1223",
"wing2_airfoil2_name": "s1223",
"wing2_airfoil3_name": "s1223",
"wing2_span1": 0.8605,
"wing2_span2": 0.3895,
"wing2_chord1": 0.4,
"wing2_chord2": 0.35,
"wing2_chord3": 0.25,
"wing2_twist1": 0,
"wing2_twist2": 0,
"wing2_twist3": 0,
"wing2_incidence": 0,
"hs_x": -0.5928,
"hs_y": 0,
"hs_z": 0.1,
"hs_clmax_airfoil": 1.55,
"hs_airfoil1_name": "sd7037",
"hs_airfoil2_name": "sd7037",
"hs_airfoil3_name": "sd7037",
"hs_span1": 0.8,
"hs_span2": 0.5,
"hs_chord1": 0.16,
"hs_chord2": 0.14,
"hs_chord3": 0.12,
"hs_twist1": 0,
"hs_twist2": 0,
"hs_twist3": 0,
"hs_incidence": 0,
"vs_x": -0.7,
"vs_y": 0,
"vs_z": 0,
"vs_clmax_airfoil": 2.2,
"vs_airfoil1_name": "s1223",
"vs_airfoil2_name": "s1223",
"vs_airfoil3_name": "s1223",
"vs_span1": 0.1,
"vs_span2": 0.1,
"vs_chord1": 0.2,
"vs_chord2": 0.2,
"vs_chord3": 0.1,
"vs_twist1": 0,
"vs_twist2": 0,
"vs_twist3": 0,
"vs_incidence": 0,
"motor_x": 0.25,
"motor_z": 0,
"static_thrust": 45,
"linear_decay_coefficient": 1.1,
"cg_x": -0.103,
"cg_z": -0.1,
"tpr_x": -0.153,
"tpr_z": -0.2,
"Iyy_TPR": 0.2,
"CD_tp": 0.8,
"S_tp": 0.001,
"CD_fus": 0.6,
"S_fus": 0.02,
"u_k": 0.05,
}
performance_parameters = {
"rho_air": 1.1, # Densidade do ar [kg/m^3]
"dist_max": 45, # Distancia maxima de decolagem pelo regulamento [m]
# Distancia antes do fim da pista em que o piloto aciona o profundor [m]
"offset_pilot": 5,
}
def get_plane_parameters():
return plane_parameters
def get_performance_parameters():
return performance_parameters | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/parameters.py | parameters.py |
from ADR.Components.Plane import Plane
from ADR.Core.data_manipulation import save_dict
from ADR.Analysis.Performance.Takeoff import Takeoff
from ADR.Analysis.Performance.run_Takeoff import plot_takeoff_data
from ADR.Analysis.Performance.Power import Power
from ADR.Analysis.Performance.run_Power import plot_power_curves
from ADR.Analysis.Stability.FlightStability.FlightStability import FlightStability
from ADR.Analysis.Stability.FlightStability.run_FlightStability import (
plot_stability_data,
)
from ADR.Analysis.Aerodynamics.AerodynamicPlots import plot_aerodynamic_data
from ADR.Checkers.TrimmRange import TrimmRangeChecker
from ADR.Checkers.StaticMargin import StaticMarginChecker
from ADR.Checkers.Scoreboard import MaybeAnAssassin
from ADR.Checkers.Dimensions import Ruler
from matplotlib import pyplot as plt
import traceback
import logging
def analyzer(plane_parameters, performance_parameters, plot=False):
try:
plane = Plane(plane_parameters)
plane.show_plane()
takeoff_analysis = Takeoff(plane, performance_parameters)
mtow = takeoff_analysis.get_mtow()
print("Initial MTOW is {}".format(mtow))
flight_stability = FlightStability(plane)
flight_stability.CM_plane_CG(plane.cg)
flight_stability.static_margin()
power_analysis = Power(plane, performance_parameters)
trimm_range_checker = TrimmRangeChecker(plane)
trimm_range_checker.check()
sm_checker = StaticMarginChecker(plane)
sm_checker.check()
ruler = Ruler(plane)
ruler.measure()
maybe_an_assassin = MaybeAnAssassin(plane)
maybe_an_assassin.score_or_kill()
print("Final MTOW is {}".format(plane.mtow))
if plot == True:
plot_takeoff_data(takeoff_analysis, mtow)
plot_stability_data(flight_stability)
plot_power_curves(power_analysis)
plot_aerodynamic_data(plane)
plt.show()
if plane.dead == True:
save_dict(plane_parameters, performance_parameters, mtow, "dead")
else:
save_dict(plane_parameters, performance_parameters, mtow, "alive")
return (plane.score,)
except Exception:
logging.error(traceback.format_exc())
print(
"-----------------------------------Error-----------------------------------"
)
return (0,) | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/analyzer.py | analyzer.py |
import ADR
import pandas as pd
def get_filepath(folder, filename):
package_filepath = ADR.__file__.replace("__init__.py", "")
complete_filepath = package_filepath + folder + filename
return complete_filepath
def import_airfoil_aerodynamic_data(airfoil_name):
airfoil_aerodynamic_data_filepath = get_filepath(
"World/Profiles/AerodynamicData/", "xf-" + airfoil_name + "-il-200000.csv"
)
airfoil_df = pd.read_csv(
airfoil_aerodynamic_data_filepath, skiprows=10, index_col=0
)
Cl_alpha = airfoil_df[["Cl"]]
Cd_alpha = airfoil_df[["Cd"]]
Cm_alpha = airfoil_df[["Cm"]]
return Cl_alpha, Cd_alpha, Cm_alpha
def import_airfoil_coordinates(airfoil_name):
# Aqui o arquivo de coordenadas no formato de arquivo de texto, conforme copiado e colado de um banco de dados é lido e editado
# removendo espaços em branco adicionais para facilitar a separação em data frames. O formato do arquivo de coordenadas deve ser
# o formato selig.
airfoil_coordinates_filepath = get_filepath(
"World/Profiles/Coordinates/", airfoil_name + ".dat"
)
file1 = open(airfoil_coordinates_filepath, "r")
file2 = open(airfoil_coordinates_filepath.replace(
".dat", "_edited.dat"), "wt")
file2.write("x y \n")
for line in file1:
file2.write(" ".join(line.split()) + "\n")
file1.close()
file2.close()
# -----Criação do dataframe e transformação de dataframe em vetores (listas)-----
# A partir do arquivo de texto é criado o dataframe com as coordenadas dos pontos do perfil. Esse dataframe é separado em coordendas
# horizontais e verticais e em seguida transformados em vetores (listas).
df = pd.read_csv(
airfoil_coordinates_filepath.replace(".dat", "_edited.dat"),
sep=" ",
skiprows=[1],
)
df_array_x = df.loc[:, "x"]
df_array_y = df.loc[:, "y"]
return (
df_array_x.values,
df_array_y.values,
) # vetores de coordenadas horizontais e verticais | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Core/import_functions.py | import_functions.py |
from ADR import parameters
from ADR.Components.Plane import Plane
from ADR.Analysis.Performance.Takeoff import Takeoff
from matplotlib import pyplot as plt
def plot_takeoff_data(takeoff_analysis, mtow):
fig1, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots(2, 3)
ax1.plot(takeoff_analysis.mass_dict[mtow]["N"], label="Normal")
ax1.plot(takeoff_analysis.mass_dict[mtow]["L"], label="Lift")
ax1.plot(takeoff_analysis.mass_dict[mtow]["L_w1"], label="Lift Wing1")
ax1.plot(takeoff_analysis.mass_dict[mtow]["L_w2"], label="Lift Wing2")
ax1.plot(takeoff_analysis.mass_dict[mtow]["L_hs"], label="Lift HS")
ax1.grid()
ax1.legend()
ax2.plot(takeoff_analysis.mass_dict[mtow]["D"], label="Drag")
ax2.plot(takeoff_analysis.mass_dict[mtow]["D_w1"], label="Drag Wing1")
ax2.plot(takeoff_analysis.mass_dict[mtow]["D_w2"], label="Drag Wing2")
ax2.plot(takeoff_analysis.mass_dict[mtow]["D_hs"], label="Drag HS")
ax2.grid()
ax2.legend()
ax3.plot(takeoff_analysis.mass_dict[mtow]["M"], label="Moment")
ax3.plot(takeoff_analysis.mass_dict[mtow]["M_w1"], label="Moment Wing1")
ax3.plot(takeoff_analysis.mass_dict[mtow]["M_w2"], label="Moment Wing2")
ax3.plot(takeoff_analysis.mass_dict[mtow]["M_hs"], label="Moment HS")
ax3.grid()
ax3.legend()
ax4.plot(takeoff_analysis.mass_dict[mtow]["dTheta"], label="dTheta")
ax4.grid()
ax4.legend()
ax5.plot(takeoff_analysis.mass_dict[mtow]
["incidence_hs"], label="HS incidence")
ax5.plot(takeoff_analysis.mass_dict[mtow]["theta"], label="Theta")
ax5.grid()
ax5.legend()
ax6.plot(takeoff_analysis.mass_dict[mtow]["dist_x"], label="Distance")
ax6.plot(takeoff_analysis.mass_dict[mtow]["V_x"], label="Velocity")
ax6.grid()
ax6.legend()
def takeoff(plot=True):
plane_parameters = parameters.get_plane_parameters()
performance_data = parameters.get_performance_parameters()
plane = Plane(plane_parameters)
takeoff_analysis = Takeoff(plane, performance_data)
mtow = takeoff_analysis.get_mtow()
print("Final MTOW is {}".format(mtow))
print("V_takeoff : ", plane.V_takeoff)
if plot == True:
plot_takeoff_data(takeoff_analysis, mtow)
plt.show()
if __name__ == "__main__":
takeoff() | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Analysis/Performance/run_Takeoff.py | run_Takeoff.py |
from math import sin, cos, radians, degrees
import pandas as pd
from ADR.Methods.FundamentalEquations import drag
class Takeoff:
def __init__(self, plane, performance_parameters):
self.plane = plane
self.rho_air = performance_parameters.get("rho_air")
self.dist_max = performance_parameters.get("dist_max")
self.offset_pilot = performance_parameters.get("offset_pilot")
self.distx_wing1_tpr = abs(plane.wing1.ca.abs_x - plane.tpr.x)
self.distz_wing1_tpr = abs(plane.wing1.ca.abs_z - plane.tpr.z)
self.distx_wing2_tpr = abs(plane.wing2.ca.abs_x - plane.tpr.x)
self.distz_wing2_tpr = abs(plane.wing2.ca.abs_z - plane.tpr.z)
self.distx_hs_tpr = abs(plane.hs.ca.abs_x - plane.tpr.x)
self.distz_hs_tpr = abs(plane.hs.ca.abs_z - plane.tpr.z)
self.distx_cg_tpr = abs(plane.cg.x - plane.tpr.x)
self.distz_cg_tpr = abs(plane.cg.z - plane.tpr.z)
self.distx_motor_tpr = abs(plane.motor.x - plane.tpr.x)
self.distz_motor_tpr = abs(plane.motor.z - plane.tpr.z)
def get_mtow(self):
m = 1 # Massa total inicial do aviao [kg]
g = 9.81 # Constante gravitacional [m/s^2]
dt = 0.01 # Incremento discreto de tempo [s]
dm = 0.1 # Incremento de massa [kg]
incidence_active_hs = (
# Angulo de incidencia adicionado no profundor ao ser acionado [deg]
10
)
takeoff_failed = False
self.mass_dict = {}
while not takeoff_failed:
m = m + dm
self.plane.mtow = m
theta_airplane_deg = 0 # Angulo do aviao com a pista [°]
V_x = 0 # Velocidade inicial do aviao no eixo X [m/s]
pilot_triggered = False # O piloto acionou o profundor?
dist_x = 0 # Distancia percorrida em X [m]
N = 0.1 # Força normal [N]
t = 0 # Tempo [s]
incidence_w1 = 0 # Angulo de incidencia da asa1 [deg]
incidence_w2 = 0 # Angulo de incidencia da asa2 [deg]
incidence_hs = 0 # Angulo de incidencia do profundor [deg]
on_ground = True
takeoff_failed = False
going_forward = True
time_dict = {}
while on_ground and going_forward and not takeoff_failed:
alpha_w1 = theta_airplane_deg + incidence_w1
if self.plane.plane_type == "biplane":
alpha_w2 = theta_airplane_deg + incidence_w2
alpha_hs = theta_airplane_deg + incidence_hs
E = self.plane.motor.thrust(V_x)
t = t + dt
L_w1 = self.plane.wing1.lift(self.rho_air, V_x, alpha_w1)
L_w2 = 0 # Value if there's no wing2
if self.plane.plane_type == "biplane":
L_w2 = self.plane.wing2.lift(self.rho_air, V_x, alpha_w2)
L_hs = self.plane.hs.lift(self.rho_air, V_x, alpha_hs)
L = L_w1 + L_w2 - L_hs
E_z = E * sin(radians(theta_airplane_deg))
W = m * g
N = W - L - E_z
E_x = E * cos(radians(theta_airplane_deg))
D_w1 = self.plane.wing1.drag(self.rho_air, V_x, alpha_w1)
D_w2 = 0 # Value if there's no wing2
if self.plane.plane_type == "biplane":
D_w2 = self.plane.wing2.drag(self.rho_air, V_x, alpha_w2)
D_hs = self.plane.hs.drag(self.rho_air, V_x, alpha_hs)
D_tp = drag(self.rho_air, V_x,
self.plane.S_tp, self.plane.CD_tp)
D_fus = drag(self.rho_air, V_x,
self.plane.S_fus, self.plane.CD_fus)
D = D_w1 + D_w2 + D_hs + D_tp + D_fus
F_at = self.plane.u_k * N
F_x = E_x - D - F_at
dV_x = ((F_x) / m) * dt
V_x = V_x + dV_x
dist_x = dist_x + V_x * dt
M_w1 = self.plane.wing1.moment(self.rho_air, V_x, alpha_w1)
M_w2 = 0 # Value if there's no wing2
if self.plane.plane_type == "biplane":
M_w2 = self.plane.wing2.moment(self.rho_air, V_x, alpha_w2)
M_hs = self.plane.hs.moment(self.rho_air, V_x, alpha_hs)
M_x = (
E_z * self.distx_motor_tpr
- W * self.distx_cg_tpr
+ L_w1 * self.distx_wing1_tpr
+ L_w2 * self.distx_wing2_tpr
+ L_hs * self.distx_hs_tpr
)
M_z = (
-E_x * self.distz_motor_tpr
+ D_w1 * self.distz_wing1_tpr
+ D_w2 * self.distz_wing2_tpr
+ D_hs * self.distz_hs_tpr
)
M = M_x + M_z - M_hs + M_w1 + M_w2
dOmega = (M / self.plane.Iyy_TPR) * dt
dTheta = dOmega * dt
if theta_airplane_deg + degrees(dTheta) >= 0:
theta_airplane_deg = theta_airplane_deg + degrees(dTheta)
if (
self.dist_max - dist_x
) <= self.offset_pilot and pilot_triggered == False:
incidence_hs += incidence_active_hs
pilot_triggered = True
alpha_hs = theta_airplane_deg + incidence_hs
if dist_x > self.dist_max:
takeoff_failed = True
else:
takeoff_failed = False
if N > 0:
on_ground = True
else:
on_ground = False
V_takeoff = V_x
V_stall = self.plane.get_V_stall(self.rho_air)
if V_takeoff < V_stall or D > E_x:
takeoff_failed = True
self.plane.V_takeoff = V_takeoff
if dist_x > -5:
going_forward = True
else:
going_forward = False
takeoff_failed = True
print("Indo pra tras")
time_data = [
theta_airplane_deg,
E,
L,
L_w1,
L_w2,
L_hs,
D,
D_w1,
D_w2,
D_hs,
N,
F_at,
V_x,
dist_x,
M,
M_w1,
M_w2,
M_hs,
dTheta,
incidence_hs,
]
time_dict[t] = time_data
time_df = pd.DataFrame.from_dict(
time_dict,
orient="index",
columns=[
"theta",
"E",
"L",
"L_w1",
"L_w2",
"L_hs",
"D",
"D_w1",
"D_w2",
"D_hs",
"N",
"F_at",
"V_x",
"dist_x",
"M",
"M_w1",
"M_w2",
"M_hs",
"dTheta",
"incidence_hs",
],
)
time_df.index.name = "t"
self.mass_dict[m] = time_df
return self.plane.mtow | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Analysis/Performance/Takeoff.py | Takeoff.py |
import numpy as np
from ADR.Core.data_manipulation import dict_to_dataframe
from ADR.Core.data_manipulation import find_df_roots
from ADR.Methods.FundamentalEquations import drag
class Power:
def __init__(self, plane, performance_parameters):
self.plane = plane
self.wing1 = plane.wing1
self.wing2 = plane.wing2
self.hs = plane.hs
self.area_ref = plane.wing1.area
self.rho = performance_parameters.get("rho_air")
self.checks_and_update_mtow()
def checks_and_update_mtow(self):
self.plane.get_V_stall(self.rho)
self.plane.get_V_CLmin(self.rho)
self.velocity_range = np.arange(
self.plane.V_stall, self.plane.V_CLmin, 0.1)
self.power_available()
self.power_required()
self.power_excess()
positive_power = self.power_excess_df["Power excess"] > 0
has_power_excess = positive_power.any()
while has_power_excess == False and self.plane.mtow != 0:
positive_power = self.power_excess_df["Power excess"] > 0
has_power_excess = positive_power.any()
# TODO: This is a big reduce-step. We should get this down by getting the power analysis time down.
self.plane.mtow -= 1
print("New MTOW: {}".format(self.plane.mtow))
if self.plane.mtow > 0:
self.power_available()
self.power_required()
self.power_excess()
else:
self.plane.mtow = 0
print("Aircraft cannot sustain flight even with zero weight")
self.get_V_min_max()
def power_required(self):
thrust_required_dict = {}
power_required_dict = {}
alpha_dict = {}
for velocity in self.velocity_range:
total_lift = 0
alpha = self.plane.stall_min
while total_lift < self.plane.mtow * 9.81:
alpha += 0.1
total_lift = self.wing1.lift(self.rho, velocity, alpha) - self.hs.lift(
self.rho, velocity, alpha
)
if self.plane.plane_type == "biplane":
total_lift += self.wing2.lift(self.rho, velocity, alpha)
if alpha >= self.plane.stall_max:
alpha_nivel = None
break
else:
alpha_nivel = alpha
thrust_required = (
drag(self.rho, velocity, self.plane.S_tp, self.plane.CD_tp)
+ drag(self.rho, velocity, self.plane.S_fus, self.plane.CD_fus)
+ self.wing1.drag(self.rho, velocity, alpha_nivel)
+ self.wing2.drag(self.rho, velocity, alpha_nivel)
+ self.hs.drag(self.rho, velocity, alpha_nivel)
)
alpha_dict[velocity] = alpha_nivel
thrust_required_dict[velocity] = thrust_required
for velocity in thrust_required_dict:
power_required_dict[velocity] = thrust_required_dict[velocity] * velocity
self.thrust_required_dict = thrust_required_dict
self.power_required_dict = power_required_dict
self.alpha_dict = alpha_dict
self.alpha_df = dict_to_dataframe(alpha_dict, "Alpha", "Velocity")
self.thrust_required_df = dict_to_dataframe(
thrust_required_dict, "Thrust required", "Velocity"
)
self.power_required_df = dict_to_dataframe(
power_required_dict, "Power required", "Velocity"
)
return self.alpha_df, self.thrust_required_df, self.power_required_df
def power_available(self):
thrust_available_dict = {}
power_available_dict = {}
for velocity in self.velocity_range:
thrust_available = self.plane.motor.thrust(velocity)
thrust_available_dict[velocity] = thrust_available
for velocity in thrust_available_dict:
power_available_dict[velocity] = thrust_available_dict[velocity] * velocity
self.thrust_available_dict = thrust_available_dict
self.power_available_dict = power_available_dict
self.thrust_available_df = dict_to_dataframe(
thrust_available_dict, "Thrust available", "Velocity"
)
self.power_available_df = dict_to_dataframe(
power_available_dict, "Power available", "Velocity"
)
return self.thrust_available_df, self.power_available_df
def power_excess(self):
power_excess_dict = {}
for velocity in self.power_available_dict:
power_required = self.power_required_dict[velocity]
power_available = self.power_available_dict[velocity]
power_excess_dict[velocity] = power_available - power_required
self.power_excess_dict = power_excess_dict
self.power_excess_df = dict_to_dataframe(
power_excess_dict, "Power excess", "Velocity"
)
def get_V_min_max(self):
roots = find_df_roots(self.power_excess_df, "Power excess")
if len(roots) == 1:
self.plane.V_min = self.plane.V_stall
self.plane.V_max = roots[0]
alpha_max = self.alpha_df.max()[0]
elif len(roots) == 2:
self.plane.V_min = roots[0]
self.plane.V_max = roots[1]
alpha_max = np.interp(
self.plane.V_min, self.alpha_df.index.values, self.alpha_df["Alpha"]
)
elif len(roots) == 0:
self.plane.V_min = self.plane.V_stall
self.plane.V_max = np.amax(self.velocity_range)
alpha_max = self.alpha_df.max()[0]
self.plane.alpha_min = self.alpha_dict[self.plane.V_max]
print("Alpha_max: {}".format(alpha_max))
self.plane.alpha_max = alpha_max | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Analysis/Performance/Power.py | Power.py |
from ADR.Components.Aerodynamic_components.Wing import Wing
from ADR.Components.Aerodynamic_components.HS import HS
from ADR.Components.Propulsion.Motor import Motor
from ADR.Components.Points.CG import CG
from ADR.Components.Points.TPR import TPR
from ADR.Core.data_manipulation import dict_to_dataframe
import numpy as np
class Plane:
def __init__(self, data):
self.data = data
self.plane_type = data.get("plane_type")
wing1_data = {
"x": data.get("wing1_x"),
"y": data.get("wing1_y"),
"z": data.get("wing1_z"),
"airfoil_clmax": data.get("wing1_clmax_airfoil"),
"airfoil1_name": data.get("wing1_airfoil1_name"),
"airfoil2_name": data.get("wing1_airfoil2_name"),
"airfoil3_name": data.get("wing1_airfoil3_name"),
"span1": data.get("wing1_span1"),
"span2": data.get("wing1_span2"),
"chord1": data.get("wing1_chord1"),
"chord2": data.get("wing1_chord2"),
"chord3": data.get("wing1_chord3"),
"twist1": data.get("wing1_twist1"),
"twist2": data.get("wing1_twist2"),
"twist3": data.get("wing1_twist3"),
"incidence": data.get("wing1_incidence"),
"CM_ca": data.get("wing1_CM_ca"),
}
wing2_data = {
"x": data.get("wing2_x"),
"y": data.get("wing2_y"),
"z": data.get("wing2_z"),
"airfoil_clmax": data.get("wing2_clmax_airfoil"),
"airfoil1_name": data.get("wing2_airfoil1_name"),
"airfoil2_name": data.get("wing2_airfoil2_name"),
"airfoil3_name": data.get("wing2_airfoil3_name"),
"span1": data.get("wing2_span1"),
"span2": data.get("wing2_span2"),
"chord1": data.get("wing2_chord1"),
"chord2": data.get("wing2_chord2"),
"chord3": data.get("wing2_chord3"),
"twist1": data.get("wing2_twist1"),
"twist2": data.get("wing2_twist2"),
"twist3": data.get("wing2_twist3"),
"incidence": data.get("wing2_incidence"),
"CM_ca": data.get("wing2_CM_ca"),
}
hs_data = {
"x": data.get("hs_x"),
"y": data.get("hs_y"),
"z": data.get("hs_z"),
"airfoil_clmax": data.get("hs_clmax_airfoil"),
"airfoil1_name": data.get("hs_airfoil1_name"),
"airfoil2_name": data.get("hs_airfoil2_name"),
"airfoil3_name": data.get("hs_airfoil3_name"),
"span1": data.get("hs_span1"),
"span2": data.get("hs_span2"),
"chord1": data.get("hs_chord1"),
"chord2": data.get("hs_chord2"),
"chord3": data.get("hs_chord3"),
"twist1": data.get("hs_twist1"),
"twist2": data.get("hs_twist2"),
"twist3": data.get("hs_twist3"),
"incidence": data.get("hs_incidence"),
"CM_ca": data.get("hs_CM_ca"),
}
motor_data = {
"x": data.get("motor_x"),
"y": data.get("motor_y"),
"z": data.get("motor_z"),
"static_thrust": data.get("static_thrust"),
"linear_decay_coefficient": data.get("linear_decay_coefficient"),
}
cg_data = {"x": data.get("cg_x"), "z": data.get("cg_z")}
tpr_data = {"x": data.get("tpr_x"), "z": data.get("tpr_z")}
self.Iyy_TPR = data.get("Iyy_TPR")
self.CD_tp = data.get("CD_tp")
self.S_tp = data.get("S_tp")
self.CD_fus = data.get("CD_fus")
self.S_fus = data.get("S_fus")
self.u_k = data.get("u_k")
self.wing1 = Wing(wing1_data)
self.wing2 = Wing(wing2_data)
self.hs = HS(hs_data)
# self.vs = VS(vs_data)
self.motor = Motor(motor_data)
self.cg = CG(cg_data)
self.tpr = TPR(tpr_data)
self.V_stall = 0
self.V_min = 0
self.V_max = 0
self.V_takeoff = 0
self.mtow = 5
self.alpha_min = 0
self.alpha_max = 0
self.alpha_trimm_min = 0
self.alpha_trimm_max = 0
self.tail_trimm = 0
self.SM_alpha = None
self.trimm_for_low_angles = False
self.trimm_for_high_angles = False
self.positive_sm_for_positive_alphas = False
self.dimensions_are_good = False
self.total_dimensions = 0
self.dead_weight = 0
self.payload = 0
self.score = None
self.dead = False
self.get_CL_alpha_plane()
self.get_CD_alpha_plane()
self.set_alpha_range()
self.hs.set_incidence_range(self.stall_min, self.stall_max)
def __str__(self):
return self.__class__.__name__
def set_alpha_range(self):
wings_stall_min = max(self.wing1.stall_min, self.wing2.stall_min)
wings_stall_max = min(self.wing1.stall_max, self.wing2.stall_max)
# incidence_min = min(self.wing1.incidence, self.wing2.incidence)
# incidence_max = max(self.wing1.incidence, self.wing2.incidence)
# TODO: Incidence for now is fixed on 0 and should be better implemented
self.stall_min = wings_stall_min
self.stall_max = wings_stall_max
self.alpha_range = np.arange(self.stall_min, self.stall_max + 1)
def set_alpha_trimmed(self, alpha_airplane):
self.wing1.update_alpha(alpha_airplane)
if self.plane_type == "biplane":
self.wing2.update_alpha(alpha_airplane)
hs_incidence = np.interp(
alpha_airplane,
self.tail_trimm.index.values,
self.tail_trimm["hs_incidence"],
)
self.hs.incidence = hs_incidence
self.hs.update_alpha(alpha_airplane)
def get_CL_alpha_plane(self):
CL_alpha_plane = {}
for alpha in np.arange(-10, 21, 1.0):
numerator = (
self.wing1.get_CL(alpha) * self.wing1.area
- self.hs.get_CL(alpha) * self.hs.area
)
if self.plane_type == "biplane":
numerator += self.wing2.get_CL(alpha) * self.wing2.area
CL_alpha_plane[alpha] = numerator / self.wing1.area
self.CL_alpha = dict_to_dataframe(CL_alpha_plane, "CL", "alpha")
return self.CL_alpha
def get_CD_alpha_plane(self):
CD_alpha_plane = {}
for alpha in np.arange(-10, 21, 1.0):
numerator = (
self.wing1.get_CD(alpha) * self.wing1.area
- self.hs.get_CD(alpha) * self.hs.area
)
if self.plane_type == "biplane":
numerator += self.wing2.get_CD(alpha) * self.wing2.area
CD_alpha_plane[alpha] = numerator / self.wing1.area
self.CD_alpha = dict_to_dataframe(CD_alpha_plane, "CD", "alpha")
return self.CD_alpha
def get_V_stall(self, rho):
self.CL_max = self.CL_alpha.max()[0]
self.V_stall = (
(2 * self.mtow * 9.81) / (rho * self.wing1.area * self.CL_max)
) ** 0.5
return self.V_stall
def get_V_CLmin(self, rho):
self.CL_min = self.CL_alpha.min()[0]
self.V_CLmin = (
(2 * self.mtow * 9.81) / (rho * self.wing1.area * self.CL_min)
) ** 0.5
return self.V_CLmin
def show_plane(self):
pass
# print('-------------------------------------------------------------')
# print("\nPlane components:\n")
# print("\t--- ", self.wing1, " ---")
# "x": data.get("wing1_x"),
# "y": data.get("wing1_y"),
# "z": data.get("wing1_z"),
# "airfoil_clmax": data.get("wing1_clmax_airfoil"),
# "airfoil1_name": data.get("wing1_airfoil1_name"),
# "airfoil2_name": data.get("wing1_airfoil2_name"),
# "airfoil3_name": data.get("wing1_airfoil3_name"),
# "span1": data.get("wing1_span1"),
# "span2": data.get("wing1_span2"),
# "chord1": data.get("wing1_chord1"),
# "chord2": data.get("wing1_chord2"),
# "chord3": data.get("wing1_chord3"),
# "twist1": data.get("wing1_twist1"),
# "twist2": data.get("wing1_twist2"),
# "twist3": data.get("wing1_twist3"),
# "incidence": data.get("wing1_incidence"),
# "CM_ca": data.get("wing1_CM_ca"),
# print()
# print("\t--- ", self.wing2, " ---")
# "x": data.get("wing2_x"),
# "y": data.get("wing2_y"),
# "z": data.get("wing2_z"),
# "airfoil_clmax": data.get("wing2_clmax_airfoil"),
# "airfoil1_name": data.get("wing2_airfoil1_name"),
# "airfoil2_name": data.get("wing2_airfoil2_name"),
# "airfoil3_name": data.get("wing2_airfoil3_name"),
# "span1": data.get("wing2_span1"),
# "span2": data.get("wing2_span2"),
# "chord1": data.get("wing2_chord1"),
# "chord2": data.get("wing2_chord2"),
# "chord3": data.get("wing2_chord3"),
# "twist1": data.get("wing2_twist1"),
# "twist2": data.get("wing2_twist2"),
# "twist3": data.get("wing2_twist3"),
# "incidence": data.get("wing2_incidence"),
# "CM_ca": data.get("wing2_CM_ca"),
# print()
# print("\t--- ", self.hs, " ---")
# "x": data.get("hs_x"),
# "y": data.get("hs_y"),
# "z": data.get("hs_z"),
# "airfoil_clmax": data.get("hs_clmax_airfoil"),
# "airfoil1_name": data.get("hs_airfoil1_name"),
# "airfoil2_name": data.get("hs_airfoil2_name"),
# "airfoil3_name": data.get("hs_airfoil3_name"),
# "span1": data.get("hs_span1"),
# "span2": data.get("hs_span2"),
# "chord1": data.get("hs_chord1"),
# "chord2": data.get("hs_chord2"),
# "chord3": data.get("hs_chord3"),
# "twist1": data.get("hs_twist1"),
# "twist2": data.get("hs_twist2"),
# "twist3": data.get("hs_twist3"),
# "incidence": data.get("hs_incidence"),
# "CM_ca": data.get("hs_CM_ca"),
# print()
# "x": data.get("motor_x"),
# "y": data.get("motor_y"),
# "z": data.get("motor_z"),
# "static_thrust": data.get("static_thrust"),
# "linear_decay_coefficient": data.get("linear_decay_coefficient")
# print()
# "x": data.get("cg_x"),
# "z": data.get("cg_z")
# print()
# "x": data.get("tpr_x"),
# "z": data.get("tpr_z")
# print()
# print('-------------------------------------------------------------') | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Components/Plane.py | Plane.py |
import pandas as pd
import numpy as np
from ADR.Core.import_functions import (
import_airfoil_aerodynamic_data,
import_airfoil_coordinates,
)
class Airfoil:
def __init__(self, data):
self.name = data.get("airfoil_name")
self.Cl_alpha, self.Cd_alpha, self.Cm_alpha = import_airfoil_aerodynamic_data(
self.name
)
self.airfoil_x_coords, self.airfoil_y_coords = import_airfoil_coordinates(
self.name
)
self.import_camber_line()
self.generate_upper_surface_coordinates()
self.generate_inner_surface_coordinates()
self.calc_perimeter()
self.calc_area()
def __str__(self):
return self.name
def get_Cl(self, alpha):
Cl = np.interp(alpha, self.Cl_alpha.index.values, self.Cl_alpha["Cl"])
return Cl
def get_Cd(self, alpha):
Cd = np.interp(alpha, self.Cd_alpha.index.values, self.Cd_alpha["Cd"])
return Cd
def get_Cm(self, alpha):
Cm = np.interp(alpha, self.Cm_alpha.index.values, self.Cm_alpha["Cm"])
return Cm
def generate_upper_surface_coordinates(self):
self.n = self.airfoil_x_coords.size
# número de elementos do vetor de coordenadas horizontais
self.minpos = np.argmin(self.airfoil_x_coords)
# posição do menor valor no vetor de coordenadas horizontais,
# determina a posição do bordo de ataque
self.airfoil_x_coords_ext = np.flip(
self.airfoil_x_coords[0: self.minpos])
# coordenadas horizontais do extradorso
self.airfoil_y_coords_ext = np.flip(
self.airfoil_y_coords[0: self.minpos])
# coordenadas verticais do extradorso
self.n_ext = self.airfoil_x_coords_ext.size
# número de elementos do vetor de coordenadas horizontais do extradorso
def generate_inner_surface_coordinates(self):
self.airfoil_x_coords_int = self.airfoil_x_coords[
self.minpos: self.n - 1
]
# coordenadas horizontais do intradorso
self.airfoil_y_coords_int = self.airfoil_y_coords[
self.minpos: self.n - 1
]
# coordenadas verticais do intradorso
self.n_int = (self.airfoil_x_coords_int.size)
# número de elementos do vetor de coordenadas horizontais do intradorso
def calc_perimeter(self):
self.delta_perimeter_array_ext = np.empty([self.n_ext - 1])
# vetor contando as parcelas de perimetro do extradorso
self.delta_perimeter_array_int = np.empty([self.n_int - 1])
# vetor contando as parcelas de perimetro do intradorso
for i in range(0, self.n_ext - 2):
delta_perimeter_i_ext = (
(self.airfoil_x_coords_ext[i + 1]
- self.airfoil_x_coords_ext[i]) ** 2
+ (self.airfoil_y_coords_ext[i + 1]
- self.airfoil_y_coords_ext[i]) ** 2
) ** 0.5
self.delta_perimeter_array_ext[i] = delta_perimeter_i_ext
perimeter_ext = np.sum(self.delta_perimeter_array_ext)
# perímetro do extradorso do perfil
for i in range(0, self.n_int - 2):
delta_perimeter_i_int = (
(self.airfoil_x_coords_int[i + 1]
- self.airfoil_x_coords_int[i]) ** 2
+ (self.airfoil_y_coords_int[i + 1]
- self.airfoil_y_coords_int[i]) ** 2
) ** 0.5
self.delta_perimeter_array_int[i] = delta_perimeter_i_int
perimeter_int = np.sum(self.delta_perimeter_array_int)
# perímetro do intradorso do perfil
self.perimeter = perimeter_ext + perimeter_int
# perimetro total do perfil
self.sum_perimeter_ext_pos = np.empty([self.n_ext - 1])
sum_perimeter_ext_pos_i = 0
for i in range(0, self.n_ext - 2):
sum_perimeter_ext_pos_i = (
sum_perimeter_ext_pos_i + self.delta_perimeter_array_ext[i]
)
self.sum_perimeter_ext_pos[i] = sum_perimeter_ext_pos_i
self.sum_perimeter_int_pos = np.empty([self.n_int - 1])
sum_perimeter_int_pos_i = 0
for i in range(0, self.n_int - 2):
sum_perimeter_int_pos_i = (
sum_perimeter_int_pos_i + self.delta_perimeter_array_int[i]
)
self.sum_perimeter_int_pos[i] = sum_perimeter_int_pos_i
def calc_area(self):
self.delta_area_array_ext = np.empty([self.n_ext - 1])
# vetor contando as parcelas de área
# determinadas pelo extradordo e a linha de corda
self.delta_area_array_int = np.empty([self.n_int - 1])
# vetor contando as parcelas de área
# determinadas pelo intradordo e a linha de corda
for i in range(0, self.n_ext - 2):
delta_area_ext_i = (
(self.airfoil_y_coords_ext[i]
+ self.airfoil_y_coords_ext[i + 1])
* (self.airfoil_x_coords_ext[i + 1] - self.airfoil_x_coords_ext[i])
* 0.5
)
self.delta_area_array_ext[i] = delta_area_ext_i
area_ext = np.sum(self.delta_area_array_ext)
# área determinada pelo extradordo e a linha de corda
for i in range(0, self.n_int - 2):
delta_area_int_i = (
(self.airfoil_y_coords_int[i]
+ self.airfoil_y_coords_int[i + 1])
* (self.airfoil_x_coords_int[i + 1] - self.airfoil_x_coords_int[i])
* 0.5
)
self.delta_area_array_int[i] = delta_area_int_i
area_int = np.sum(self.delta_area_array_int)
# área determinada pelo intradorso e a linha de corda
self.area = area_ext - area_int
# área do perfil
def import_camber_line(self):
np_array_x, np_array_y = import_airfoil_coordinates(self.name)
x_interp = np.arange(0, 1, 0.02)
minpos = np.argmin(np_array_x)
# posição do menor valor no vetor de coordenadas horizontais,
# determina a posição do bordo de ataque
n = np_array_x.size
# número de elementos do vetor de coordenadas horizontais
np_array_x_ext = np_array_x[0:minpos]
np_array_x_ext = np.flip(np_array_x_ext)
# coordenadas horizontais do extradorso
np_array_x_int = np_array_x[minpos: n - 1]
# coordenadas horizontais do intradorso
np_array_y_int = np_array_y[minpos: n - 1]
# coordenadas verticais do intradorso
np_array_y_ext = np_array_y[0:minpos]
np_array_y_ext = np.flip(np_array_y_ext)
# coordenadas verticais do extradorso
y_interp_ext = np.interp(x_interp, np_array_x_ext, np_array_y_ext)
y_interp_int = np.interp(x_interp, np_array_x_int, np_array_y_int)
y_camber = (y_interp_int + y_interp_ext) / 2
camber_line = pd.DataFrame({"x": x_interp, "y": y_camber})
self.Camber_line = camber_line | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Components/Aerodynamic_components/Airfoil.py | Airfoil.py |
from ADR.Components.Points.CA import CA
from ADR.Components.Aerodynamic_components.Aerodynamic_section import (
Aerodynamic_section,
)
from ADR.Components.Component import Component
from ADR.Methods.VLM.AVL.avlwrapper_io import get_aero_coefs
import numpy as np
from math import radians, cos, sin
class Aerodynamic_surface(Component):
def __init__(self, data):
super().__init__(data)
self.data = data
self.airfoil_clmax = data.get("airfoil_clmax")
self.airfoil1_name = data.get("airfoil1_name")
self.airfoil2_name = data.get("airfoil2_name")
self.airfoil3_name = data.get("airfoil3_name")
self.span1 = data.get("span1")
self.span2 = data.get("span2")
self.chord1 = data.get("chord1")
self.chord2 = data.get("chord2")
self.chord3 = data.get("chord3")
self.twist1 = data.get("twist1")
self.twist2 = data.get("twist2")
self.twist3 = data.get("twist3")
self.incidence = data.get("incidence")
self.attack_angle = None
self.CM_alpha_CG = None
data_section1 = {
"airfoil1_name": self.airfoil1_name,
"airfoil2_name": self.airfoil2_name,
"span": self.span1,
"chord1": self.chord1,
"chord2": self.chord2,
"twist1": self.twist1,
"twist2": self.twist2,
}
data_section2 = {
"airfoil1_name": self.airfoil2_name,
"airfoil2_name": self.airfoil3_name,
"span": self.span2,
"chord1": self.chord2,
"chord2": self.chord3,
"twist1": self.twist2,
"twist2": self.twist3,
}
self.section1 = Aerodynamic_section(data_section1)
self.section2 = Aerodynamic_section(data_section2)
self.area = 2 * (self.section1.area + self.section2.area)
self.MAC = self.calc_MAC()
self.vlm = "AVL"
self.calc_aerodynamic_data()
self.ca = CA(
{"x": -self.MAC / 4, "z": 0, "surface_x": self.x, "surface_z": self.z}
)
def __str__(self):
return self.__class__.__name__
def update_alpha(self, alpha_airplane):
self.attack_angle = alpha_airplane + self.incidence
def calc_aerodynamic_data(self):
# This entire method is NOT bullshit\
if self.vlm == "AVL":
a, b, c, self.CL_alpha, self.CD_alpha, self.Cm_alpha = get_aero_coefs(
self.data, self.airfoil_clmax)
self.CM_ca = self.Cm_alpha["Cm"].mean()
self.stall_min = self.CL_alpha.index.min()
self.stall_max = self.CL_alpha.index.max()
self.dCL_dalpha = self.CL_alpha.diff()
self.dCD_dalpha = self.CD_alpha.diff()
self.dCL_dalpha.fillna(method="bfill", inplace=True)
self.dCD_dalpha.fillna(method="bfill", inplace=True)
self.downwash_angle = 0
def moment_on_CG(self, reference_surface, cg, alpha_plane):
resultant = 0
surface_CL = self.CL_alpha.at[self.attack_angle, "CL"]
surface_CD = self.CD_alpha.at[self.attack_angle, "CD"]
sin_component = sin(radians(alpha_plane + self.incidence))
cos_component = cos(radians(alpha_plane + self.incidence))
horizontal_distance = self.x + self.ca.x - cg.x
vertical_distance = self.z + self.ca.z - cg.z
item1 = surface_CL * cos_component * horizontal_distance / reference_surface.MAC
item2 = surface_CL * sin_component * vertical_distance / reference_surface.MAC
item3 = surface_CD * sin_component * horizontal_distance / reference_surface.MAC
item4 = surface_CD * cos_component * vertical_distance / reference_surface.MAC
if self.__str__() == "Wing":
resultant = +item1 - item2 + item3 + item4
elif self.__str__() == "HS":
resultant = -item1 + item2 + item3 + item4
CM = (
(self.CM_ca * self.MAC / reference_surface.MAC + resultant)
* self.area
/ reference_surface.area
)
return CM
def get_alpha_range(self):
alpha_range = np.arange(self.stall_min, self.stall_max + 1)
return alpha_range
def calc_MAC(self):
MAC = self.section1.MAC * (
self.section1.area / (self.section1.area + self.section2.area)
) + self.section2.MAC * (
self.section2.area / (self.section1.area + self.section2.area)
)
return MAC
def get_CL(self, alpha):
CL = np.interp(alpha, self.CL_alpha.index.values, self.CL_alpha["CL"])
return CL
def get_CD(self, alpha):
CD = np.interp(alpha, self.CD_alpha.index.values, self.CD_alpha["CD"])
return CD
def get_CM(self):
return self.CM_ca
def lift(self, air_density, velocity, alpha):
lift = 0.5 * air_density * velocity ** 2 * \
self.area * self.get_CL(alpha)
return lift
def drag(self, air_density, velocity, alpha):
drag = 0.5 * air_density * velocity ** 2 * \
self.area * self.get_CD(alpha)
return drag
def moment(self, air_density, velocity, alpha):
moment = (
0.5 * air_density * velocity ** 2 * self.area * self.MAC * self.get_CM()
)
return moment | AircraftDR | /AircraftDR-0.1-py3-none-any.whl/ADR/Components/Aerodynamic_components/Aerodynamic_surface.py | Aerodynamic_surface.py |
import os
import math
from Beans.Common.Spectrum import Spectrum
from Beans.Compressor import Compressor
from Compressor.ByteComp.BrotliWrapper import BrotliWrapper
from Compressor.ByteComp.SnappyWrapper import SnappyWrapper
from Compressor.ByteComp.ZlibWrapper import ZlibWrapper
from Compressor.ByteComp.ZstdWrapper import ZstdWrapper
from Compressor.ByteTrans import ByteTrans
from Compressor.IntComp.BinPackingWrapper import BinPackingWrapper
from Compressor.IntComp.EmptyWrapper import EmptyWrapper
from Compressor.IntComp.FastPFor256Wrapper import FastPFor256Wrapper
from Compressor.IntComp.VarByteWrapper import VarByteWrapper
from Compressor.SortedIntComp.DeltaWrapper import DeltaWrapper
from Compressor.SortedIntComp.IntegratedBinPackingWrapper import IntegratedBinPackingWrapper
from Compressor.SortedIntComp.IntegratedVarByteWrapper import IntegratedVarByteWrapper
from Enums.DataDim import DataDim
from Utils.AirdScanUtil import AirdScanUtil
class BaseParser:
def __init__(self, indexPath):
self.indexPath = indexPath
self.airdInfo = AirdScanUtil.loadAirdInfo(indexPath)
self.airdPath = AirdScanUtil.getAirdPathByIndexPath(indexPath)
self.airdFile = open(self.airdPath, 'rb')
self.intCompressor = None
self.intByteComp = None
self.intIntComp = None
self.intPrecision = None
self.mobiCompressor = None
self.mobiDict = None
self.mobiIntComp = None
self.mobiByteComp = None
self.mobiPrecision = None
self.mzCompressor = None
self.mzIntComp = None
self.mzByteComp = None
self.mzPrecision = None
self.parseCompsFromAirdInfo()
self.parseComboComp()
self.parseMobilityDict()
# def buildParser(self, indexJsonPath):
# self.indexPath = indexJsonPath
# self.airdInfo = AirdScanUtil.loadAirdInfo(self.indexPath)
# self.airdPath = AirdScanUtil.getAirdPathByIndexPath(indexJsonPath)
# self.airdFile = open(self.airdPath, 'rb')
# self.parseCompsFromAirdInfo()
# # self.parseComboComp()
#
# if self.airdInfo.type is AirdType.DIA:
# return DIAParser(indexJsonPath, self.airdInfo)
# elif self.airdInfo.type is AirdType.DDA:
# return DDAParser(indexJsonPath, self.airdInfo)
# elif self.airdInfo.type is AirdType.PRM:
# return PRMParser(indexJsonPath, self.airdInfo)
# elif self.airdInfo.type is AirdType.DIA_PASEF:
# return DIAPasefParser(indexJsonPath, self.airdInfo)
# elif self.airdInfo.type is AirdType.DDA_PASEF:
# return DDAPasefParser(indexJsonPath, self.airdInfo)
def parseCompsFromAirdInfo(self):
self.mzCompressor = BaseParser.fetchTargetCompressor(self.airdInfo.compressors, DataDim.TARGET_MZ.value)
self.intCompressor = BaseParser.fetchTargetCompressor(self.airdInfo.compressors, DataDim.TARGET_INTENSITY.value)
self.mobiCompressor = BaseParser.fetchTargetCompressor(self.airdInfo.compressors, DataDim.TARGET_MOBILITY.value)
self.mzPrecision = self.mzCompressor.precision
self.intPrecision = self.intCompressor.precision
self.mobiPrecision = self.mobiCompressor.precision
def parseMobilityDict(self):
mobiInfo = self.airdInfo.mobiInfo
if mobiInfo.type == "TIMS":
self.airdFile.seek(mobiInfo.dictStart, 0)
delta = mobiInfo.dictEnd - mobiInfo.dictStart
result = self.airdFile.read(delta)
mobiArray = DeltaWrapper().decode(ByteTrans.byteToInt(ZstdWrapper().decode(result)))
mobiDArray = [None] * len(mobiArray)
for i in range(0, len(mobiArray)):
mobiDArray[i] = mobiArray[i] / self.mobiPrecision
self.mobiDict = mobiDArray
def parseComboComp(self):
mzMethods = self.mzCompressor.methods
if len(mzMethods) == 2:
self.mzIntComp = BaseParser.parseComp(mzMethods[0])
self.mzByteComp = BaseParser.parseComp(mzMethods[1])
intMethods = self.intCompressor.methods
if len(intMethods) == 2:
self.intIntComp = BaseParser.parseComp(intMethods[0])
self.intByteComp = BaseParser.parseComp(intMethods[1])
if self.mobiCompressor is not None:
mobiMethods = self.mobiCompressor.methods
if len(mobiMethods) == 2:
self.mobiIntComp = BaseParser.parseComp(mobiMethods[0])
self.mobiByteComp = BaseParser.parseComp(mobiMethods[1])
def getSpectrum(self, bytes, offset, mzOffset, intOffset):
if mzOffset == 0:
return Spectrum([], [], None)
mzArray = self.getMzs(bytes, offset, mzOffset)
offset = offset + mzOffset
intensityArray = self.getInts(bytes, offset, intOffset)
return Spectrum(mzArray, intensityArray, None)
def getMzs(self, value, offset, length):
decodedData = self.mzByteComp.decode(value, offset, length)
intValues = ByteTrans.byteToInt(decodedData)
intValues = self.mzIntComp.decode(intValues, 0, intValues.size)
doubleValues = intValues / self.mzPrecision
return doubleValues
def getInts(self, value, start, length):
decodedData = self.intByteComp.decode(value, start, length)
intValues = ByteTrans.byteToInt(decodedData)
intValues = self.intIntComp.decode(intValues, 0, len(intValues))
for i in range(len(intValues)):
if intValues[i] < 0:
intValues[i] = math.pow(2, -intValues[i] / 100000.0)
intensityValues = intValues / self.intPrecision
return intensityValues
def getMobilities(self, value, start, length):
decodedData = self.mobiByteComp.decode(value, start, length)
intValues = ByteTrans.byteToInt(decodedData)
intValues = self.mobiIntComp.decode(intValues, 0, len(intValues))
mobilities = [None] * len(intValues)
for i in range(len(intValues)):
mobilities[i] = self.mobiDict[intValues[i]]
return mobilities
def getSpectraByIndex(self, index):
return self.getSpectra(index.startPtr, index.endPtr, index.rts, index.mzs, index.ints)
def getSpectrumByIndex(self, startPtr, mzOffsets, intOffsets, index):
start = startPtr
for i in range(index):
start += mzOffsets[i]
start += intOffsets[i]
self.airdFile.seek(start)
reader = self.airdFile.read(mzOffsets[index] + intOffsets[index])
return self.getSpectrum(reader, 0, mzOffsets[index], intOffsets[index])
def getSpectrumByRt(self, startPtr, rtList, mzOffsets, intOffsets, rt):
position = rtList.index(rt)
return self.getSpectrumByIndex(startPtr, mzOffsets, intOffsets, position)
def getSpectra(self, start, end, rtList, mzOffsets, intOffsets):
map = {}
self.airdFile.seek(start)
delta = end - start
result = self.airdFile.read(delta)
iter = 0
for i in range(len(rtList)):
map[rtList[i]] = self.getSpectrum(result, iter, mzOffsets[i], intOffsets[i])
iter = iter + mzOffsets[i] + intOffsets[i]
return map
@staticmethod
def parseComp(name):
if name == 'VB':
return VarByteWrapper()
elif name == 'BPVB':
return BinPackingWrapper()
elif name == 'FPF256':
return FastPFor256Wrapper()
elif name == 'Empty':
return EmptyWrapper()
elif name == 'Zlib':
return ZlibWrapper()
elif name == 'Brotli':
return BrotliWrapper()
elif name == 'Snappy':
return SnappyWrapper()
elif name == 'Zstd':
return ZstdWrapper()
elif name == 'IVB':
return IntegratedVarByteWrapper()
elif name == 'IBP':
return IntegratedBinPackingWrapper()
elif name == 'Delta':
return DeltaWrapper()
@staticmethod
def fetchTargetCompressor(compressors, target):
if target is None:
return None
for compressor in compressors:
if compressor.target == target:
return compressor
return None | AirdSDK | /AirdSDK-0.0.2-py3-none-any.whl/Parser/BaseParser.py | BaseParser.py |
from Beans.DDAMs import DDAMs
from Parser.BaseParser import BaseParser
from Utils.DDAUtil import DDAUtil
class DDAParser(BaseParser):
def getMs1Index(self):
if self.airdInfo is not None and self.airdInfo.indexList is not None and len(self.airdInfo.indexList) > 0:
return self.airdInfo.indexList[0]
return None
def getAllMs2Index(self):
if self.airdInfo is not None and self.airdInfo.indexList is not None and len(self.airdInfo.indexList) > 0:
return self.airdInfo.indexList[1: len(self.airdInfo.indexList)]
return None
def getMs2IndexMap(self):
if self.airdInfo is not None and self.airdInfo.indexList is not None and len(self.airdInfo.indexList) > 0:
ms2IndexList = self.airdInfo.indexList[1: len(self.airdInfo.indexList)]
results = {}
for index in ms2IndexList:
results[index.getParentNum()] = index
return results
return None
def readAllToMemeory(self):
ms1Index = self.getMs1Index()
ms1Map = self.getSpectraByIndex(ms1Index)
ms1RtList = list(ms1Map.keys())
ms1List = self.buildDDAMsList(ms1RtList, ms1Index, ms1Map, True)
return ms1List
def buildDDAMsList(self, rtList, ms1Index, ms1Map, includeMS2):
ms1List = [None] * len(rtList)
ms2IndexMap = None
if includeMS2:
ms2IndexMap = self.getMs2IndexMap()
for i in range(len(rtList)):
ms1 = DDAMs(rtList[i], ms1Map[rtList[i]])
DDAUtil.initFromIndex(ms1, ms1Index, i)
if includeMS2:
if ms1.num in ms2IndexMap:
ms2Index = ms2IndexMap[ms1.num]
if ms2Index is not None:
ms2Map = self.getSpectra(ms2Index.startPtr, ms2Index.endPtr, ms2Index.rts, ms2Index.mzs,
ms2Index.ints)
ms2RtList = list(ms2Map.keys())
ms2List = [None] * len(ms2RtList)
for j in range(len(ms2RtList)):
ms2 = DDAMs(ms2RtList[j], ms2Map[ms2RtList[j]])
DDAUtil.initFromIndex(ms2, ms2Index, j)
ms2List[j] = ms2
ms1.ms2List = ms2List
ms1List[i] = ms1
return ms1List
def getSpectrumByNum(self, num):
indexList = self.airdInfo.indexList
for blockIndex in indexList:
index = blockIndex.nums.index(num)
if index >= 0:
return self.getSpectrumByIndex(blockIndex.startPtr, blockIndex.mzs, blockIndex.ints, index)
return None
def getSpectraByRtRange(self, rtStart, rtEnd, includeMS2):
ms1Index = self.getMs1Index() | AirdSDK | /AirdSDK-0.0.2-py3-none-any.whl/Parser/DDAParser.py | DDAParser.py |
from Beans.BlockIndex import BlockIndex
from Beans.Compressor import Compressor
from Beans.DataProcessing import DataProcessing
from Beans.Instrument import Instrument
from Beans.MobiInfo import MobiInfo
from Beans.ParentFile import ParentFile
from Beans.Software import Software
from Beans.WindowRange import WindowRange
class AirdInfo:
def __init__(self, dict):
self.version = dict['version'] if 'version' in dict else None
self.versionCode = dict['versionCode'] if 'versionCode' in dict else None
compressors = []
if "compressors" in dict and len(dict['compressors']) > 0:
for compressorDict in dict['compressors']:
compressors.append(Compressor(compressorDict))
self.compressors = compressors
instruments = []
if "instruments" in dict and len(dict['instruments']) > 0:
for instrumentDict in dict['instruments']:
instruments.append(Instrument(instrumentDict))
self.instruments = instruments
dataProcessings = []
if "dataProcessings" in dict and len(dict['dataProcessings']) > 0:
for dataProcessingDict in dict['dataProcessings']:
dataProcessings.append(DataProcessing(dataProcessingDict))
self.dataProcessings = dataProcessings
softwares = []
if "softwares" in dict and len(dict['softwares']) > 0:
for softwareDict in dict['softwares']:
softwares.append(Software(softwareDict))
self.softwares = softwares
parentFiles = []
if "parentFiles" in dict and len(dict['parentFiles']) > 0:
for parentFileDict in dict['parentFiles']:
parentFiles.append(ParentFile(parentFileDict))
self.parentFiles = parentFiles
rangeList = []
if "rangeList" in dict and len(dict['rangeList']) > 0:
for rangeDict in dict['rangeList']:
rangeList.append(WindowRange(rangeDict))
self.rangeList = rangeList
indexList = []
if "indexList" in dict and len(dict['indexList']) > 0:
for indexDict in dict['indexList']:
indexList.append(BlockIndex(indexDict))
self.indexList = indexList
self.type = dict['type'] if 'type' in dict else None
self.fileSize = dict['fileSize'] if 'fileSize' in dict else None
self.totalCount = dict['totalCount'] if 'totalCount' in dict else None
self.airdPath = dict['airdPath'] if 'airdPath' in dict else None
self.activator = dict['activator'] if 'activator' in dict else None
self.energy = dict['energy'] if 'energy' in dict else None
self.msType = dict['msType'] if 'msType' in dict else None
self.rtUnit = dict['rtUnit'] if 'rtUnit' in dict else None
self.polarity = dict['polarity'] if 'polarity' in dict else None
self.ignoreZeroIntensityPoint = dict['ignoreZeroIntensityPoint'] if 'ignoreZeroIntensityPoint' in dict else None
if 'mobiInfo' in dict:
self.mobiInfo = MobiInfo(dict['mobiInfo'])
self.creator = dict['creator'] if 'creator' in dict else None
self.features = dict['features'] if 'features' in dict else None | AirdSDK | /AirdSDK-0.0.2-py3-none-any.whl/Beans/AirdInfo.py | AirdInfo.py |
import numpy as np
import openmdao.api as om
import sys
import time
from datetime import timedelta
from differential_evolution import DifferentialEvolutionDriver
from . import rank, run_parallel
from .components.airfoil import cst2coords
from .model import AfOptModel
from .recorders import PopulationReporter
def get_de_driver(
gen=100,
tolx=1e-8,
tolf=1e-8,
strategy="rand-to-best/1/exp/random",
f=None,
cr=None,
adaptivity=2,
):
kwargs = dict(
run_parallel=run_parallel,
adaptivity=adaptivity,
max_gen=gen,
tolx=tolx,
tolf=tolf,
strategy=strategy,
show_progress=True,
)
if f is not None:
kwargs.update({"Pm": f})
if cr is not None:
kwargs.update({"Pc": cr})
driver = DifferentialEvolutionDriver(**kwargs)
return driver
def get_coords(prob):
"""
Get the coordinates of the airfoil represented by the current state of the airfoil optimization problem.
Parameters
----------
prob : openmdao.api.Problem
Airfoil optimization problem
Returns
-------
np.ndarray
(n, 2) array of x-, and y-coordinates of the airfoil in counterclockwise direction
"""
x, y_u, y_l, _, _ = cst2coords(prob["a_ca"], prob["a_th"], prob["t_te"])
x = np.reshape(x, (-1, 1))
y_u = np.reshape(y_u, (-1, 1))
y_l = np.reshape(y_l, (-1, 1))
coords_u = np.concatenate((x, y_u), axis=1)
coords_l = np.concatenate((x, y_l), axis=1)
coords = np.concatenate((np.flip(coords_u[1:], axis=0), coords_l))
return coords
def plot(prob, display=False):
"""
Plot the airfoil represented by the current state of the airfoil optimization problem.
Parameters
----------
prob : openmdao.api.Problem
Airfoil optimization problem
display : bool, optional
True if the figure should be displayed. False by default
Returns
-------
figure
"""
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator
fig, ax = plt.subplots()
x, y_u, y_l, y_c, _ = cst2coords(prob["a_ca"], prob["a_th"], prob["t_te"])
ax.plot(x, y_u, "k", x, y_l, "k", x, y_c, "k--")
ax.axis("scaled")
ax.set_xlabel("x/c")
ax.set_ylabel("y/c")
ax.xaxis.set_major_locator(MultipleLocator(0.2))
ax.xaxis.set_minor_locator(MultipleLocator(0.05))
ax.yaxis.set_major_locator(MultipleLocator(0.2))
ax.yaxis.set_minor_locator(MultipleLocator(0.05))
ax.grid(which="both")
if display:
fig.show()
return fig
def write(prob, filename):
"""
Write airfoil coordinates represented by the current state of the airfoil optimization problem to a file
Parameters
----------
prob : openmdao.api.Problem
Airfoil optimization problem
filename : str
Filename
"""
coords = get_coords(prob)
fmt_str = 2 * ("{: >" + str(6 + 1) + "." + str(6) + "f} ") + "\n"
with open(filename, "w") as f:
for i in range(coords.shape[0]):
f.write(fmt_str.format(coords[i, 0], coords[i, 1]))
def main(
cl,
re,
ma,
n_ca,
n_th,
gen=100,
tolx=1e-8,
tolf=1e-8,
fix_te=True,
t_te_min=0.0,
t_c_min=0.01,
r_le_min=0.05,
A_cs_min=None,
A_bins_min=None,
Cm_max=None,
strategy="rand-to-best/1/exp/random",
f=None,
cr=None,
adaptivity=2,
repr_file="repr.yml",
dat_file="optimized.dat",
png_file="optimized.png",
):
"""
Create, analyze, optimize airfoil, and write optimized coordinates to a file. Then clean the problem up and exit.
Parameters
----------
cl : float
Design lift coefficient
re : float
Reynolds number
ma : float
Mach number
n_ca, n_th : int
Number of CST coefficients for the chord line and thickness distribution, respectively
gen : int, optional
Number of generations to use for the genetic algorithm. 100 by default
tolx : float, optional
Tolerance on the spread of the design vectors.
tolf: float, optional
Tolerance on the spread of objective functions.
fix_te : bool, optional
True if the trailing edge thickness should be fixed. True by default
t_te_min : float, optional
Minimum TE thickness as fraction of chord length. Default is 0.0.
t_c_min : float or None, optional
Minimum thickness over chord ratio. None if unconstrained. Defaults is 0.01.
r_le_min : float or None, optional
Minimum leading edge radius. None if unconstrained. Defaults is 0.05.
A_cs_min : float or None, optional
Minimum cross sectional area. None if unconstrained. Default is None.
A_bins_min : float or None, optional
Minimum relative area of the airfoil in each bin along the chord. None if unconstrained. Default is None.
Cm_max : float or None, optional
Maximum absolute moment coefficient. None if unconstrained. Default is None.
strategy : string, optional
Evolution strategy to use. Default is 'rand-to-best/1/exp/random'.
f : float or None, optional
Mutation rate
cr : float or None, optional
Crossover rate
adaptivity : 0, 1, or 2
Which kind of self-adaptivity to ue (0: none, 1: simple, 2: complex)
repr_file, dat_file, png_file : str, optional
Paths where the final representation, optimized airfoil coordinates, and output image should be saved.
"""
# Construct the OpenMDAO Problem
kwargs = dict(
n_ca=n_ca,
n_th=n_th,
fix_te=fix_te,
t_te_min=t_te_min,
t_c_min=t_c_min,
r_le_min=r_le_min,
A_cs_min=A_cs_min,
A_bins_min=A_bins_min,
Cm_max=Cm_max,
)
prob = om.Problem()
prob.model = AfOptModel(**kwargs)
prob.driver = get_de_driver(gen, tolx, tolf, strategy, f, cr, adaptivity)
prob.driver.add_recorder(PopulationReporter())
prob.setup()
# Set reference values
prob["Cl_des"] = cl
prob["Re"] = re
prob["M"] = ma
# Optimize the problem using a genetic algorithm
t0 = time.time()
prob.run_driver()
dt = time.time() - t0
# Show and write final results
if rank == 0:
yaml = prob.model.__repr__()
print("Optimized airfoil:")
print(" " + yaml.replace("\n", "\n "))
print(f"Time Elapsed: {timedelta(seconds=dt)}")
with open(repr_file, "w") as f:
f.write(yaml)
write(prob, filename=dat_file)
fig = plot(prob)
fig.savefig(png_file)
# Clean up and exit
prob.cleanup()
del prob
sys.exit(0)
if __name__ == "__main__":
if len(sys.argv) == 23:
main(
cl=float(sys.argv[1]),
re=float(sys.argv[2]),
ma=float(sys.argv[3]),
n_ca=int(sys.argv[4]),
n_th=int(sys.argv[5]),
gen=int(sys.argv[6]),
tolx=float(sys.argv[7]),
tolf=float(sys.argv[8]),
fix_te=(sys.argv[9] == "True"),
t_te_min=float(sys.argv[10]),
t_c_min=None if sys.argv[11] == "None" else float(sys.argv[11]),
r_le_min=None if sys.argv[12] == "None" else float(sys.argv[12]),
A_cs_min=None if sys.argv[13] == "None" else float(sys.argv[13]),
A_bins_min=None if sys.argv[14] == "None" else float(sys.argv[14]),
Cm_max=None if sys.argv[15] == "None" else float(sys.argv[15]),
strategy=sys.argv[16],
f=None if sys.argv[17] == "None" else float(sys.argv[17]),
cr=None if sys.argv[18] == "None" else float(sys.argv[18]),
adaptivity=int(sys.argv[19]),
repr_file=sys.argv[20],
dat_file=sys.argv[21],
png_file=sys.argv[22],
)
else:
main(1.0, 1e6, 0.0, 3, 3, gen=9) | Airfoil-Optimizer | /Airfoil_Optimizer-0.9.0-py3-none-any.whl/af_opt/problem.py | problem.py |
import numpy as np
import openmdao.api as om
from .components import *
class AfOptModel(om.Group):
"""
Airfoil shape optimization using XFoil.
"""
def initialize(self):
self.options.declare("n_ca", default=6, types=int)
self.options.declare("n_th", default=6, types=int)
self.options.declare("fix_te", default=True, types=bool)
self.options.declare("n_area_bins", default=5, lower=1, types=int)
self.options.declare(
"t_te_min", default=0.0, lower=0.0, types=float, allow_none=False
)
self.options.declare("t_c_min", default=0.1, types=float, allow_none=True)
self.options.declare("r_le_min", default=0.05, types=float, allow_none=True)
self.options.declare("A_cs_min", default=0.1, types=float, allow_none=True)
self.options.declare("A_bins_min", default=0.02, types=float, allow_none=True)
self.options.declare("Cm_max", default=None, types=float, allow_none=True)
self.options.declare("n_coords", default=100, types=int)
def setup(self):
# Number of CST coefficients
n_ca = self.options["n_ca"]
n_th = self.options["n_th"]
# Number of bins to check for slender sections
n_area_bins = self.options["n_area_bins"]
# Design variable bounds
a_c_lower = -0.25 * np.ones(n_ca)
a_c_upper = +0.25 * np.ones(n_ca)
a_t_lower = +0.01 * np.ones(n_th)
a_t_upper = +0.20 * np.ones(n_th)
t_te_upper = 0.1
# Independent variables
ivc = om.IndepVarComp()
ivc.add_output("a_ca", val=np.zeros(n_ca))
ivc.add_output("a_th", val=np.zeros(n_th))
ivc.add_output("t_te", val=self.options["t_te_min"])
ivc.add_output("Re", val=1e6)
ivc.add_output("M", val=0.0)
ivc.add_output("Cl_des", val=1.0)
# Main sub-systems
self.add_subsystem("ivc", ivc, promotes=["*"])
self.add_subsystem("XFoil", XFoilAnalysis(n_ca=n_ca, n_th=n_th), promotes=["*"])
# Design variables
self.add_design_var("a_ca", lower=a_c_lower, upper=a_c_upper)
self.add_design_var("a_th", lower=a_t_lower, upper=a_t_upper)
if not self.options["fix_te"]:
self.add_design_var(
"t_te", lower=self.options["t_te_min"], upper=t_te_upper
)
# Objective
self.add_objective("Cd") # Cd
# Constraints
self.add_subsystem("Geometry", Geometry(n_ca=n_ca, n_th=n_th, n_area_bins=n_area_bins), promotes=["*"])
if self.options["t_c_min"] is not None:
self.add_subsystem(
"G1",
om.ExecComp(
f"g1 = 1 - t_c / {self.options['t_c_min']:15g}", g1=0.0, t_c=1.0
),
promotes=["*"],
)
self.add_constraint("g1", upper=0.0) # t_c >= t_c_min
if self.options["r_le_min"] is not None:
self.add_subsystem(
"G2",
om.ExecComp(
f"g2 = 1 - r_le / {self.options['r_le_min']:15g}", g2=0.0, r_le=1.0
),
promotes=["*"],
)
self.add_constraint("g2", upper=0.0) # r_le >= r_le_min
if self.options["A_cs_min"] is not None:
self.add_subsystem(
"G3",
om.ExecComp(
f"g3 = 1 - A_cs / {self.options['A_cs_min']:15g}", g3=0, A_cs=1.0
),
promotes=["*"],
)
self.add_constraint("g3", upper=0.0) # A_cs >= A_cs_min
if self.options["A_bins_min"] is not None:
self.add_subsystem(
"G4",
om.ExecComp(
f"g4 = 1 - A_bins / {self.options['A_bins_min']:15g}",
g4=np.zeros(n_area_bins),
A_bins=np.ones(n_area_bins),
),
promotes=["*"],
)
self.add_constraint("g4", upper=0.0) # A_bins >= A_bins_min
if self.options["Cm_max"] is not None:
self.add_subsystem(
"G5",
om.ExecComp(
f"g5 = 1 - abs(Cm) / {np.abs(self.options['Cm_max']):15g}",
g5=0.0,
Cm=1.0,
),
promotes=["*"],
)
self.add_constraint("g5", lower=0.0) # |Cm| <= |Cm_max|
def __repr__(self):
outputs = dict(self.list_outputs(out_stream=None))
s_t_te_des = f"{outputs['ivc.t_te']['value'][0]:.4g}"
desvar_formatter = {"float_kind": "{: 7.4f}".format}
s_area_bins = np.array2string(
outputs["Geometry.A_bins"]["value"],
formatter=desvar_formatter,
separator=", ",
)
s_a_ca = np.array2string(
outputs["ivc.a_ca"]["value"], formatter=desvar_formatter, separator=", "
)
s_a_th = np.array2string(
outputs["ivc.a_th"]["value"], formatter=desvar_formatter, separator=", "
)
yaml = ""
yaml += f"Cl: {outputs['ivc.Cl_des']['value'][0]:.4g}\n"
yaml += f"M: {outputs['ivc.M']['value'][0]:.4g}\n"
yaml += f"Re: {outputs['ivc.Re']['value'][0]:.4g}\n"
yaml += (
"" if self.options["fix_te"] else "min "
) + f"t_te: {self.options['t_te_min']:.4g}\n"
if self.options["t_c_min"] is not None:
yaml += f"t_c_min: {self.options['t_c_min']:.4g}\n"
if self.options["r_le_min"] is not None:
yaml += f"r_le_min: {self.options['r_le_min']:.4g}\n"
if self.options["A_cs_min"] is not None:
yaml += f"A_cs_min: {self.options['A_cs_min']:.4g}\n"
if self.options["A_bins_min"] is not None:
yaml += f"A_bins_min: {self.options['A_bins_min']:.4g}\n"
if self.options["Cm_max"] is not None:
yaml += f"Cm_max: {self.options['Cm_max']:.4g}\n"
yaml += f"Cd: {outputs['XFoil.Cd']['value'][0]:.4g}\n"
yaml += f"Cm: {outputs['XFoil.Cm']['value'][0]: .4g}\n"
yaml += f"t_c: {outputs['Geometry.t_c']['value'][0]:.4g}\n"
yaml += f"r_le: {outputs['Geometry.r_le']['value'][0]:.4g}\n"
yaml += f"A_cs: {outputs['Geometry.A_cs']['value'][0]:.4g}\n"
yaml += f"A_bins: {s_area_bins}\n"
yaml += f"a_ca: {s_a_ca}\n"
yaml += f"a_th: {s_a_th}"
if not self.options["fix_te"]:
yaml += f"\nt_te: {s_t_te_des}"
return yaml | Airfoil-Optimizer | /Airfoil_Optimizer-0.9.0-py3-none-any.whl/af_opt/model.py | model.py |
import numpy as np
import openmdao.api as om
from cst import cst, fit
from ..util import cosspace
def coords2cst(x, y_u, y_l, n_ca, n_th):
"""
Convert airfoil upper/lower curve coordinates to camber line/thickness distribution CST coefficients.
Parameters
----------
x : array_like
X-Coordinates
y_u, y_l : array_like
Y-Coordinates of the upper and lower curves, respectively
n_ca, n_th : int
Number of CST coefficients to use for the camber line and thickness distribution of the airfoil
Returns
-------
a_ca, a_th : np.ndarray
CST coefficients describing the camber line and thickness distribution of the airfoil
t_te : float
Airfoil trailing edge thickness
"""
y_c = (y_u + y_l) / 2
t = y_u - y_l
a_ca, _ = fit(x, y_c, n_ca, delta=(0.0, 0.0), n1=1)
a_th, t_te = fit(x, t, n_th)
return a_ca, a_th, t_te[1]
def cst2coords(a_ca, a_th, t_te, n_coords=100):
"""
Convert airfoil camber line/thickness distribution CST coefficients to upper/lower curve coordinates.
Parameters
----------
a_ca, a_th : array_like
CST coefficients describing the camber line and thickness distribution of the airfoil
t_te : float
Airfoil trailing edge thickness
n_coords : int, optional
Number of x-coordinates to use. 100 by default
Returns
-------
x : np.ndarray
Airfoil x-coordinates
y_u, y_l : np.ndarray
Airfoil upper and lower curves y-coordinates
y_c, t : np.ndarray
Airfoil camber line and thickness distribution
"""
x = cosspace(0, 1, n_coords)
y_c = cst(x, a_ca, n1=1)
t = cst(x, a_th, delta=(0, t_te))
y_u = y_c + t / 2
y_l = y_c - t / 2
return x, y_u, y_l, y_c, t
class AirfoilComponent(om.ExplicitComponent):
"""
Basic Aifoil specified by CST coefficients for its camber line and thickness distribution and a TE thickness.
"""
def initialize(self):
self.options.declare("n_ca", default=6, types=int)
self.options.declare("n_th", default=6, types=int)
self.options.declare("n_coords", default=100, types=int)
def setup(self):
# Number of CST coefficients
n_ca = self.options["n_ca"]
n_th = self.options["n_th"]
# Inputs
self.add_input("a_ca", shape=n_ca)
self.add_input("a_th", shape=n_th)
self.add_input("t_te", shape=1)
def compute_coords(self, inputs, precision=None, n_coords=None):
"""
Compute airfoil coordinates from the set of OpenMDAO inputs.
"""
x, y_u, y_l, y_c, t = cst2coords(
inputs["a_ca"],
inputs["a_th"],
inputs["t_te"][0],
self.options["n_coords"] if n_coords is None else n_coords,
)
if precision is not None:
return (
np.round(x, precision),
np.round(y_u, precision),
np.round(y_l, precision),
np.round(y_c, precision),
np.round(t, precision),
)
return x, y_u, y_l, y_c, t | Airfoil-Optimizer | /Airfoil_Optimizer-0.9.0-py3-none-any.whl/af_opt/components/airfoil.py | airfoil.py |
import numpy as np
import time
from multiprocessing.pool import ThreadPool
from xfoil import XFoil
from xfoil.model import Airfoil
from .. import rank
from .airfoil import AirfoilComponent
def xfoil_worker(xf, cl_spec, consistency_check=True):
"""
Try to operate the given XFoil instance at a specified lift coefficient.
Parameters
----------
xf : XFoil
Instance of XFoil class with Airfoil already specified
cl_spec : float
Lift coefficient
consistency_check : bool, optional
If True, airfoil will be analyzed at least twice to ensure consistent results. True by default.
This option will run the same airfoil at the same point twice and checks if the results match. If they don't, it
will be run a third time. It is expected that two out of three results will agree. The third will be considered
incorrect and discarded. If the first run returns NaN, the airfoil will be assumed unrealistic and it will not
be run a second time.
Returns
-------
cd, cm : float or np.nan
Drag and moment coefficients or nan if analysis did not complete successfully
Notes
-----
The consistency check works as follows. Each airfoil is analyzed twice. First with a standard panel distribution,
then with a panel distribution which is refined around the leading edge. If the two results are within 5%, the
average result is returned. Otherwise, the larger of the two results is returned to be conservative.
"""
xf.repanel(n_nodes=240)
xf.reset_bls()
_, cd1, cm1, _ = xf.cl(cl_spec)
if np.isnan(cd1) or not consistency_check:
return cd1, cm1
xf.repanel(n_nodes=240, cv_par=2.0, cte_ratio=0.5)
xf.reset_bls()
_, cd2, cm2, _ = xf.cl(cl_spec)
e = np.abs(cd2 - cd1) / cd1
if e < 0.05:
return (cd1 + cd2) / 2.0, (cm1 + cm2) / 2.0
else:
if cd1 > cd2:
return cd1, cm1
else:
return cd2, cm2
def analyze_airfoil(
x, y_u, y_l, cl, rey, mach=0, xf=None, pool=None, show_output=False
):
"""
Analyze an airfoil at a given lift coefficient for given Reynolds and Mach numbers using XFoil.
Parameters
----------
x : array_like
Airfoil x-coordinates
y_u, y_l : array_like
Airfoil upper and lower curve y-coordinates
cl : float
Target lift coefficient
rey, mach : float
Reynolds and Mach numbers
xf : XFoil, optional
An instance of the XFoil class to use to perform the analysis. Will be created if not given
pool : multiprocessing.ThreadPool, optional
An instance of the multiprocessing.Threadpool class used to run the xfoil_worker. Will be created if not given
show_output : bool, optional
If True, a debug string will be printed after analyses. False by default.
Returns
-------
cd, cm : float or np.nan
Drag and moment coefficients of the airfoil at specified conditions, or nan if XFoil did not run successfully
"""
# If the lower and upper curves swap, this is a bad, self-intersecting airfoil. Return 1e27 immediately.
if np.any(y_l > y_u):
return np.nan
else:
clean_xf = False
if xf is None:
xf = XFoil()
xf.print = show_output
clean_xf = True
clean_pool = False
if pool is None:
pool = ThreadPool(processes=1)
clean_pool = True
xf.airfoil = Airfoil(
x=np.concatenate((x[-1:0:-1], x)), y=np.concatenate((y_u[-1:0:-1], y_l))
)
xf.Re = rey
xf.M = mach
xf.max_iter = 100
xf.n_crit = 0.1
cd, cm = pool.apply(xfoil_worker, args=(xf, cl))
if clean_xf:
del xf
if clean_pool:
del pool
return cd, cm, None if clean_xf else xf
class XFoilAnalysis(AirfoilComponent):
"""
Computes the drag coefficient of an airfoil at a given lift coefficient, Reynolds nr., and Mach nr.
"""
# Numpy string formatter
array_formatter = {"float_kind": "{: 7.4f}".format}
def initialize(self):
super().initialize()
self.options.declare("print", default=False, types=bool)
xf = XFoil()
xf.print = False
self.options.declare("_xf", default=xf, types=XFoil, allow_none=True)
self.options.declare(
"_pool", default=ThreadPool(processes=1), types=ThreadPool, allow_none=True
)
self.recording_options["options_excludes"] = ["_xf", "_pool"]
def setup(self):
super().setup()
# Inputs
self.add_input("Cl_des", val=1.0)
self.add_input("Re", val=1e6)
self.add_input("M", val=0.0)
# Output
self.add_output("Cd", val=1.0)
self.add_output("Cm", val=1.0)
def compute(self, inputs, outputs, **kwargs):
x, y_u, y_l, _, _ = self.compute_coords(inputs)
t0 = time.time()
cd, cm, xf = analyze_airfoil(
x,
y_u,
y_l,
inputs["Cl_des"][0],
inputs["Re"][0],
inputs["M"][0],
self.options["_xf"],
self.options["_pool"],
self.options["print"],
)
dt = time.time() - t0
self.options["_xf"] = xf
outputs["Cd"] = cd if not np.isnan(cd) else 1e27
outputs["Cm"] = cm if not np.isnan(cm) else 1e27
if self.options["print"]:
print(
f"{rank:02d} :: "
+ "a_ca: {}, ".format(
np.array2string(
inputs["a_ca"], separator=", ", formatter=self.array_formatter
)
)
+ "a_th: {}, ".format(
np.array2string(
inputs["a_th"], separator=", ", formatter=self.array_formatter
)
)
+ f't_te: {inputs["t_te"][0]: 6.4f}, '
+ f"C_d: {cd: 7.4f}, Cm: {cm: 7.4f}, dt: {dt:6.3f}"
) | Airfoil-Optimizer | /Airfoil_Optimizer-0.9.0-py3-none-any.whl/af_opt/components/xfoil.py | xfoil.py |
import numpy as np
from scipy.interpolate import InterpolatedUnivariateSpline
from .airfoil import AirfoilComponent
class Geometry(AirfoilComponent):
"""
Computes the thickness-over-chord ratio and cross-sectional area of an airfoil.
"""
def initialize(self):
super().initialize()
self.options.declare(
"n_area_bins",
default=5,
lower=1,
desc="Number of 'bins' to divide to chord into."
"For each bin, the area enclosed by the upper and lower surface will be computed"
"as a fraction of the width of the bin. This can be used as a check to avoid very"
"slender parts of the airfoil.",
)
def setup(self):
super().setup()
# Outputs
self.add_output("t_c", val=0.0)
self.add_output("A_cs", val=0.0)
self.add_output("r_le", val=0.0)
self.add_output(
"A_bins",
val=np.zeros(self.options["n_area_bins"]),
desc="Area enclosed by the upper and lower surfaces as a fraction of the bin width.",
)
def compute(self, inputs, outputs, discrete_inputs=None, discrete_outputs=None):
x, y_u, y_l, _, t = self.compute_coords(inputs)
# Compute the t/c and cross-sectional area of the airfoil
outputs["t_c"] = np.max(t)
outputs["A_cs"] = np.trapz(t, x)
# Compute the area, as fraction of the bin width,
# enclosed by the upper and lower surfaces for each bin.
f_t = InterpolatedUnivariateSpline(x, t)
n_area_bins = self.options["n_area_bins"]
dx_bins = 1 / n_area_bins
outputs["A_bins"] = [
f_t.integral(i * dx_bins, (i + 1) * dx_bins) / dx_bins
for i in range(n_area_bins)
]
# Compute the leading edge radius of the airfoil
xs = np.concatenate((np.flip(x), x[1:]))
ys = np.concatenate((np.flip(y_u), y_l[1:]))
dx = np.gradient(xs)
dy = np.gradient(ys)
d2x = np.gradient(dx)
d2y = np.gradient(dy)
curvature = np.abs(d2x * dy - dx * d2y) / (dx * dx + dy * dy) ** 1.5
if (
np.isnan(curvature[x.size])
or np.isinf(curvature[x.size])
or curvature[x.size] == 0.0
):
outputs["r_le"] = 0.0
else:
outputs["r_le"] = 1.0 / curvature[x.size] | Airfoil-Optimizer | /Airfoil_Optimizer-0.9.0-py3-none-any.whl/af_opt/components/geometry.py | geometry.py |
import os
import sys
import time
import allure
from selenium import webdriver
from robot.libraries.BuiltIn import RobotNotRunningError
from SeleniumLibrary import SeleniumLibrary
from SeleniumLibrary.keywords import (AlertKeywords,
BrowserManagementKeywords,
CookieKeywords,
ElementKeywords,
FormElementKeywords,
FrameKeywords,
JavaScriptKeywords,
RunOnFailureKeywords,
ScreenshotKeywords,
SelectElementKeywords,
TableElementKeywords,
WaitingKeywords,
WindowKeywords)
from airtest import aircv
from airtest_selenium.proxy import Element, WebChrome, WebFirefox, WebRemote, WebElement
from airtest.core.helper import logwrap
from airobots.core.settings import ST
from airtest.core.cv import Template
from airtest_selenium.utils.airtest_api import loop_find
from typing import Optional, Union, Any
class AirSelenium(
AlertKeywords,
BrowserManagementKeywords,
CookieKeywords,
ElementKeywords,
FormElementKeywords,
FrameKeywords,
JavaScriptKeywords,
RunOnFailureKeywords,
ScreenshotKeywords,
SelectElementKeywords,
TableElementKeywords,
WaitingKeywords,
WindowKeywords):
def __init__(self, screenshot_root_directory='logs', remote_url=ST.REMOTE_URL, browser=ST.BROWSER, headless=False, alias=None, device=None, executable_path=None, options=None, service_args=None, desired_capabilities=None):
"""
启动浏览器类型可选: Firefox, Chrome, Ie, Opera, Safari, PhantomJS, 可模拟移动设备
"""
if browser not in ['Firefox', 'Chrome', 'Ie', 'Opera', 'Safari', 'PhantomJS']:
raise Exception('浏览器类型不对, 仅可选: Firefox, Chrome, Ie, Opera, Safari, PhantomJS')
self.remote_url = remote_url
self.browser = browser
self.headless = headless
self.alias = alias
self.device = device
self.executable_path = executable_path
self.options = options
self.service_args = service_args
self.desired_capabilities = desired_capabilities
self.ctx = SeleniumLibrary(screenshot_root_directory=screenshot_root_directory)
self.screenshot_directory = ST.LOG_DIR = self.ctx.screenshot_root_directory
super(AirSelenium, self).__init__(self.ctx)
@logwrap
@allure.step
def open_browser(
self,
url: Optional[str] = None,
browser: str = "Chrome",
alias: Optional[str] = None,
remote_url: Union[bool, str] = False,
headless: Optional[bool] = False,
options: Any = None,
device: Optional[str] = None,
executable_path: Optional[str] = None,
service_args: Union[dict, None, str] = None,
desired_capabilities: Union[dict, None, str] = None) -> str:
"""
启动浏览器类型可选: Firefox, Chrome, Ie, Opera, Safari, PhantomJS, 可模拟移动设备
"""
if browser not in ['Firefox', 'Chrome', 'Ie', 'Opera', 'Safari', 'PhantomJS']:
raise Exception('浏览器类型不对, 仅可选: Firefox, Chrome, Ie, Opera, Safari, PhantomJS')
remote_url = remote_url or self.remote_url
browser = browser or self.browser
headless = headless or self.headless
alias = alias or self.alias
device = device or self.device
executable_path = executable_path or self.executable_path
options or self.options
service_args = service_args or self.service_args
desired_capabilities = desired_capabilities or self.desired_capabilities
if remote_url:
if browser == 'Chrome':
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-setuid-sandbox')
chrome_options.add_argument('--disable-dev-shm-usage')
if headless:
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
if device:
mobile_emulation = {'deviceName': device}
chrome_options.add_experimental_option('mobileEmulation', mobile_emulation)
browser_options = chrome_options
elif browser == 'Firefox':
firefox_options = webdriver.FirefoxOptions()
firefox_options.add_argument('--disable-dev-shm-usage')
if headless:
firefox_options.add_argument('--headless')
firefox_options.add_argument('--disable-gpu')
browser_options = firefox_options
else:
browser_options = options
desired_capabilities = desired_capabilities or {}
desired_capabilities['browserName'] = browser.lower()
driver = WebRemote(command_executor=remote_url, desired_capabilities=desired_capabilities, options=options or browser_options)
# ctx.create_webdriver(driver_name='Remote', alias=alias, command_executor=remote_url, options=options, desired_capabilities=desired_capabilities)
elif browser == 'Chrome':
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-setuid-sandbox')
if headless:
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
if device:
mobile_emulation = {'deviceName': device}
chrome_options.add_experimental_option('mobileEmulation', mobile_emulation)
if executable_path:
driver = WebChrome(executable_path=executable_path, options=options or chrome_options, service_args=service_args, desired_capabilities=desired_capabilities)
# ctx.create_webdriver(driver_name=browser, alias=alias, executable_path=executable_path, options=options or chrome_options, service_args=service_args, desired_capabilities=desired_capabilities)
else:
driver = WebChrome(options=options or chrome_options, service_args=service_args, desired_capabilities=desired_capabilities)
# ctx.create_webdriver(driver_name=browser, alias=alias, options=options or chrome_options, service_args=service_args, desired_capabilities=desired_capabilities)
elif browser == 'Firefox':
firefox_options = webdriver.FirefoxOptions()
if headless:
firefox_options.add_argument('--headless')
firefox_options.add_argument('--disable-gpu')
if executable_path:
driver = WebFirefox(executable_path=executable_path, options=options or firefox_options, service_args=service_args, desired_capabilities=desired_capabilities)
# ctx.create_webdriver(driver_name=browser, alias=alias, executable_path=executable_path, options=options or firefox_options, service_args=service_args, desired_capabilities=desired_capabilities)
else:
driver = WebFirefox(options=options or firefox_options, service_args=service_args, desired_capabilities=desired_capabilities)
# ctx.create_webdriver(driver_name=browser, alias=alias, options=options or firefox_options, service_args=service_args, desired_capabilities=desired_capabilities)
else:
if executable_path:
self.create_webdriver(driver_name=browser, alias=alias, executable_path=executable_path, service_args=service_args, desired_capabilities=desired_capabilities)
else:
self.create_webdriver(driver_name=browser, alias=alias, service_args=service_args, desired_capabilities=desired_capabilities)
driver = self.driver
index = self.ctx.register_driver(driver=driver, alias=alias)
if url: self.go_to(url)
return index
@logwrap
@allure.step
def close_browser(self):
return super(AirSelenium, self).close_browser()
@logwrap
@allure.step
def close_all_browsers(self):
return super(AirSelenium, self).close_all_browsers()
@logwrap
@allure.step
def switch_browser(self, index_or_alias: str):
return super(AirSelenium, self).switch_browser(index_or_alias)
@logwrap
@allure.step
def switch_window(self, locator: Union[list, str] = "MAIN", timeout: Optional[str] = None, browser: str = 'CURRENT'):
return super(AirSelenium, self).switch_window(locator=locator, timeout=timeout, browser=browser)
@logwrap
@allure.step
def set_window_size(self, width: int, height: int, inner: bool = False):
return super(AirSelenium, self).set_window_size(width, height, inner=inner)
@logwrap
@allure.step
def choose_file(self, locator: Union[WebElement, str], file_path: str):
return super(AirSelenium, self).choose_file(locator, file_path)
@logwrap
@allure.step
def go_back(self):
return super(AirSelenium, self).go_back()
@logwrap
@allure.step
def press_key(self, locator: Union[WebElement, str], key: str):
return super(AirSelenium, self).press_key(locator, key)
@logwrap
@allure.step
def press_keys(self, locator: Union[WebElement, None, str] = None, *keys: str):
return super(AirSelenium, self).press_keys(locator=locator, *keys)
@logwrap
@allure.step
def select_checkbox(self, locator: Union[WebElement, str]):
return super(AirSelenium, self).select_checkbox(locator)
@logwrap
@allure.step
def select_radio_button(self, group_name: str, value: str):
return super(AirSelenium, self).select_radio_button(group_name, value)
@logwrap
@allure.step
def scroll_element_into_view(self, locator: Union[WebElement, str]):
return super(AirSelenium, self).scroll_element_into_view(locator)
@logwrap
@allure.step
def unselect_checkbox(self, locator: Union[WebElement, str]):
return super(AirSelenium, self).unselect_checkbox(locator)
@logwrap
@allure.step
def unselect_all_from_list(self, locator: Union[WebElement, str]):
return super(AirSelenium, self).unselect_all_from_list(locator)
@logwrap
def find_element(self, locator, tag=None, required=True, parent=None):
web_element = super(AirSelenium, self).find_element(locator=locator, tag=tag, required=required, parent=parent)
log_res=self._gen_screen_log(web_element)
return web_element and Element(web_element, log_res)
@logwrap
@allure.step
def air_click(self, v):
"""
Perform the click action on the current page by image identification.
Args:
v: target to click, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be clicked.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).click().perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_assert(self, v, msg=""):
"""
Assert target exists on the current page.
Args:
v: target to touch, either a Template instance
Raise:
AssertionError - if target not found.
Returns:
Position of the template.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
return self.driver.assert_template(v=v, msg=msg)
@logwrap
@allure.step
def air_double_click(self, v):
"""
Perform the double click action on the current page by image identification.
Args:
v: target to double click, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be double clicked.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).double_click().perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_context_click(self, v):
"""
Perform the right click action on the current page by image identification.
Args:
v: target to right click, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be right clicked.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).context_click().perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_mouse_up(self, v):
"""
Perform the mouse up action on the current page by image identification.
Args:
v: target to mouse up, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be mouse up.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).release().perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_mouse_down(self, v):
"""
Perform the mouse down action on the current page by image identification.
Args:
v: target to mouse down, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be mouse down.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).click_and_hold().perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_mouse_over(self, v):
"""
Perform the mouse over action on the current page by image identification.
Args:
v: target to mouse over, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be mouse over.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_mouse_out(self, v):
"""
Perform the mouse out action on the current page by image identification.
Args:
v: target to mouse out, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be mouse out.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(v, Template):
_pos = loop_find(v, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos = v
x, y = _pos
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x, y).move_by_offset(0, 0).perform()
time.sleep(1)
return _pos
@logwrap
@allure.step
def air_drag_and_drop(self, s, t):
"""
Perform the drag and drop action on the current page by image identification.
Args:
v: target to drag and drop, either a Template instance or absolute coordinates (x, y)
Returns:
Finial position to be drag and drop.
"""
if not isinstance(self.driver, (WebChrome, WebFirefox, WebRemote)):
raise AssertionError('Use this function, the driver is must be WebChrome, WebFirefox or WebRemote')
if isinstance(s, Template):
_pos_s = loop_find(s, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos_s = s
x_s, y_s = _pos_s
if isinstance(t, Template):
_pos_t = loop_find(t, timeout=ST.FIND_TIMEOUT, driver=self.driver)
else:
_pos_t = t
x_t, y_t = _pos_t
# pos = self.driver._get_left_up_offset()
# pos = (pos[0] + x, pos[1] + y)
self.driver.action_chains.move_by_offset(x_s, y_s).click_and_hold().move_by_offset(x_t, y_t).release().perform()
time.sleep(1)
return _pos_s, _pos_t
@logwrap
@allure.step
def click_element(self, locator, modifier=False, action_chain=False):
super(AirSelenium, self).click_element(locator=locator, modifier=modifier, action_chain=action_chain)
@logwrap
@allure.step
def click_link(self, locator, modifier=False):
super(AirSelenium, self).click_link(locator=locator, modifier=modifier)
@logwrap
@allure.step
def click_image(self, locator, modifier=False):
super(AirSelenium, self).click_image(locator=locator, modifier=modifier)
@logwrap
@allure.step
def click_button(self, locator, modifier=False):
super(AirSelenium, self).click_button(locator=locator, modifier=modifier)
@logwrap
@allure.step
def input_text(self, locator, text, clear=True):
super(AirSelenium, self).input_text(locator=locator, text=text, clear=clear)
@logwrap
@allure.step
def input_password(self, locator, password, clear=True):
super(AirSelenium, self).input_password(locator=locator, password=password, clear=clear)
@logwrap
@allure.step
def double_click_element(self, locator):
super(AirSelenium, self).double_click_element(locator=locator)
@logwrap
@allure.step
def page_should_contain(self, text, loglevel='TRACE'):
super(AirSelenium, self).page_should_contain(text=text, loglevel=loglevel)
@logwrap
@allure.step
def page_should_not_contain(self, text, loglevel='TRACE'):
super(AirSelenium, self).page_should_not_contain(text=text, loglevel=loglevel)
@logwrap
@allure.step
def open_context_menu(self, locator):
super(AirSelenium, self).open_context_menu(locator=locator)
@logwrap
@allure.step
def mouse_up(self, locator):
super(AirSelenium, self).mouse_up(locator=locator)
@logwrap
@allure.step
def mouse_down(self, locator):
super(AirSelenium, self).mouse_down(locator=locator)
@logwrap
@allure.step
def mouse_over(self, locator):
super(AirSelenium, self).mouse_over(locator=locator)
@logwrap
@allure.step
def mouse_out(self, locator):
super(AirSelenium, self).mouse_out(locator=locator)
@logwrap
@allure.step
def drag_and_drop(self, locator, target):
super(AirSelenium, self).drag_and_drop(locator=locator, target=target)
@logwrap
@allure.step
def drag_and_drop_by_offset(self, locator, xoffset, yoffset):
super(AirSelenium, self).drag_and_drop_by_offset(locator=locator, xoffset=xoffset, yoffset=yoffset)
@logwrap
@allure.step
def go_to(self, url):
super(AirSelenium, self).go_to(url=url)
def screenshot(self, file_path=None):
if file_path:
file = self.capture_page_screenshot(file_path)
with open(file, 'rb') as fp:
allure.attach(fp.read(), '截图{}'.format(file_path), allure.attachment_type.PNG)
else:
if not self.screenshot_directory:
file_path = "temp.png"
else:
file_path = os.path.join('', "temp.png")
file = self.capture_page_screenshot(file_path)
with open(file, 'rb') as fp:
allure.attach(fp.read(), '截图{}'.format(file_path), allure.attachment_type.PNG)
screen = aircv.imread(file_path)
return screen
def _gen_screen_log(self, element=None, filename=None,):
if self.screenshot_directory is None:
return None
if filename:
self.screenshot(filename)
jpg_file_name=str(int(time.time())) + '.png'
jpg_path=os.path.join('', jpg_file_name)
# print("this is jpg path:", jpg_path)
self.screenshot(jpg_path)
saved={"screen": jpg_file_name}
if element:
size=element.size
location=element.location
x=size['width'] / 2 + location['x']
y=size['height'] / 2 + location['y']
if "darwin" in sys.platform:
x, y=x * 2, y * 2
saved.update({"pos": [[x, y]]})
return saved
@property
def log_dir(self):
try:
if os.path.isdir(self.screenshot_directory):
return os.path.abspath(self.screenshot_directory)
else:
os.makedirs(self.screenshot_directory)
return os.path.abspath(self.screenshot_directory)
except RobotNotRunningError:
return os.getcwd() | AirobotLibrary | /AirobotLibrary-1.2.4.tar.gz/AirobotLibrary-1.2.4/src/airobotLibrary/air_selenium.py | air_selenium.py |
from AppiumLibrary import AppiumLibrary
from airobots.core.settings import ST
from airtest import aircv
from airobots.core.api import G
from appium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from appium.webdriver.common.touch_action import TouchAction
from appium.webdriver.common.multi_action import MultiAction
import allure
import traceback
import base64
import time
class AirAppium(AppiumLibrary):
def __init__(self, timeout=5):
super(AirAppium, self).__init__(timeout=timeout)
@allure.step
def open_application(self, remote_url=ST.REMOTE_URL, alias=None, platformName=ST.PLATFORM_NAME, **kwargs):
kwargs['platformName'] = platformName
desired_caps = kwargs
application = webdriver.Remote(str(remote_url), desired_caps)
self._debug('Opened application with session id %s' % application.session_id)
self.driver = application
self.driver.home = self.home
self.driver.snapshot = self.snapshot
self.driver.text = self.text
self.driver.air_keyevent = self.driver.keyevent
self.driver.keyevent = self.air_keyevent
self.driver.double_click = self.double_click
self.driver.click = self.touch
self.driver.touch = self.touch
self.driver.wake = self.wake
self.driver.uninstall_app = self.uninstall_app
self.driver.clear_app = self.clear_app
self.driver.stop_app = self.stop_app
self.driver.start_app = self.start_app
self.driver.shell = self.shell
self.driver.air_swipe = self.driver.swipe
self.driver.pinch = self.air_pinch
self.driver.zoom = self.air_zoom
self.driver.swipe = self.air_swipe
self.driver.get_current_resolution = self.get_current_resolution
G.add_device(self.driver)
return self._cache.register(application, alias)
@allure.step
def close_application(self):
return super(AirAppium, self).close_application()
@allure.step
def close_all_applications(self):
return super(AirAppium, self).close_all_applications()
@allure.step
def switch_application(self, index_or_alias):
return super(AirAppium, self).switch_application(index_or_alias=index_or_alias)
@allure.step
def switch_to_context(self, context_name):
return super(AirAppium, self).switch_to_context(context_name=context_name)
@allure.step
def go_back(self):
return super(AirAppium, self).go_back()
@allure.step
def press_keycode(self, keycode, metastate=None):
return super(AirAppium, self).press_keycode(keycode=keycode, metastate=metastate)
@allure.step
def scroll(self, start_locator, end_locator):
return super(AirAppium, self).scroll(start_locator=start_locator, end_locator=end_locator)
@allure.step
def scroll_down(self, locator):
return super(AirAppium, self).scroll_down(locator=locator)
@allure.step
def scroll_up(self, locator):
return super(AirAppium, self).scroll_up(locator=locator)
@allure.step
def click_a_point(self, x=0, y=0, duration=100):
return super(AirAppium, self).click_a_point(x=x, y=y, duration=duration)
@allure.step
def click_element(self, locator):
return super(AirAppium, self).click_element(locator=locator)
@allure.step
def click_button(self, index_or_name):
return super(AirAppium, self).click_button(index_or_name=index_or_name)
@allure.step
def click_text(self, text, exact_match=False):
return super(AirAppium, self).click_text(text=text, exact_match=exact_match)
@allure.step
def long_press(self, locator, duration=1000):
return super(AirAppium, self).long_press(locator=locator, duration=duration)
@allure.step
def long_press_keycode(self, keycode, metastate=None):
return super(AirAppium, self).long_press_keycode(keycode=keycode, metastate=metastate)
@allure.step
def input_text(self, locator, text):
return super(AirAppium, self).input_text(locator=locator, text=text)
@allure.step
def input_password(self, locator, text):
return super(AirAppium, self).input_password(locator=locator, text=text)
@allure.step
def input_value(self, locator, text):
return super(AirAppium, self).input_value(locator=locator, text=text)
@allure.step
def install_app(self, app_path, app_package):
return super(AirAppium, self).install_app(app_path=app_path, app_package=app_package)
@allure.step
def shake(self):
return super(AirAppium, self).shake()
@allure.step
def swipe(self, start_x, start_y, offset_x, offset_y, duration=1000):
return super(AirAppium, self).swipe(start_x=start_x, start_y=start_y, offset_x=offset_x, offset_y=offset_y, duration=duration)
@allure.step
def tap(self, locator, x_offset=None, y_offset=None, count=1):
return super(AirAppium, self).tap(locator=locator, x_offset=x_offset, y_offset=y_offset, count=count)
@allure.step
def touch_id(self, match=True):
return super(AirAppium, self).touch_id(match=match)
@allure.step
def pinch(self, locator, percent="200%", steps=1):
return super(AirAppium, self).pinch(locator=locator, percent=percent, steps=steps)
@allure.step
def zoom(self, locator, percent="200%", steps=1):
return super(AirAppium, self).zoom(locator=locator, percent=percent, steps=steps)
@allure.step
def page_should_contain_text(self, text, loglevel='INFO'):
return super(AirAppium, self).page_should_contain_text(text=text, loglevel=loglevel)
@allure.step
def page_should_contain_element(self, locator, loglevel='INFO'):
return super(AirAppium, self).page_should_contain_element(locator=locator, loglevel=loglevel)
@allure.step
def page_should_not_contain_element(self, locator, loglevel='INFO'):
return super(AirAppium, self).page_should_not_contain_element(locator=locator, loglevel=loglevel)
@allure.step
def page_should_not_contain_text(self, text, loglevel='INFO'):
return super(AirAppium, self).page_should_not_contain_text(text=text, loglevel=loglevel)
@allure.step
def text_should_be_visible(self, text, exact_match=False, loglevel='INFO'):
return super(AirAppium, self).text_should_be_visible(text=text, exact_match=exact_match, loglevel=loglevel)
@allure.step
def element_text_should_be(self, locator, expected, message=''):
return super(AirAppium, self).element_text_should_be(locator=locator, expected=expected, message=message)
@allure.step
def element_value_should_be(self, locator, expected):
return super(AirAppium, self).element_value_should_be(locator=locator, expected=expected)
@allure.step
def element_should_be_visible(self, locator, loglevel='INFO'):
return super(AirAppium, self).element_should_be_visible(locator=locator, loglevel=loglevel)
@allure.step
def element_should_be_enabled(self, locator, loglevel='INFO'):
return super(AirAppium, self).element_should_be_enabled(locator=locator, loglevel=loglevel)
@allure.step
def element_should_be_disabled(self, locator, loglevel='INFO'):
return super(AirAppium, self).element_should_be_disabled(locator=locator, loglevel=loglevel)
@allure.step
def element_should_not_contain_text(self, locator, expected, message=''):
return super(AirAppium, self).element_should_not_contain_text(locator=locator, expected=expected, message=message)
@allure.step
def element_should_contain_text(self, locator, expected, message=''):
return super(AirAppium, self).element_should_contain_text(locator=locator, expected=expected, message=message)
@allure.step
def capture_page_screenshot(self, filename=None):
file = super(AirAppium, self).capture_page_screenshot(filename=filename)
with open(file, 'rb') as fp:
allure.attach(fp.read(), '截图{}'.format(filename or ''), allure.attachment_type.PNG)
return file
def _get_log_dir(self):
return ST.LOG_DIR
def _run_on_failure(self):
if self._run_on_failure_keyword is None:
return
if self._running_on_failure_routine:
return
self._running_on_failure_routine = True
try:
file = self.driver.get_screenshot_as_base64()
allure.attach(base64.b64decode(file), '异常截图', allure.attachment_type.PNG)
except Exception as err:
self._run_on_failure_error(err)
finally:
self._running_on_failure_routine = False
def _click_element_by_name(self, name):
driver = self._current_application()
try:
element = driver.find_element_by_name(name)
except Exception as e:
raise e
try:
element.click()
except Exception as e:
raise Exception('Cannot click the element with name "%s"' % name)
def _find_element_by_class_name(self, class_name, index_or_name):
elements = self._find_elements_by_class_name(class_name)
if self._is_index(index_or_name):
try:
index = int(index_or_name.split('=')[-1])
element = elements[index]
except (IndexError, TypeError):
raise Exception('Cannot find the element with index "%s"' % index_or_name)
else:
found = False
for element in elements:
self._info("'%s'." % element.text)
if element.text == index_or_name:
found = True
break
if not found:
raise Exception('Cannot find the element with name "%s"' % index_or_name)
return element
def _click_element_by_class_name(self, class_name, index_or_name):
element = self._find_element_by_class_name(class_name, index_or_name)
self._info("Clicking element '%s'." % element.text)
try:
element.click()
except Exception as e:
raise Exception('Cannot click the %s element "%s"' % (class_name, index_or_name))
def _element_input_text_by_class_name(self, class_name, index_or_name, text):
try:
element = self._find_element_by_class_name(class_name, index_or_name)
except Exception as e:
raise e
self._info("input text in element as '%s'." % element.text)
try:
element.send_keys(text)
except Exception as e:
raise Exception('Cannot input text "%s" for the %s element "%s"' % (text, class_name, index_or_name))
def home(self):
if self._is_ios():
return self.driver.press_button("home")
elif self._is_android():
return self.driver.press_keycode(3)
else:
raise Exception('Unsupport this keyword')
def snapshot(self, filename=None, strType=False, quality=10, max_size=None, **kwargs):
if self._is_ios() or self._is_android():
value = self.driver.get_screenshot_as_base64()
data = base64.b64decode(value)
if strType:
if filename:
with open(filename, 'wb') as f:
f.write(data)
return data
# output cv2 object
try:
screen = aircv.utils.string_2_img(data)
except:
# may be black/locked screen or other reason, print exc for debugging
traceback.print_exc()
return None
# save as file if needed
if filename:
aircv.imwrite(filename, screen, quality, max_size=max_size)
return screen
else:
raise Exception('Unsupport this keyword')
def text(self, text, enter=True, locator=None, **kwargs):
if locator is not None:
if enter:
text += '\n'
element = self._element_find(locator, True, True)
ActionChains(self.driver).send_keys_to_element(element, text).perform()
else:
raise Exception('Unsupport this keyword')
def air_keyevent(self, keyname, **kwargs):
if self._is_ios():
return self.driver.press_button(keyname)
elif self._is_android():
self.shell("input keyevent {}".format(keyname.upper()))
else:
raise Exception('Unsupport this keyword')
def double_click(self, pos):
if self._is_ios() or self._is_android():
self.touch(pos)
time.sleep(0.05)
self.touch(pos)
else:
raise Exception('Unsupport this keyword')
def touch(self, pos, duration=0, **kwargs):
if not isinstance(pos, (list, tuple)):
raise Exception('params pos is must be tuple or list, but pos is {}'.format(type(pos)))
if self._is_ios() or self._is_android():
self.click_a_point(x=pos[0], y=pos[1], duration=duration*1000)
else:
raise Exception('Unsupport this keyword')
def wake(self):
if self._is_ios() or self._is_android():
self.home()
else:
raise Exception('Unsupport this keyword')
def uninstall_app(self, package):
if self._is_ios() or self._is_android():
self.driver.remove_app(package)
else:
raise Exception('Unsupport this keyword')
def clear_app(self, package):
if self._is_ios() or self._is_android():
self.stop_app(package)
self.start_app(package)
else:
raise Exception('Unsupport this keyword')
def stop_app(self, package):
if self._is_ios() or self._is_android():
self.driver.terminate_app(package)
else:
raise Exception('Unsupport this keyword')
def start_app(self, package, activity=None):
if self._is_ios() or self._is_android():
self.driver.activate_app(package)
else:
raise Exception('Unsupport this keyword')
def shell(self, cmd):
if self._is_android():
self.execute_adb_shell(cmd)
else:
raise Exception('Unsupport this keyword')
def air_pinch(self, center=None, percent=0.5, duration=0.05, steps=1, in_or_out='in', element=None, **kwargs):
x, y = cx, cy = (0, 0)
if element:
element_location = element.location
x, y = element_location.get('x'), element_location.get('y')
if isinstance(center, (list, tuple)): cx, cy = center
width, height = self.get_current_resolution()
if x == y == cx == cy == 0: x, y = width/2, height/2
elif cx and cy: x, y = cx, cy
p1x, p1y = width*0.2, y
p2x, p2y = width*0.8, y
p1 = TouchAction(self.driver)
p2 = TouchAction(self.driver)
if in_or_out == 'out':
p1.press(x=x, y=y).wait(500).move_to(x=p1x, y=p1y).wait(duration*1000).release()
p2.press(x=x, y=y).wait(500).move_to(x=p2x, y=p2y).wait(duration*1000).release()
else:
p1.press(x=p1x, y=p1y).wait(500).move_to(x=x, y=y).wait(duration*1000).release()
p2.press(x=p2x, y=p2y).wait(500).move_to(x=x, y=y).wait(duration*1000).release()
for _ in range(steps):
ma = MultiAction(self.driver)
ma.add(p1, p2)
ma.perform()
def air_zoom(self, element, percent="200%", steps=1):
element_location = element.location
x, y = element_location.get('x'), element_location.get('y')
width, height = self.get_current_resolution()
if x == y == 0: x, y = width/2, height/2
p1x, p1y = width*0.2, y
p2x, p2y = width*0.8, y
p1 = TouchAction(self.driver)
p2 = TouchAction(self.driver)
p1.press(x=x, y=y).wait(500).move_to(x=p1x, y=p1y).wait(50).release()
p2.press(x=x, y=y).wait(500).move_to(x=p2x, y=p2y).wait(50).release()
for _ in range(steps):
ma = MultiAction(self.driver)
ma.add(p1, p2)
ma.perform()
def air_swipe(self, start_x=None, start_y=None, offset_x=None, offset_y=None, duration=0.1, **kwargs):
if self._is_ios() or self._is_android():
if isinstance(start_x, (list, tuple)):
duration = float(duration) * 1000
offset_x = start_y and start_y[0]
offset_y = start_y and start_y[1]
start_y = start_x[1]
start_x = start_x[0]
self.driver.air_swipe(start_x=start_x, start_y=start_y, end_x=offset_x, end_y=offset_y, duration=duration, **kwargs)
else:
raise Exception('Unsupport this keyword')
def get_current_resolution(self):
size = self.driver.get_window_size()
return size.get('width'), size.get('height') | AirobotLibrary | /AirobotLibrary-1.2.4.tar.gz/AirobotLibrary-1.2.4/src/airobotLibrary/air_appium.py | air_appium.py |
MIT License
Copyright (c) 2020
Kandavel A <[email protected]>,
Mohanasundar M <[email protected]> and
Nanda H Krishna <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/LICENSE.md | LICENSE.md |
<h1 align="center">
<br>
<img src="https://raw.githubusercontent.com/KuroLabs/Airshare/master/assets/Airshare.svg" alt="Airshare" width="100">
<br>
<br>
<span>Airshare</span>
<br>
<br>
<a href="https://pypi.org/project/Airshare">
<img alt="PyPI" src="https://img.shields.io/pypi/v/Airshare" />
</a>
<a href="https://pypi.org/project/Airshare">
<img alt="PyPI" src="https://static.pepy.tech/badge/airshare" />
</a>
<a href="https://pypi.org/project/Airshare">
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/Airshare" />
</a>
<a href="https://github.com/KuroLabs/Airshare/blob/master/LICENSE.md">
<img alt="PyPI - License" src="https://img.shields.io/pypi/l/Airshare">
</a>
<a href="https://airshare.readthedocs.io/en/latest/?badge=latest">
<img src="https://readthedocs.org/projects/airshare/badge/?version=latest" alt="Documentation Status" />
</a>
</h1>
<h4 align="center">Cross-platform content sharing in a local network.</h4>
<p align="justify"><b>Airshare</b> is a Python-based CLI tool and module that lets you transfer data between two machines in a local network, P2P, using Multicast-DNS. It also opens an HTTP gateway for other non-CLI external interfaces. It works completely offline! Built with aiohttp and zeroconf. Checkout the <a href="https://www.youtube.com/watch?v=iJH6bkLRdSw">demo</a>.</p>
## Features
* Blazing fast content transfer within a local network.
* Lets you transfer plain text, send from or receive into your clipboard.
* Supports transfer of multiple files, directories and large files - content is sent chunk by chunk and never read into memory entirely.
* Lets you send files whose paths have been copied into the clipboard (more details in the docs).
* Cross-platform, works on Linux, Windows and Mac (CLI and Web Interface), and also supports mobile (Web Interface).
* Uses Multicast-DNS service registration and discovery - so you can access content with human-readable code words.
* Can be used as a module in other Python programs.

## Important Links
Source Code: https://github.com/KuroLabs/Airshare <br>
Bug Reports: https://github.com/KuroLabs/Airshare/issues <br>
Documentation: https://airshare.rtfd.io <br>
PyPI: https://pypi.org/project/Airshare <br>
## Installation
### [pip](https://pip.pypa.io/en/stable/)
```bash
$ pip install Airshare
```
### [pipx](https://pipxproject.github.io/pipx/)
```bash
$ pipx install Airshare
```
### [Homebrew](https://brew.sh)
```bash
$ brew install airshare
```
## Example
Send and receive files and directories.
To send using the CLI,
```bash
$ airshare noobmaster requirements.txt
```
To receive using the CLI,
```bash
$ airshare noobmaster
```
or visit `http://noobmaster.local:8000` in the browser to download.
You can also `import airshare` in any Python program. Visit the documentation for detailed usage instructions.
## Known Issues
* Link-local Name Resolution (for the `.local` addresses) on non-Apple devices requires Avahi (on Linux) or Bonjour (on Windows). Chances are you already have them, but if you don't, do check the web on how to install them.
* Android browsers do not have inbuilt Multicast-DNS service discovery, and cannot resolve the `.local` addresses. For this reason, we included QR Code support, for you to visit the URLs easily.
* Windows users with Python < 3.8, use <kbd>Ctrl</kbd> + <kbd>Break</kbd> to quit, as <kbd>Ctrl</kbd> + <kbd>C</kbd> will not work. This is a known issue with `asyncio`, which has been fixed in Python 3.8. If you do not have a <kbd>Break</kbd> key, try using <kbd>Ctrl</kbd> + <kbd>Fn</kbd> + <kbd>B</kbd>, or check the web for other alternatives (depending on your PC).
## Contributing
Contributions are welcome! Read our [Contribution Guide](https://github.com/KuroLabs/Airshare/blob/master/CONTRIBUTING.md) for more details.
## License
[MIT](https://github.com/KuroLabs/Airshare/blob/master/LICENSE.md) - Copyright (c) 2020 [Kandavel A](http://github.com/AK5123), [Mohanasundar M](https://github.com/mohanpierce99), [Nanda H Krishna](https://github.com/nandahkrishna)
## Acknowledgements
The Airshare logo was designed by [Siddique](https://dribbble.com/thesideeq).
The Airshare GIF was created by [Anam Saatvik](https://github.com/kalki7).
| Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/README.md | README.md |
<h1 align="center">
<br>
<img src="https://raw.githubusercontent.com/KuroLabs/Airshare/master/assets/Airshare.svg" alt="Airshare" width="100">
<br>
<br>
<span>Airshare</span>
<br>
<br>
</h1>
<h4 align="center">Cross-platform content sharing in a local network.</h4>
<p align="justify"><b>Airshare</b> is a Python-based CLI tool and module that lets you transfer data between two machines in a local network, P2P, using Multicast-DNS. It also opens an HTTP gateway for other non-CLI external interfaces. It works completely offline! Built with aiohttp and zeroconf.</p>
## Features
* Blazing fast content transfer within a local network.
* Lets you transfer plain text, send from or receive into your clipboard.
* Supports transfer of multiple files, directories and large files - content is sent chun$
* Lets you send files whose paths have been copied into the clipboard (more details in th$
* Cross-platform, works on Linux, Windows and Mac (CLI and Web Interface), and also suppo$
* Uses Multicast-DNS service registration and discovery - so you can access content with $
* Can be used as a module in other Python programs.
## Important Links
Source Code: https://github.com/KuroLabs/Airshare <br>
Bug Reports: https://github.com/KuroLabs/Airshare/issues <br>
Documentation: https://airshare.rtfd.io <br>
PyPI: https://pypi.org/project/Airshare <br>
## Installation
* Install using `pip`
```bash
$ pip install Airshare
```
* Install using `pipx`
```bash
$ pipx install Airshare
```
* Install using Homebrew
```bash
$ brew install airshare
```
## Example
Send and receive files and directories.
To send using the CLI,
```bash
$ airshare noobmaster requirements.txt
```
To receive using the CLI,
```bash
$ airshare noobmaster
```
or visit `http://noobmaster.local:8000` in the browser to download.
You can also `import airshare` in any Python program. Visit the documentation for detailed usage instructions.
## Known Issues
* Link-local Name Resolution (for the `.local` addresses) on non-Apple devices requires Avahi (on Linux) or Bonjour (on Windows). Chances are you already have them, but if you don't, do check the web on how to install them.
* Android browsers do not have inbuilt Multicast-DNS service discovery, and cannot resolve the `.local` addresses. For this reason, we included QR Code support, for you to visit the URLs easily.
* Windows users with Python < 3.8, use <kbd>Ctrl</kbd> + <kbd>Break</kbd> to quit, as <kbd>Ctrl</kbd> + <kbd>C</kbd> will not work. This is a known issue with `asyncio`, which has been fixed in Python 3.8. If you do not have a <kbd>Break</kbd> key, try using <kbd>Ctrl</kbd> + <kbd>Fn</kbd> + <kbd>B</kbd>, or check the web for other alternatives (depending on your PC).
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
## License
[MIT](https://github.com/KuroLabs/Airshare/blob/master/LICENSE.md) - Copyright (c) 2020 [Kandavel A](http://github.com/AK5123), [Mohanasundar M](https://github.com/mohanpierce99), [Nanda H Krishna](https://github.com/nandahkrishna)
## Acknowledgements
The Airshare logo was designed by [Siddique](https://dribbble.com/thesideeq).
The Airshare GIF was created by [Anam Saatvik](https://github.com/kalki7).
| Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/PIP.md | PIP.md |
from aiohttp import web
import asyncio
import humanize
from multiprocessing import Process
import os
import pkgutil
import platform
import requests
import socket
import sys
from time import sleep, strftime
from tqdm import tqdm
from zipfile import is_zipfile
from .exception import CodeExistsError, CodeNotFoundError, IsNotSenderError
from .utils import get_local_ip_address, get_service_info, qr_code, \
register_service, unzip_file
__all__ = ["receive", "receive_server", "receive_server_proc"]
# Request handlers
async def _upload_page(request):
"""Renders an upload page. GET handler for route '/'."""
upload = pkgutil.get_data(__name__, "static/upload.html").decode()
return web.Response(text=upload, content_type="text/html")
async def _uploaded_file_receiver(request):
"""Receives an uploaded file. POST handler for '/upload'."""
progress_queue = request.app["progress_queue"]
tqdm_position = await progress_queue.get()
decompress = request.app["decompress"]
compress_header = request.headers.get("airshare-compress") or "false"
if compress_header == "true":
decompress = True
total = 0
reader = await request.multipart()
field = await reader.next()
file_name = field.filename.replace("'", "")
file_path = os.getcwd() + os.path.sep + file_name
if os.path.isfile(file_path):
file_name, file_ext = os.path.splitext(file_name)
file_name = file_name + "-" + strftime("%Y%m%d%H%M%S") + file_ext
file_path = os.getcwd() + os.path.sep + file_name
desc = "Downloading `" + file_name + "`"
bar = tqdm(desc=desc, total=None, unit="B", unit_scale=1,
position=tqdm_position, leave=False)
with open(file_path, "wb") as f:
while True:
chunk = await field.read_chunk()
if not chunk:
break
total += len(chunk)
f.write(chunk)
bar.update(len(chunk))
await progress_queue.put(tqdm_position)
if is_zipfile(file_path) and decompress:
zip_dir = unzip_file(file_path)
tqdm.write("Downloaded and decompressed to `" + zip_dir + "`!")
os.remove(file_path)
else:
tqdm.write("Downloaded `" + file_name + "`!")
file_name = field.filename
file_size = humanize.naturalsize(total)
text = "{} ({}) successfully received!".format(file_name, file_size)
return web.Response(text=text)
async def _is_airshare_upload_receiver(request):
"""Returns 'Upload Receiver'. GET handler for '/airshare'."""
return web.Response(text="Upload Receiver")
# Receiver functions
def receive(*, code, decompress=False):
r"""Receive file(s) from a sending server.
Parameters
----------
code : str
Identifying code for the Airshare sending server.
decompress : boolean, default=False
Flag to enable or disable decompression (Zip).
Returns
-------
text (or) file_path : str
Returns the text or path of the file received, if successful.
"""
info = get_service_info(code)
if info is None:
raise CodeNotFoundError(code)
ip = socket.inet_ntoa(info.addresses[0])
url = "http://" + ip + ":" + str(info.port)
airshare_type = requests.get(url + "/airshare").text
if "Sender" not in airshare_type:
raise IsNotSenderError(code)
print("Receiving from Airshare `" + code + "`...")
sleep(2)
if airshare_type == "Text Sender":
text = requests.get(url + "/text").text
print("Received: " + text)
return text
elif airshare_type == "File Sender":
with requests.get(url + "/download", stream=True) as r:
r.raise_for_status()
header = r.headers["content-disposition"]
compress_header = r.headers.get("airshare-compress") or "false"
if compress_header == "true":
decompress = True
file_name = header.split("; ")[1].split("=")[1] \
.replace("'", "")
file_path = os.getcwd() + os.path.sep + file_name
file_size = int(header.split("=")[-1])
if os.path.isfile(file_path):
file_name, file_ext = os.path.splitext(file_name)
file_name += "-" + strftime("%Y%m%d%H%M%S") + file_ext
file_path = os.getcwd() + os.path.sep + file_name
with open(file_path, "wb") as f:
desc = "Downloading `" + file_name + "`"
bar = tqdm(desc=desc, total=file_size, unit="B",
unit_scale=1, leave=False)
for chunk in r.iter_content(chunk_size=8192):
if chunk:
f.write(chunk)
bar.update(len(chunk))
file_path = os.path.realpath(file_path)
if is_zipfile(file_path) and decompress:
zip_dir = unzip_file(file_path)
tqdm.write("Downloaded and decompressed to `" + zip_dir + "`!")
os.remove(file_path)
file_path = zip_dir
else:
tqdm.write("Downloaded `" + file_path + "`!")
return file_path
def receive_server(*, code, decompress=False, port=8000):
r"""Serves a file receiver and registers it as a Multicast-DNS service.
Parameters
----------
code : str
Identifying code for the Airshare service and server.
decompress : boolean, default=False
Flag to enable or disable decompression (Zip).
port : int, default=8000
Port number at which the server is hosted on the device.
"""
info = get_service_info(code)
if info is not None:
raise CodeExistsError(code)
addresses = [get_local_ip_address()]
register_service(code, addresses, port)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
app = web.Application()
app["decompress"] = decompress
app["progress_queue"] = asyncio.Queue()
for pos in range(5):
app["progress_queue"].put_nowait(pos)
app.router.add_get(path="/", handler=_upload_page)
app.router.add_get(path="/airshare", handler=_is_airshare_upload_receiver)
app.router.add_post(path="/upload", handler=_uploaded_file_receiver)
runner = web.AppRunner(app)
loop.run_until_complete(runner.setup())
site = web.TCPSite(runner, "0.0.0.0", str(port))
loop.run_until_complete(site.start())
url_port = ""
if port != 80:
url_port = ":" + str(port)
ip = socket.inet_ntoa(addresses[0]) + url_port
quit_msg = "`, press Ctrl+C to stop receiving..."
if platform.system() == "Windows" and sys.version_info < (3, 8):
quit_msg = "`, press Ctrl+Break to stop receiving..."
print("Waiting for uploaded files at " + ip + " and `http://"
+ code + ".local" + url_port + quit_msg)
qr_code("http://" + ip)
if decompress:
print("Note: Any Zip Archives will be decompressed!")
loop.run_forever()
def receive_server_proc(*, code, decompress=False, port=8000):
r"""Creates a process with 'receive_server' as the target.
Parameters
----------
code : str
Identifying code for the Airshare service and server.
decompress : boolean, default=False
Flag to enable or disable decompression (Zip).
port : int, default=8000
Port number at which the server is hosted on the device.
Returns
-------
process: multiprocessing.Process
A multiprocessing.Process object with 'receive_server' as target.
"""
kwargs = {"code": code, "decompress": decompress, "port": port}
process = Process(target=receive_server, kwargs=kwargs)
return process | Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/airshare/receiver.py | receiver.py |
r"""QR Code Generator for Python
Copyright (c) 2012 Kazuhiko Arase
URL: http://www.d-project.com/
Licensed under the MIT license:
http://www.opensource.org/licenses/mit-license.php
The word 'QR Code' is registered trademark of
DENSO WAVE INCORPORATED
http://www.denso-wave.com/qrcode/faqpatent-e.html
Modified by Airshare Developers:
- added `printQr()` to `QRCode`
"""
import colorama
from termcolor import colored, cprint
colorama.init()
class QRCode:
PAD0 = 0xEC
PAD1 = 0x11
def __init__(self):
self.typeNumber = 1
self.errorCorrectLevel = ErrorCorrectLevel.H
self.qrDataList = []
self.modules = []
self.moduleCount = 0
def getTypeNumber(self):
return self.typeNumber
def setTypeNumber(self, typeNumber):
self.typeNumber = typeNumber
def getErrorCorrectLevel(self):
return self.errorCorrectLevel
def setErrorCorrectLevel(self, errorCorrectLevel):
self.errorCorrectLevel = errorCorrectLevel
def clearData(self):
self.qrDataList = []
def addData(self, data):
self.qrDataList.append(QR8BitByte(data))
def getDataCount(self):
return len(self.qrDataList)
def getData(self, index):
return self.qrDataList[index]
def isDark(self, row, col):
return (self.modules[row][col] if self.modules[row][col] is not None
else False)
def getModuleCount(self):
return self.moduleCount
def printQr(self):
WHITE = colored(" ", "white", "on_white")
BLACK = " "
size = self.getModuleCount() + 2
print()
for row in range(size):
for col in range(size):
if row == 0 or col == 0 or col == size - 1 or row == size - 1:
cprint(WHITE, end="")
else:
color = BLACK if self.isDark(row - 1, col - 1) else WHITE
cprint(color, end="")
print()
print()
def make(self):
self._make(False, self._getBestMaskPattern())
def _getBestMaskPattern(self):
minLostPoint = 0
pattern = 0
for i in range(8):
self._make(True, i)
lostPoint = QRUtil.getLostPoint(self)
if i == 0 or minLostPoint > lostPoint:
minLostPoint = lostPoint
pattern = i
return pattern
def _make(self, test, maskPattern):
self.moduleCount = self.typeNumber * 4 + 17
self.modules = [[None] * self.moduleCount
for i in range(self.moduleCount)]
self._setupPositionProbePattern(0, 0)
self._setupPositionProbePattern(self.moduleCount - 7, 0)
self._setupPositionProbePattern(0, self.moduleCount - 7)
self._setupPositionAdjustPattern()
self._setupTimingPattern()
self._setupTypeInfo(test, maskPattern)
if self.typeNumber >= 7:
self._setupTypeNumber(test)
data = QRCode._createData(
self.typeNumber,
self.errorCorrectLevel,
self.qrDataList)
self._mapData(data, maskPattern)
def _mapData(self, data, maskPattern):
rows = list(range(self.moduleCount))
cols = [col - 1 if col <= 6 else col
for col in range(self.moduleCount - 1, 0, -2)]
maskFunc = QRUtil.getMaskFunction(maskPattern)
byteIndex = 0
bitIndex = 7
for col in cols:
rows.reverse()
for row in rows:
for c in range(2):
if self.modules[row][col - c] is None:
dark = False
if byteIndex < len(data):
dark = ((data[byteIndex] >> bitIndex) & 1) == 1
if maskFunc(row, col - c):
dark = not dark
self.modules[row][col - c] = dark
bitIndex -= 1
if bitIndex == -1:
byteIndex += 1
bitIndex = 7
def _setupPositionAdjustPattern(self):
pos = QRUtil.getPatternPosition(self.typeNumber)
for row in pos:
for col in pos:
if self.modules[row][col] is not None:
continue
for r in range(-2, 3):
for c in range(-2, 3):
self.modules[row + r][col + c] = (
r == -2 or r == 2 or c == -2 or c == 2
or (r == 0 and c == 0))
def _setupPositionProbePattern(self, row, col):
for r in range(-1, 8):
for c in range(-1, 8):
if (row + r <= -1 or self.moduleCount <= row + r
or col + c <= -1 or self.moduleCount <= col + c):
continue
self.modules[row + r][col + c] = (
(0 <= r and r <= 6 and (c == 0 or c == 6))
or (0 <= c and c <= 6 and (r == 0 or r == 6))
or (2 <= r and r <= 4 and 2 <= c and c <= 4))
def _setupTimingPattern(self):
for r in range(8, self.moduleCount - 8):
if self.modules[r][6] is not None:
continue
self.modules[r][6] = r % 2 == 0
for c in range(8, self.moduleCount - 8):
if self.modules[6][c] is not None:
continue
self.modules[6][c] = c % 2 == 0
def _setupTypeNumber(self, test):
bits = QRUtil.getBCHTypeNumber(self.typeNumber)
for i in range(18):
self.modules[i // 3][i % 3 + self.moduleCount - 8 - 3] = (
not test and ((bits >> i) & 1) == 1)
for i in range(18):
self.modules[i % 3 + self.moduleCount - 8 - 3][i // 3] = (
not test and ((bits >> i) & 1) == 1)
def _setupTypeInfo(self, test, maskPattern):
data = (self.errorCorrectLevel << 3) | maskPattern
bits = QRUtil.getBCHTypeInfo(data)
# vertical
for i in range(15):
mod = not test and ((bits >> i) & 1) == 1
if i < 6:
self.modules[i][8] = mod
elif i < 8:
self.modules[i + 1][8] = mod
else:
self.modules[self.moduleCount - 15 + i][8] = mod
# horizontal
for i in range(15):
mod = not test and ((bits >> i) & 1) == 1
if i < 8:
self.modules[8][self.moduleCount - i - 1] = mod
elif i < 9:
self.modules[8][15 - i - 1 + 1] = mod
else:
self.modules[8][15 - i - 1] = mod
# fixed
self.modules[self.moduleCount - 8][8] = not test
@staticmethod
def _createData(typeNumber, errorCorrectLevel, dataArray):
rsBlocks = RSBlock.getRSBlocks(typeNumber, errorCorrectLevel)
buffer = BitBuffer()
for data in dataArray:
buffer.put(data.getMode(), 4)
buffer.put(data.getLength(), data.getLengthInBits(typeNumber))
data.write(buffer)
totalDataCount = sum(rsBlock.getDataCount()
for rsBlock in rsBlocks)
if buffer.getLengthInBits() > totalDataCount * 8:
raise Exception('code length overflow. (%s>%s)' %
(buffer.getLengthInBits(), totalDataCount * 8))
# end code
if buffer.getLengthInBits() + 4 <= totalDataCount * 8:
buffer.put(0, 4)
# padding
while buffer.getLengthInBits() % 8 != 0:
buffer.put(False)
# padding
while True:
if buffer.getLengthInBits() >= totalDataCount * 8:
break
buffer.put(QRCode.PAD0, 8)
if buffer.getLengthInBits() >= totalDataCount * 8:
break
buffer.put(QRCode.PAD1, 8)
return QRCode._createBytes(buffer, rsBlocks)
@staticmethod
def _createBytes(buffer, rsBlocks):
offset = 0
maxDcCount = 0
maxEcCount = 0
dcdata = [None] * len(rsBlocks)
ecdata = [None] * len(rsBlocks)
for r in range(len(rsBlocks)):
dcCount = rsBlocks[r].getDataCount()
ecCount = rsBlocks[r].getTotalCount() - dcCount
maxDcCount = max(maxDcCount, dcCount)
maxEcCount = max(maxEcCount, ecCount)
dcdata[r] = [0] * dcCount
for i in range(len(dcdata[r])):
dcdata[r][i] = 0xff & buffer.getBuffer()[i + offset]
offset += dcCount
rsPoly = QRUtil.getErrorCorrectPolynomial(ecCount)
rawPoly = Polynomial(dcdata[r], rsPoly.getLength() - 1)
modPoly = rawPoly.mod(rsPoly)
ecdata[r] = [0] * (rsPoly.getLength() - 1)
for i in range(len(ecdata[r])):
modIndex = i + modPoly.getLength() - len(ecdata[r])
ecdata[r][i] = modPoly.get(modIndex) if modIndex >= 0 else 0
totalCodeCount = sum(rsBlock.getTotalCount()
for rsBlock in rsBlocks)
data = [0] * totalCodeCount
index = 0
for i in range(maxDcCount):
for r in range(len(rsBlocks)):
if i < len(dcdata[r]):
data[index] = dcdata[r][i]
index += 1
for i in range(maxEcCount):
for r in range(len(rsBlocks)):
if i < len(ecdata[r]):
data[index] = ecdata[r][i]
index += 1
return data
@staticmethod
def getMinimumQRCode(data, errorCorrectLevel):
mode = Mode.MODE_8BIT_BYTE # fixed to 8bit byte
qr = QRCode()
qr.setErrorCorrectLevel(errorCorrectLevel)
qr.addData(data)
length = qr.getData(0).getLength()
for typeNumber in range(1, 11):
if length <= QRUtil.getMaxLength(
typeNumber, mode, errorCorrectLevel):
qr.setTypeNumber(typeNumber)
break
qr.make()
return qr
class Mode:
MODE_NUMBER = 1 << 0
MODE_ALPHA_NUM = 1 << 1
MODE_8BIT_BYTE = 1 << 2
MODE_KANJI = 1 << 3
class ErrorCorrectLevel:
L = 1 # 7%
M = 0 # 15%
Q = 3 # 25%
H = 2 # 30%
class MaskPattern:
PATTERN000 = 0
PATTERN001 = 1
PATTERN010 = 2
PATTERN011 = 3
PATTERN100 = 4
PATTERN101 = 5
PATTERN110 = 6
PATTERN111 = 7
class QRUtil:
@staticmethod
def getPatternPosition(typeNumber):
return QRUtil.PATTERN_POSITION_TABLE[typeNumber - 1]
PATTERN_POSITION_TABLE = [
[],
[6, 18],
[6, 22],
[6, 26],
[6, 30],
[6, 34],
[6, 22, 38],
[6, 24, 42],
[6, 26, 46],
[6, 28, 50],
[6, 30, 54],
[6, 32, 58],
[6, 34, 62],
[6, 26, 46, 66],
[6, 26, 48, 70],
[6, 26, 50, 74],
[6, 30, 54, 78],
[6, 30, 56, 82],
[6, 30, 58, 86],
[6, 34, 62, 90],
[6, 28, 50, 72, 94],
[6, 26, 50, 74, 98],
[6, 30, 54, 78, 102],
[6, 28, 54, 80, 106],
[6, 32, 58, 84, 110],
[6, 30, 58, 86, 114],
[6, 34, 62, 90, 118],
[6, 26, 50, 74, 98, 122],
[6, 30, 54, 78, 102, 126],
[6, 26, 52, 78, 104, 130],
[6, 30, 56, 82, 108, 134],
[6, 34, 60, 86, 112, 138],
[6, 30, 58, 86, 114, 142],
[6, 34, 62, 90, 118, 146],
[6, 30, 54, 78, 102, 126, 150],
[6, 24, 50, 76, 102, 128, 154],
[6, 28, 54, 80, 106, 132, 158],
[6, 32, 58, 84, 110, 136, 162],
[6, 26, 54, 82, 110, 138, 166],
[6, 30, 58, 86, 114, 142, 170]
]
MAX_LENGTH = [
[[41, 25, 17, 10], [34, 20, 14, 8], [
27, 16, 11, 7], [17, 10, 7, 4]],
[[77, 47, 32, 20], [63, 38, 26, 16],
[48, 29, 20, 12], [34, 20, 14, 8]],
[[127, 77, 53, 32], [101, 61, 42, 26], [
77, 47, 32, 20], [58, 35, 24, 15]],
[[187, 114, 78, 48], [149, 90, 62, 38], [
111, 67, 46, 28], [82, 50, 34, 21]],
[[255, 154, 106, 65], [202, 122, 84, 52], [
144, 87, 60, 37], [106, 64, 44, 27]],
[[322, 195, 134, 82], [255, 154, 106, 65], [
178, 108, 74, 45], [139, 84, 58, 36]],
[[370, 224, 154, 95], [293, 178, 122, 75], [
207, 125, 86, 53], [154, 93, 64, 39]],
[[461, 279, 192, 118], [365, 221, 152, 93], [
259, 157, 108, 66], [202, 122, 84, 52]],
[[552, 335, 230, 141], [432, 262, 180, 111], [
312, 189, 130, 80], [235, 143, 98, 60]],
[[652, 395, 271, 167], [513, 311, 213, 131],
[364, 221, 151, 93], [288, 174, 119, 74]]
]
@staticmethod
def getMaxLength(typeNumber, mode, errorCorrectLevel):
t = typeNumber - 1
e = {
ErrorCorrectLevel.L: 0,
ErrorCorrectLevel.M: 1,
ErrorCorrectLevel.Q: 2,
ErrorCorrectLevel.H: 3
}[errorCorrectLevel]
m = {
Mode.MODE_NUMBER: 0,
Mode.MODE_ALPHA_NUM: 1,
Mode.MODE_8BIT_BYTE: 2,
Mode.MODE_KANJI: 3
}[mode]
return QRUtil.MAX_LENGTH[t][e][m]
@staticmethod
def getErrorCorrectPolynomial(errorCorrectLength):
a = Polynomial([1])
for i in range(errorCorrectLength):
a = a.multiply(Polynomial([1, QRMath.gexp(i)]))
return a
@staticmethod
def getMaskFunction(maskPattern):
return {
MaskPattern.PATTERN000:
lambda i, j: (i + j) % 2 == 0,
MaskPattern.PATTERN001:
lambda i, j: i % 2 == 0,
MaskPattern.PATTERN010:
lambda i, j: j % 3 == 0,
MaskPattern.PATTERN011:
lambda i, j: (i + j) % 3 == 0,
MaskPattern.PATTERN100:
lambda i, j: (i // 2 + j // 3) % 2 == 0,
MaskPattern.PATTERN101:
lambda i, j: (i * j) % 2 + (i * j) % 3 == 0,
MaskPattern.PATTERN110:
lambda i, j: ((i * j) % 2 + (i * j) % 3) % 2 == 0,
MaskPattern.PATTERN111:
lambda i, j: ((i * j) % 3 + (i + j) % 2) % 2 == 0
}[maskPattern]
@staticmethod
def getLostPoint(qrcode):
moduleCount = qrcode.getModuleCount()
lostPoint = 0
# LEVEL1
for row in range(moduleCount):
for col in range(moduleCount):
sameCount = 0
dark = qrcode.isDark(row, col)
for r in range(-1, 2):
if row + r < 0 or moduleCount <= row + r:
continue
for c in range(-1, 2):
if col + c < 0 or moduleCount <= col + c:
continue
if r == 0 and c == 0:
continue
if dark == qrcode.isDark(row + r, col + c):
sameCount += 1
if sameCount > 5:
lostPoint += (3 + sameCount - 5)
# LEVEL2
for row in range(moduleCount - 1):
for col in range(moduleCount - 1):
count = 0
if qrcode.isDark(row, col):
count += 1
if qrcode.isDark(row + 1, col):
count += 1
if qrcode.isDark(row, col + 1):
count += 1
if qrcode.isDark(row + 1, col + 1):
count += 1
if count == 0 or count == 4:
lostPoint += 3
# LEVEL3
for row in range(moduleCount):
for col in range(moduleCount - 6):
if (qrcode.isDark(row, col)
and not qrcode.isDark(row, col + 1)
and qrcode.isDark(row, col + 2)
and qrcode.isDark(row, col + 3)
and qrcode.isDark(row, col + 4)
and not qrcode.isDark(row, col + 5)
and qrcode.isDark(row, col + 6)):
lostPoint += 40
for col in range(moduleCount):
for row in range(moduleCount - 6):
if (qrcode.isDark(row, col)
and not qrcode.isDark(row + 1, col)
and qrcode.isDark(row + 2, col)
and qrcode.isDark(row + 3, col)
and qrcode.isDark(row + 4, col)
and not qrcode.isDark(row + 5, col)
and qrcode.isDark(row + 6, col)):
lostPoint += 40
# LEVEL4
darkCount = 0
for col in range(moduleCount):
for row in range(moduleCount):
if qrcode.isDark(row, col):
darkCount += 1
ratio = abs(100 * darkCount // moduleCount // moduleCount - 50) // 5
lostPoint += ratio * 10
return lostPoint
G15 = ((1 << 10) | (1 << 8) | (1 << 5) | (1 << 4) |
(1 << 2) | (1 << 1) | (1 << 0))
G18 = ((1 << 12) | (1 << 11) | (1 << 10) | (1 << 9) |
(1 << 8) | (1 << 5) | (1 << 2) | (1 << 0))
G15_MASK = (1 << 14) | (1 << 12) | (1 << 10) | (1 << 4) | (1 << 1)
@staticmethod
def getBCHTypeInfo(data):
d = data << 10
while QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G15) >= 0:
d ^= (QRUtil.G15 << (QRUtil.getBCHDigit(d) -
QRUtil.getBCHDigit(QRUtil.G15)))
return ((data << 10) | d) ^ QRUtil.G15_MASK
@staticmethod
def getBCHTypeNumber(data):
d = data << 12
while QRUtil.getBCHDigit(d) - QRUtil.getBCHDigit(QRUtil.G18) >= 0:
d ^= (QRUtil.G18 << (QRUtil.getBCHDigit(d) -
QRUtil.getBCHDigit(QRUtil.G18)))
return (data << 12) | d
@staticmethod
def getBCHDigit(data):
digit = 0
while data != 0:
digit += 1
data >>= 1
return digit
@staticmethod
def stringToBytes(s):
return [ord(c) & 0xff for c in s]
class QR8BitByte:
def __init__(self, data):
self.mode = Mode.MODE_8BIT_BYTE
self.data = data
def getMode(self):
return self.mode
def getData(self):
return self.data
'''
def write(self, buffer): raise Exception('not implemented.')
def getLength(self): raise Exception('not implemented.')
'''
def write(self, buffer):
data = QRUtil.stringToBytes(self.getData())
for d in data:
buffer.put(d, 8)
def getLength(self):
return len(QRUtil.stringToBytes(self.getData()))
def getLengthInBits(self, type):
if 1 <= type and type < 10: # 1 - 9
return {
Mode.MODE_NUMBER: 10,
Mode.MODE_ALPHA_NUM: 9,
Mode.MODE_8BIT_BYTE: 8,
Mode.MODE_KANJI: 8
}[self.mode]
elif type < 27: # 10 - 26
return {
Mode.MODE_NUMBER: 12,
Mode.MODE_ALPHA_NUM: 11,
Mode.MODE_8BIT_BYTE: 16,
Mode.MODE_KANJI: 10
}[self.mode]
elif type < 41: # 27 - 40
return {
Mode.MODE_NUMBER: 14,
Mode.MODE_ALPHA_NUM: 13,
Mode.MODE_8BIT_BYTE: 16,
Mode.MODE_KANJI: 12
}[self.mode]
else:
raise Exception('type:%s' % type)
class QRMath:
EXP_TABLE = None
LOG_TABLE = None
@staticmethod
def _init():
QRMath.EXP_TABLE = [0] * 256
for i in range(256):
QRMath.EXP_TABLE[i] = (1 << i if i < 8 else
QRMath.EXP_TABLE[i - 4]
^ QRMath.EXP_TABLE[i - 5]
^ QRMath.EXP_TABLE[i - 6]
^ QRMath.EXP_TABLE[i - 8])
QRMath.LOG_TABLE = [0] * 256
for i in range(255):
QRMath.LOG_TABLE[QRMath.EXP_TABLE[i]] = i
@staticmethod
def glog(n):
if n < 1:
raise Exception('log(%s)' % n)
return QRMath.LOG_TABLE[n]
@staticmethod
def gexp(n):
while n < 0:
n += 255
while n >= 256:
n -= 255
return QRMath.EXP_TABLE[n]
# initialize statics
QRMath._init()
class Polynomial:
def __init__(self, num, shift=0):
offset = 0
length = len(num)
while offset < length and num[offset] == 0:
offset += 1
self.num = num[offset:] + [0] * shift
def get(self, index):
return self.num[index]
def getLength(self):
return len(self.num)
def __repr__(self):
return ','.join([str(self.get(i))
for i in range(self.getLength())])
def toLogString(self):
return ','.join([str(QRMath.glog(self.get(i)))
for i in range(self.getLength())])
def multiply(self, e):
num = [0] * (self.getLength() + e.getLength() - 1)
for i in range(self.getLength()):
for j in range(e.getLength()):
num[i + j] ^= QRMath.gexp(QRMath.glog(self.get(i)) +
QRMath.glog(e.get(j)))
return Polynomial(num)
def mod(self, e):
if self.getLength() - e.getLength() < 0:
return self
ratio = QRMath.glog(self.get(0)) - QRMath.glog(e.get(0))
num = self.num[:]
for i in range(e.getLength()):
num[i] ^= QRMath.gexp(QRMath.glog(e.get(i)) + ratio)
return Polynomial(num).mod(e)
class RSBlock:
RS_BLOCK_TABLE = [
# L
# M
# Q
# H
# 1
[1, 26, 19],
[1, 26, 16],
[1, 26, 13],
[1, 26, 9],
# 2
[1, 44, 34],
[1, 44, 28],
[1, 44, 22],
[1, 44, 16],
# 3
[1, 70, 55],
[1, 70, 44],
[2, 35, 17],
[2, 35, 13],
# 4
[1, 100, 80],
[2, 50, 32],
[2, 50, 24],
[4, 25, 9],
# 5
[1, 134, 108],
[2, 67, 43],
[2, 33, 15, 2, 34, 16],
[2, 33, 11, 2, 34, 12],
# 6
[2, 86, 68],
[4, 43, 27],
[4, 43, 19],
[4, 43, 15],
# 7
[2, 98, 78],
[4, 49, 31],
[2, 32, 14, 4, 33, 15],
[4, 39, 13, 1, 40, 14],
# 8
[2, 121, 97],
[2, 60, 38, 2, 61, 39],
[4, 40, 18, 2, 41, 19],
[4, 40, 14, 2, 41, 15],
# 9
[2, 146, 116],
[3, 58, 36, 2, 59, 37],
[4, 36, 16, 4, 37, 17],
[4, 36, 12, 4, 37, 13],
# 10
[2, 86, 68, 2, 87, 69],
[4, 69, 43, 1, 70, 44],
[6, 43, 19, 2, 44, 20],
[6, 43, 15, 2, 44, 16]
]
def __init__(self, totalCount, dataCount):
self.totalCount = totalCount
self.dataCount = dataCount
def getDataCount(self):
return self.dataCount
def getTotalCount(self):
return self.totalCount
def __repr__(self):
return ('(total=%s,data=%s)' % (self.totalCount, self.dataCount))
@staticmethod
def getRSBlocks(typeNumber, errorCorrectLevel):
rsBlock = RSBlock.getRsBlockTable(typeNumber, errorCorrectLevel)
length = len(rsBlock) // 3
list = []
for i in range(length):
count = rsBlock[i * 3 + 0]
totalCount = rsBlock[i * 3 + 1]
dataCount = rsBlock[i * 3 + 2]
list += [RSBlock(totalCount, dataCount)] * count
return list
@staticmethod
def getRsBlockTable(typeNumber, errorCorrectLevel):
return {
ErrorCorrectLevel.L:
RSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 0],
ErrorCorrectLevel.M:
RSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 1],
ErrorCorrectLevel.Q:
RSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 2],
ErrorCorrectLevel.H:
RSBlock.RS_BLOCK_TABLE[(typeNumber - 1) * 4 + 3]
}[errorCorrectLevel]
class BitBuffer:
def __init__(self, inclements=32):
self.inclements = inclements
self.buffer = [0] * self.inclements
self.length = 0
def getBuffer(self):
return self.buffer
def getLengthInBits(self):
return self.length
def get(self, index):
return ((self.buffer[index // 8] >> (7 - index % 8)) & 1) == 1
def putBit(self, bit):
if self.length == len(self.buffer) * 8:
self.buffer += [0] * self.inclements
if bit:
self.buffer[self.length // 8] |= (0x80 >> (self.length % 8))
self.length += 1
def put(self, num, length):
for i in range(length):
self.putBit(((num >> (length - i - 1)) & 1) == 1)
def __repr__(self):
return ''.join('1' if self.get(i) else '0'
for i in range(self.getLengthInBits())) | Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/airshare/qrcode.py | qrcode.py |
import mimetypes
import os
import pyperclip
import re
import socket
import tempfile
from time import strftime
from zipfile import ZipFile
from zeroconf import IPVersion, ServiceInfo, Zeroconf
from .qrcode import ErrorCorrectLevel, QRCode
__all__ = ["get_local_ip_address", "qr_code", "get_service_info",
"register_service", "get_zip_file", "unzip_file",
"get_clipboard_paths", "is_file_copyable"]
# Local IP Address
def get_local_ip_address():
r"""Obtains the device's local network IP address.
Returns
-------
ip : bytes
Packed 32-bit representation of the device's local IP Address.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("10.255.255.255", 1))
ip = s.getsockname()[0]
s.close()
ip = socket.inet_aton(ip)
return ip
# QR Code Utility
def qr_code(url):
r"""Generate QR Code from URL and print it.
Parameters
----------
url : str
URL to create the QR Code for.
"""
qr = QRCode.getMinimumQRCode(url, ErrorCorrectLevel.M)
qr.setErrorCorrectLevel(ErrorCorrectLevel.M)
qr.make()
qr.printQr()
# Zeroconf Utilities
def get_service_info(code):
r"""Get service information for an Airshare service.
Parameters
----------
code : str
Identifying code for the Airshare service.
Returns
-------
info : zeroconf.ServiceInfo
Details of the Airshare service.
"""
zeroconf = Zeroconf(ip_version=IPVersion.V4Only)
service = "_airshare._http._tcp.local."
info = zeroconf.get_service_info(service, code + service)
return info
def register_service(code, addresses, port):
r"""Registers an Airshare Multicast-DNS service based in the local network.
Parameters
----------
code : str
Identifying code for the Airshare service.
addresses : list
List of local network IP Addresses for the service.
port : int
Port number for the Airshare service's server.
Returns
-------
info : zeroconf.ServiceInfo
Details of the Airshare service.
"""
zeroconf = Zeroconf(ip_version=IPVersion.V4Only)
service = "_airshare._http._tcp.local."
info = ServiceInfo(
service,
code + service,
addresses=addresses,
port=port,
server=code + ".local."
)
zeroconf.register_service(info)
return info
# Zip and Unzip
def get_zip_file(files):
r"""Creates a temporary Zip Archive of files and directories.
Parameters
----------
files : list
List of paths of files and directories to compress.
Returns
-------
zip_file_path : str
Canonical file path of the temporary Zip Archive file.
zip_file_name : str
File name to be assigned to the Zip Archive (during sending).
"""
files = [os.path.realpath(x) for x in files]
_, zip_file_path = tempfile.mkstemp(prefix="airshare", suffix=".zip")
zip_archive = ZipFile(zip_file_path, "w")
num_files = len(files)
index = -1
if num_files == 1:
index = 0
for item in files:
index += len(item.split(os.path.sep))
if os.path.isdir(item):
for root, _, file_list in os.walk(item):
for file in file_list:
file_path = os.path.realpath(os.path.join(root, file))
zip_archive.write(file_path, os.path.join(
*tuple(root.split(os.path.sep)[index:] + [file])))
else:
file_path = os.path.realpath(item)
zip_archive.write(file_path, os.path.join(
*tuple(file_path.split(os.path.sep)[index:])))
index = -1
zip_archive.close()
zip_file_path = os.path.abspath(zip_file_path)
zip_file_name = "airshare.zip"
if num_files == 1:
zip_file_name = os.path.splitext(
os.path.realpath(files[0]).split(os.path.sep)[-1])[0] + ".zip"
return zip_file_path, zip_file_name
def unzip_file(zip_file_path):
r"""Unzips a Zip Archive file into a new directory.
Parameters
----------
zip_file_path : str
Path of the Zip Archive file to unzip.
Returns
-------
zip_dir : str
Canonical path of the unzipped directory.
"""
zip_dir = zip_file_path[:-4]
if os.path.isdir(zip_dir):
zip_dir += "-" + strftime("%Y%m%d%H%M%S")
os.mkdir(zip_dir)
with ZipFile(zip_file_path, "r") as zip_archive:
zip_archive.extractall(zip_dir)
zip_dir = os.path.realpath(zip_dir)
return zip_dir
# Clipboard Utilities
def get_clipboard_paths():
r"""Extract file paths from the clipboard.
Returns
-------
file_paths : list
List of canonical paths extracted from the clipboard.
"""
file_paths = []
clipboard = pyperclip.paste()
erase = ["x-special/nautilus-clipboard\ncopy\n", "file://", "\r", "'", '"']
file_paths = re.sub("|".join(erase), "", clipboard.strip()).split("\n")
file_paths = [os.path.realpath(str(x).strip()) for x in file_paths]
return file_paths
def is_file_copyable(file_path):
r"""Check if a file can be copied to the clipboard or not.
Parameters
----------
file_path : str
Path of the file to check.
Returns
-------
copyable : boolean
True if the file can be copied to the clipboard, False otherwise.
"""
file_type = mimetypes.guess_type(file_path)[0]
copyable = False
if file_type is not None:
if (re.findall("text|json", file_type, re.IGNORECASE)):
copyable = True
else:
copyable = False
return copyable | Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/airshare/utils.py | utils.py |
from aiohttp import web
import asyncio
import humanize
from multiprocessing import Process
import os
import pkgutil
import platform
import requests
from requests_toolbelt import MultipartEncoder
import socket
import sys
from .exception import CodeExistsError, CodeNotFoundError, IsNotReceiverError
from .utils import get_local_ip_address, get_service_info, get_zip_file, \
qr_code, register_service
__all__ = ["send", "send_server", "send_server_proc"]
# Request handlers
async def _text_page(request):
"""Renders a text viewing page, GET handler for route '/'."""
text = pkgutil.get_data(__name__, "static/text.html").decode()
return web.Response(text=text, content_type="text/html")
async def _text_sender(request):
"""Returns the text being shared, GET handler for route '/text'."""
address = ""
peername = request.transport.get_extra_info("peername")
if peername is not None:
host, _ = peername
address = " (by " + str(host) + ")"
print("Content viewed" + address + "!")
return web.Response(text=request.app["text"])
async def _download_page(request):
"""Renders a download page, GET handler for route '/'."""
download = pkgutil.get_data(__name__, "static/download.html").decode()
return web.Response(text=download, content_type="text/html")
async def _file_stream_sender(request):
"""Streams a file from the server, GET handler for route '/download'."""
address = ""
peername = request.transport.get_extra_info("peername")
if peername is not None:
host, _ = peername
address = " (by " + str(host) + ")"
if request.method == "GET":
print("Content requested" + address + ", transferring!")
elif request.method == "HEAD":
print("Content examined" + address + "!")
response = web.StreamResponse()
file_path = request.app["file_path"]
file_name = request.app["file_name"]
file_size = str(request.app["file_size"])
header = "attachment; filename=\"{}\"; size={}" \
.format(file_name, file_size)
response.headers["content-type"] = "application/octet-stream"
response.headers["content-length"] = str(request.app["file_size"])
response.headers["content-disposition"] = header
response.headers["airshare-compress"] = request.app["compress"]
await response.prepare(request)
with open(file_path, "rb") as f:
chunk = f.read(8192)
while chunk:
await response.write(chunk)
chunk = f.read(8192)
return response
async def _is_airshare_text_sender(request):
"""Returns 'Text Sender', GET handler for route '/airshare'."""
return web.Response(text="Text Sender")
async def _is_airshare_file_sender(request):
"""Returns 'File Sender', GET handler for route '/airshare'."""
return web.Response(text="File Sender")
# Sender functions
def send(*, code, file, compress=False):
r"""Send file(s) or directories to a receiving server.
Parameters
----------
code : str
Identifying code for the Airshare receiving server.
file : str or list or None
Relative path or list of paths of the files or directories to serve.
For multiple files or directories, contents are automatically zipped.
compress : boolean, default=False
Flag to enable or disable compression (Zip).
Effective when only one file is given.
Returns
-------
status_code : int
Status code of upload POST request.
"""
info = get_service_info(code)
if info is None:
raise CodeNotFoundError(code)
if type(file) is str:
if file == "":
file = None
else:
file = [file]
elif len(file) == 0:
file = None
if file is None:
raise ValueError("The parameter `file` must be non-empty!")
if compress or len(file) > 1 or os.path.isdir(file[0]):
compress = "true"
print("Compressing...")
file, name = get_zip_file(file)
print("Compressed to `" + name + "`!")
else:
compress = "false"
file, name = file[0], file[0].split(os.path.sep)[-1]
ip = socket.inet_ntoa(info.addresses[0])
url = "http://" + ip + ":" + str(info.port)
airshare_type = requests.get(url + "/airshare")
if airshare_type.text != "Upload Receiver":
raise IsNotReceiverError(code)
m = MultipartEncoder(fields={"field0": (name, open(file, "rb"))})
headers = {"content-type": m.content_type, "airshare-compress": compress}
r = requests.post(url + "/upload", data=m, headers=headers)
print("Uploaded `" + name + "` to Airshare `" + code + "`!")
return r.status_code
def send_server(*, code, text=None, file=None, compress=False, port=8000):
r"""Serves a file or text and registers it as a Multicast-DNS service.
Parameters
----------
code : str
Identifying code for the Airshare service and server.
text : str or None
String value to be shared.
If both `text` and `files` are given, `text` will be shared.
Must be given if `files` is not given.
file : str or list or None
Relative path or list of paths of the files or directories to serve. If
multiple files or directories are given, the contents are automatically
zipped. If not given or both `files` and `text` are given, `text` will
be shared. Must be given if `text` is not given.
compress : boolean, default=False
Flag to enable or disable compression (Zip).
Effective when only one file is given.
port : int, default=8000
Port number at which the server is hosted on the device.
"""
info = get_service_info(code)
if info is not None:
raise CodeExistsError(code)
if file is not None:
if type(file) is str:
if file == "":
file = None
else:
file = [file]
elif len(file) == 0:
file = None
content = text or file
name = None
if content is None:
raise ValueError("Either `file` or `text` (keyword arguments) must be"
+ " given and non-empty!")
elif text is None and file is not None:
if compress or len(file) > 1 or os.path.isdir(file[0]):
compress = "true"
print("Compressing...")
content, name = get_zip_file(file)
print("Compressed to `" + name + "`!")
else:
compress = "false"
content = file[0]
addresses = [get_local_ip_address()]
register_service(code, addresses, port)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
app = web.Application()
file_size = ""
if text is not None:
app["text"] = content
app.router.add_get(path="/", handler=_text_page)
app.router.add_get(path="/text", handler=_text_sender)
app.router.add_get(path="/airshare", handler=_is_airshare_text_sender)
elif file:
app["file_path"] = os.path.realpath(content)
app["file_name"] = name or app["file_path"].split(os.path.sep)[-1]
app["file_size"] = os.stat(app["file_path"]).st_size
app["compress"] = compress
file_size = " (" + humanize.naturalsize(app["file_size"]) + ")"
content = app["file_name"]
app.router.add_get(path="/", handler=_download_page)
app.router.add_get(path="/airshare", handler=_is_airshare_file_sender)
app.router.add_get(path="/download", handler=_file_stream_sender)
runner = web.AppRunner(app)
loop.run_until_complete(runner.setup())
site = web.TCPSite(runner, "0.0.0.0", str(port))
loop.run_until_complete(site.start())
url_port = ""
if port != 80:
url_port = ":" + str(port)
ip = socket.inet_ntoa(addresses[0]) + url_port
quit_msg = "`, press Ctrl+C to stop sharing..."
if platform.system() == "Windows" and sys.version_info < (3, 8):
quit_msg = "`, press Ctrl+Break to stop sharing..."
print("`" + content + "`" + file_size + " available at " + ip
+ " and `http://" + code + ".local" + url_port + quit_msg)
qr_code("http://" + ip)
loop.run_forever()
def send_server_proc(*, code, text=None, file=None, compress=False, port=8000):
r"""Creates a process with 'send_server' as the target.
Parameters
----------
code : str
Identifying code for the Airshare service and server.
text : str or None
String value to be shared.
If both `text` and `files` are given, `text` will be shared.
Must be given if `files` is not given.
file : str or list or None
Relative path or list of paths of the files or directories to serve. If
multiple files or directories are given, the contents are automatically
zipped. If not given or both `files` and `text` are given, `text` will
be shared. Must be given if `text` is not given.
compress : boolean, default=False
Flag to enable or disable compression (Zip).
Effective when only one file is given.
port : int, default=8000
Port number at which the server is hosted on the device.
Returns
-------
process: multiprocessing.Process
A multiprocessing.Process object with 'send_server' as target.
"""
kwargs = {"code": code, "file": file, "text": text, "compress": compress,
"port": port}
process = Process(target=send_server, kwargs=kwargs)
return process | Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/airshare/sender.py | sender.py |
import click
import os
import pyperclip
from .utils import is_file_copyable, get_clipboard_paths
from .sender import send, send_server
from .receiver import receive, receive_server
@click.command(name="airshare")
@click.argument("code", nargs=1)
@click.option("-p", "--port", type=int, default=8000, help="""
Specify the port number to host a sending or receiving server (default 8000).
""")
@click.option("-t", "--text", type=str, help="""
Send (serve) text content. For multiple words, enclose within quotes.
""")
@click.option("-u", "--upload", is_flag=True, help="""
Host a receiving server or upload file(s) to one.
""")
@click.option("-cs", "--clip-send", is_flag=True, help="""
Send (serve) clipboard contents as text.
""")
@click.option("-cr", "--clip-receive", is_flag=True, help="""
Receive served content and also copy into clipboard (if possible).
""")
@click.option("-fp", "--file-path", is_flag=True, help="""
Send files whose paths have been copied to the clipoard.
""")
@click.argument("files", nargs=-1)
@click.help_option()
@click.version_option(version=None, prog_name="Airshare")
def main(code, port, text, upload, clip_send, clip_receive, file_path, files):
r"""Airshare - an easy way to share content in a local network.
CODE - An identifying code for Airshare.
FILES - File(s) or directories to send.
"""
files = get_clipboard_paths() if file_path else files
if text:
try:
send_server(code=code, text=text, port=port)
except KeyboardInterrupt:
exit(0)
if clip_send:
try:
send_server(code=code, text=pyperclip.paste(), port=port)
except KeyboardInterrupt:
exit(0)
if clip_receive:
content = receive(code=code)
if os.path.exists(content):
if is_file_copyable(content):
with open(content, "r") as f:
pyperclip.copy(f.read())
print("File copied to clipboard!")
else:
print("This file cannot be copied to the clipboard!")
else:
pyperclip.copy(content)
return
if len(files):
if upload:
send(code=code, file=list(files))
return
else:
try:
send_server(code=code, file=files, port=port)
except KeyboardInterrupt:
exit(0)
else:
if upload:
try:
receive_server(code=code, port=port)
except KeyboardInterrupt:
exit(0)
else:
receive(code=code)
if __name__ == "__main__":
main(prog_name="Airshare") | Airshare | /Airshare-0.1.6.tar.gz/Airshare-0.1.6/airshare/cli.py | cli.py |
# AirtablePy
[![build status][buildstatus-image]][buildstatus-url]
[buildstatus-image]: https://github.com/Spill-Tea/AirtablePy/actions/workflows/python-package.yml/badge.svg?branch=main
[buildstatus-url]: https://github.com/Spill-Tea/AirtablePy/actions?query=branch%3Amain
Python API to interact with Airtable
### Table of Contents
1. [Installation](#installation)
2. [API Token](#api-token)
3. [Simple Interface](#simple-interface)
4. [License](#license)
### Installation
AirtablPy is available on [pypi](https://pypi.org/project/AirtablePy/). Install using pip.
```bash
pip install AirtablePy
```
### API Token
To use the Airtable API, you need a valid [token](https://support.airtable.com/hc/en-us/articles/219046777-How-do-I-get-my-API-key-).
You may setup an environment variable called `AIRTABLE_API_KEY` which this interface will use.
### Simple Interface
```python
from AirtablePy import AirtableAPI
from AirtablePy.utils import retrieve_keys
from AirtablePy.query import date_query
# Instantiate interface with valid token.
# If token is not specified, it will search for environment variable AIRTABLE_API_KEY
api = AirtableAPI(token="keyXXXXXXXXXXXXXX")
# Construct a valid url
base_id = "appXXXXXXXXXXXXXX"
table_name = "Example Table"
url = api.construct_url(base_id, table_name)
# Retrieve records from a table, with or without a query filter
records = api.get(url, query=date_query(column_name="date", start="20220401", end="20220415"))
# Upload new data entries
data = {"Column 1": [1, 2, 3], "Column 2": [4, 5, 6]}
response_upload = api.push(url=url, data=data)
# Collect a list of record id's from upload
record_ids = retrieve_keys(response_upload, "id")
# Update records with additional (or modified) data
data_update = {"Column 3": [7, 8, 9]} # data will be present in all three columns
response_update = api.update(url=url, data=data_update, record_id=record_ids)
# Replace existing records with different data
data_replace = {"Column 2": [10, 11, 12]} # only column 2 will have data
response_replace = api.replace(url=url, data=data_replace, record_id=record_ids)
# Delete existing Records
response_delete = api.delete(url=url, record_id=record_ids)
```
### License
[MIT](./LICENSE)
| AirtablePy | /AirtablePy-0.1.3.tar.gz/AirtablePy-0.1.3/README.md | README.md |
import logging
import os
import sys
import urllib.request
import datetime
import inspect
import warnings
import feedparser
from bs4 import BeautifulSoup as Soup
from dotenv import load_dotenv
try:
import newspaper # Optional - required by GNews.get_full_article()
except ImportError:
pass
from gnews.utils.constants import AVAILABLE_COUNTRIES, AVAILABLE_LANGUAGES, TOPICS, BASE_URL, USER_AGENT
from gnews.utils.utils import connect_database, post_database, process_url
logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO,
datefmt='%m/%d/%Y %I:%M:%S %p')
logger = logging.getLogger(__name__)
class GNews:
def __init__(self, language="en", country="US", max_results=100, period=None, start_date=None, end_date=None,
exclude_websites=None, proxy=None):
"""
(optional parameters)
:param language: The language in which to return results, defaults to en (optional)
:param country: The country code of the country you want to get headlines for, defaults to US
:param max_results: The maximum number of results to return. The default is 100, defaults to 100
:param period: The period of time from which you want the news
:param start_date: Date after which results must have been published
:param end_date: Date before which results must have been published
:param exclude_websites: A list of strings that indicate websites to exclude from results
:param proxy: The proxy parameter is a dictionary with a single key-value pair. The key is the
protocol name and the value is the proxy address
"""
self.countries = tuple(AVAILABLE_COUNTRIES),
self.languages = tuple(AVAILABLE_LANGUAGES),
self._max_results = max_results
self._language = language
self._country = country
self._period = period
self._end_date = None
self._start_date = None
self.end_date = self.end_date = end_date
self._start_date = self.start_date = start_date
self._exclude_websites = exclude_websites if exclude_websites and isinstance(exclude_websites, list) else []
self._proxy = {'http': proxy, 'https': proxy} if proxy else None
def _ceid(self):
time_query = ''
if self._start_date or self._end_date:
if inspect.stack()[2][3] != 'get_news':
warnings.warn(message=("Only searches using the function get_news support date ranges. Review the "
f"documentation for {inspect.stack()[2][3]} for a partial workaround. \nStart "
"date and end date will be ignored"), category=UserWarning, stacklevel=4)
if self._period:
time_query += 'when%3A'.format(self._period)
if self._period:
warnings.warn(message=f'\nPeriod ({self.period}) will be ignored in favour of the start and end dates',
category=UserWarning, stacklevel=4)
if self.end_date is not None:
time_query += '%20before%3A{}'.format(self.end_date)
if self.start_date is not None:
time_query += '%20after%3A{}'.format(self.start_date)
elif self._period:
time_query += '%20when%3A{}'.format(self._period)
return time_query + '&hl={}&gl={}&ceid={}:{}'.format(self._language,
self._country,
self._country,
self._language,)
@property
def language(self):
return self._language
@language.setter
def language(self, language):
"""
:param language: The language code for the language you want to use
"""
self._language = AVAILABLE_LANGUAGES.get(language, language)
@property
def exclude_websites(self):
return self._exclude_websites
@exclude_websites.setter
def exclude_websites(self, exclude_websites):
"""
The function takes in a list of websites that you want to exclude
:param exclude_websites: A list of strings that will be used to filter out websites
"""
self._exclude_websites = exclude_websites
@property
def max_results(self):
return self._max_results
@max_results.setter
def max_results(self, size):
self._max_results = size
@property
def period(self):
return self._period
@period.setter
def period(self, period):
self._period = period
@property
def start_date(self):
"""
:return: string of start_date in form YYYY-MM-DD, or None if start_date is not set
…NOTE this will reset period to None if start_date is not none
"""
if self._start_date is None:
return None
self.period = None
return self._start_date.strftime("%Y-%m-%d")
@start_date.setter
def start_date(self, start_date):
"""
The function sets the start of the date range you want to search
:param start_date: either a tuple in the form (YYYY, MM, DD) or a datetime
"""
if type(start_date) is tuple:
start_date = datetime.datetime(start_date[0], start_date[1], start_date[2])
if self._end_date:
if start_date - self._end_date == datetime.timedelta(days=0):
warnings.warn("The start and end dates should be at least 1 day apart, or GNews will return no results")
elif self._end_date < start_date:
warnings.warn("End date should be after start date, or GNews will return no results")
self._start_date = start_date
@property
def end_date(self):
"""
:return: string of end_date in form YYYY-MM-DD, or None if end_date is not set
…NOTE this will reset period to None if end date is not None
"""
if self._end_date is None:
return None
self.period = None
return self._end_date.strftime("%Y-%m-%d")
@end_date.setter
def end_date(self, end_date):
"""
The function sets the end of the date range you want to search
:param end_date: either a tuple in the form (YYYY, MM, DD) or a datetime
…NOTE this will reset period to None
"""
if type(end_date) is tuple:
end_date = datetime.datetime(end_date[0], end_date[1], end_date[2])
if self._start_date:
if end_date - self._start_date == datetime.timedelta(days=0):
warnings.warn("The start and end dates should be at least 1 day apart, or GNews will return no results")
elif end_date < self._start_date:
warnings.warn("End date should be after start date, or GNews will return no results")
self._end_date = end_date
@property
def country(self):
return self._country
@country.setter
def country(self, country):
self._country = AVAILABLE_COUNTRIES.get(country, country)
def get_full_article(self, url):
"""
Download an article from the specified URL, parse it, and return an article object.
:param url: The URL of the article you wish to summarize.
:return: An `Article` object returned by the `newspaper` library.
"""
# Check if the `newspaper` library is available
if 'newspaper' not in (sys.modules.keys() & globals()): # Top import failed since it's not installed
print("\nget_full_article() requires the `newspaper` library.")
print("You can install it by running `python3 -m pip install newspaper3k` in your shell.\n")
return None
try:
article = newspaper.Article(url="%s" % url, language=self._language)
article.download()
article.parse()
except Exception as error:
logger.error(error.args[0])
return None
return article
@staticmethod
def _clean(html):
soup = Soup(html, features="html.parser")
text = soup.get_text()
text = text.replace('\xa0', ' ')
return text
def _process(self, item):
url = process_url(item, self._exclude_websites)
if url:
title = item.get("title", "")
item = {
'title': title,
'description': self._clean(item.get("description", "")),
'published date': item.get("published", ""),
'url': url,
'publisher': item.get("source", " ")
}
return item
def docstring_parameter(*sub):
def dec(obj):
obj.__doc__ = obj.__doc__.format(*sub)
return obj
return dec
indent = '\n\t\t\t'
indent2 = indent + '\t'
standard_output = (indent + "{'title': Article Title," + indent + "'description': Google News summary of the "
"article," + indent + "'url': link to the news article," + indent + "'publisher':" + indent2 +
"{'href': link to publisher's website," + indent2 + "'title': name of the publisher}}")
@docstring_parameter(standard_output)
def get_news(self, key):
"""
The function takes in a key and returns a list of news articles
:param key: The query you want to search for. For example, if you want to search for news about
the "Yahoo", you would get results from Google News according to your key i.e "yahoo"
:return: A list of dictionaries with structure: {0}.
"""
if key:
key = "%20".join(key.split(" "))
query = '/search?q={}'.format(key)
return self._get_news(query)
@docstring_parameter(standard_output)
def get_top_news(self):
"""
This function returns top news stories for the current time
:return: A list of dictionaries with structure: {0}.
..To implement date range try get_news('?')
"""
query = "?"
return self._get_news(query)
@docstring_parameter(standard_output, ', '.join(TOPICS))
def get_news_by_topic(self, topic: str):
"""
Function to get news from one of Google's key topics
:param topic: TOPIC names i.e {1}
:return: A list of dictionaries with structure: {0}.
..To implement date range try get_news('topic')
"""
topic = topic.upper()
if topic in TOPICS:
query = '/headlines/section/topic/' + topic + '?'
return self._get_news(query)
logger.info(f"Invalid topic. \nAvailable topics are: {', '.join(TOPICS)}.")
return []
@docstring_parameter(standard_output)
def get_news_by_location(self, location: str):
"""
This function is used to get news from a specific location (city, state, and country)
:param location: (type: str) The location for which you want to get headlines
:return: A list of dictionaries with structure: {0}.
..To implement date range try get_news('location')
"""
if location:
query = '/headlines/section/geo/' + location + '?'
return self._get_news(query)
logger.warning("Enter a valid location.")
return []
def _get_news(self, query):
url = BASE_URL + query + self._ceid()
try:
if self._proxy:
proxy_handler = urllib.request.ProxyHandler(self._proxy)
feed_data = feedparser.parse(url, agent=USER_AGENT, handlers=[proxy_handler])
else:
feed_data = feedparser.parse(url, agent=USER_AGENT)
return [item for item in
map(self._process, feed_data.entries[:self._max_results]) if item]
except Exception as err:
logger.error(err.args[0])
return []
def store_in_mongodb(self, news):
"""
- MongoDB cluster needs to be created first - https://www.mongodb.com/cloud/atlas/register
- Connect to the MongoDB cluster
- Create a new collection
- Insert the news into the collection
:param news: the news object that we created in the previous function
"""
load_dotenv()
db_user = os.getenv("DB_USER")
db_pw = os.getenv("DB_PW")
db_name = os.getenv("DB_NAME")
collection_name = os.getenv("COLLECTION_NAME")
collection = connect_database(db_user, db_pw, db_name, collection_name)
post_database(collection, news) | Airvue-gn | /Airvue-gn-0.0.3.tar.gz/Airvue-gn-0.0.3/gnews/gnews.py | gnews.py |
import random
USER_AGENTS = '''Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36
Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36
Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36
Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36
Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36
Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.22 (KHTML, like Gecko) Chrome/19.0.1047.0 Safari/535.22
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1042.0 Safari/535.21
Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1041.0 Safari/535.21
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/18.6.872.0 Safari/535.2 UNTRUSTED/1.0 3gpp-gba UNTRUSTED/1.0
Mozilla/5.0 (Macintosh; AMD Mac OS X 10_8_2) AppleWebKit/535.22 (KHTML, like Gecko) Chrome/18.6.872
Mozilla/5.0 (X11; CrOS i686 1660.57.0) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.46 Safari/535.19
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.151 Safari/535.19
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.19 (KHTML, like Gecko) Ubuntu/11.10 Chromium/18.0.1025.142 Chrome/18.0.1025.142 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.11 Safari/535.19
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.04 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/10.10 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.700.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.698.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.697.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.71 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.68 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_7) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.68 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.68 Safari/534.24
Mozilla/5.0 Slackware/13.37 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/11.0.696.50
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.43 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.0) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.14 Safari/534.24
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.12 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.12 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.04 Chromium/11.0.696.0 Chrome/11.0.696.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.694.0 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.23 (KHTML, like Gecko) Chrome/11.0.686.3 Safari/534.23
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.682.0 Safari/534.21
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_0; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Windows NT) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.669.0 Safari/534.20
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.19 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.19
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.18 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.18
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.18 (KHTML, like Gecko) Chrome/11.0.660.0 Safari/534.18
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.655.0 Safari/534.17
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.655.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.654.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.652.0 Safari/534.17
Mozilla/4.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/11.0.1245.0 Safari/537.36
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/10.0.649.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/10.0.649.0 Safari/534.17
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.82 Safari/534.16
Mozilla/5.0 (X11; U; Linux armv7l; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (X11; U; FreeBSD x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (X11; U; FreeBSD i386; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.133 Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.133 Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_2; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.127 Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU; AppleWebKit/534.16; KHTML; like Gecko; Chrome/10.0.648.11;Safari/534.16)
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.0 Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.0 Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.642.0 Chrome/10.0.642.0 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.639.0 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.638.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.634.0 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.634.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 SUSE/10.0.626.0 (KHTML, like Gecko) Chrome/10.0.626.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.613.0 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.613.0 Chrome/10.0.613.0 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.04 Chromium/10.0.612.3 Chrome/10.0.612.3 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.612.1 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.611.0 Chrome/10.0.611.0 Safari/534.15
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.602.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML,like Gecko) Chrome/9.1.0.0 Safari/540.0
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML, like Gecko) Ubuntu/10.10 Chrome/9.1.0.0 Safari/540.0
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.601.0 Safari/534.14
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Ubuntu/10.10 Chromium/9.0.600.0 Chrome/9.0.600.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.600.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.599.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-CA) AppleWebKit/534.13 (KHTML like Gecko) Chrome/9.0.597.98 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.84 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.44 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.19 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416758524.9051
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416748405.3871
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416670950.695
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416664997.4379
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1333515017.9196
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.596.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Ubuntu/10.04 Chromium/9.0.595.0 Chrome/9.0.595.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Ubuntu/9.10 Chromium/9.0.592.0 Chrome/9.0.592.0 Safari/534.13
Mozilla/5.0 (X11; U; Windows NT 6; en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.587.0 Safari/534.12
Mozilla/5.0 (Windows U Windows NT 5.1 en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.583.0 Safari/534.12'''.split('\n')
USER_AGENT = random.choice(USER_AGENTS)
AVAILABLE_LANGUAGES = {
"english": "en",
"indonesian": "id",
"czech": "cs",
"german": "de",
"spanish": "es-419",
"french": "fr",
"italian": "it",
"latvian": "lv",
"lithuanian": "lt",
"hungarian": "hu",
"dutch": "nl",
"norwegian": "no",
"polish": "pl",
"portuguese brasil": "pt-419",
"portuguese portugal": "pt-150",
"romanian": "ro",
"slovak": "sk",
"slovenian": "sl",
"swedish": "sv",
"vietnamese": "vi",
"turkish": "tr",
"greek": "el",
"bulgarian": "bg",
"russian": "ru",
"serbian": "sr",
"ukrainian": "uk",
"hebrew": "he",
"arabic": "ar",
"marathi": "mr",
"hindi": "hi",
"bengali": "bn",
"tamil": "ta",
"telugu": "te",
"malyalam": "ml",
"thai": "th",
"chinese simplified": "zh-Hans",
"chinese traditional": "zh-Hant",
"japanese": "ja",
"korean": "ko"
}
AVAILABLE_COUNTRIES = {
"Australia": "AU",
"Botswana": "BW",
"Canada ": "CA",
"Ethiopia": "ET",
"Ghana": "GH",
"India ": "IN",
"Indonesia": "ID",
"Ireland": "IE",
"Israel ": "IL",
"Kenya": "KE",
"Latvia": "LV",
"Malaysia": "MY",
"Namibia": "NA",
"New Zealand": "NZ",
"Nigeria": "NG",
"Pakistan": "PK",
"Philippines": "PH",
"Singapore": "SG",
"South Africa": "ZA",
"Tanzania": "TZ",
"Uganda": "UG",
"United Kingdom": "GB",
"United States": "US",
"Zimbabwe": "ZW",
"Czech Republic": "CZ",
"Germany": "DE",
"Austria": "AT",
"Switzerland": "CH",
"Argentina": "AR",
"Chile": "CL",
"Colombia": "CO",
"Cuba": "CU",
"Mexico": "MX",
"Peru": "PE",
"Venezuela": "VE",
"Belgium ": "BE",
"France": "FR",
"Morocco": "MA",
"Senegal": "SN",
"Italy": "IT",
"Lithuania": "LT",
"Hungary": "HU",
"Netherlands": "NL",
"Norway": "NO",
"Poland": "PL",
"Brazil": "BR",
"Portugal": "PT",
"Romania": "RO",
"Slovakia": "SK",
"Slovenia": "SI",
"Sweden": "SE",
"Vietnam": "VN",
"Turkey": "TR",
"Greece": "GR",
"Bulgaria": "BG",
"Russia": "RU",
"Ukraine ": "UA",
"Serbia": "RS",
"United Arab Emirates": "AE",
"Saudi Arabia": "SA",
"Lebanon": "LB",
"Egypt": "EG",
"Bangladesh": "BD",
"Thailand": "TH",
"China": "CN",
"Taiwan": "TW",
"Hong Kong": "HK",
"Japan": "JP",
"Republic of Korea": "KR"
}
GOOGLE_NEWS_URL = 'https://news.google.com'
BASE_URL = "{0}/rss".format(GOOGLE_NEWS_URL)
GOOGLE_NEWS_REGEX = f'^http(s)?://(www.)?news.google.com*'
TOPICS = ["WORLD", "NATION", "BUSINESS", "TECHNOLOGY", "ENTERTAINMENT", "SPORTS", "SCIENCE", "HEALTH"] | Airvue-gn | /Airvue-gn-0.0.3.tar.gz/Airvue-gn-0.0.3/gnews/utils/constants.py | constants.py |
import hashlib
import json
import logging
import re
import pymongo
import requests
from gnews.utils.constants import AVAILABLE_COUNTRIES, AVAILABLE_LANGUAGES, GOOGLE_NEWS_REGEX
from pymongo import MongoClient
def lang_mapping(lang):
return AVAILABLE_LANGUAGES.get(lang)
def country_mapping(country):
return AVAILABLE_COUNTRIES.get(country)
def connect_database(db_user, db_pw, db_name, collection_name):
"""Mongo DB Establish Cluster Connection"""
# .env file Structure:
# DB_USER="..."
# DB_PW="..."
# DB_NAME="..."
# COLLECTION_NAME="..."
# name of the mongodb cluster as well as the database name should be "gnews"
try:
cluster = MongoClient(
"mongodb+srv://" +
db_user +
":" +
db_pw +
"@gnews.stjap.mongodb.net/" +
db_name +
"?retryWrites=true&w=majority"
)
db = cluster[db_name]
collection = db[collection_name]
return collection
except Exception as e:
print("Connection Error.", e)
def post_database(collection, news):
"""post unique news articles to mongodb database"""
doc = {
"_id": hashlib.sha256(str(json.dumps(news)).encode('utf-8')).hexdigest(),
"title": news['title'],
"description": news['description'],
"published_date": news['published date'],
"url": news['url'],
"publisher": news['publisher']
}
try:
collection.update_one(doc, {'$set': doc}, upsert=True)
except pymongo.errors.DuplicateKeyError:
logging.error("Posting to database failed.")
def process_url(item, exclude_websites):
source = item.get('source').get('href')
if not all([not re.match(website, source) for website in
[f'^http(s)?://(www.)?{website.lower()}.*' for website in exclude_websites]]):
return
url = item.get('link')
if re.match(GOOGLE_NEWS_REGEX, url):
url = requests.head(url).headers.get('location', url)
return url | Airvue-gn | /Airvue-gn-0.0.3.tar.gz/Airvue-gn-0.0.3/gnews/utils/utils.py | utils.py |
# Airzone Cloud
- [Airzone Cloud](#airzone-cloud)
- [Presentation](#presentation)
- [Abstract](#abstract)
- [Module classes](#module-classes)
- [Usage](#usage)
- [Install](#install)
- [Start API](#start-api)
- [Get installations](#get-installations)
- [Get installations](#get-installations-1)
- [Get groups for each installation](#get-groups-for-each-installation)
- [Get devices for each grou of each installation](#get-devices-for-each-grou-of-each-installation)
- [Get all devices from all installations shortcut](#get-all-devices-from-all-installations-shortcut)
- [Control a device](#control-a-device)
- [HVAC mode](#hvac-mode)
- [Available modes](#available-modes)
- [List supported modes for each devices](#list-supported-modes-for-each-devices)
- [Set HVAC mode on a master thermostat device (and all linked thermostats)](#set-hvac-mode-on-a-master-thermostat-device-and-all-linked-thermostats)
- [API documentation](#api-documentation)
- [Tests](#tests)
- [Update configuration in config_test.json](#update-configuration-in-config_testjson)
- [Run test script](#run-test-script)
## Presentation
### Abstract
Allow to communicate easily with Airzone Cloud to retrieve information or to send commands (on/off, temperature, HVAC mode, ...)
This library manage the main Airzone Cloud API (try to connect to [www.airzonecloud.com](https://www.airzonecloud.com) to be sure).
Official API documentation is available here : https://developers.airzonecloud.com/docs/web-api/
### Module classes
- **AirzoneCloud** : represent your AirzoneCloud account. Contains a list of your **installations** :
- **Installation**: represent one of your installation (like your home, an office, ...). Contains a list of its **groups** :
- **Group** : represent a group of **devices** in the installation
- **Device** : represent your thermostat to control

## Usage
### Install
```bash
pip3 install AirzoneCloud
```
### Start API
```python
from AirzoneCloud import AirzoneCloud
api = AirzoneCloud("[email protected]", "password")
```
### Get installations
```python
for installation in api.installations:
print(
"Installation(name={}, access_type={}, ws_ids=[{}], id={})".format(
installation.name, installation.access_type, ", ".join(installation.ws_ids), installation.id
)
)
```
Output :
<pre>
Installation(name=Home, access_type=admin, ws_ids=[AA:BB:CC:DD:EE:FF], id=60f5cb990123456789abdcef)
</pre>
### Get installations
```python
for installation in api.installations:
print(
"Installation(name={}, access_type={}, ws_ids=[{}], id={})".format(
installation.name, installation.access_type, ", ".join(installation.ws_ids), installation.id
)
)
```
Output :
<pre>
Installation(name=Home, access_type=admin, ws_ids=[AA:BB:CC:DD:EE:FF], id=60f5cb990123456789abdcef)
</pre>
### Get groups for each installation
```python
for installation in api.installations:
print(installation)
for group in installation.groups:
print(
" Group(name={}, installation={}, id={})".format(
group.name, group.installation.name, group.id
)
)
```
Output :
<pre>
Installation(name=Home)
Group(name=System 1, installation=Home, id=60f5cb990123456789abdce0)
</pre>
### Get devices for each grou of each installation
```python
for installation in api.installations:
print(installation)
for group in installation.groups:
print(" " + str(group))
for device in group.devices:
print(
" Device(name={}, is_connected={}, is_on={}, mode={}, current_temp={}, target_temp={}, id={}, ws_id={})".format(
device.name,
device.is_connected,
device.is_on,
device.mode,
device.current_temperature,
device.target_temperature,
device.id,
device.ws_id,
)
)
```
Output :
<pre>
Installation(name=Home)
Group(name=System 1, installation=Home)
Device(name=Salon, is_connected=True, is_on=True, mode=heating, current_temp=20.9, target_temp=20.0, id=60f5cb990123456789abdce1, ws_id=AA:BB:CC:DD:EE:FF)
Device(name=Ch parents, is_connected=True, is_on=False, mode=heating, current_temp=17.2, target_temp=18.0, id=60f5cb990123456789abdce2, ws_id=AA:BB:CC:DD:EE:FF)
Device(name=Ch bebe, is_connected=True, is_on=False, mode=heating, current_temp=18.6, target_temp=19.5, id=60f5cb990123456789abdce3, ws_id=AA:BB:CC:DD:EE:FF)
</pre>
### Get all devices from all installations shortcut
```python
for device in api.all_devices:
print(
"Device(name={}, is_on={}, mode={}, current_temp={}, target_temp={}, id={})".format(
device.name,
device.is_on,
device.mode,
device.current_temperature,
device.target_temperature,
device.id,
)
)
```
Output :
<pre>
Device(name=Salon, is_connected=True, is_on=True, mode=heating, current_temp=20.9, target_temp=20.0, id=60f5cb990123456789abdce1, ws_id=AA:BB:CC:DD:EE:FF)
Device(name=Ch parents, is_connected=True, is_on=False, mode=heating, current_temp=17.2, target_temp=18.0, id=60f5cb990123456789abdce2, ws_id=AA:BB:CC:DD:EE:FF)
Device(name=Ch bebe, is_connected=True, is_on=False, mode=heating, current_temp=18.6, target_temp=19.5, id=60f5cb990123456789abdce3, ws_id=AA:BB:CC:DD:EE:FF)
</pre>
### Control a device
All actions by default are waiting 1 second then refresh the device.
You can disable this behavior by adding auto_refresh=False.
```python
# get first device
device = api.all_devices[0]
print(device)
# start device & set temperature
device.turn_on(auto_refresh=False).set_temperature(22)
print(device)
# stopping device
device.turn_off()
print(device)
```
Output :
<pre>
Device(name=Salon, is_connected=True, is_on=False, mode=heating, current_temp=20.8, target_temp=20.0)
Device(name=Salon, is_connected=True, is_on=True, mode=heating, current_temp=20.8, target_temp=22.0)
Device(name=Salon, is_connected=True, is_on=False, mode=heating, current_temp=20.8, target_temp=22.0)
</pre>
### HVAC mode
#### Available modes
- **stop** : Stop mode
- **auto** : Automatic mode
- **cooling** : Cooling mode
- **heating** : Heating mode
- **ventilation** : Ventilation mode
- **dehumidify** : Dehumidifier / Dry mode
- **emergency-heating** : Emergency heat mode
- **air-heating** : Heat air mode (only compatible systems)
- **radiant-heating** : Heat radiant mode (only compatible systems)
- **combined-heating** : Heat combined mode (only compatible systems)
- **air-cooling** : Cooling air mode (only compatible systems)
- **radiant-cooling** : Cooling radiant mode (only compatible systems)
- **combined-cooling** : Cooling combined mode (only compatible systems)
Only master thermostat device can update the mode.
#### List supported modes for each devices
```python
for device in api.all_devices:
print(
"Device(name={}, mode={}, modes_availables={})".format(
device.name,
device.mode,
device.modes_availables,
)
)
```
Output :
<pre>
Device(name=Salon, mode=heating, modes_availables=['cooling', 'heating', 'ventilation', 'dehumidify', 'stop'])
Device(name=Ch parents, mode=heating, modes_availables=[])
Device(name=Ch bebe, mode=heating, modes_availables=[])
</pre>
If modes_availables is an empty list, your device is not the master thermostat.
#### Set HVAC mode on a master thermostat device (and all linked thermostats)
```python
device = api.all_devices[0]
print(device)
# set mode to cooling
device.set_mode("cooling")
print(device)
```
Output :
<pre>
Device(name=Salon, is_connected=True, is_on=True, mode=heating, current_temp=20.8, target_temp=20.0)
Device(name=Salon, is_connected=True, is_on=True, mode=cooling, current_temp=20.8, target_temp=20.0)
</pre>
## API documentation
[API full doc](API.md)
## Tests
### Update configuration in config_test.json
- **email** : used to log-in to you AirzoneCloud account (default to *[email protected]*)
- **password** : used to log-in to you AirzoneCloud account (default to *changeme*)
- **log_level** : minimum level of log to display : DEBUG | INFO | WARNING | ERROR | CRITICIAL (default to *INFO*)
- **display_group_properties** : display all properties for each group (default to *true*).
- **display_device_properties** : display all properties for each device (default to *true*).
- **display_api_token** : ask to display token used to connect to the AirzoneCloud API (default to *false*). Useful for https://developers.airzonecloud.com/docs/web-api/
- **refresh_before_display** : ask to call all refresh functions before displaying for test purpose (default to *false*).
### Run test script
```bash
./test.py
``` | AirzoneCloud | /AirzoneCloud-1.2.2.tar.gz/AirzoneCloud-1.2.2/README.md | README.md |
# Airzone Cloud Daikin
- [Airzone Cloud Daikin](#airzone-cloud-daikin)
- [Presentation](#presentation)
- [Abstract](#abstract)
- [Module classes](#module-classes)
- [Usage](#usage)
- [Install](#install)
- [Start API](#start-api)
- [Get installations](#get-installations)
- [Get devices from installations](#get-devices-from-installations)
- [Get all devices shortcut](#get-all-devices-shortcut)
- [Control a device](#control-a-device)
- [HVAC mode](#hvac-mode)
- [Available modes](#available-modes)
- [Set HVAC mode on a system (and its sub-zones)](#set-hvac-mode-on-a-system-and-its-sub-zones)
- [API doc](#api-doc)
- [Constructor](#constructor)
## Presentation
### Abstract
Allow to communicate easily with Daikin Airzone Cloud to retrieve information or to send commands (on/off, temperature, HVAC mode, ...)
This API is specific to Daikin implementation (try to connect to [dkn.airzonecloud.com](https://dkn.airzonecloud.com) to be sure).
If you are looking for the main Airzone Cloud API (try to connect to [www.airzonecloud.com](https://www.airzonecloud.com)), you should use this package : [AirzoneCloud](https://github.com/max13fr/AirzoneCloud)
### Module classes
- **AirzoneCloudDaikin** : represent your Daikin AirzoneCloud account. Contains a list of your **installations** :
- **Installation**: represent one of your installation (like your home, an office, ...). Contains a list of its **devices** :
- **Device** : represent your climate equipement to control
## Usage
### Install
```bash
pip3 install AirzoneCloudDaikin
```
### Start API
```python
from AirzoneCloudDaikin import AirzoneCloudDaikin
api = AirzoneCloudDaikin("[email protected]", "password")
```
### Get installations
```python
for installation in api.installations:
print(
"Installation(name={}, type={}, scenary={}, id={})".format(
installation.name, installation.type, installation.scenary, installation.id
)
)
```
Output :
<pre>
Installation(name=My home, type=home, scenary=occupied, id=5d592c14646b6d798ccc2aaa)
</pre>
### Get devices from installations
```python
for installation in api.installations:
for device in installation.devices:
print(
"Device(name={}, is_on={}, mode={}, current_temp={}, target_temp={}, id={}, mac={})".format(
device.name,
device.is_on,
device.mode,
device.current_temperature,
device.target_temperature,
device.id,
device.mac,
)
)
```
Output :
<pre>
Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0, id=5ab1875a651241708814575681, mac=AA:BB:CC:DD:EE:FF)
</pre>
### Get all devices shortcut
```python
for device in api.all_devices:
print(
"Device(name={}, is_on={}, mode={}, current_temp={}, target_temp={}, id={}, mac={})".format(
device.name,
device.is_on,
device.mode,
device.current_temperature,
device.target_temperature,
device.id,
device.mac,
)
)
```
Output :
<pre>
Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0, id=5ab1875a651241708814575681, mac=AA:BB:CC:DD:EE:FF)
</pre>
### Control a device
```python
device = api.all_devices[0]
print(device)
# start device
device.turn_on()
# set temperature
device.set_temperature(26)
print(device)
# stopping device
device.turn_off()
print(device)
```
Output :
<pre>
Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=30.0)
Device(name=Dknwserver, is_on=True, mode=cool, current_temp=25.0, target_temp=26.0)
Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0)
</pre>
### HVAC mode
#### Available modes
- **cool** : Cooling mode
- **heat** : Heating mode
- **ventilate** : Ventilation
- **dehumidify** : Dry
- **heat-cold-auto** : Auto heat/cold mode
#### Set HVAC mode on a system (and its sub-zones)
```python
device = api.all_devices[0]
print(device)
# set mode to heat
device.set_mode("heat")
print(device)
```
Output :
<pre>
Device(name=Dknwserver, is_on=False, mode=cool, current_temp=25.0, target_temp=26.0)
Device(name=Dknwserver, is_on=False, mode=heat, current_temp=25.0, target_temp=23.0)
</pre>
> :warning: Daikin climate equipment has 2 consigns : one for heat & one of cold.
> Its visible in the previous example, the target temperature has change from 26 to 23 just by changing the mode from cool to heat.
> So don't forget to do your set_temperature() AFTER the set_mode() and not before
## API doc
[API full doc](API.md)
### Constructor
```python
AirzoneCloudDaikin(username, password, user_agent=None, base_url=None)
```
- **username** : you're username used to connect on Daikin Airzone Cloud website or app
- **password** : you're password used to connect on Daikin Airzone Cloud website or app
- **user_agent** : allow to change default user agent if set
- **base_url** : allow to change base url of the Daikin Airzone Cloud API if set
- default value : _https://dkn.airzonecloud.com_
| AirzoneCloudDaikin | /AirzoneCloudDaikin-0.4.0.tar.gz/AirzoneCloudDaikin-0.4.0/README.md | README.md |
# Airzone Cloud
- [Airzone Cloud](#airzone-cloud)
- [Presentation](#presentation)
- [Abstract](#abstract)
- [Module classes](#module-classes)
- [Usage](#usage)
- [Install](#install)
- [Start API](#start-api)
- [Get device status](#get-device-status)
- [Get system status](#get-system-status)
- [Get all zones status (on all devices / systems)](#get-all-zones-status-on-all-devices--systems)
- [Control a specific zone](#control-a-specific-zone)
- [HVAC mode](#hvac-mode)
- [Available modes](#available-modes)
- [Set HVAC mode on a system (and its sub-zones)](#set-hvac-mode-on-a-system-and-its-sub-zones)
- [API doc](#api-doc)
- [Constructor](#constructor)
## Presentation
### Abstract
Allow to communicate easily with Airzone Cloud to retrieve information or to send commands (on/off, temperature, HVAC mode, ...)
This library manage the main Airzone Cloud API (try to connect to [www.airzonecloud.com](https://www.airzonecloud.com) to be sure).
If you are looking for the specific Airzone Cloud API for Daikin (try to connect to [dkn.airzonecloud.com](https://dkn.airzonecloud.com)), you should use this package : [AirzoneCloudDaikin](https://github.com/max13fr/AirzoneCloudDaikin)
### Module classes
* **AirzoneCloud** : represent your AirzoneCloud account. Contains a list of your **devices** :
* **Device** : represent one of your Airzone webserver registered. Contains a list of its **systems** :
* **System** : represent your climate equipment (Mitsubishi, Daikin, ...). Contains a list of its **zones** :
* **Zone** : represent a zone to control
## Usage
### Install
```bash
pip3 install AirzoneCloud
```
### Start API
```python
from AirzoneCloud import AirzoneCloud
api = AirzoneCloud("[email protected]", "password")
```
### Get device status
```python
for device in api.devices:
print(
"Device name={}, status={}, id={}, mac={}, pin={}".format(
device.name, device.status, device.id, device.mac, device.pin
)
)
```
Output :
<pre>
Device name=Home, status=activated, id=5bc8ae0c4149526af90c0000, mac=AA:BB:CC:DD:EE:FF, pin=1234
</pre>
### Get system status
```python
for system in api.devices[0].systems:
print(
"System name={}, mode={}, eco={}, velocity={}, airflow={}".format(
system.name,
system.mode,
system.eco,
system.velocity,
system.airflow,
)
)
```
Output :
<pre>
System name=Home, mode=heat-both, eco=eco-a, velocity=None, airflow=None
</pre>
### Get all zones status (on all devices / systems)
```python
for zone in api.all_zones:
print(
"Zone name={}, is_on={}, mode={}, current_temperature={}, target_temperature={}".format(
zone.name,
zone.is_on,
zone.mode,
zone.current_temperature,
zone.target_temperature,
)
)
```
Output :
<pre>
Zone name=Baby bedroom, is_on=False, mode=heat-both, current_temperature=20.4, target_temperature=19.5
Zone name=Parents bedroom, is_on=False, mode=heat-both, current_temperature=21.1, target_temperature=17.0
Zone name=Living room, is_on=True, mode=heat-both, current_temperature=21.4, target_temperature=21.5
Zone name=Kitchen, is_on=False, mode=heat-both, current_temperature=21.2, target_temperature=19.0
</pre>
### Control a specific zone
```python
zone = api.all_zones[2]
print(zone)
# start zone
zone.turn_on()
# set temperature
zone.set_temperature(18.5)
print(zone)
```
Output :
<pre>
Zone(name=Living room, is_on=False, mode=heat-both, current_temp=21.6, target_temp=21.0)
Zone(name=Living room, is_on=True, mode=heat-both, current_temp=21.6, target_temp=18.5)
</pre>
### HVAC mode
#### Available modes
* **stop** : Stop
* **ventilate** : Ventilate
* **dehumidify** : Dry
* **heat-air** : Air heating
* **heat-radiant** : Radiant heating
* **heat-both** : Combined heating
* **cool-air** : Air cooling
* **cool-radiant** : Radiant cooling
* **cool-both** : Combined cooling
#### Set HVAC mode on a system (and its sub-zones)
```python
system = api.devices[0].systems[0]
print(system)
# set mode to heat-both
system.set_mode("heat-both")
print(system)
```
Output :
<pre>
System(name=Home, mode=stop, eco=eco-a, velocity=None, airflow=None)
System(name=Home, mode=heat-both, eco=eco-a, velocity=None, airflow=None)
</pre>
## API doc
[API full doc](API.md)
### Constructor
```python
AirzoneCloud(username, password, user_agent=None, base_url=None)
```
* **username** : you're username used to connect on Airzone Cloud website or app
* **password** : you're password used to connect on Airzone Cloud website or app
* **user_agent** : allow to change default user agent if set
* **base_url** : allow to change base url of the Airzone Cloud API if set
* default value : _https://www.airzonecloud.com_
| AirzoneCloudfix | /AirzoneCloudfix-0.5.0.1.tar.gz/AirzoneCloudfix-0.5.0.1/README.md | README.md |
API = "/api/v1"
API_LOGIN = "{}/auth/login".format(API)
API_SITES = "{}/installations".format(API)
API_SYSTEMS = "/systems"
API_ZONES = "{}/devices".format(API)
API_ZONE = "{}/devices".format(API)
API_EVENTS = "/events"
# 2020-04-18: extracted from https://airzonecloud.com/assets/application-506494af86e686bf472b872d02048b42.js
MODES_CONVERTER = {
"0": {"name": "stop", "description": "Stop"},
"1": {"name": "cool-air", "description": "Air cooling"},
"2": {"name": "heat-radiant", "description": "Radiant heating"},
"3": {"name": "ventilate", "description": "Ventilate"},
"4": {"name": "heat-air", "description": "Air heating"},
"5": {"name": "heat-both", "description": "Combined heating"},
"6": {"name": "dehumidify", "description": "Dry"},
"7": {"name": "not_exit", "description": ""},
"8": {"name": "cool-radiant", "description": "Radiant cooling"},
"9": {"name": "cool-both", "description": "Combined cooling"},
}
SCHEDULE_MODES_CONVERTER = {
"0": {"name": "", "description": ""},
"1": {"name": "stop", "description": "Stop"},
"2": {"name": "ventilate", "description": "Ventilate"},
"3": {"name": "cool-air", "description": "Air cooling"},
"4": {"name": "heat-air", "description": "Air heating"},
"5": {"name": "heat-radiant", "description": "Radiant heating"},
"6": {"name": "heat-both", "description": "Combined heating"},
"7": {"name": "dehumidify", "description": "Dry"},
"8": {"name": "cool-radiant", "description": "Radiant cooling"},
"9": {"name": "cool-both", "description": "Combined cooling"},
}
VELOCITIES_CONVERTER = {
"0": {"name": "auto", "description": "Auto"},
"1": {"name": "velocity-1", "description": "Low speed"},
"2": {"name": "velocity-2", "description": "Medium speed"},
"3": {"name": "velocity-3", "description": "High speed"},
}
AIRFLOW_CONVERTER = {
"0": {"name": "airflow-0", "description": "Silence"},
"1": {"name": "airflow-1", "description": "Standard"},
"2": {"name": "airflow-2", "description": "Power"},
}
ECO_CONVERTER = {
"0": {"name": "eco-off", "description": "Eco off"},
"1": {"name": "eco-m", "description": "Eco manual"},
"2": {"name": "eco-a", "description": "Eco A"},
"3": {"name": "eco-aa", "description": "Eco A+"},
"4": {"name": "eco-aaa", "description": "Eco A++"},
}
SCENES_CONVERTER = {
"0": {
"name": "stop",
"description": "The air-conditioning system will remain switched off regardless of the demand status of any zone, all the motorized dampers will remain opened",
},
"1": {
"name": "confort",
"description": "Default and standard user mode. The desired set point temperature can be selected using the predefined temperature ranges",
},
"2": {
"name": "unocupied",
"description": "To be used when there is no presence detected for short periods of time. A more efficient set point temperature will be set. If the thermostat is activated, the zone will start running in comfort mode",
},
"3": {
"name": "night",
"description": "The system automatically changes the set point temperature 0.5\xba C/1\xba F every 30 minutes in up to 4 increments of 2\xba C/4\xba F in 2 hours. When cooling, the system increases the set point temperature; when heating, the system decreases the set point temperature",
},
"4": {
"name": "eco",
"description": "The range of available set point temperatures change for more efficient operation",
},
"5": {
"name": "vacation",
"description": "This mode feature saves energy while the user is away for extended periods of time",
},
} | AirzoneCloudfix | /AirzoneCloudfix-0.5.0.1.tar.gz/AirzoneCloudfix-0.5.0.1/AirzoneCloud/contants.py | contants.py |
import logging
import pprint
from .contants import (
MODES_CONVERTER,
ECO_CONVERTER,
VELOCITIES_CONVERTER,
AIRFLOW_CONVERTER,
)
from .Zone import Zone
_LOGGER = logging.getLogger(__name__)
class System:
"""Manage a AirzoneCloud system"""
def __init__(self, api, site, data):
self._api = api
self._site = site
self._zones = {}
self._set_data_refreshed(data)
# load zones
self._load_zones()
# log
_LOGGER.info("Init {}".format(self.str_complete))
_LOGGER.debug(data)
def __str__(self):
return "System(name={}, mode={}, eco={}, velocity={}, airflow={})".format(
self.name, self.mode, self.eco, self.velocity, self.airflow,
)
@property
def str_complete(self):
return "System(name={}, mode={}, eco={}, velocity={}, airflow={}, id={}, system_number={}, site_id={})".format(
self.name,
self.mode,
self.eco,
self.velocity,
self.airflow,
self.id,
self.system_number,
self.site_id,
)
#
# getters
#
@property
def name(self):
return self._data.get("name")
@property
def mode(self):
if self.mode_raw is None:
return None
return MODES_CONVERTER[self.mode_raw]["name"]
@property
def mode_description(self):
if self.mode_raw is None:
return None
return MODES_CONVERTER[self.mode_raw]["description"]
@property
def mode_raw(self):
return self._data.get("mode")
@property
def eco(self):
if self.eco_raw is None:
return None
return ECO_CONVERTER[self.eco_raw]["name"]
@property
def eco_description(self):
if self.eco_raw is None:
return None
return ECO_CONVERTER[self.eco_raw]["description"]
@property
def eco_raw(self):
return self._data.get("eco")
@property
def has_velocity(self):
return self._data.get("has_velocity")
@property
def velocity(self):
if self.velocity_raw is None:
return None
return VELOCITIES_CONVERTER[self.velocity_raw]["name"]
@property
def velocity_description(self):
if self.velocity_raw is None:
return None
return VELOCITIES_CONVERTER[self.velocity_raw]["description"]
@property
def velocity_raw(self):
return self._data.get("velocity")
@property
def has_airflow(self):
return self._data.get("has_air_flow")
@property
def airflow(self):
if self.airflow_raw is None:
return None
return AIRFLOW_CONVERTER[self.airflow_raw]["name"]
@property
def airflow_description(self):
if self.airflow_raw is None:
return None
return AIRFLOW_CONVERTER[self.airflow_raw]["description"]
@property
def airflow_raw(self):
return self._data.get("air_flow")
@property
def max_temp(self):
if self._data.get("max_limit") is not None:
return float(self._data.get("max_limit"))
return None
@property
def min_temp(self):
if self._data.get("min_limit") is not None:
return float(self._data.get("min_limit"))
return None
@property
def id(self):
return self._data.get("id")
@property
def site_id(self):
return self._data.get("site_id")
@property
def system_number(self):
return self._data.get("system_number")
@property
def firmware_ws(self):
return self._data.get("firm_ws")
@property
def firmware_system(self):
return self._data.get("system_fw")
#
# setters
#
def set_mode(self, mode_name):
""" Set mode of the system """
_LOGGER.info("call set_mode({}) on {}".format(mode_name, self))
mode_id_found = None
for mode_id, mode in MODES_CONVERTER.items():
if mode["name"] == mode_name:
mode_id_found = mode_id
break
if mode_id_found is None:
raise ValueError('mode name "{}" not found'.format(mode_name))
# send event
self._send_event("mode", int(mode_id_found))
# update mode
self._data["mode"] = mode_id_found
# refresh modes on sub-zones (don't refresh because API so slow to update sub-zones, about 5sec...)
for zone in self.zones:
zone._data["mode"] = mode_id_found
return True
#
# children
#
@property
def zones(self):
""" Get all zones in this system """
return self._zones
#
# parent site
#
@property
def site(self):
""" Get parent site """
return self._site
#
# Refresh
#
def ask_airzone_update(self):
"""
Ask an update to the airzone hardware (airzonecloud don't autopull data like current temperature)
The update should be available in airzonecloud after 3 to 5 secs in average
"""
self._ask_airzone_update()
def refresh(self, refresh_zones=True):
""" Refresh current system data (call refresh_systems on parent site) """
# ask airzone to update its data in airzonecloud (there is some delay so current update will be available on next refresh)
self.ask_airzone_update()
# refresh systems (including current) from parent site
self.site.refresh_systems()
# refresh subzones in needed
if refresh_zones:
self._load_zones()
#
# private
#
def _load_zones(self):
"""Load all zones for this system"""
#pprint.pprint(self._data)
current_zones = self._zones
self._zones = {}
try:
for zone_data in self._data.get("devices"):
zone_id = zone_data["device_id"]
zone = current_zones.get(zone_id)
# zone not found => instance new zone
if zone is None:
zone = Zone(self._api, self, zone_id)
else:
pass
self._zones[zone.id] = zone
except RuntimeError:
raise Exception(
"Unable to load zones of system {} ({}) from AirzoneCloud".format(
self.name, self.id
)
)
return self._zones
def _send_event(self, option, value):
""" Send an event for current system """
payload = {
"event": {
"cgi": "modsistema",
"site_id": self.site_id,
"system_number": self.system_number,
"option": option,
"value": value,
}
}
return self._api._send_event(payload)
def _ask_airzone_update(self):
"""Ask an update to the airzone hardware (airzonecloud don't autopull data)"""
payload = {
"event": {
"cgi": "infosistema2",
"site_id": self.site_id,
"system_number": self.system_number,
"option": None,
"value": None,
}
}
return self._api._send_event(payload)
def _set_data_refreshed(self, data):
""" Set data refreshed (call by parent site on refresh_systems()) """
self._data = data
_LOGGER.info("Data refreshed for {}".format(self.str_complete))
#
# System raw data example
#
# {
# "id": "...",
# "site_id": "...",
# "name": "Home",
# "eco": "2",
# "eco_color": "5",
# "velocity": null,
# "air_flow": null,
# "connMC": null,
# "VMC_mode": "0",
# "VMC_state": "0",
# "has_velocity": false,
# "has_air_flow": false,
# "mode": "5",
# "modes": "1111111011",
# "master_setup": false,
# "setup_type": "0",
# "max_limit": "30.0",
# "min_limit": "18.0",
# "zones_ids": [
# "id1...",
# "id2...",
# "id3...",
# "id4...",
# ],
# "class": "System",
# "updated_at": 1587195368,
# "system_number": "1",
# "last_update": 1587195368,
# "firm_ws": "3.173",
# "scene": null,
# "auto": null,
# "temperature_unit": null,
# "autochange_differential": null,
# "config_ZBS_visible_environment": null,
# "system_fw": 3.09,
# "heat_stages": "1",
# "cold_stages": null,
# "auto_index_prog": true,
# "system_errors": "00000001",
# "auto_mode_battery_temperature": false,
# "machine_error_code": "ÿÿÿÿ",
# "setpoint": null,
# "tank_temp": null,
# "powerful": null,
# "power_acs": null,
# "acs_min": null,
# "acs_max": null,
# } | AirzoneCloudfix | /AirzoneCloudfix-0.5.0.1.tar.gz/AirzoneCloudfix-0.5.0.1/AirzoneCloud/System.py | System.py |
import logging
import pprint
from .contants import MODES_CONVERTER
_LOGGER = logging.getLogger(__name__)
class Zone:
"""Manage a Airzonecloud zone"""
def __init__(self, api, system, zone_id):
self._api = api
self._zone_id = zone_id
self._system = system
self._data = {}
self.refresh()
# log
_LOGGER.info("Init {}".format(self.str_complete))
_LOGGER.debug(zone_id)
def __str__(self):
return "Zone(name={}, is_on={}, mode={}, current_temp={}, target_temp={})".format(
self.name,
self.is_on,
self.mode,
self.current_temperature,
self.target_temperature,
)
@property
def str_complete(self):
return "Zone"
return "Zone(name={}, is_on={}, mode={}, current_temperature={} target_temperature={}, id={}, system_number={}, zone_number={})".format(
self.name,
self.is_on,
self.mode,
self.current_temperature,
self.target_temperature,
self.id,
self.system_number,
self.zone_number,
)
#
# getters
#
@property
def name(self):
return self._data.get("name")
@property
def current_temperature(self):
if self._data.get("temp") is not None:
return float(self._data.get("temp"))
return None
@property
def current_humidity(self):
if self._data.get("humidity") is not None:
return float(self._data.get("humidity"))
return None
@property
def target_temperature(self):
if self._data.get("consign") is not None:
return float(self._data.get("consign"))
return None
@property
def max_temp(self):
if self._data.get("upper_conf_limit") is not None:
return float(self._data.get("upper_conf_limit"))
return None
@property
def min_temp(self):
if self._data.get("lower_conf_limit") is not None:
return float(self._data.get("lower_conf_limit"))
return None
@property
def is_on(self):
return bool(int(self._data.get("state", 0)))
@property
def mode(self):
return MODES_CONVERTER[self.mode_raw]["name"]
@property
def mode_description(self):
return MODES_CONVERTER[self.mode_raw]["description"]
@property
def mode_raw(self):
return str(self._data.get("mode"))
@property
def id(self):
return self._data.get("id")
@property
def device_id(self):
return self._data.get("device_id")
@property
def system_number(self):
return self._data.get("system_number")
@property
def zone_number(self):
return self._data.get("zone_number")
#
# setters
#
def turn_on(self):
""" Turn zone on """
_LOGGER.info("call turn_on() on {}".format(self.str_complete))
self._send_event("state", 1)
self._data["state"] = "1"
return True
def turn_off(self):
""" Turn zone off """
_LOGGER.info("call turn_off() on {}".format(self.str_complete))
self._send_event("state", 0)
self._data["state"] = "0"
return True
def set_temperature(self, temperature):
""" Set target_temperature for this zone """
_LOGGER.info(
"call set_temperature({}) on {}".format(temperature, self.str_complete)
)
temperature = float(temperature)
if self.min_temp is not None and temperature < self.min_temp:
temperature = self.min_temp
if self.max_temp is not None and temperature > self.max_temp:
temperature = self.max_temp
self._send_event("consign", temperature)
self._data["consign"] = str(temperature)
return True
#
# parent system
#
@property
def system(self):
""" Get parent system """
return self._system
#
# Refresh zone data
#
def refresh(self):
""" Refresh current zone data (call refresh on parent system) """
self._config_data = self._api._get_zone_config(self.system.site.id, self._zone_id)
#
# private
#
def _send_event(self, option, value):
""" Send an event for current zone """
payload = {
"event": {
"cgi": "modzona",
"device_id": self.device_id,
"system_number": self.system_number,
"zone_number": self.zone_number,
"option": option,
"value": value,
}
}
return self._api._send_event(payload)
#
# Zone raw data example
#
# {
# "id": "...",
# "system_id": "...",
# "device_id": "...",
# "modes": "1111111011",
# "warning": "0",
# "name": "Living room",
# "system_number": "1",
# "zone_number": "6",
# "state": "1",
# "consign": "21.5",
# "temp": "21.4",
# "mode": "5",
# "velocity": None,
# "show_velocity": None,
# "sleep": "0",
# "lower_conf_limit": "18.0",
# "upper_conf_limit": "30.0",
# "master": "1",
# "velMax": None,
# "eco": "2",
# "prog_enabled": "1",
# "speed_prog_mode": "0",
# "show_ventilation": "1",
# "updated_at": 1587190474,
# "setup_type": "0",
# "class": "Zone",
# "last_update": 1587190474,
# "next_schedule_number": 4,
# "led": None,
# "offset": None,
# "cold_offset": None,
# "heat_offset": None,
# "scene": None,
# "air_flow": None,
# "humidity": "42",
# "coldConsign": "",
# "heatConsign": "",
# "auto": None,
# "temperature_unit": None,
# "vla": None,
# "config": {
# "id": "...",
# "cold_values": "1",
# "heat_values": "1",
# "cold_angle": None,
# "heat_angle": None,
# "swing_horizontal": None,
# "swing_vertical": None,
# "antifreeze": "0",
# "vla": None,
# "zone_number": "6",
# "slave": None,
# "master": None,
# "basic_mode": "0",
# "ambient_temp": "24.6",
# "heat_type": None,
# "cold_type": None,
# "heat_type_config": "1",
# "cold_type_config": "1",
# "ventilation": None,
# "q_weight": None,
# "window": None,
# "presence": None,
# "spray_dew": None,
# "local_vent": None,
# "tact_fw": "3. 7",
# "firm_lm": None,
# "manufacturer": None,
# "led": None,
# "velMax": None,
# "confort_cold_consign": None,
# "confort_heat_consign": None,
# "eco_cold_consign": None,
# "eco_heat_consign": None,
# "unocupied_cold_consign": None,
# "unocupied_heat_consign": None,
# "vacation_cold_consign": None,
# "vacation_heat_consign": None,
# "firm_ws": "3.173",
# "offset": None,
# "errors": "0",
# "zone_id": "...",
# "automatic_weight": None,
# "autochange_differential": None,
# "offset_environment_cold": None,
# "offset_environment_heat": None,
# "eco_function": None,
# "heat_constant_ventilation": None,
# "cold_constant_ventilation": None,
# "v_min_module_010": None,
# "v_max_module_010": None,
# "cold_battery_temperature": None,
# "heat_battery_temperature": None,
# "VAF_coldstage": None,
# "VAF_heatstage": None,
# "VAF_radiantstage": None,
# },
# } | AirzoneCloudfix | /AirzoneCloudfix-0.5.0.1.tar.gz/AirzoneCloudfix-0.5.0.1/AirzoneCloud/Zone.py | Zone.py |
import json
import logging
import pprint
import requests
import urllib
import urllib.parse
from .contants import (
API_LOGIN,
API_SITES,
API_ZONES,
API_ZONE,
API_EVENTS,
)
from .Site import Site
_LOGGER = logging.getLogger(__name__)
class AirzoneCloud:
"""Allow to connect to AirzoneCloud API"""
_session = None
_username = None
_password = None
_base_url = "https://m.airzonecloud.com"
_user_agent = "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 7 Build/MOB30X; wv) AppleWebKit/537.26 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.110 Safari/537.36"
_token = None
_sites = {}
def __init__(
self, username, password, user_agent=None, base_url=None,
):
"""Initialize API connection"""
self._session = requests.Session()
self._username = username
self._password = password
if user_agent is not None and isinstance(user_agent, str):
self._user_agent = user_agent
if base_url is not None and isinstance(base_url, str):
self._base_url = base_url
# login
self._login()
# load sites
self._load_sites()
#
# getters
#
@property
def sites(self):
"""Get sites list (same order as in app)"""
return list(self._sites.values())
@property
def all_systems(self):
"""Get all systems from all sites (same order as in app)"""
result = []
for site in self.sites:
for system in site.systems:
result.append(system)
return result
@property
def all_zones(self):
"""Get all zones from all sites (same order as in app)"""
result = []
for site in self.sites:
for system in site.systems:
for zone in system.zones:
result.append(zone)
return result
#
# Refresh
#
def refresh_sites(self):
"""Refresh sites"""
self._load_sites()
#
# private
#
def _login(self):
"""Login to AirzoneCloud and return token"""
try:
url = "{}{}".format(self._base_url, API_LOGIN)
login_payload = {"email": self._username, "password": self._password}
headers = {"User-Agent": self._user_agent}
response = self._session.post(
url, headers=headers, json=login_payload
).json()
self._token = response.get("token")
except (RuntimeError, AttributeError):
raise Exception("Unable to login to AirzoneCloud") from None
_LOGGER.info("Login success as {}".format(self._username))
return self._token
def _load_sites(self):
"""Load all sites for this account"""
current_sites = self._sites
self._sites = {}
try:
for site_data in self._get_sites():
#pprint.pprint(site_data)
site_id = site_data.get("installation_id")
site = current_sites.get(site_id)
# site not found => instance new site
if site is None:
site = Site(self, site_id)
else:
site.refersh();
self._sites[site.id] = site
except RuntimeError:
raise Exception("Unable to load sites from AirzoneCloud")
return self._sites
def _get_sites(self):
"""Http GET to load sites"""
_LOGGER.debug("get_sites()")
return self._get(API_SITES).get("installations")
def _get_site(self, site_id):
"""Http GET to load site"""
_LOGGER.debug("get_site({})".format(site_id))
return self._get("{}/{}".format(API_SITES, site_id))
def _get_zone(self, zone_id):
"""Http GET to load Zone"""
_LOGGER.debug("get_zone({})".format(zone_id))
return self._get("{}/{}".format(API_ZONES, "60f817cc7b7b998ed14b58f9"))
def _get_zone_config(self, site_id, zone_id):
"""Http GET to load Zone"""
_LOGGER.debug("get_zone_config({}, {})".format(site_id, zone_id))
return self._get("{}/{}/config".format(API_ZONE, zone_id), params = { "installation_id": site_id, "type": "user"})
def _send_event(self, payload):
"""Http POST to send an event"""
_LOGGER.debug("Send event with payload: {}".format(json.dumps(payload)))
try:
result = self._post(API_EVENTS, payload)
_LOGGER.debug("Result event: {}".format(json.dumps(result)))
return result
except RuntimeError:
_LOGGER.error("Unable to send event to AirzoneCloud")
return None
def _get(self, api_endpoint, params={}):
"""Do a http GET request on an api endpoint"""
params["format"] = "json"
return self._request(method="GET", api_endpoint=api_endpoint, params=params)
def _post(self, api_endpoint, payload={}):
"""Do a http POST request on an api endpoint"""
headers = {
"X-Requested-With": "XMLHttpRequest",
"Content-Type": "application/json;charset=UTF-8",
"Accept": "application/json, text/plain, */*",
}
return self._request(
method="POST", api_endpoint=api_endpoint, headers=headers, json=payload
)
def _request(
self, method, api_endpoint, params={}, headers={}, json=None, autoreconnect=True
):
# generate url with auth
headers["authorization"] = "Bearer {}".format(self._token)
url = "{}{}/?{}".format(
self._base_url, api_endpoint, urllib.parse.urlencode(params)
)
#pprint.pprint(url)
# set user agent
headers["User-Agent"] = self._user_agent
# make call
call = self._session.request(method=method, url=url, headers=headers)
if call.status_code == 401 and autoreconnect: # unauthorized error
# log
_LOGGER.info(
"Get unauthorized error (token expired ?), trying to reconnect..."
)
# try to reconnect
self._login()
# retry get without autoreconnect (to avoid infinite loop)
return self._request(
method=method,
api_endpoint=api_endpoint,
params=params,
headers=headers,
json=json,
autoreconnect=False,
)
# raise other error if needed
call.raise_for_status()
#pprint.pprint(call.json())
return call.json() | AirzoneCloudfix | /AirzoneCloudfix-0.5.0.1.tar.gz/AirzoneCloudfix-0.5.0.1/AirzoneCloud/AirzoneCloud.py | AirzoneCloud.py |
import logging
import pprint
from .System import System
_LOGGER = logging.getLogger(__name__)
class Site:
"""Manage a AirzoneCloud site"""
def __init__(self, api, site_id):
self._api = api
self._site_id = site_id
self._systems = {}
# load all systems
self.refresh()
# log
_LOGGER.info("Init {}".format(self.str_complete))
_LOGGER.debug(self._data)
def __str__(self):
return "Site(name={}, status={})".format(self.name, self.status)
@property
def str_complete(self):
return "Site(name={}, status={}, id={}, mac={})".format(
self.name, self.status, self.id, self.mac,
)
#
# getters
#
@property
def id(self):
""" Return site id """
return self._data.get("installation_id")
@property
def name(self):
""" Return site name """
return self._data.get("name")
@property
def status(self):
""" Return site status """
return self._data.get("status")
@property
def location(self):
""" Return site location """
return self._data.get("complete_name")
@property
def mac(self):
""" Return site mac """
return self._data.get("mac")
@property
def pin(self):
""" Return site pin code """
return self._data.get("pin")
@property
def target_temperature(self):
""" Return site target temperature """
return self._data.get("consign")
@property
def firmware_ws(self):
""" Return webserver site """
return self._data.get("firm_ws")
@property
def has_eco(self):
return self._data.get("has_eco")
@property
def has_velocity(self):
return self._data.get("has_velocity")
@property
def has_airflow(self):
return self._data.get("has_air_flow")
@property
def has_farenheit(self):
return self._data.get("has_harenheit")
@property
def sync_datetime(self):
""" Return True if site datetime is sync with AirzoneCloud """
return self._data.get("sync_datetime")
#
# children
#
@property
def systems(self):
return list(self._systems.values())
#
# Refresh
#
def refresh(self, refresh_systems=True):
""" Refresh current site data (call refresh_sites on parent AirzoneCloud) """
self._data = self._api._get_site(self._site_id)
#pprint.pprint(self._data)
if refresh_systems:
self.refresh_systems()
def refresh_systems(self):
""" Refresh all systems of this site """
self._load_systems()
#
# private
#
def _load_systems(self):
"""Load all systems for this site"""
current_systems = self._systems
self._systems = {}
try:
for system_data in self._data["groups"]:
system = self._systems.get(system_data.get("group_id"))
# system not found => instance new system
if system is None:
system = System(self._api, self, system_data)
else:
system._set_data_refreshed(system_data)
self._systems[system.id] = system
except RuntimeError:
raise Exception(
"Unable to load systems of site {} ({}) from AirzoneCloud".format(
self.name, self.id
)
)
return self._systems
def _set_data_refreshed(self, data):
""" Set data refreshed (call by parent AirzoneCloud on refresh_sites()) """
self._data = data
_LOGGER.info("Data refreshed for {}".format(self.str_complete))
#
# site raw data example
#
# {
# "id": "...",
# "mac": "AA:BB:CC:DD:EE:FF",
# "pin": "1234",
# "name": "Home",
# "icon": 5,
# "consign": "19.0",
# "sync_datetime": True,
# "remote_control": False,
# "firm_ws": "3.173",
# "status": "activated",
# "connection_date": "2020-04-18T08:58:15.000+00:00",
# "has_eco": True,
# "has_velocity": False,
# "spot_name": "Marseille",
# "complete_name": "Marseille,Bouches-du-Rhône,Provence-Alpes-Côte d'Azur,France",
# "country_code": "FR",
# "electricity_prices": {},
# "location": {"latitude": 43.00000000000000, "longitude": 5.00000000000000},
# "data": {
# "data": {
# "time_zone": [
# {
# "localtime": "2020-04-18 05:34",
# "utcOffset": "2.0",
# "zone": "Europe/Paris",
# }
# ]
# }
# },
# "modes": "00001111111011",
# "has_air_flow": False,
# "has_scene": False,
# "has_farenheit": False,
# } | AirzoneCloudfix | /AirzoneCloudfix-0.5.0.1.tar.gz/AirzoneCloudfix-0.5.0.1/AirzoneCloud/Site.py | Site.py |
# Python imports
import re, requests
# Local imports
import settings
import printers
class TestSet:
"""
All TestSets must inherit from this class
"""
_base_url = ""
def setUp(self):
"""
Called before the beginning of the test set
"""
return
def setDown(self):
"""
Called after the end of the test set
"""
return
def get(self, end_url, **kwargs):
url = self._base_url + end_url
return requests.get(url, **kwargs)
def post(self, end_url, data=None, json=None, **kwargs):
url = self._base_url + end_url
return requests.post(url, data, json, **kwargs)
def put(self, end_url, data=None, **kwargs):
url = self._base_url + end_url
return requests.put(url, data, **kwargs)
def patch(self, end_url, data=None, **kwargs):
url = self._base_url + end_url
return requests.patch(url, data, **kwargs)
def delete(self, end_url, **kwargs):
url = self._base_url + end_url
return requests.delete(url, **kwargs)
def expect(self, response, code=None, body=None):
"""
Return whether the response corresponds to what is expected or not
"""
success = True
if code != None and code != response.status_code:
success = False
if body != None and body != response.body:
success = False
#print body
return {'success': success, 'code': response.status_code, 'elapsed': response.elapsed}
class App:
"""
Main entry
"""
def __init__(self, printer):
self.printer = printer
def process(self, modules):
"""
Process modules
"""
data_total = {}
data_total['index'] = 0
data_total['nb_ok'] = 0
if len(modules) == 0:
self.printer.printErrorNoSetFound()
return
self.printer.printIntro()
for test_set in modules:
self.printer.printSetIntro(test_set)
data_test_set = {}
data_test_set['index'] = 0
data_test_set['nb_ok'] = 0
test_set.setUp()
for f in dir(test_set):
if re.match('test_*', f):
data_test_set['index'] += 1
func = getattr(test_set, f)
func_doc = func.__doc__.strip('\n')
try:
data_test = func()
if data_test['success']:
data_test_set['nb_ok'] += 1
self.printer.printTestOutput(data_test, func_doc)
except Exception as e:
self.printer.printTestDirtyFailure({'success': False, 'exception': e})
test_set.setDown()
data_total['index'] += data_test_set['index']
data_total['nb_ok'] += data_test_set['nb_ok']
self.printer.printSetResult(test_set, data_test_set['index'], data_test_set['nb_ok'], 0)
self.printer.printTotalResult(data_total['index'], data_total['nb_ok'], 0)
return 0 if data_total['index'] == data_total['nb_ok'] else 1 | Aito | /Aito-0.5.6.tar.gz/Aito-0.5.6/libaito/core.py | core.py |
# Python import
import sys
# Local import
import settings
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
#following from Python cookbook, #475186
def has_colors(stream):
if not hasattr(stream, "isatty") or not stream.isatty():
return False
try:
import curses
curses.setupterm()
return curses.tigetnum("colors") > 2
except:
return False
has_colors = has_colors(sys.stdout)
def printout(text, color=WHITE):
if has_colors:
seq = "\x1b[1;%dm" % (30+color) + text + "\x1b[0m"
return seq
else:
return text
class LocalPrinter:
"""
Print all outputs on standard output, with all the colors and stuff
"""
def __init__(self, verbosity):
self.verbosity = verbosity
def printErrorNoSetFound(self):
"""
Print 'ErrorNoSetFound' error message
"""
print printout(settings.strings['errorNoSetFound'], settings.colors['errors'])
def printIntro(self):
"""
Print the intro sentence, before testing starts
"""
print printout(settings.strings['intro'], settings.colors['intro'])
def printSetIntro(self, u):
"""
Print the set intro sentence, before the beginning of each test set
"""
if self.verbosity > 0:
print printout(u.__class__.__name__ + ': ' + u.__doc__, settings.colors['setIntro'])
def printTestOutput(self, data, doc):
"""
Print the output of a test
"""
if data['success']:
success = printout(settings.strings['testSuccess'], settings.colors['testSuccess'])
else:
success = printout(settings.strings['testFailure'], settings.colors['testFailure'])
output = settings.strings['testOutputFormat'].format(success=success, return_code=data['code'], elapsed=data['elapsed'], doc=doc)
if self.verbosity > 1:
print output
def printTestDirtyFailure(self, data):
"""
Print the output of a dirty failed test (aka Exception was thrown during test execution)
"""
output = printout(settings.strings['testDirtyFailure'], settings.colors['testDirtyFailure']) + str(data['exception'])
if self.verbosity > 1:
print output
def printSetResult(self, test_set, nb_tests, nb_ok, total_response_time):
"""
Print set results, after the end of each test set
"""
if self.verbosity > 0:
percent = int(100 * (float(nb_ok) / float(nb_tests)))
print printout(
settings.strings['setResult'].format(nb_tests_passed=nb_ok,
nb_tests_total=nb_tests,
percent=percent,
className=test_set.__class__.__name__),
settings.colors['setResult'])
def printTotalResult(self, nb_tests, nb_ok, total_response_time):
"""
Print total results, after the end of all test sets
"""
percent = int(100 * (float(nb_ok) / float(nb_tests)))
print printout(
settings.strings['totalResult'].format(nb_tests_passed=nb_ok,
nb_tests_total=nb_tests,
percent=percent),
settings.colors['totalResult'])
if percent == 100:
print printout(settings.strings['buildOk'], settings.colors['buildOk'])
else:
print printout(settings.strings['buildKo'], settings.colors['buildKo']) | Aito | /Aito-0.5.6.tar.gz/Aito-0.5.6/libaito/printers.py | printers.py |
from dataclasses import dataclass
from functools import cached_property, lru_cache
import os
from typing import Optional
from dotenv.main import load_dotenv
from pandas import DataFrame
from aito.util.data_proc import ParquetDataset
load_dotenv(dotenv_path='.env',
stream=None,
verbose=True,
override=False,
interpolate=True,
encoding='utf-8')
AWS_REGION: Optional[str] = os.environ.get('AITO_PMFP_AWS_REGION')
AWS_ACCESS_KEY: Optional[str] = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_KEY: Optional[str] = os.environ.get('AWS_SECRET_ACCESS_KEY')
EQUIPMENT_DATA_PARENT_DIR_PATH: Optional[str] = \
os.environ.get('AITO_PMFP_EQUIPMENT_DATA_PARENT_DIR_PATH')
EQUIPMENT_DATA_TIMEZONE: Optional[str] = \
os.environ.get('AITO_PMFP_EQUIPMENT_DATA_TIMEZONE')
EQUIPMENT_INSTANCE_ID_COL: str = 'equipment_instance_id'
DATE_COL: str = 'date'
DATE_TIME_COL: str = 'date_time'
@dataclass(init=True,
repr=True,
eq=True,
order=True,
unsafe_hash=False,
frozen=True) # frozen=True needed for __hash__()
class EquipmentParquetDataSet:
"""Equipment Unique Type Group Parquet Data Set."""
general_type: str
unique_type_group: str
@cached_property
def name(self) -> str:
"""Name data set."""
return f'{self.general_type.upper()}---{self.unique_type_group}'
@cached_property
def url(self) -> str:
"""Get URL of data set."""
assert EQUIPMENT_DATA_PARENT_DIR_PATH, \
EnvironmentError(
'*** AITO_PMFP_EQUIPMENT_DATA_PARENT_DIR_PATH env var not set ***') # noqa: E501
return f'{EQUIPMENT_DATA_PARENT_DIR_PATH}/{self.name}.parquet'
def __repr__(self) -> str:
"""Return string representation."""
return f'{self.unique_type_group.upper()} data @ {self.url}'
@lru_cache(maxsize=None, typed=False)
def load(self) -> ParquetDataset:
"""Load as a Parquet Data Feeder."""
if EQUIPMENT_DATA_PARENT_DIR_PATH.startswith('s3://'):
assert AWS_REGION, \
EnvironmentError('*** AITO_PMFP_AWS_REGION envvar not set ***')
return ParquetDataset(
path=self.url,
awsRegion=AWS_REGION, # default is location-dependent
accessKey=AWS_ACCESS_KEY, secretKey=AWS_SECRET_KEY,
iCol=EQUIPMENT_INSTANCE_ID_COL, tCol=DATE_TIME_COL
).castType(**{EQUIPMENT_INSTANCE_ID_COL: str})
@lru_cache(maxsize=None, typed=False)
def get_equipment_instance_ids_by_date(
self,
date: Optional[str] = None, to_date: Optional[str] = None) \
-> list[str]:
"""Get equipment instance IDs by date(s)."""
parquet_ds: ParquetDataset = self.load()
if date:
try:
parquet_ds: ParquetDataset = \
parquet_ds.filterByPartitionKeys((DATE_COL, date, to_date)
if to_date
else (DATE_COL, date))
except Exception as err: # pylint: disable=broad-except
print(f'*** {err} ***')
return []
return [str(i) for i in
sorted(parquet_ds.collect(EQUIPMENT_INSTANCE_ID_COL)
[EQUIPMENT_INSTANCE_ID_COL].unique())]
def load_by_date(self,
date: str, to_date: Optional[str] = None,
equipment_instance_id: Optional[str] = None) \
-> ParquetDataset:
"""Load equipment data by date(s)."""
parquet_ds: ParquetDataset = self.load()
try:
parquet_ds: ParquetDataset = \
parquet_ds.filterByPartitionKeys((DATE_COL, date, to_date)
if to_date
else (DATE_COL, date))
except Exception as err: # pylint: disable=broad-except
ParquetDataset.classStdOutLogger().error(msg=str(err))
if equipment_instance_id:
parquet_ds: ParquetDataset = \
parquet_ds.filter(f'{EQUIPMENT_INSTANCE_ID_COL} == '
f'"{equipment_instance_id}"')
return parquet_ds
def load_by_equipment_instance_id_by_date(
self,
equipment_instance_id: str,
date: str, to_date: Optional[str] = None) -> DataFrame:
"""Load equipment data by equipment instance ID and date(s)."""
assert EQUIPMENT_DATA_TIMEZONE, \
EnvironmentError(
'*** AITO_PMFP_EQUIPMENT_DATA_TIMEZONE env var not set ***')
parquet_ds: ParquetDataset = \
self.load().filter(f'{EQUIPMENT_INSTANCE_ID_COL} == '
f'"{equipment_instance_id}"')
if date:
parquet_ds: ParquetDataset = \
parquet_ds.filterByPartitionKeys((DATE_COL, date, to_date)
if to_date
else (DATE_COL, date))
return (parquet_ds.collect()
.drop(columns=[EQUIPMENT_INSTANCE_ID_COL, DATE_COL],
inplace=False,
errors='raise')
.sort_values(by=DATE_TIME_COL,
axis='index',
ascending=True,
inplace=False,
kind='quicksort',
na_position='last')
.set_index(keys=DATE_TIME_COL,
drop=True,
append=False,
inplace=False,
verify_integrity=True)
.tz_localize('UTC')
.tz_convert(EQUIPMENT_DATA_TIMEZONE)
.tz_localize(None)) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/data_mgmt/equipment_parquet_data.py | equipment_parquet_data.py |
from __future__ import annotations
from functools import cached_property
from logging import getLogger, Logger, DEBUG
import os
from pathlib import Path
from random import random
import time
from typing import Literal, Optional, Union
from typing import Dict, List # Py3.9+: use built-ins
from uuid import uuid4
from dotenv.main import load_dotenv
from pandas import DataFrame, Series
from aito.util.data_proc import ParquetDataset
from aito.util.log import STDOUT_HANDLER
from aito.util import s3
from aito.pmfp.data_mgmt import (EquipmentParquetDataSet,
EQUIPMENT_INSTANCE_ID_COL, DATE_COL)
__all__ = (
'H1ST_MODELS_DIR_PATH', 'H1ST_BATCH_OUTPUT_DIR_PATH',
'BaseFaultPredictor',
)
load_dotenv(dotenv_path='.env',
stream=None,
verbose=True,
override=False,
interpolate=True,
encoding='utf-8')
S3_BUCKET: Optional[str] = os.environ.get('AITO_PMFP_S3_BUCKET')
LOCAL_HOME_DIR_PATH = Path.home()
H1ST_MODEL_DIR_NAME: str = '.aito/models'
H1ST_MODELS_DIR_PATH: Union[str, Path] = (
f's3://{S3_BUCKET}/{H1ST_MODEL_DIR_NAME}'
if S3_BUCKET
else (LOCAL_HOME_DIR_PATH / H1ST_MODEL_DIR_NAME)
)
BATCH_OUTPUT_DIR_NAME: str = '.aito/batch-output'
H1ST_BATCH_OUTPUT_DIR_PATH: Union[str, Path] = (
f's3://{S3_BUCKET}/{BATCH_OUTPUT_DIR_NAME}'
if S3_BUCKET
else (LOCAL_HOME_DIR_PATH / BATCH_OUTPUT_DIR_NAME)
)
class BaseFaultPredictor:
# pylint: disable=too-many-ancestors
"""Base Fault Prediction model class."""
def __init__(self,
general_type: Literal['refrig', 'disp_case'],
unique_type_group: str,
version: Optional[str] = None):
# pylint: disable=super-init-not-called
"""Init Fault Prediction model."""
self.general_type: str = general_type
self.unique_type_group: str = unique_type_group
self.version: str = version if version else str(uuid4())
def __repr__(self) -> str:
"""Return string repr."""
return f'{self.unique_type_group} {type(self).__name__} "{self.version}"' # noqa: E501
@cached_property
def name(self) -> str:
"""Return string name."""
return f'{type(self).__name__}--{self.version}'
@property
def logger(self) -> Logger:
"""Logger."""
logger: Logger = getLogger(name=str(self))
logger.setLevel(level=DEBUG)
logger.addHandler(hdlr=STDOUT_HANDLER)
return logger
def save(self):
"""Persist model instance."""
raise NotImplementedError
@classmethod
def load(cls, version: str) -> BaseFaultPredictor:
# pylint: disable=unused-argument
"""Load model instance by version."""
if cls is BaseFaultPredictor:
# return an arbitrary model for testing
return cls(general_type='refrig',
unique_type_group='co2_mid_1_compressor')
raise NotImplementedError
@classmethod
def list_versions(cls) -> List[str]:
"""List model versions."""
if S3_BUCKET:
prefix_len: int = len(prefix := f'{H1ST_MODEL_DIR_NAME}/{cls.__name__}/') # noqa: E501
results: dict = s3.client().list_objects_v2(Bucket=S3_BUCKET,
Delimiter='/',
EncodingType='url',
MaxKeys=10 ** 3,
Prefix=prefix)
return [i['Prefix'][prefix_len:-1]
for i in results.get('CommonPrefixes', [])]
return [str(i) for i in H1ST_MODELS_DIR_PATH.iterdir()]
def predict(self, df_for_1_equipment_unit_for_1_day: DataFrame, /) \
-> Union[bool, float]:
# pylint: disable=unused-argument
"""Fault Prediction logic.
User shall override this method and return a boolean or float value for
whether the equipment unit has the concerned fault on the date.
"""
return random()
def batch_predict(self,
parquet_ds: ParquetDataset, /,
**predict_kwargs) -> Series:
"""Batch predict."""
return parquet_ds.map(
lambda df: (df.groupby(by=[EQUIPMENT_INSTANCE_ID_COL, DATE_COL],
axis='index',
level=None,
as_index=True, # group labels as index
sort=False, # better performance
# when `apply`ing: add group keys to index?
group_keys=False,
# squeeze=False, # deprecated
observed=False,
dropna=True)
.apply(func=self.predict, **predict_kwargs))).collect()
def batch_process(self,
date: str, to_date: Optional[str] = None,
*, equipment_instance_id: Optional[str] = None,
return_json: bool = False, **predict_kwargs) \
-> Union[Series,
Dict[str, Dict[str, Union[bool, float]]]]:
# pylint: disable=too-many-locals
"""(Bulk-)Process data to predict fault per equipment unit per date."""
try:
parquet_ds: ParquetDataset = (
EquipmentParquetDataSet(general_type=self.general_type,
unique_type_group=self.unique_type_group) # noqa: E501
.load_by_date(date=date, to_date=to_date,
equipment_instance_id=equipment_instance_id))
except Exception as err: # pylint: disable=broad-except
print(f'*** {err} ***')
return ({}
if return_json
else (DataFrame(columns=[EQUIPMENT_INSTANCE_ID_COL, DATE_COL, # noqa: E501
'FAULT'])
.set_index(keys=[EQUIPMENT_INSTANCE_ID_COL, DATE_COL], # noqa: E501
drop=True,
append=False,
verify_integrity=True,
inplace=False)))
parquet_ds.cacheLocally()
self.logger.info(
msg=(msg := f'Batch-Processing {parquet_ds.__shortRepr__}...'))
tic: float = time.time()
fault_preds: Series = (self.batch_predict(parquet_ds, **predict_kwargs) # noqa: E501
# sort index to make output order consistent
.sort_index(axis='index',
level=None,
ascending=True,
inplace=False,
kind='quicksort',
na_position='last',
sort_remaining=True,
ignore_index=False,
key=None))
toc: float = time.time()
self.logger.info(msg=f'{msg} done! <{toc-tic:.1f}s>')
if return_json:
d: Dict[str, Dict[str, Union[bool, float]]] = {}
for (_equipment_instance_id, _date), pred in fault_preds.items():
if isinstance(pred, tuple):
assert len(pred) == 3
pred: List[Union[bool, float]] = [bool(i) for i in pred]
if _equipment_instance_id in d:
d[_equipment_instance_id][str(_date)] = pred
else:
d[_equipment_instance_id] = {str(_date): pred}
return d
return fault_preds | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/models/base.py | base.py |
from __future__ import annotations
from typing import List, Sequence, Tuple # Py3.9+: use built-ins
from pandas import DataFrame, Series
from aito.util.data_proc import ParquetDataset
from aito.pmfp.models.base import BaseFaultPredictor
from .teacher.base import BaseFaultPredTeacher
from .student.timeseries_dl import (TimeSeriesDLFaultPredStudentModeler,
TimeSeriesDLFaultPredStudent)
from .ensemble.basic import EitherFaultPredEnsemble
class FaultPredOracleModeler:
"""Fault Prediction Oracle Modeler."""
def __init__(self, teacher: BaseFaultPredTeacher,
student_input_cat_cols: Sequence[str],
student_input_num_cols: Sequence[str],
student_input_subsampling_factor: int,
student_input_n_rows_per_day: int,
student_train_date_range: Tuple[str, str],
student_tuning_date_range: Tuple[str, str]):
# pylint: disable=super-init-not-called
"""Init Fault Prediction Oracle Modeler."""
self.teacher: BaseFaultPredTeacher = teacher
self.student_input_cat_cols: Sequence[str] = student_input_cat_cols
self.student_input_num_cols: Sequence[str] = student_input_num_cols
self.student_input_subsampling_factor: int = student_input_subsampling_factor # noqa: E501
self.student_input_n_rows_per_day: int = student_input_n_rows_per_day
self.student_train_date_range: Tuple[str, str] = student_train_date_range # noqa: E501
self.student_tuning_date_range: Tuple[str, str] = student_tuning_date_range # noqa: E501
def build_model(self) -> FaultPredOracle:
"""Construct an Oracle from a Knowledge ("Teacher") Model."""
# train Knowledge Generalizer ("Student") model
student: TimeSeriesDLFaultPredStudent = \
TimeSeriesDLFaultPredStudentModeler(
teacher=self.teacher,
input_cat_cols=self.student_input_cat_cols,
input_num_cols=self.student_input_num_cols,
input_subsampling_factor=self.student_input_subsampling_factor,
input_n_rows_per_day=self.student_input_n_rows_per_day,
date_range=self.student_train_date_range).build_model()
# tune Knowledge Generalizer ("Student") model's decision threshold
student.tune_decision_threshold(
tuning_date_range=self.student_tuning_date_range)
class FaultPredOracle(BaseFaultPredictor):
# pylint: disable=abstract-method,too-many-ancestors
"""Fault Prediction Oracle."""
def __init__(self,
teacher: BaseFaultPredTeacher,
student: TimeSeriesDLFaultPredStudent,
ensemble: EitherFaultPredEnsemble = EitherFaultPredEnsemble()): # noqa: E501
"""Init Fault Prediction Oracle."""
super().__init__(general_type=teacher.general_type,
unique_type_group=teacher.unique_type_group,
version=student.version)
self.teacher: BaseFaultPredTeacher = teacher
self.student: TimeSeriesDLFaultPredStudent = student
self.ensemble: EitherFaultPredEnsemble = ensemble
@classmethod
def load(cls, version: str) -> FaultPredOracle:
"""Load oracle by version."""
# pylint: disable=import-error,import-outside-toplevel
import ai.models
teacher_name, _student_name = version.split('---')
teacher_class_name, teacher_version = teacher_name.split('--')
teacher_class = getattr(ai.models, teacher_class_name)
teacher: BaseFaultPredTeacher = teacher_class.load(version=teacher_version) # noqa: E501
student: TimeSeriesDLFaultPredStudent = \
TimeSeriesDLFaultPredStudent.load(version=version)
return cls(teacher=teacher, student=student)
@classmethod
def list_versions(cls) -> List[str]:
"""List model versions."""
return TimeSeriesDLFaultPredStudent.list_versions()
def predict(self,
df_for_1_equipment_unit_for_1_day: DataFrame, /) \
-> Tuple[bool, bool, bool]:
"""Make oracle prediction."""
return (
teacher_pred := self.teacher.predict(df_for_1_equipment_unit_for_1_day), # noqa: E501
student_pred := self.student.predict(df_for_1_equipment_unit_for_1_day, # noqa: E501
return_binary=True),
self.ensemble.predict(teacher_pred=teacher_pred,
student_pred=student_pred))
def batch_predict(self, parquet_ds: ParquetDataset) -> Series:
"""Batch-Predict faults."""
return Series(
data=zip(
teacher_preds := self.teacher.batch_predict(parquet_ds),
student_preds := self.student.batch_predict(parquet_ds,
return_binary=True), # noqa: E501
ensemble_preds := self.ensemble.batch_predict(
teacher_preds=teacher_preds, student_preds=student_preds)),
index=ensemble_preds.index,
dtype=None, name='FAULT', copy=False, fastpath=False) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/models/oracle/__init__.py | __init__.py |
from __future__ import annotations
from functools import cached_property
import math
import pickle
from tempfile import NamedTemporaryFile
from typing import Optional, Union
from typing import Dict, List, Set, Tuple # Py3.9+: use built-ins
from imblearn.over_sampling import RandomOverSampler
import joblib
from numpy import expand_dims
from pandas import DataFrame, Series
from scipy.stats.stats import hmean
from sklearn.metrics import precision_recall_curve
from sklearn.neural_network import MLPClassifier
from ruamel import yaml
from aito.util.data_proc import (PandasFlatteningSubsampler,
PandasMLPreprocessor,
ParquetDataset)
from aito.util.data_proc._abstract import ColsType
from aito.util.iter import to_iterable
from aito.util.path import add_cwd_to_py_path
from aito.util import fs, s3
from aito.pmfp.data_mgmt import (EquipmentParquetDataSet,
EQUIPMENT_INSTANCE_ID_COL, DATE_COL)
from aito.pmfp.models.base import BaseFaultPredictor, H1ST_MODELS_DIR_PATH
from aito.pmfp.models.oracle.teacher.base import BaseFaultPredTeacher
N_MINUTES_PER_DAY: int = 24 * 60
_ON_S3: bool = (isinstance(H1ST_MODELS_DIR_PATH, str) and
H1ST_MODELS_DIR_PATH.startswith('s3://'))
class TimeSeriesDLFaultPredStudentModeler:
"""Time-Series-DL-based Fault Prediction k-gen ("student") modeler."""
def __init__(self, teacher: BaseFaultPredTeacher,
input_cat_cols: Optional[ColsType],
input_num_cols: Optional[ColsType],
input_subsampling_factor: int,
input_n_rows_per_day: int,
date_range: Tuple[str, str]):
# pylint: disable=super-init-not-called,too-many-arguments
"""Init Time-Series-DL-based student modeler."""
self.teacher: BaseFaultPredTeacher = teacher
self.general_type: str = teacher.general_type
self.unique_type_group: str = teacher.unique_type_group
self.input_cat_cols: Set[str] = (to_iterable(input_cat_cols,
iterable_type=set)
if input_cat_cols
else set())
self.input_num_cols: Set[str] = (to_iterable(input_num_cols,
iterable_type=set)
if input_num_cols
else set())
self.input_subsampling_factor: int = input_subsampling_factor
self.input_n_rows_per_day: int = input_n_rows_per_day
self.date_range: Tuple[str, str] = date_range
def build_model(self, *,
hidden_layer_compress_factor: int = 10,
l2_regularization_factor: float = 3e0,
# *** HAND-TUNED TO COMBAT OVER-FITTING ***
# (target Prec & Recall of 70-80% against Teacher labels)
random_seed: Optional[int] = None) \
-> TimeSeriesDLFaultPredStudent:
# pylint: disable=arguments-differ,too-many-locals
"""Fit Knowledge Generalizer ("Student") model."""
parquet_ds: ParquetDataset = (
EquipmentParquetDataSet(general_type=self.general_type,
unique_type_group=self.unique_type_group)
.load()
.filterByPartitionKeys(
(DATE_COL, *self.date_range)
)[(EQUIPMENT_INSTANCE_ID_COL, DATE_COL) +
tuple(self.input_cat_cols) + tuple(self.input_num_cols)])
parquet_ds.cacheLocally()
parquet_ds, preprocessor = parquet_ds.preprocForML(
*self.input_cat_cols, *self.input_num_cols,
forceCat=self.input_cat_cols, forceNum=self.input_num_cols,
returnPreproc=True)
flattening_subsampler: PandasFlatteningSubsampler = \
PandasFlatteningSubsampler(columns=tuple(preprocessor.sortedPreprocCols), # noqa: E501
everyNRows=self.input_subsampling_factor, # noqa: E501
totalNRows=self.input_n_rows_per_day)
parquet_ds: ParquetDataset = parquet_ds.map(
lambda df: (df.groupby(by=[EQUIPMENT_INSTANCE_ID_COL, DATE_COL],
axis='index',
level=None,
as_index=True, # group labels as index
sort=False, # better performance
# when `apply`ing: add group keys to index?
group_keys=False,
# squeeze=False, # deprecated
observed=False,
dropna=True)
.apply(func=flattening_subsampler, padWithLastRow=True))) # noqa: E501
parquet_ds.stdOutLogger.info(msg='Featurizing into Pandas DF...')
df: DataFrame = parquet_ds.collect()
parquet_ds.stdOutLogger.info(
msg='Featurized into Pandas DF with:'
f'\n{len(df.columns):,} Columns:\n{df.columns}'
f'\nand Index:\n{df.index}')
print('Getting Teacher Labels...')
from_date, to_date = self.date_range
teacher_predicted_faults_series: Series = \
self.teacher.batch_process(date=from_date, to_date=to_date)
teacher_predicted_faults_series.mask(
cond=teacher_predicted_faults_series.isnull(),
other=False,
inplace=True,
axis='index',
level=None)
teacher_predicted_faults_series.name = 'FAULT'
print(teacher_predicted_faults_series)
print('Joining Teacher Labels to Unlabeled Features...')
df: DataFrame = df.join(other=teacher_predicted_faults_series, on=None,
how='left', lsuffix='', rsuffix='', sort=False)
print(f'TRAINING ON {(n_rows := len(df)):,} ROWS w/ FAULT INCIDENCE = '
f'{100 * teacher_predicted_faults_series.sum() / n_rows:,.1f}%...') # noqa: E501
transformed_cols: List[str] = flattening_subsampler.transformedCols
print(f'{(n_cols := len(transformed_cols)):,} Columns')
n_fwd_transforms: int = round(number=math.log(n_cols,
hidden_layer_compress_factor), # noqa: E501
ndigits=None)
hidden_layer_sizes: Tuple[int] = \
tuple(hidden_layer_compress_factor ** i
for i in reversed(range(1, n_fwd_transforms)))
print(f'Hidden Layer Sizes: {hidden_layer_sizes}')
print(f'L2 Weight Regularization Factor: {l2_regularization_factor}')
native_skl_mlp_classifier: MLPClassifier = MLPClassifier(
hidden_layer_sizes=hidden_layer_sizes,
# tuple, length = n_layers - 2, default=(100,)
# The ith element represents the number of neurons
# in the ith hidden layer.
activation='tanh',
# {‘identity’, ‘logistic’, ‘tanh’, ‘relu’}, default=’relu’
# Activation function for the hidden layer.
# - ‘identity’, no-op activation,
# useful to implement linear bottleneck, returns f(x) = x
# - ‘logistic’, the logistic sigmoid function,
# returns f(x) = 1 / (1 + exp(-x)).
# - ‘tanh’, the hyperbolic tan function, returns f(x) = tanh(x).
# - ‘relu’, the rectified linear unit function,
# returns f(x) = max(0, x)
solver='adam',
# {‘lbfgs’, ‘sgd’, ‘adam’}, default=’adam’
# The solver for weight optimization.
# - ‘lbfgs’ is an optimizer in the family of quasi-Newton methods.
# - ‘sgd’ refers to stochastic gradient descent.
# - ‘adam’ refers to a stochastic gradient-based optimizer
# proposed by Kingma, Diederik, and Jimmy Ba
# Note: The default solver ‘adam’ works pretty well on relatively
# large datasets (with thousands of training samples or more)
# in terms of both training time and validation score.
# For small datasets, however, ‘lbfgs’ can converge faster
# and perform better.
alpha=l2_regularization_factor,
# float, default=0.0001
# L2 penalty (regularization term) parameter.
batch_size='auto',
# int, default=’auto’
# Size of minibatches for stochastic optimizers.
# If the solver is ‘lbfgs’, the classifier will not use minibatch.
# When set to “auto”, batch_size=min(200, n_samples).
# learning_rate='constant',
# {‘constant’, ‘invscaling’, ‘adaptive’}, default=’constant’
# Learning rate schedule for weight updates.
# - ‘constant’ is a constant learning rate
# given by ‘learning_rate_init’.
# - ‘invscaling’ gradually decreases the learning rat at each
# time step ‘t’ using an inverse scaling exponent of ‘power_t’.
# effective_learning_rate = learning_rate_init / pow(t, power_t)
# - ‘adaptive’ keeps the learning rate constant to
# ‘learning_rate_init’ as long as training loss keeps decreasing.
# Each time two consecutive epochs fail to decrease training loss
# by at least tol, or fail to increase validation score by at least
# tol if ‘early_stopping’ is on, the current learning rate
# is divided by 5.
# Only used when solver='sgd'.
learning_rate_init=1e-3,
# float, default=0.001
# The initial learning rate used.
# It controls the step-size in updating the weights.
# Only used when solver=’sgd’ or ‘adam’.
# power_t=0.5,
# float, default=0.5
# The exponent for inverse scaling learning rate.
# It is used in updating effective learning rate
# when the learning_rate is set to ‘invscaling’.
# Only used when solver=’sgd’.
max_iter=10 ** 3,
# int, default=200
# Maximum number of iterations.
# The solver iterates until convergence (determined by ‘tol’)
# or this number of iterations.
# For stochastic solvers (‘sgd’, ‘adam’) note that this determines
# the number of epochs (how many times each data point
# will be used), not the number of gradient steps.
shuffle=True,
# bool, default=True
# Whether to shuffle samples in each iteration.
# Only used when solver=’sgd’ or ‘adam’.
random_state=random_seed,
# int, RandomState instance, default=None
# Determines random number generation for weights and bias
# initialization, train-test split if early stopping is used, and
# batch sampling when solver=’sgd’ or ‘adam’. Pass an int for
# reproducible results across multiple function calls.
tol=1e-4,
# float, default=1e-4
# Tolerance for the optimization.
# When the loss or score is not improving by at least tol
# for n_iter_no_change consecutive iterations,
# unless learning_rate is set to ‘adaptive’,
# convergence is considered to be reached and training stops.
verbose=True,
# bool, default=False
# Whether to print progress messages to stdout.
warm_start=False,
# bool, default=False
# When set to True, reuse the solution of the previous call to fit
# as initialization, otherwise, just erase the previous solution.
# momentum=0.9,
# float, default=0.9
# Momentum for gradient descent update. Should be between 0 and 1.
# Only used when solver=’sgd’.
# nesterovs_momentum=True,
# bool, default=True
# Whether to use Nesterov’s momentum.
# Only used when solver=’sgd’ and momentum > 0.
early_stopping=True,
# bool, default=False
# Whether to use early stopping to terminate training
# when validation score is not improving.
# If set to true, it will automatically set aside 10% of training
# data as validation and terminate training when validation score
# is not improving by at least tol for n_iter_no_change consecutive
# epochs. The split is stratified, except in a multilabel setting.
# If early stopping is False, then the training stops when the
# training loss does not improve by more than tol for
# n_iter_no_change consecutive passes over the training set.
# Only effective when solver=’sgd’ or ‘adam’.
validation_fraction=0.32,
# float, default=0.1
# The proportion of training data to set aside as validation set
# for early stopping. Must be between 0 and 1.
# Only used if early_stopping is True.
beta_1=0.9,
# float, default=0.9
# Exponential decay rate for estimates of first moment vector
# in adam, should be in [0, 1).
# Only used when solver=’adam’.
beta_2=0.999,
# float, default=0.999
# Exponential decay rate for estimates of second moment vector
# in adam, should be in [0, 1).
# Only used when solver=’adam’.
epsilon=1e-08,
# float, default=1e-8
# Value for numerical stability in adam.
# Only used when solver=’adam’.
n_iter_no_change=10 ** 2,
# int, default=10
# Maximum number of epochs to not meet tol improvement.
# Only effective when solver=’sgd’ or ‘adam’.
# max_fun=15000,
# Only used when solver=’lbfgs’.
# Maximum number of loss function calls.
# The solver iterates until convergence (determined by ‘tol’),
# number of iterations reaches max_iter, or this number of loss
# function calls. Note that number of loss function calls will be
# greater than or equal to the number of iterations.
)
x_resampled, y_resampled = (RandomOverSampler(sampling_strategy='minority', # noqa: E501
random_state=random_seed,
shrinkage=None)
.fit_resample(X=df[transformed_cols].values, # noqa: E501
y=df.FAULT.astype(dtype=int,
copy=True,
errors='raise') # noqa: E501
))
print(f'Class-Balanced Training Data Set with {len(y_resampled):,} Samples') # noqa: E501
native_skl_mlp_classifier.fit(X=x_resampled, y=y_resampled)
student_model: TimeSeriesDLFaultPredStudent = \
TimeSeriesDLFaultPredStudent(
teacher=self.teacher,
input_cat_cols=self.input_cat_cols,
input_num_cols=self.input_num_cols,
input_subsampling_factor=self.input_subsampling_factor,
input_n_rows_per_day=self.input_n_rows_per_day,
preprocessor=preprocessor,
native_obj=native_skl_mlp_classifier,
decision_threshold=.5) # tune later
student_model.save()
return student_model
class TimeSeriesDLFaultPredStudent(BaseFaultPredictor):
# pylint: disable=too-many-ancestors
"""Time-Series-DL-based knowledge generalizer ("student") model class."""
def __init__(self, teacher: BaseFaultPredTeacher,
input_cat_cols: Optional[ColsType],
input_num_cols: Optional[ColsType],
input_subsampling_factor: int, input_n_rows_per_day: int,
preprocessor: PandasMLPreprocessor,
native_obj: MLPClassifier,
decision_threshold: float,
_version: Optional[str] = None):
# pylint: disable=too-many-arguments
"""Init Time-Series-DL-based k-gen ("student") model."""
super().__init__(general_type=teacher.general_type,
unique_type_group=teacher.unique_type_group,
version=_version)
self.version: str = f'{teacher.name}---{type(self).__name__}--{self.version}' # noqa: E501
self.teacher: BaseFaultPredTeacher = teacher
# input params
self.input_cat_cols: Set[str] = (to_iterable(input_cat_cols,
iterable_type=set)
if input_cat_cols
else set())
self.input_num_cols: Set[str] = (to_iterable(input_num_cols,
iterable_type=set)
if input_num_cols
else set())
self.input_subsampling_factor: int = input_subsampling_factor
self.input_n_rows_per_day: int = input_n_rows_per_day
# preprocessing params
self.preprocessor: PandasMLPreprocessor = preprocessor
# native model
self.native_obj: MLPClassifier = native_obj
# postprocessing params
self.decision_threshold: float = decision_threshold
@cached_property
def class_url(self) -> str:
"""Return model class's global dir URL."""
return f'{H1ST_MODELS_DIR_PATH}/{type(self).__name__}'
@cached_property
def instance_url(self) -> str:
"""Return model instance's global dir URL."""
return f'{self.class_url}/{self.version}'
def __repr__(self) -> str:
"""Return string repr."""
return (f'{self.unique_type_group} '
'Time-Series-DL-based Knowledge Generalizer ("Student") Model '
f'w/ Decision Threshold {self.decision_threshold:.3f} '
f'@ {self.instance_url}')
@cached_property
def input_params_url(self) -> str:
"""Return model's input parameters URL."""
return f'{self.instance_url}/input-params.yaml'
@cached_property
def preproc_params_url(self) -> str:
"""Return model's preprocessing parameters URL."""
return f'{self.instance_url}/preproc-params.yaml'
@cached_property
def native_obj_url(self) -> str:
"""Return model's native object URL."""
return f'{self.instance_url}/native-obj.pkl'
@cached_property
def postproc_params_url(self) -> str:
"""Return model's output parameters URL."""
return f'{self.instance_url}/postproc-params.yaml'
def save(self):
"""Save model params & native object."""
# save input params
with NamedTemporaryFile(mode='wt',
buffering=-1,
encoding='utf-8',
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=False,
errors=None) as input_params_tmp_file:
yaml.safe_dump(data={'cat-cols': self.input_cat_cols,
'num-cols': self.input_num_cols,
'subsampling-factor': self.input_subsampling_factor, # noqa: E501
'n-rows-per-day': self.input_n_rows_per_day},
stream=input_params_tmp_file,
default_style=None,
default_flow_style=False,
encoding='utf-8',
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
canonical=False,
indent=2,
width=100,
allow_unicode=True,
line_break=None)
if _ON_S3:
s3.mv(from_path=input_params_tmp_file.name,
to_path=self.input_params_url,
is_dir=False, quiet=False)
else:
fs.mv(from_path=input_params_tmp_file.name,
to_path=self.input_params_url,
hdfs=False, is_dir=False)
# save preprocessing params
with NamedTemporaryFile(mode='wb',
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=False,
errors=None) as preproc_params_tmp_file:
self.preprocessor.to_yaml(path=preproc_params_tmp_file.name)
if _ON_S3:
s3.mv(from_path=preproc_params_tmp_file.name,
to_path=self.preproc_params_url,
is_dir=False, quiet=False)
else:
fs.mv(from_path=preproc_params_tmp_file.name,
to_path=self.preproc_params_url,
hdfs=False, is_dir=False)
# save native object
with NamedTemporaryFile(mode='wb',
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=False,
errors=None) as native_obj_tmp_file:
joblib.dump(value=self.native_obj,
filename=native_obj_tmp_file.name,
compress=9,
protocol=pickle.HIGHEST_PROTOCOL,
cache_size=None)
if _ON_S3:
s3.mv(from_path=native_obj_tmp_file.name,
to_path=self.native_obj_url,
is_dir=False, quiet=False)
else:
fs.mv(from_path=native_obj_tmp_file.name,
to_path=self.native_obj_url,
hdfs=False, is_dir=False)
# save postprocessing params
with NamedTemporaryFile(mode='wt',
buffering=-1,
encoding='utf-8',
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=False,
errors=None) as postproc_params_tmp_file:
yaml.safe_dump(data={'decision-threshold': self.decision_threshold}, # noqa: E501
stream=postproc_params_tmp_file,
default_style=None,
default_flow_style=False,
encoding='utf-8',
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
canonical=False,
indent=2,
width=100,
allow_unicode=True,
line_break=None)
if _ON_S3:
s3.mv(from_path=postproc_params_tmp_file.name,
to_path=self.postproc_params_url,
is_dir=False, quiet=False)
else:
fs.mv(from_path=postproc_params_tmp_file.name,
to_path=self.postproc_params_url,
hdfs=False, is_dir=False)
print(f'SAVED: {self}')
@classmethod
def load(cls, version: str) -> TimeSeriesDLFaultPredStudent:
# pylint: disable=too-many-locals
"""Load Time-Series-DL-based k-gen ("student") model."""
add_cwd_to_py_path()
# pylint: disable=import-error,import-outside-toplevel
import ai.models
teacher_name, student_str = version.split('---')
teacher_class_name, teacher_version = teacher_name.split('--')
teacher_class = getattr(ai.models, teacher_class_name)
teacher: BaseFaultPredTeacher = teacher_class.load(version=teacher_version) # noqa: E501
_student_class_name, _student_version = student_str.split('--')
student: TimeSeriesDLFaultPredStudent = cls(
teacher=teacher,
# params to load in subsequent steps below
input_cat_cols=None, input_num_cols=None,
input_subsampling_factor=None,
input_n_rows_per_day=None,
preprocessor=None,
native_obj=None,
decision_threshold=None,
_version=_student_version)
# load input params
with NamedTemporaryFile(mode='rt',
buffering=-1,
encoding='utf-8',
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
errors=None) as input_params_tmp_file:
if _ON_S3:
s3.cp(from_path=student.input_params_url,
to_path=input_params_tmp_file.name,
is_dir=False, quiet=False)
else:
fs.cp(from_path=student.input_params_url,
to_path=input_params_tmp_file.name,
hdfs=False, is_dir=False)
# pylint: disable=consider-using-with
d: Dict[str, Union[List[str], int]] = \
yaml.safe_load(stream=open(file=input_params_tmp_file.name,
mode='rt', encoding='utf-8'),
version=None)
student.input_cat_cols = d['cat-cols']
student.input_num_cols = d['num-cols']
student.input_subsampling_factor = d.get('subsampling-factor', 1)
student.input_n_rows_per_day = d.get('n-rows-per-day', N_MINUTES_PER_DAY) # noqa: E501
# load preprocessing params
with NamedTemporaryFile(mode='rt',
buffering=-1,
encoding='utf-8',
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
errors=None) as preproc_params_tmp_file:
if _ON_S3:
s3.cp(from_path=student.preproc_params_url,
to_path=preproc_params_tmp_file.name,
is_dir=False, quiet=False)
else:
fs.cp(from_path=student.preproc_params_url,
to_path=preproc_params_tmp_file.name,
hdfs=False, is_dir=False)
student.preprocessor = \
PandasMLPreprocessor.from_yaml(path=preproc_params_tmp_file.name) # noqa: E501
# load native object
with NamedTemporaryFile(mode='rb',
buffering=-1,
encoding=None,
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
errors=None) as native_obj_tmp_file:
if _ON_S3:
s3.cp(from_path=student.native_obj_url,
to_path=native_obj_tmp_file.name,
is_dir=False, quiet=False)
else:
fs.cp(from_path=student.native_obj_url,
to_path=native_obj_tmp_file.name,
hdfs=False, is_dir=False)
student.native_obj = joblib.load(filename=native_obj_tmp_file.name)
with NamedTemporaryFile(mode='rt',
buffering=-1,
encoding='utf-8',
newline=None,
suffix=None,
prefix=None,
dir=None,
delete=True,
errors=None) as postproc_params_tmp_file:
if _ON_S3:
s3.cp(from_path=student.postproc_params_url,
to_path=postproc_params_tmp_file.name,
is_dir=False, quiet=False)
else:
fs.cp(from_path=student.postproc_params_url,
to_path=postproc_params_tmp_file.name,
hdfs=False, is_dir=False)
# pylint: disable=consider-using-with
d: Dict[str, float] = yaml.safe_load(
stream=open(file=postproc_params_tmp_file.name,
mode='rt', encoding='utf-8'),
version=None)
student.decision_threshold = d['decision-threshold']
return student
@property
def flattening_subsampler(self) -> PandasFlatteningSubsampler:
"""Get instance's Pandas flattening subsampler."""
return PandasFlatteningSubsampler(
columns=tuple(self.preprocessor.sortedPreprocCols),
everyNRows=self.input_subsampling_factor,
totalNRows=self.input_n_rows_per_day)
def predict(self,
df_for_1_equipment_unit_for_1_day: DataFrame, /,
return_binary: bool = True) -> Union[bool, float]:
# pylint: disable=arguments-differ
"""Predict fault."""
prob: float = self.native_obj.predict_proba(
X=expand_dims(
self.flattening_subsampler(
self.preprocessor(df_for_1_equipment_unit_for_1_day)).values, # noqa: E501
axis=0))[0, 1]
return (prob > self.decision_threshold) if return_binary else prob
def batch_predict(self,
parquet_ds: ParquetDataset, /,
return_binary: bool = True) -> Series:
# pylint: disable=arguments-differ
"""Batch-Predict faults."""
df: DataFrame = parquet_ds.map(
self.preprocessor,
lambda df: (df.groupby(by=[EQUIPMENT_INSTANCE_ID_COL, DATE_COL],
axis='index',
level=None,
as_index=True, # group labels as index
sort=False, # better performance
# when `apply`ing: add group keys to index?
group_keys=False,
# squeeze=False, # deprecated
observed=False,
dropna=True)
.apply(func=self.flattening_subsampler,
padWithLastRow=True))).collect()
df.loc[:, 'FAULT'] = self.native_obj.predict_proba(X=df.values)[:, 1]
return (df.FAULT > self.decision_threshold) if return_binary else df.FAULT # noqa: E501
def tune_decision_threshold(self, tuning_date_range: Tuple[str, str]):
"""Tune Model's decision threshold to maximize P-R harmonic mean."""
tune_from_date, tune_to_date = tuning_date_range
precision, recall, thresholds = \
precision_recall_curve(
y_true=((_y_true := self.teacher.batch_process(date=tune_from_date, # noqa: E501
to_date=tune_to_date)) # noqa: E501
.mask(cond=_y_true.isnull(),
other=False,
inplace=False,
axis='index',
level=None)
.astype(dtype=bool, copy=True, errors='raise')),
probas_pred=self.batch_process(date=tune_from_date,
to_date=tune_to_date,
return_binary=False))
df: DataFrame = DataFrame(data=dict(threshold=list(thresholds) + [1],
precision=precision, recall=recall)) # noqa: E501
df.loc[:, 'pr_hmean'] = hmean(df[['precision', 'recall']], axis=1)
best_pr_tradeoff_idx: int = df.pr_hmean.argmax(skipna=True)
print('BEST PRECISION-RECALL TRADE-OFF:')
self.decision_threshold: float = \
float(df.threshold.iloc[best_pr_tradeoff_idx])
print(f'- Decision Threshold: {self.decision_threshold:.3f}')
print(f'- Precision: {df.precision.iloc[best_pr_tradeoff_idx]:.3f}')
print(f'- Recall: {df.recall.iloc[best_pr_tradeoff_idx]:.3f}')
print(f'- PR Harmonic Mean: {df.pr_hmean.iloc[best_pr_tradeoff_idx]:.3f}') # noqa: E501
self.save() | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/models/oracle/student/timeseries_dl.py | timeseries_dl.py |
from typing import Tuple
import click
from aito.pmfp.models import TimeSeriesDLFaultPredStudent
import aito.util.debug
@click.command(name='tune-fault-pred-student-decision-threshold',
cls=click.Command,
# Command kwargs
context_settings=None,
# callback=None,
# params=None,
help="Tune a Student model's decision threshold >>>",
epilog="^^^ Tune a Student model's decision threshold",
short_help="Tune a Student model's decision threshold",
options_metavar='[OPTIONS]',
add_help_option=True,
no_args_is_help=True,
hidden=False,
deprecated=False)
@click.argument('student_version',
type=str,
required=True,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='STUDENT_VERSION',
expose_value=True,
is_eager=False,
envvar=None)
@click.argument('date_range',
type=str,
required=True,
default=None,
callback=None,
nargs=2,
# multiple=False,
metavar='FROM_DATE TO_DATE',
expose_value=True,
is_eager=False,
envvar=None)
@click.option('--debug',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=True,
flag_value=True,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Run in DEBUG mode',
hidden=False,
show_choices=True,
show_envvar=False,
type=bool,
required=False,
default=False,
callback=None,
nargs=None,
# multiple=False,
metavar='DEBUG',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
def tune_fault_pred_student_decision_threshold(student_version: str,
date_range: Tuple[str, str],
debug: bool = False):
"""Tune a Knowledge Generalizer ("Student") model's decision threshold."""
if debug:
aito.util.debug.ON = True
# load Student model
student: TimeSeriesDLFaultPredStudent = \
TimeSeriesDLFaultPredStudent.load(version=student_version)
# tune Student's decision threshold
student.tune_decision_threshold(tuning_date_range=date_range) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/tools/oracle/tune.py | tune.py |
from pathlib import Path
from pprint import pprint
from typing import Optional
import click
from pandas import Series
from aito.pmfp.models import BaseFaultPredictor, H1ST_BATCH_OUTPUT_DIR_PATH
import aito.util.debug
from aito.util.path import add_cwd_to_py_path
@click.command(name='predict-faults',
cls=click.Command,
# Command kwargs
context_settings=None,
# callback=None,
# params=None,
help='Batch-predict equipment faults >>>', # noqa: E501
epilog='^^^ Batch-predict equipment faults', # noqa: E501
short_help='Batch-predict equipment faults', # noqa: E501
options_metavar='[OPTIONS]',
add_help_option=True,
no_args_is_help=True,
hidden=False,
deprecated=False)
@click.argument('model_class_name',
type=str,
required=True,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='MODEL_CLASS_NAME',
expose_value=True,
is_eager=False,
envvar=None)
@click.argument('model_version',
type=str,
required=True,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='MODEL_VERSION',
expose_value=True,
is_eager=False,
envvar=None)
@click.argument('date',
type=str,
required=True,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='DATE',
expose_value=True,
is_eager=False,
envvar=None)
@click.option('--to-date',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='To Date (YYYY-MM-DD)',
hidden=False,
show_choices=True,
show_envvar=False,
type=str,
required=False,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='TO_DATE',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--debug',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=True,
flag_value=True,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Run in DEBUG mode',
hidden=False,
show_choices=True,
show_envvar=False,
type=bool,
required=False,
default=False,
callback=None,
nargs=None,
# multiple=False,
metavar='DEBUG',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
def predict_faults(
model_class_name: str, model_version: str,
date: str, to_date: Optional[str] = None,
debug: bool = False):
"""Batch-predict equipment faults."""
if debug:
aito.util.debug.ON = True
# load model
add_cwd_to_py_path()
import ai.models # pylint: disable=import-error,import-outside-toplevel
model: BaseFaultPredictor = (getattr(ai.models, model_class_name)
.load(version=model_version))
# predict
results: Series = model.batch_process(date=date, to_date=to_date,
return_json=False)
# filter for positive predictions
fault_preds: Series = results.loc[(results.map(sum) > 0)
if isinstance(results.iloc[0], tuple)
else results]
# print
pprint(fault_preds.to_dict())
# save
Path(output_path := (f'{H1ST_BATCH_OUTPUT_DIR_PATH}/'
f'{model_class_name}/{model_version}/'
f'{date}-to-{to_date}.csv')
).parent.mkdir(parents=True, exist_ok=True)
fault_preds.to_csv(output_path, header=True, index=True)
print(f'\n@ {output_path}')
# summarize
print(f'\n{(n_faults := len(fault_preds)):,} Predicted Daily Faults '
f'({100 * n_faults / (n := len(results)):.3f}% of {n:,})') | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/tools/oracle/exec.py | exec.py |
from typing import List, Tuple # Py3.9+: use built-ins/collections.abc
import click
from aito.pmfp.models import BaseFaultPredTeacher, FaultPredOracleModeler
from aito.pmfp.models.oracle.student.timeseries_dl import N_MINUTES_PER_DAY
import aito.util.debug
from aito.util.path import add_cwd_to_py_path
@click.command(name='oraclize-fault-pred-teacher',
cls=click.Command,
# Command kwargs
context_settings=None,
# callback=None,
# params=None,
help='Oraclize a Fault-Prediction Knowledge ("Teacher") model >>>', # noqa: E501
epilog='^^^ Oraclize a Fault-Prediction Knowledge ("Teacher") model', # noqa: E501
short_help='Oraclize a Fault-Prediction Knowledge ("Teacher") model', # noqa: E501
options_metavar='[OPTIONS]',
add_help_option=True,
no_args_is_help=True,
hidden=False,
deprecated=False)
@click.argument('teacher_class_name',
type=str,
required=True,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='TEACHER_CLASS_NAME',
expose_value=True,
is_eager=False,
envvar=None)
@click.argument('teacher_version',
type=str,
required=True,
default=None,
callback=None,
nargs=None,
# multiple=False,
metavar='TEACHER_VERSION',
expose_value=True,
is_eager=False,
envvar=None)
@click.option('--input-cat-cols',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Comma-separated Input Categorical Columns',
hidden=False,
show_choices=True,
show_envvar=False,
type=str,
required=False,
default='',
callback=None,
nargs=None,
# multiple=False,
metavar='INPUT_CAT_COL,INPUT_CAT_COL,...',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--input-num-cols',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Comma-separated Input Numerical Columns',
hidden=False,
show_choices=True,
show_envvar=False,
type=str,
required=False,
default='',
callback=None,
nargs=None,
# multiple=False,
metavar='INPUT_NUM_COL,INPUT_NUM_COL,...',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--input-subsampling-factor',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Input Sub-Sampling Factor (positive int)',
hidden=False,
show_choices=True,
show_envvar=False,
type=int,
required=False,
default=1,
callback=None,
nargs=None,
# multiple=False,
metavar='INPUT_SUBSAMPLING_FACTOR',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--input-n-rows-per-day',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Input No. of Rows per Day (positive int)',
hidden=False,
show_choices=True,
show_envvar=False,
type=int,
required=False,
default=N_MINUTES_PER_DAY,
callback=None,
nargs=None,
# multiple=False,
metavar='INPUT_N_ROWS_PER_DAY',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--train-date-range',
show_default=False,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Training Data Date Range',
hidden=False,
show_choices=True,
show_envvar=False,
type=str,
required=True,
default=None,
callback=None,
nargs=2,
# multiple=False,
metavar='TRAIN_FROM_DATE TRAIN_TO_DATE',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--tune-date-range',
show_default=False,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=False,
flag_value=None,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Decision-Threshold-Tuning Data Date Range',
hidden=False,
show_choices=True,
show_envvar=False,
type=str,
required=True,
default=None,
callback=None,
nargs=2,
# multiple=False,
metavar='TUNE_FROM_DATE TUNE_TO_DATE',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
@click.option('--debug',
show_default=True,
prompt=False,
confirmation_prompt=False,
# prompt_required=True,
hide_input=False,
is_flag=True,
flag_value=True,
multiple=False,
count=False,
allow_from_autoenv=True,
help='Run in DEBUG mode',
hidden=False,
show_choices=True,
show_envvar=False,
type=bool,
required=False,
default=False,
callback=None,
nargs=None,
# multiple=False,
metavar='DEBUG',
expose_value=True,
is_eager=False,
envvar=None,
# shell_complete=None,
)
def oraclize_fault_pred_teacher(teacher_class_name: str, teacher_version: str,
input_cat_cols: str, input_num_cols: str,
input_subsampling_factor: int,
input_n_rows_per_day: int,
train_date_range: Tuple[str, str],
tune_date_range: Tuple[str, str],
debug: bool = False):
"""Oraclize a Fault-Prediction Knowledge ("Teacher") model."""
assert input_cat_cols or input_num_cols
input_cat_cols: List[str] = input_cat_cols.split(sep=',', maxsplit=-1)
input_num_cols: List[str] = input_num_cols.split(sep=',', maxsplit=-1)
if debug:
aito.util.debug.ON = True
# load Teacher model
add_cwd_to_py_path()
import ai.models # pylint: disable=import-error,import-outside-toplevel
teacher: BaseFaultPredTeacher = (getattr(ai.models, teacher_class_name)
.load(version=teacher_version))
# oraclize Teacher model
FaultPredOracleModeler(
teacher=teacher,
student_input_cat_cols=input_cat_cols,
student_input_num_cols=input_num_cols,
student_input_subsampling_factor=input_subsampling_factor,
student_input_n_rows_per_day=input_n_rows_per_day,
student_train_date_range=train_date_range,
student_tuning_date_range=tune_date_range,
).build_model() | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/pmfp/tools/oracle/build.py | build.py |
from rest_framework_filters import FilterSet, RelatedFilter
from aito.iot_mgmt.data.models import (EquipmentUniqueTypeGroup,
EquipmentInstance)
from aito.iot_mgmt.data.filters import (EquipmentUniqueTypeGroupFilter,
EquipmentInstanceFilter)
from aito.iot_mgmt.maint_ops.models import (
EquipmentInstanceDailyRiskScore,
EquipmentProblemType,
EquipmentInstanceAlarmPeriod,
EquipmentInstanceProblemDiagnosis,
AlertDiagnosisStatus,
EquipmentInstanceAlertPeriod,
)
class EquipmentInstanceDailyRiskScoreFilter(FilterSet):
"""EquipmentInstanceDailyRiskScoreFilter."""
equipment_unique_type_group = \
RelatedFilter(
queryset=EquipmentUniqueTypeGroup.objects.all(),
filterset=EquipmentUniqueTypeGroupFilter)
equipment_instance = \
RelatedFilter(
queryset=EquipmentInstance.objects.all(),
filterset=EquipmentInstanceFilter)
class Meta:
"""Metadata."""
model = EquipmentInstanceDailyRiskScore
fields = dict(
risk_score_name=[
'exact', 'iexact',
'in',
'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',
],
date=[
'exact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains',
'startswith',
'endswith',
'range',
'isnull',
'year',
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
risk_score_value=[
'gt', 'gte', 'lt', 'lte',
'startswith', 'istartswith',
'range',
])
class EquipmentProblemTypeFilter(FilterSet):
"""EquipmentProblemTypeFilter."""
class Meta:
"""Metadata."""
model = EquipmentProblemType
fields = dict(
name=[
'exact', 'iexact',
'in',
'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',
])
class EquipmentInstanceAlarmPeriodFilter(FilterSet):
"""EquipmentInstanceAlarmPeriodFilter."""
equipment_instance = \
RelatedFilter(
queryset=EquipmentInstance.objects.all(),
filterset=EquipmentInstanceFilter)
alarm_type = \
RelatedFilter(
queryset=EquipmentProblemType.objects.all(),
filterset=EquipmentProblemTypeFilter)
class Meta:
"""Metadata."""
model = EquipmentInstanceAlarmPeriod
fields = dict(
from_utc_date_time=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains'
'startswith', # 'istartswith'
'endswith', # 'iendswith',
'range',
'isnull',
'year', # 'year__iexact'
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month', # 'month__iexact',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
to_utc_date_time=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains'
'startswith', # 'istartswith'
'endswith', # 'iendswith',
'range',
'isnull',
'year', # 'year__iexact'
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month', # 'month__iexact',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
duration_in_days=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
has_associated_equipment_instance_alert_periods=['exact'],
has_associated_equipment_instance_problem_diagnoses=['exact'])
class EquipmentInstanceProblemDiagnosisFilter(FilterSet):
"""EquipmentInstanceProblemDiagnosisFilter."""
equipment_instance = \
RelatedFilter(
queryset=EquipmentInstance.objects.all(),
filterset=EquipmentInstanceFilter)
equipment_problem_types = \
RelatedFilter(
queryset=EquipmentProblemType.objects.all(),
filterset=EquipmentProblemTypeFilter)
class Meta:
"""Metadata."""
model = EquipmentInstanceProblemDiagnosis
fields = dict(
from_date=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains'
'startswith', # 'istartswith'
'endswith', # 'iendswith',
'range',
'isnull',
'year', # 'year__iexact'
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month', # 'month__iexact',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
to_date=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains'
'startswith', # 'istartswith'
'endswith', # 'iendswith',
'range',
'isnull',
'year', # 'year__iexact'
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month', # 'month__iexact',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
duration=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
has_equipment_problems=['exact'],
dismissed=['exact'],
has_associated_equipment_instance_alarm_periods=['exact'],
has_associated_equipment_instance_alert_periods=['exact'])
class AlertDiagnosisStatusFilter(FilterSet):
"""AlertDiagnosisStatusFilter."""
class Meta:
"""Metadata."""
model = AlertDiagnosisStatus
fields = dict(
name=[
'exact', 'iexact',
'in',
'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',
])
class EquipmentInstanceAlertPeriodFilter(FilterSet):
"""EquipmentInstanceAlertPeriodFilter."""
equipment_unique_type_group = \
RelatedFilter(
queryset=EquipmentUniqueTypeGroup.objects.all(),
filterset=EquipmentUniqueTypeGroupFilter)
equipment_instance = \
RelatedFilter(
queryset=EquipmentInstance.objects.all(),
filterset=EquipmentInstanceFilter)
diagnosis_status = \
RelatedFilter(
queryset=AlertDiagnosisStatus.objects.all(),
filterset=AlertDiagnosisStatusFilter)
class Meta:
"""Metadata."""
model = EquipmentInstanceAlertPeriod
fields = dict(
risk_score_name=[
'exact', 'iexact',
'in',
'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',
],
threshold=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
from_date=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains'
'startswith', # 'istartswith'
'endswith', # 'iendswith',
'range',
'isnull',
'year', # 'year__iexact'
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month', # 'month__iexact',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
to_date=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains'
'startswith', # 'istartswith'
'endswith', # 'iendswith',
'range',
'isnull',
'year', # 'year__iexact'
'year__gt', 'year__gte', 'year__lt', 'year__lte',
'year__in',
'year__range',
'month', # 'month__iexact',
'month__gt', 'month__gte', 'month__lt', 'month__lte',
'month__in',
'month__range',
],
duration=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
cumulative_excess_risk_score=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
approx_average_risk_score=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
last_risk_score=[
'exact', # 'iexact',
'gt', 'gte', 'lt', 'lte',
'in',
'contains', # 'icontains',
'startswith', # 'istartswith',
'endswith', # 'iendswith',
'range',
],
ongoing=['exact'],
has_associated_equipment_instance_alarm_periods=['exact'],
has_associated_equipment_instance_problem_diagnoses=['exact']) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/filters.py | filters.py |
from django.contrib.admin.decorators import register
from django.contrib.admin.options import ModelAdmin
from django.db.models import Prefetch
from silk.profiling.profiler import silk_profile
from aito.iot_mgmt.maint_ops.models import (
EquipmentProblemType,
EquipmentInstanceDailyPredictedFault,
EquipmentInstanceAlarmPeriod,
EquipmentInstanceProblemDiagnosis,
EquipmentInstanceAlertPeriod,
AlertDiagnosisStatus,
)
from aito.iot_mgmt.maint_ops.querysets import (
EQUIPMENT_INSTANCE_ALARM_PERIOD_STR_QUERYSET,
EQUIPMENT_INSTANCE_ALERT_PERIOD_STR_QUERYSET,
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_ID_ONLY_UNORDERED_QUERYSET,
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_STR_QUERYSET,
)
# pylint: disable=invalid-name,line-too-long
@register(EquipmentProblemType)
class EquipmentProblemTypeAdmin(ModelAdmin):
"""EquipmentProblemType admin."""
list_display = ('name',)
search_fields = ('name',)
show_full_result_count = False
@silk_profile(name='Admin: Equipment Problem Types')
def changelist_view(self, *args, **kwargs):
"""Change-list view."""
return super().changelist_view(*args, **kwargs)
@silk_profile(name='Admin: Equipment Problem Type')
def changeform_view(self, *args, **kwargs):
"""Change-form view."""
return super().changeform_view(*args, **kwargs)
@register(EquipmentInstanceDailyPredictedFault)
class EquipmentInstanceDailyPredictedFaultAdmin(ModelAdmin):
"""EquipmentInstanceDailyPredictedFault admin."""
list_display = (
'equipment_unique_type_group',
'equipment_instance',
'date',
'fault_type',
'fault_predictor_name',
'predicted_fault_probability',
)
list_filter = (
'equipment_unique_type_group__equipment_general_type__name',
'equipment_unique_type_group__name',
'date',
'fault_type__name',
'fault_predictor_name',
)
search_fields = (
'equipment_unique_type_group__equipment_general_type__name',
'equipment_unique_type_group__name',
'date',
'fault_type__name',
'fault_predictor_name',
)
show_full_result_count = False
readonly_fields = (
'equipment_unique_type_group',
'equipment_instance',
'date',
'fault_type',
'fault_predictor_name',
'predicted_fault_probability',
)
def get_queryset(self, request):
"""Get queryset."""
return super().get_queryset(request) \
.select_related(
'equipment_unique_type_group',
'equipment_unique_type_group__equipment_general_type',
'equipment_instance',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'fault_type') \
.defer(
'equipment_instance__equipment_facility',
'equipment_instance__info')
@silk_profile(name='Admin: Equipment Instance Daily Predicted Faults')
def changelist_view(self, *args, **kwargs):
"""Change-list view."""
return super().changelist_view(*args, **kwargs)
@silk_profile(name='Admin: Equipment Instance Daily Predicted Fault')
def changeform_view(self, *args, **kwargs):
"""Change-form view."""
return super().changeform_view(*args, **kwargs)
@register(EquipmentInstanceAlarmPeriod)
class EquipmentInstanceAlarmPeriodAdmin(ModelAdmin):
"""EquipmentInstanceAlarmPeriod admin."""
list_display = (
'equipment_instance',
'alarm_type',
'from_utc_date_time',
'to_utc_date_time',
'duration_in_days',
'has_associated_equipment_instance_alert_periods',
'has_associated_equipment_instance_problem_diagnoses',
)
list_filter = (
'equipment_instance__equipment_general_type__name',
'alarm_type__name',
'from_utc_date_time',
'to_utc_date_time',
'has_associated_equipment_instance_alert_periods',
'has_associated_equipment_instance_problem_diagnoses',
)
search_fields = (
'equipment_instance__equipment_general_type__name',
'equipment_instance__equipment_unique_type__name',
'equipment_instance__name',
)
show_full_result_count = False
readonly_fields = (
'equipment_instance',
'alarm_type',
'from_utc_date_time',
'to_utc_date_time',
'duration_in_days',
'date_range',
'has_associated_equipment_instance_alert_periods',
'equipment_instance_alert_periods',
'has_associated_equipment_instance_problem_diagnoses',
'equipment_instance_problem_diagnoses',
)
def get_queryset(self, request):
"""Get queryset."""
qs = super().get_queryset(request=request) \
.select_related(
'equipment_instance',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'alarm_type') \
.defer(
'equipment_instance__equipment_facility',
'equipment_instance__info')
return qs.prefetch_related(
Prefetch(
lookup='equipment_instance_alert_periods',
queryset=EQUIPMENT_INSTANCE_ALERT_PERIOD_STR_QUERYSET),
Prefetch(
lookup='equipment_instance_problem_diagnoses',
queryset=EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_STR_QUERYSET)) \
if request.resolver_match.url_name.endswith('_change') \
else qs.defer('date_range')
@silk_profile(name='Admin: Equipment Instance Alarm Periods')
def changelist_view(self, *args, **kwargs):
"""Change-list view."""
return super().changelist_view(*args, **kwargs)
@silk_profile(name='Admin: Equipment Instance Alarm Period')
def changeform_view(self, *args, **kwargs):
"""Change-form view."""
return super().changeform_view(*args, **kwargs)
@register(EquipmentInstanceProblemDiagnosis)
class EquipmentInstanceProblemDiagnosisAdmin(ModelAdmin):
"""EquipmentInstanceProblemDiagnosis admin."""
list_display = (
'equipment_instance',
'from_date',
'to_date',
'duration',
'equipment_problem_type_names',
'dismissed',
'comments',
'has_associated_equipment_instance_alarm_periods',
'has_associated_equipment_instance_alert_periods',
)
list_filter = (
'equipment_instance__equipment_general_type__name',
'from_date',
'to_date',
'dismissed',
)
readonly_fields = (
'date_range',
'duration',
'has_equipment_problems',
'has_associated_equipment_instance_alarm_periods',
'equipment_instance_alarm_periods',
'has_associated_equipment_instance_alert_periods',
'equipment_instance_alert_periods',
)
show_full_result_count = False
search_fields = (
'equipment_instance__equipment_general_type__name',
'equipment_instance__equipment_unique_type__name',
'equipment_instance__name',
)
def equipment_problem_type_names(self, obj):
# pylint: disable=no-self-use
"""Extra displayed field."""
return ', '.join(equipment_problem_type.name
for equipment_problem_type in
obj.equipment_problem_types.all())
def get_queryset(self, request):
"""Get queryset."""
qs = super().get_queryset(request) \
.select_related(
'equipment_instance',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type') \
.defer(
'equipment_instance__equipment_facility',
'equipment_instance__info')
return qs.prefetch_related(
'equipment_problem_types',
Prefetch(
lookup='equipment_instance_alarm_periods',
queryset=EQUIPMENT_INSTANCE_ALARM_PERIOD_STR_QUERYSET),
Prefetch(
lookup='equipment_instance_alert_periods',
queryset=EQUIPMENT_INSTANCE_ALERT_PERIOD_STR_QUERYSET)) \
if request.resolver_match.url_name.endswith('_change') \
else qs.defer('date_range') \
.prefetch_related('equipment_problem_types')
@silk_profile(name='Admin: Equipment Problem Diagnoses')
def changelist_view(self, *args, **kwargs):
"""Change-list view."""
return super().changelist_view(*args, **kwargs)
@silk_profile(name='Admin: Equipment Problem Diagnosis')
def changeform_view(self, *args, **kwargs):
"""Change-form view."""
return super().changeform_view(*args, **kwargs)
@register(AlertDiagnosisStatus)
class AlertDiagnosisStatusAdmin(ModelAdmin):
"""AlertDiagnosisStatus admin."""
list_display = 'index', 'name'
show_full_result_count = False
@silk_profile(name='Admin: Alert Diagnosis Statuses')
def changelist_view(self, *args, **kwargs):
"""Change-list view."""
return super().changelist_view(*args, **kwargs)
@silk_profile(name='Admin: Alert Diagnosis Status')
def changeform_view(self, *args, **kwargs):
"""Change-form view."""
return super().changeform_view(*args, **kwargs)
@register(EquipmentInstanceAlertPeriod)
class EquipmentInstanceAlertPeriodAdmin(ModelAdmin):
"""EquipmentInstanceAlertPeriod admin."""
list_display = (
'equipment_unique_type_group',
'equipment_instance',
'risk_score_name',
'threshold',
'from_date',
'to_date',
'duration',
'approx_average_risk_score',
'last_risk_score',
'cumulative_excess_risk_score',
'ongoing',
'diagnosis_status',
'has_associated_equipment_instance_alarm_periods',
'has_associated_equipment_instance_problem_diagnoses',
)
list_filter = (
'equipment_unique_type_group__equipment_general_type__name',
'equipment_unique_type_group__name',
'risk_score_name',
'threshold',
'from_date',
'to_date',
'ongoing',
'diagnosis_status',
'has_associated_equipment_instance_alarm_periods',
'has_associated_equipment_instance_problem_diagnoses',
)
search_fields = (
'equipment_unique_type_group__equipment_general_type__name',
'equipment_unique_type_group__name',
'equipment_instance__name',
'risk_score_name',
)
show_full_result_count = False
readonly_fields = (
'equipment_unique_type_group',
'equipment_instance',
'risk_score_name',
'threshold',
'from_date',
'to_date',
'date_range',
'duration',
'approx_average_risk_score',
'last_risk_score',
'cumulative_excess_risk_score',
'ongoing',
'info',
'has_associated_equipment_instance_alarm_periods',
'equipment_instance_alarm_periods',
'has_associated_equipment_instance_problem_diagnoses',
)
def get_queryset(self, request):
"""Get queryset."""
qs = super().get_queryset(request) \
.select_related(
'equipment_unique_type_group',
'equipment_unique_type_group__equipment_general_type',
'equipment_instance',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'diagnosis_status') \
.defer(
'equipment_instance__equipment_facility',
'equipment_instance__info')
return (
qs.prefetch_related(
Prefetch(
lookup='equipment_instance_alarm_periods',
queryset=EQUIPMENT_INSTANCE_ALARM_PERIOD_STR_QUERYSET),
Prefetch(
lookup='equipment_instance_problem_diagnoses',
queryset=EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_ID_ONLY_UNORDERED_QUERYSET)) # noqa: E501
) if request.resolver_match.url_name.endswith('_change') \
else qs.defer('date_range', 'info')
@silk_profile(name='Admin: Equipment Instance Alert Periods')
def changelist_view(self, *args, **kwargs):
"""Change-list view."""
return super().changelist_view(*args, **kwargs)
@silk_profile(name='Admin: Equipment Instance Alert Period')
def changeform_view(self, *args, **kwargs):
"""Change-form view."""
return super().changeform_view(*args, **kwargs) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/admin.py | admin.py |
from rest_framework.authentication import (BasicAuthentication,
RemoteUserAuthentication,
SessionAuthentication,
TokenAuthentication)
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.permissions import (IsAuthenticated,
IsAuthenticatedOrReadOnly)
from rest_framework.renderers import CoreJSONRenderer, JSONRenderer
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
from silk.profiling.profiler import silk_profile
from aito.iot_mgmt.maint_ops.filters import (
EquipmentInstanceDailyRiskScoreFilter,
EquipmentProblemTypeFilter,
EquipmentInstanceAlarmPeriodFilter,
EquipmentInstanceProblemDiagnosisFilter,
AlertDiagnosisStatusFilter,
EquipmentInstanceAlertPeriodFilter,
)
from aito.iot_mgmt.maint_ops.querysets import (
EQUIPMENT_INSTANCE_DAILY_RISK_SCORE,
EQUIPMENT_PROBLEM_TYPE_QUERYSET,
EQUIPMENT_INSTANCE_ALARM_PERIOD_REST_API_QUERYSET,
ALERT_DIAGNOSIS_STATUS_REST_API_QUERYSET,
EQUIPMENT_INSTANCE_ALERT_PERIOD_REST_API_QUERYSET,
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_REST_API_QUERYSET,
)
from aito.iot_mgmt.maint_ops.serializers import (
EquipmentInstanceDailyRiskScoreSerializer,
EquipmentProblemTypeSerializer,
EquipmentInstanceAlarmPeriodSerializer,
EquipmentInstanceProblemDiagnosisSerializer,
AlertDiagnosisStatusSerializer,
EquipmentInstanceAlertPeriodSerializer,
)
class EquipmentInstanceDailyRiskScoreViewSet(ReadOnlyModelViewSet):
"""EquipmentInstanceDailyRiskScoreViewSet.
list:
`GET` a filterable, paginated list of Equipment Instance Daily Risk Scores
retrieve:
`GET` the Equipment Instance Daily Risk Score specified by `id`
"""
queryset = EQUIPMENT_INSTANCE_DAILY_RISK_SCORE
serializer_class = EquipmentInstanceDailyRiskScoreSerializer
authentication_classes = \
BasicAuthentication, \
RemoteUserAuthentication, \
SessionAuthentication, \
TokenAuthentication
permission_classes = (IsAuthenticated,)
filter_class = EquipmentInstanceDailyRiskScoreFilter
ordering_fields = \
'equipment_unique_type_group', \
'equipment_instance', \
'risk_score_name', \
'date'
pagination_class = LimitOffsetPagination
renderer_classes = CoreJSONRenderer, JSONRenderer
@silk_profile(name='API: Equipment Instance Daily Risk Scores')
def list(self, request, *args, **kwargs):
"""List items."""
return super().list(request, *args, **kwargs)
@silk_profile(name='API: Equipment Instance Daily Risk Score')
def retrieve(self, request, *args, **kwargs):
"""Retrieve item."""
return super().retrieve(request, *args, **kwargs)
class EquipmentProblemTypeViewSet(ModelViewSet):
"""EquipmentProblemTypeViewSet.
list:
`GET` a filterable, unpaginated list of Equipment Problem Types
retrieve:
`GET` the Equipment Problem Type specified by `name`
create:
`POST` a new Equipment Problem Type by `name`
update:
`PUT` updated data for the Equipment Problem Type specified by `name`
partial_update:
`PATCH` the Equipment Problem Type specified by `name`
destroy:
`DELETE` the Equipment Problem Type specified by `name`
"""
queryset = EQUIPMENT_PROBLEM_TYPE_QUERYSET
serializer_class = EquipmentProblemTypeSerializer
authentication_classes = \
BasicAuthentication, \
RemoteUserAuthentication, \
SessionAuthentication, \
TokenAuthentication
permission_classes = (IsAuthenticated,)
filter_class = EquipmentProblemTypeFilter
ordering_fields = ('name',)
ordering = ('name',)
pagination_class = None
lookup_field = 'name'
lookup_url_kwarg = 'equipment_problem_type_name'
renderer_classes = CoreJSONRenderer, JSONRenderer
@silk_profile(name='API: Equipment Problem Types')
def list(self, request, *args, **kwargs):
"""List items."""
return super().list(request, *args, **kwargs)
@silk_profile(name='API: Equipment Problem Type')
def retrieve(self, request, *args, **kwargs):
"""Retrieve item."""
return super().retrieve(request, *args, **kwargs)
class EquipmentInstanceAlarmPeriodViewSet(ModelViewSet):
"""EquipmentInstanceAlarmPeriodViewSet."""
queryset = EQUIPMENT_INSTANCE_ALARM_PERIOD_REST_API_QUERYSET
serializer_class = EquipmentInstanceAlarmPeriodSerializer
authentication_classes = \
BasicAuthentication, \
RemoteUserAuthentication, \
SessionAuthentication, \
TokenAuthentication
permission_classes = (IsAuthenticated,)
filter_class = EquipmentInstanceAlarmPeriodFilter
ordering_fields = \
'equipment_instance', \
'from_utc_date_time'
ordering = \
'-from_utc_date_time'
pagination_class = LimitOffsetPagination
renderer_classes = CoreJSONRenderer, JSONRenderer
@silk_profile(name='API: Equipment Instance Alarm Periods')
def list(self, request, *args, **kwargs):
"""List items."""
return super().list(request, *args, **kwargs)
@silk_profile(name='API: Equipment Instance Alarm Period')
def retrieve(self, request, *args, **kwargs):
"""Retrieve item."""
return super().retrieve(request, *args, **kwargs)
class EquipmentInstanceProblemDiagnosisViewSet(ModelViewSet):
"""EquipmentInstanceProblemDiagnosisViewSet."""
queryset = EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_REST_API_QUERYSET
serializer_class = EquipmentInstanceProblemDiagnosisSerializer
authentication_classes = \
BasicAuthentication, \
RemoteUserAuthentication, \
SessionAuthentication, \
TokenAuthentication
permission_classes = (IsAuthenticated,)
filter_class = EquipmentInstanceProblemDiagnosisFilter
ordering_fields = \
'ongoing', \
'from_date', \
'to_date', \
'equipment_instance', \
'dismissed'
ordering = \
'-ongoing', \
'-from_date', \
'-to_date', \
'dismissed'
pagination_class = LimitOffsetPagination
renderer_classes = CoreJSONRenderer, JSONRenderer
@silk_profile(name='API: Equipment Instance Problem Diagnoses')
def list(self, request, *args, **kwargs):
"""List items."""
return super().list(request, *args, **kwargs)
@silk_profile(name='API: Equipment Instance Problem Diagnosis')
def retrieve(self, request, *args, **kwargs):
"""Retrieve item."""
return super().retrieve(request, *args, **kwargs)
class AlertDiagnosisStatusViewSet(ReadOnlyModelViewSet):
"""AlertDiagnosisStatusViewSet.
list:
`GET` a filterable, unpaginated list of Alert Diagnosis Statuses
retrieve:
`GET` the Alert Diagnosis Status specified by `name`
"""
queryset = ALERT_DIAGNOSIS_STATUS_REST_API_QUERYSET
serializer_class = AlertDiagnosisStatusSerializer
authentication_classes = \
BasicAuthentication, \
RemoteUserAuthentication, \
SessionAuthentication, \
TokenAuthentication
permission_classes = (IsAuthenticatedOrReadOnly,)
filter_class = AlertDiagnosisStatusFilter
ordering_fields = ('index',)
ordering = ('index',)
pagination_class = None
lookup_field = 'name'
lookup_url_kwarg = 'alert_diagnosis_status_name'
renderer_classes = CoreJSONRenderer, JSONRenderer
@silk_profile(name='API: Alert Diagnosis Statuses')
def list(self, request, *args, **kwargs):
"""List items."""
return super().list(request, *args, **kwargs)
@silk_profile(name='API: Alert Diagnosis Status')
def retrieve(self, request, *args, **kwargs):
"""Retrieve item."""
return super().retrieve(request, *args, **kwargs)
class EquipmentInstanceAlertPeriodViewSet(ModelViewSet):
"""EquipmentInstanceAlertPeriodViewSet.
list:
`GET` a filterable, paginated list of Alerts
retrieve:
`GET` the Alert specified by `id`
partial_update:
`PATCH` the `diagnosis_status` of the Alert specified by `id`
"""
queryset = EQUIPMENT_INSTANCE_ALERT_PERIOD_REST_API_QUERYSET
serializer_class = EquipmentInstanceAlertPeriodSerializer
authentication_classes = \
BasicAuthentication, \
RemoteUserAuthentication, \
SessionAuthentication, \
TokenAuthentication
permission_classes = (IsAuthenticated,)
filter_class = EquipmentInstanceAlertPeriodFilter
ordering_fields = \
'diagnosis_status', \
'ongoing', \
'risk_score_name', \
'threshold', \
'cumulative_excess_risk_score'
ordering = \
'diagnosis_status', \
'-ongoing', \
'risk_score_name', \
'-threshold', \
'-cumulative_excess_risk_score'
pagination_class = LimitOffsetPagination
renderer_classes = CoreJSONRenderer, JSONRenderer
@silk_profile(name='API: Equipment Instance Alert Periods')
def list(self, request, *args, **kwargs):
"""List items."""
return super().list(request, *args, **kwargs)
@silk_profile(name='API: Equipment Instance Alert Period')
def retrieve(self, request, *args, **kwargs):
"""Retrieve item."""
return super().retrieve(request, *args, **kwargs) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/views.py | views.py |
from django.db.models import Prefetch
from aito.iot_mgmt.maint_ops.models import (
EquipmentInstanceDailyRiskScore,
EquipmentProblemType,
EquipmentInstanceAlarmPeriod,
AlertDiagnosisStatus,
EquipmentInstanceAlertPeriod,
EquipmentInstanceProblemDiagnosis,
)
EQUIPMENT_INSTANCE_DAILY_RISK_SCORE = \
EquipmentInstanceDailyRiskScore.objects \
.select_related(
'equipment_unique_type_group',
'equipment_instance') \
.defer(
'equipment_unique_type_group__equipment_general_type',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'equipment_instance__equipment_facility',
'equipment_instance__info')
EQUIPMENT_PROBLEM_TYPE_QUERYSET = \
EquipmentProblemType.objects.all()
EQUIPMENT_INSTANCE_ALARM_PERIOD_STR_QUERYSET = \
EquipmentInstanceAlarmPeriod.objects \
.defer(
'date_range',
'has_associated_equipment_instance_alert_periods',
'has_associated_equipment_instance_problem_diagnoses') \
.select_related(
'equipment_instance',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'alarm_type') \
.defer(
'equipment_instance__equipment_facility',
'equipment_instance__info') \
.order_by(
'from_utc_date_time')
EQUIPMENT_INSTANCE_ALARM_PERIOD_FULL_QUERYSET = \
EquipmentInstanceAlarmPeriod.objects \
.defer(
'date_range') \
.select_related(
'equipment_instance',
'alarm_type') \
.defer(
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'equipment_instance__equipment_facility',
'equipment_instance__info') \
.order_by(
'from_utc_date_time')
ALERT_DIAGNOSIS_STATUS_REST_API_QUERYSET = \
AlertDiagnosisStatus.objects.all()
EQUIPMENT_INSTANCE_ALERT_PERIOD_STR_QUERYSET = \
EquipmentInstanceAlertPeriod.objects \
.defer(
'date_range',
'info',
'has_associated_equipment_instance_alarm_periods',
'has_associated_equipment_instance_problem_diagnoses') \
.select_related(
'equipment_unique_type_group',
'equipment_unique_type_group__equipment_general_type',
'equipment_instance',
'diagnosis_status') \
.defer(
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'equipment_instance__equipment_facility',
'equipment_instance__info',
'diagnosis_status__index')
EQUIPMENT_INSTANCE_ALERT_PERIOD_FULL_QUERYSET = \
EquipmentInstanceAlertPeriod.objects \
.defer(
'date_range') \
.select_related(
'equipment_unique_type_group',
'equipment_instance',
'diagnosis_status') \
.defer(
'equipment_unique_type_group__equipment_general_type',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'equipment_instance__equipment_facility',
'equipment_instance__info',
'diagnosis_status__index')
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_ID_ONLY_UNORDERED_QUERYSET = \
EquipmentInstanceProblemDiagnosis.objects \
.only('id') \
.order_by()
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_STR_QUERYSET = \
EquipmentInstanceProblemDiagnosis.objects \
.defer(
'date_range',
'duration',
'has_equipment_problems',
'comments',
'has_associated_equipment_instance_alarm_periods',
'has_associated_equipment_instance_alert_periods') \
.select_related(
'equipment_instance',
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type') \
.defer(
'equipment_instance__equipment_facility',
'equipment_instance__info') \
.prefetch_related(
'equipment_problem_types')
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_FULL_QUERYSET = \
EquipmentInstanceProblemDiagnosis.objects \
.defer(
'date_range') \
.select_related(
'equipment_instance') \
.defer(
'equipment_instance__equipment_general_type',
'equipment_instance__equipment_unique_type',
'equipment_instance__equipment_facility',
'equipment_instance__info') \
.prefetch_related(
'equipment_problem_types')
EQUIPMENT_INSTANCE_ALARM_PERIOD_REST_API_QUERYSET = \
EQUIPMENT_INSTANCE_ALARM_PERIOD_FULL_QUERYSET \
.prefetch_related(
Prefetch(
lookup='equipment_instance_alert_periods',
queryset=EQUIPMENT_INSTANCE_ALERT_PERIOD_FULL_QUERYSET),
Prefetch(
lookup='equipment_instance_problem_diagnoses',
queryset=EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_FULL_QUERYSET))
EQUIPMENT_INSTANCE_ALERT_PERIOD_REST_API_QUERYSET = \
EQUIPMENT_INSTANCE_ALERT_PERIOD_FULL_QUERYSET \
.prefetch_related(
Prefetch(
lookup='equipment_instance_alarm_periods',
queryset=EQUIPMENT_INSTANCE_ALARM_PERIOD_FULL_QUERYSET),
Prefetch(
lookup='equipment_instance_problem_diagnoses',
queryset=EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_FULL_QUERYSET))
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_REST_API_QUERYSET = \
EQUIPMENT_INSTANCE_PROBLEM_DIAGNOSIS_FULL_QUERYSET \
.prefetch_related(
Prefetch(
lookup='equipment_instance_alarm_periods',
queryset=EQUIPMENT_INSTANCE_ALARM_PERIOD_FULL_QUERYSET),
Prefetch(
lookup='equipment_instance_alert_periods',
queryset=EQUIPMENT_INSTANCE_ALERT_PERIOD_FULL_QUERYSET)) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/querysets.py | querysets.py |
from rest_framework.serializers import (ModelSerializer,
RelatedField,
SlugRelatedField)
from drf_writable_nested.serializers import WritableNestedModelSerializer
from aito.iot_mgmt.data.models import EquipmentInstance
from aito.iot_mgmt.maint_ops.models import (
EquipmentInstanceDailyRiskScore,
EquipmentProblemType,
EquipmentInstanceAlarmPeriod,
EquipmentInstanceProblemDiagnosis,
AlertDiagnosisStatus,
EquipmentInstanceAlertPeriod,
)
class EquipmentInstanceDailyRiskScoreSerializer(ModelSerializer):
"""EquipmentInstanceDailyRiskScoreSerializer."""
equipment_unique_type_group = \
SlugRelatedField(
read_only=True,
slug_field='name',
many=False)
equipment_instance = \
SlugRelatedField(
read_only=True,
slug_field='name',
many=False)
class Meta:
"""Metadata."""
model = EquipmentInstanceDailyRiskScore
fields = \
'id', \
'equipment_unique_type_group', \
'equipment_instance', \
'risk_score_name', \
'date', \
'risk_score_value'
class EquipmentProblemTypeSerializer(ModelSerializer):
"""EquipmentProblemTypeSerializer."""
class Meta:
"""Metadata."""
model = EquipmentProblemType
fields = ('name',)
class EquipmentInstanceAlarmPeriodRelatedField(RelatedField):
# pylint: disable=abstract-method
"""EquipmentInstanceAlarmPeriodRelatedField."""
def to_representation(self, value):
"""Get representation."""
return dict(
equipment_instance=value.equipment_instance.name,
alarm_type=value.alarm_type.name,
from_utc_date_time=str(value.from_utc_date_time),
to_utc_date_time=str(value.to_utc_date_time),
duration_in_days=value.duration_in_days,
has_associated_equipment_instance_alert_periods= # noqa: E251
value.has_associated_equipment_instance_alert_periods,
has_associated_equipment_instance_problem_diagnoses= # noqa: E251
value.has_associated_equipment_instance_problem_diagnoses)
class EquipmentInstanceProblemDiagnosisRelatedField(RelatedField):
# pylint: disable=abstract-method
"""EquipmentInstanceProblemDiagnosisRelatedField."""
def to_representation(self, value):
"""Get representation."""
return dict(
equipment_instance=value.equipment_instance.name,
from_date=str(value.from_date),
to_date=str(value.to_date),
duration=value.duration,
has_equipment_problems=value.has_equipment_problems,
equipment_problem_types=[i.name
for i in
value.equipment_problem_types.all()],
dismissed=value.dismissed,
comments=value.comments,
has_associated_equipment_instance_alarm_periods= # noqa: E251
value.has_associated_equipment_instance_alarm_periods,
has_associated_equipment_instance_alert_periods= # noqa: E251
value.has_associated_equipment_instance_alert_periods)
class EquipmentInstanceAlertPeriodRelatedField(RelatedField):
# pylint: disable=abstract-method
"""EquipmentInstanceAlertPeriodRelatedField."""
def to_representation(self, value):
"""Get representation."""
return dict(
equipment_unique_type_group=value.equipment_unique_type_group.name,
equipment_instance=value.equipment_instance.name,
risk_score_name=value.risk_score_name,
threshold=value.threshold,
from_date=str(value.from_date),
to_date=str(value.to_date),
duration=value.duration,
cumulative_excess_risk_score=value.cumulative_excess_risk_score,
approx_average_risk_score=value.approx_average_risk_score,
last_risk_score=value.last_risk_score,
ongoing=value.ongoing,
info=value.info,
diagnosis_status=value.diagnosis_status.name,
has_associated_equipment_instance_alarm_periods= # noqa: E251
value.has_associated_equipment_instance_alarm_periods,
has_associated_equipment_instance_problem_diagnoses= # noqa: E251
value.has_associated_equipment_instance_problem_diagnoses)
class EquipmentInstanceAlarmPeriodSerializer(WritableNestedModelSerializer):
"""EquipmentInstanceAlarmPeriodSerializer."""
equipment_instance = \
SlugRelatedField(
queryset=EquipmentInstance.objects.all(), read_only=False,
slug_field='name',
many=False,
required=True)
alarm_type = \
SlugRelatedField(
queryset=EquipmentProblemType.objects.all(), read_only=False,
slug_field='name',
many=False,
required=True)
equipment_instance_alert_periods = \
EquipmentInstanceAlertPeriodRelatedField(
read_only=True,
many=True)
equipment_instance_problem_diagnoses = \
EquipmentInstanceProblemDiagnosisRelatedField(
read_only=True,
many=True)
class Meta:
"""Metadata."""
model = EquipmentInstanceAlarmPeriod
fields = \
'id', \
'equipment_instance', \
'alarm_type', \
'from_utc_date_time', \
'to_utc_date_time', \
'duration_in_days', \
'has_associated_equipment_instance_alert_periods', \
'equipment_instance_alert_periods', \
'has_associated_equipment_instance_problem_diagnoses', \
'equipment_instance_problem_diagnoses'
class EquipmentInstanceProblemDiagnosisSerializer(
WritableNestedModelSerializer):
"""EquipmentInstanceProblemDiagnosisSerializer."""
equipment_instance = \
SlugRelatedField(
queryset=EquipmentInstance.objects.all(), read_only=False,
slug_field='name',
many=False,
required=True)
equipment_problem_types = \
SlugRelatedField(
queryset=EquipmentProblemType.objects.all(), read_only=False,
slug_field='name',
many=True,
required=True)
equipment_instance_alarm_periods = \
EquipmentInstanceAlarmPeriodRelatedField(
read_only=True,
many=True)
equipment_instance_alert_periods = \
EquipmentInstanceAlertPeriodRelatedField(
read_only=True,
many=True)
class Meta:
"""Metadata."""
model = EquipmentInstanceProblemDiagnosis
fields = \
'id', \
'equipment_instance', \
'from_date', \
'to_date', \
'duration', \
'ongoing', \
'equipment_problem_types', \
'has_equipment_problems', \
'dismissed', \
'comments', \
'has_associated_equipment_instance_alarm_periods', \
'equipment_instance_alarm_periods', \
'has_associated_equipment_instance_alert_periods', \
'equipment_instance_alert_periods'
class AlertDiagnosisStatusSerializer(ModelSerializer):
"""AlertDiagnosisStatusSerializer."""
class Meta:
"""Metadata."""
model = AlertDiagnosisStatus
fields = ('name',)
class EquipmentInstanceAlertPeriodSerializer(ModelSerializer):
"""EquipmentInstanceAlertPeriodSerializer."""
equipment_unique_type_group = \
SlugRelatedField(
read_only=True,
slug_field='name',
many=False)
equipment_instance = \
SlugRelatedField(
read_only=True,
slug_field='name',
many=False)
diagnosis_status = \
SlugRelatedField(
queryset=AlertDiagnosisStatus.objects.all(), read_only=False,
slug_field='name',
many=False,
required=False)
equipment_instance_alarm_periods = \
EquipmentInstanceAlarmPeriodRelatedField(
read_only=True,
many=True)
equipment_instance_problem_diagnoses = \
EquipmentInstanceProblemDiagnosisRelatedField(
read_only=True,
many=True)
class Meta:
"""Metadata."""
model = EquipmentInstanceAlertPeriod
fields = \
'id', \
'equipment_unique_type_group', \
'equipment_instance', \
'risk_score_name', \
'threshold', \
'from_date', \
'to_date', \
'duration', \
'cumulative_excess_risk_score', \
'approx_average_risk_score', \
'last_risk_score', \
'ongoing', \
'info', \
'diagnosis_status', \
'has_associated_equipment_instance_alarm_periods', \
'equipment_instance_alarm_periods', \
'has_associated_equipment_instance_problem_diagnoses', \
'equipment_instance_problem_diagnoses' | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/serializers.py | serializers.py |
from datetime import timedelta
from django.db.models import (
Model,
BigAutoField, BooleanField, CharField, DateField, DateTimeField,
FloatField, PositiveSmallIntegerField, IntegerField, TextField,
JSONField,
ForeignKey, ManyToManyField,
PROTECT)
from django.db.models.constraints import UniqueConstraint
from django.db.models.signals import post_save
from django.contrib.postgres.fields import DateRangeField
from psycopg2.extras import DateRange
from aito.iot_mgmt.data.models import (EquipmentUniqueTypeGroup,
EquipmentInstance)
from aito.iot_mgmt.utils import MAX_CHAR_LEN, clean_lower_str
_ONE_DAY_TIME_DELTA = timedelta(days=1)
_ONE_DAY_TIME_DELTA_TOTAL_SECONDS = _ONE_DAY_TIME_DELTA.total_seconds()
# pylint: disable=line-too-long
class EquipmentInstanceDailyRiskScore(Model):
"""Equipment Instance Daily Risk Score."""
RELATED_NAME = 'equipment_instance_daily_risk_scores'
RELATED_QUERY_NAME = 'equipment_instance_daily_risk_score'
id = BigAutoField(
primary_key=True)
equipment_unique_type_group = \
ForeignKey(
to=EquipmentUniqueTypeGroup,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
equipment_instance = \
ForeignKey(
to=EquipmentInstance,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
risk_score_name = \
CharField(
blank=False,
null=False,
db_index=True,
max_length=MAX_CHAR_LEN)
date = \
DateField(
blank=False,
null=False,
db_index=True)
risk_score_value = \
FloatField(
blank=False,
null=False)
class Meta:
"""Metadata."""
verbose_name = 'Equipment Instance Daily Risk Score'
verbose_name_plural = 'Equipment Instance Daily Risk Scores'
unique_together = \
'equipment_unique_type_group', \
'equipment_instance', \
'risk_score_name', \
'date'
def __str__(self):
"""Return string repr."""
return (f'{self.equipment_unique_type_group.equipment_general_type.name} ' # noqa: E501
f'{self.equipment_unique_type_group.name} '
f'#{self.equipment_instance.name} on {self.date}: '
f'{self.risk_score_name} = {self.risk_score_value:.3g}')
class EquipmentProblemType(Model):
"""Equipment Problem Type."""
name = \
CharField(
verbose_name='Equipment Problem Type',
max_length=MAX_CHAR_LEN,
blank=False,
null=False,
unique=True,
db_index=True)
class Meta:
"""Metadata."""
verbose_name = 'Equipment Problem Type'
verbose_name_plural = 'Equipment Problem Types'
ordering = ('name',)
def __str__(self):
"""Return string repr."""
return f'EqProbTp "{self.name}""'
def save(self, *args, **kwargs):
"""Save."""
self.name = self.name.strip()
super().save(*args, **kwargs)
class EquipmentInstanceDailyPredictedFault(Model):
"""Equipment Instance Daily Predicted Fault."""
RELATED_NAME = 'equipment_instance_daily_predicted_faults'
RELATED_QUERY_NAME = 'equipment_instance_daily_predicted_fault'
id = BigAutoField(
primary_key=True)
equipment_unique_type_group = \
ForeignKey(
verbose_name='Equipment Unique Type Group',
help_text='Equipment Unique Type Group',
to=EquipmentUniqueTypeGroup,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
equipment_instance = \
ForeignKey(
verbose_name='Equipment Instance',
help_text='Equipment Instance',
to=EquipmentInstance,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
date = \
DateField(
verbose_name='Date',
help_text='Date',
blank=False,
null=False,
db_index=True)
fault_type = \
ForeignKey(
verbose_name='Fault Type',
help_text='Fault Type',
to=EquipmentProblemType,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
fault_predictor_name = \
CharField(
verbose_name='Fault Predictor Name',
help_text='Fault Predictor Name',
max_length=MAX_CHAR_LEN,
blank=False,
null=False,
db_index=True)
predicted_fault_probability = \
FloatField(
verbose_name='Predicted Fault Probability',
help_text='Predicted Fault Probability',
blank=False,
null=False)
class Meta:
"""Metadata."""
verbose_name = 'Equipment Instance Daily Predicted Fault'
verbose_name_plural = 'Equipment Instance Daily Predicted Faults'
constraints = (
UniqueConstraint(
fields=('equipment_unique_type_group',
'equipment_instance',
'date',
'fault_type',
'fault_predictor_name'),
name='EquipmentInstanceDailyPredictedFault_unique_together'),
)
def __str__(self):
"""Return string repr."""
return (f'{self.equipment_unique_type_group.equipment_general_type.name} ' # noqa: E501
f'{self.equipment_unique_type_group.name} '
f'#{self.equipment_instance.name} on {self.date}: '
f'{self.fault_type.name.upper()} predicted '
f'w/ prob {100 * self.predicted_fault_probability:.1f}% '
f'by {self.fault_predictor_name}')
class EquipmentInstanceAlarmPeriod(Model):
"""Equipment Instance Alarm Period."""
RELATED_NAME = 'equipment_instance_alarm_periods'
RELATED_QUERY_NAME = 'equipment_instance_alarm_period'
equipment_instance = \
ForeignKey(
to=EquipmentInstance,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
alarm_type = \
ForeignKey(
to=EquipmentProblemType,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
from_utc_date_time = \
DateTimeField(
blank=False,
null=False,
db_index=True)
to_utc_date_time = \
DateTimeField(
blank=True,
null=True,
db_index=True)
duration_in_days = \
FloatField(
blank=True,
null=True,
db_index=True)
date_range = \
DateRangeField(
blank=True,
null=True)
equipment_instance_alert_periods = \
ManyToManyField(
to='EquipmentInstanceAlertPeriod',
related_name=RELATED_NAME + '_reverse',
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_associated_equipment_instance_alert_periods = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
equipment_instance_problem_diagnoses = \
ManyToManyField(
to='EquipmentInstanceProblemDiagnosis',
related_name=RELATED_NAME + '_reverse',
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_associated_equipment_instance_problem_diagnoses = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
class Meta:
"""Metadata."""
verbose_name = 'Equipment Instance Alarm Period'
verbose_name_plural = 'Equipment Instance Alarm Periods'
unique_together = \
'equipment_instance', \
'alarm_type', \
'from_utc_date_time'
ordering = 'equipment_instance', '-from_utc_date_time'
def __str__(self):
"""Return string repr."""
return ((f'{self.equipment_instance}: {self.alarm_type.name.upper()} '
f'from {self.from_utc_date_time}') + # noqa: W504
(f' to {self.to_utc_date_time} ({self.duration_in_days:.3f} Days)' # noqa: E501
if self.to_utc_date_time
else ' (ONGOING)'))
def save(self, *args, **kwargs):
"""Save."""
if self.to_utc_date_time:
self.duration_in_days = (
(self.to_utc_date_time - # noqa: W504
self.from_utc_date_time).total_seconds()
) / _ONE_DAY_TIME_DELTA_TOTAL_SECONDS
_to_date = (self.to_utc_date_time + _ONE_DAY_TIME_DELTA).date()
else:
self.duration_in_days = _to_date = None
self.date_range = \
DateRange(
lower=(self.from_utc_date_time - _ONE_DAY_TIME_DELTA).date(),
upper=_to_date,
bounds='[]',
empty=False)
super().save(*args, **kwargs)
class EquipmentInstanceProblemDiagnosis(Model):
"""Equipment Instance Problem Diagnosis."""
RELATED_NAME = 'equipment_instance_problem_diagnoses'
RELATED_QUERY_NAME = 'equipment_instance_problem_diagnosis'
equipment_instance = \
ForeignKey(
to=EquipmentInstance,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
from_date = \
DateField(
blank=False,
null=False,
db_index=True)
to_date = \
DateField(
blank=True,
null=True,
db_index=True)
date_range = \
DateRangeField(
blank=True,
null=True)
duration = \
IntegerField(
blank=True,
null=True)
equipment_problem_types = \
ManyToManyField(
to=EquipmentProblemType,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_equipment_problems = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
dismissed = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
comments = \
TextField(
blank=True,
null=True)
equipment_instance_alarm_periods = \
ManyToManyField(
to=EquipmentInstanceAlarmPeriod,
through=(EquipmentInstanceAlarmPeriod
.equipment_instance_problem_diagnoses.through),
related_name=RELATED_NAME + '_reverse',
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_associated_equipment_instance_alarm_periods = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
equipment_instance_alert_periods = \
ManyToManyField(
to='EquipmentInstanceAlertPeriod',
related_name=RELATED_NAME + '_reverse',
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_associated_equipment_instance_alert_periods = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
class Meta:
"""Metadata."""
verbose_name = 'Equipment Instance Problem Diagnosis'
verbose_name_plural = 'Equipment Instance Problem Diagnoses'
unique_together = 'equipment_instance', 'from_date'
ordering = 'dismissed', '-to_date', 'from_date'
def __str__(self):
"""Return string repr."""
return (f'{self.equipment_instance} from {self.from_date} ' +
(f'to {self.to_date}'
if self.to_date
else '(ONGOING)') +
(': {}'.format( # pylint: disable=consider-using-f-string
', '.join(equipment_problem_type.name.upper()
for equipment_problem_type in
self.equipment_problem_types.all()))
if self.equipment_problem_types.count()
else '') +
(' (DISMISSED)'
if self.dismissed
else ''))
def save(self, *args, **kwargs):
"""Save."""
self.date_range = \
DateRange(
lower=self.from_date,
upper=self.to_date,
bounds='[]',
empty=False)
self.duration = \
(self.to_date - self.from_date).days + 1 \
if self.to_date \
else None
super().save(*args, **kwargs)
class AlertDiagnosisStatus(Model):
"""Alert Diagnosis Status."""
RELATED_NAME = 'alert_diagnosis_statuses'
RELATED_QUERY_NAME = 'alert_diagnosis_status'
index = \
PositiveSmallIntegerField(
blank=False,
null=False,
unique=True,
default=0,
db_index=True)
name = \
CharField(
verbose_name='Alert Diagnosis Status Name',
max_length=MAX_CHAR_LEN,
blank=False,
null=False,
unique=True,
default='to_diagnose')
class Meta:
"""Metadata."""
verbose_name = 'Alert Diagnosis Status'
verbose_name_plural = 'Alert Diagnosis Statuses'
ordering = ('index',)
def __str__(self):
"""Return string repr."""
return f'{self.index}. {self.name}'
def save(self, *args, **kwargs):
"""Save."""
self.name = clean_lower_str(self.name)
super().save(*args, **kwargs)
class EquipmentInstanceAlertPeriod(Model):
"""Equipment Instance Alert Period."""
RELATED_NAME = 'equipment_instance_alert_periods'
RELATED_QUERY_NAME = 'equipment_instance_alert_period'
equipment_unique_type_group = \
ForeignKey(
to=EquipmentUniqueTypeGroup,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
equipment_instance = \
ForeignKey(
to=EquipmentInstance,
related_name=RELATED_NAME,
related_query_name=RELATED_QUERY_NAME,
blank=False,
null=False,
on_delete=PROTECT)
risk_score_name = \
CharField(
max_length=MAX_CHAR_LEN,
blank=False,
null=False,
unique=False,
db_index=True)
threshold = \
FloatField(
blank=False,
null=False,
default=0,
db_index=True)
from_date = \
DateField(
blank=False,
null=False,
auto_now=False,
auto_created=False,
default=None,
db_index=True)
to_date = \
DateField(
blank=False,
null=False,
auto_now=False,
auto_created=False,
default=None,
db_index=True)
date_range = \
DateRangeField(
blank=True,
null=True)
duration = \
IntegerField(
blank=False,
null=False,
default=0)
cumulative_excess_risk_score = \
FloatField(
blank=False,
null=False,
default=0)
approx_average_risk_score = \
FloatField(
blank=False,
null=False,
default=0)
last_risk_score = \
FloatField(
blank=False,
null=False,
default=0)
ongoing = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
info = \
JSONField(
blank=True,
null=True,
default=dict)
diagnosis_status = \
ForeignKey(
to=AlertDiagnosisStatus,
blank=True,
null=True,
on_delete=PROTECT)
equipment_instance_alarm_periods = \
ManyToManyField(
to=EquipmentInstanceAlarmPeriod,
through=(EquipmentInstanceAlarmPeriod
.equipment_instance_alert_periods.through),
related_name=RELATED_NAME + '_reverse',
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_associated_equipment_instance_alarm_periods = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
equipment_instance_problem_diagnoses = \
ManyToManyField(
to=EquipmentInstanceProblemDiagnosis,
through=(EquipmentInstanceProblemDiagnosis
.equipment_instance_alert_periods.through),
related_name=RELATED_NAME + '_reverse',
related_query_name=RELATED_QUERY_NAME,
blank=True)
has_associated_equipment_instance_problem_diagnoses = \
BooleanField(
blank=False,
null=False,
default=False,
db_index=True)
class Meta:
"""Metadata."""
verbose_name = 'Equipment Instance Alert Period'
verbose_name_plural = 'Equipment Instance Alert Periods'
unique_together = \
('equipment_unique_type_group',
'equipment_instance',
'risk_score_name',
'threshold',
'from_date'), \
('equipment_unique_type_group',
'equipment_instance',
'risk_score_name',
'threshold',
'to_date')
ordering = \
'diagnosis_status', \
'-ongoing', \
'risk_score_name', \
'-threshold', \
'-cumulative_excess_risk_score'
def __str__(self):
"""Return string repr."""
if self.diagnosis_status is None:
self.save()
return (
f'{self.diagnosis_status.name.upper()}: ' +
('ONGOING '
if self.ongoing
else '') +
'Alert on ' +
(f'{self.equipment_unique_type_group.equipment_general_type.name.upper()} ' # noqa: E501
f'{self.equipment_unique_type_group.name} '
f'#{self.equipment_instance.name} '
f'from {self.from_date} to {self.to_date} '
f'w Approx Avg Risk Score {self.approx_average_risk_score:,.1f} '
f'(Last: {self.last_risk_score:,.1f}) '
f'(based on {self.risk_score_name} > {self.threshold}) '
f'for {self.duration:,} Day(s)')
)
def save(self, *args, **kwargs):
"""Save."""
self.date_range = \
DateRange(
lower=self.from_date,
upper=self.to_date,
bounds='[]',
empty=False)
self.duration = duration = \
(self.to_date - self.from_date).days + 1
self.approx_average_risk_score = \
self.threshold + \
(self.cumulative_excess_risk_score / duration)
if self.diagnosis_status is None:
self.diagnosis_status = \
AlertDiagnosisStatus.objects.get_or_create(index=0)[0]
super().save(*args, **kwargs)
def equipment_instance_alarm_period_post_save(
sender, instance, *args, **kwargs):
"""Post-Save signal."""
# pylint: disable=unused-argument
equipment_instance_alert_periods = \
EquipmentInstanceAlertPeriod.objects.filter(
equipment_instance=instance.equipment_instance,
date_range__overlap=instance.date_range)
instance.equipment_instance_alert_periods.set(
equipment_instance_alert_periods,
clear=False)
equipment_instance_alert_periods.update(
has_associated_equipment_instance_alarm_periods=True)
equipment_instance_problem_diagnoses = \
EquipmentInstanceProblemDiagnosis.objects.filter(
equipment_instance=instance.equipment_instance,
date_range__overlap=instance.date_range)
instance.equipment_instance_problem_diagnoses.set(
equipment_instance_problem_diagnoses,
clear=False)
equipment_instance_problem_diagnoses.update(
has_associated_equipment_instance_alarm_periods=True)
EquipmentInstanceAlarmPeriod.objects.filter(pk=instance.pk).update(
has_associated_equipment_instance_alert_periods= # noqa: E251
bool(equipment_instance_alert_periods.count()),
has_associated_equipment_instance_problem_diagnoses= # noqa: E251
bool(equipment_instance_problem_diagnoses.count()))
post_save.connect(
receiver=equipment_instance_alarm_period_post_save,
sender=EquipmentInstanceAlarmPeriod,
weak=True,
dispatch_uid=None,
apps=None)
def equipment_instance_alert_period_post_save(
sender, instance, *args, **kwargs):
"""Post-Save signal."""
# pylint: disable=unused-argument
equipment_instance_alarm_periods = \
EquipmentInstanceAlarmPeriod.objects.filter(
equipment_instance=instance.equipment_instance,
date_range__overlap=instance.date_range)
instance.equipment_instance_alarm_periods.set(
equipment_instance_alarm_periods,
clear=False)
equipment_instance_alarm_periods.update(
has_associated_equipment_instance_alert_periods=True)
equipment_instance_problem_diagnoses = \
EquipmentInstanceProblemDiagnosis.objects.filter(
equipment_instance=instance.equipment_instance,
date_range__overlap=instance.date_range)
instance.equipment_instance_problem_diagnoses.set(
equipment_instance_problem_diagnoses,
clear=False)
equipment_instance_problem_diagnoses.update(
has_associated_equipment_instance_alert_periods=True)
EquipmentInstanceAlertPeriod.objects.filter(pk=instance.pk).update(
has_associated_equipment_instance_problem_diagnoses= # noqa: E251
bool(equipment_instance_problem_diagnoses.count()))
post_save.connect(
receiver=equipment_instance_alert_period_post_save,
sender=EquipmentInstanceAlertPeriod,
weak=True,
dispatch_uid=None,
apps=None)
def equipment_instance_problem_diagnosis_post_save(
sender, instance, *args, **kwargs):
"""Post-Save signal."""
# pylint: disable=unused-argument
equipment_instance_alarm_periods = \
EquipmentInstanceAlarmPeriod.objects.filter(
equipment_instance=instance.equipment_instance,
date_range__overlap=instance.date_range)
instance.equipment_instance_alarm_periods.set(
equipment_instance_alarm_periods,
clear=False)
equipment_instance_alarm_periods.update(
has_associated_equipment_instance_problem_diagnoses=True)
equipment_instance_alert_periods = \
EquipmentInstanceAlertPeriod.objects.filter(
equipment_instance=instance.equipment_instance,
date_range__overlap=instance.date_range)
instance.equipment_instance_alert_periods.set(
equipment_instance_alert_periods,
clear=False)
equipment_instance_alert_periods.update(
has_associated_equipment_instance_problem_diagnoses=True)
EquipmentInstanceProblemDiagnosis.objects.filter(pk=instance.pk).update(
has_equipment_problems=bool(instance.equipment_problem_types.count()),
has_associated_equipment_instance_alarm_periods= # noqa: E251
bool(equipment_instance_alarm_periods.count()),
has_associated_equipment_instance_alert_periods= # noqa: E251
bool(equipment_instance_alert_periods.count()))
post_save.connect(
receiver=equipment_instance_problem_diagnosis_post_save,
sender=EquipmentInstanceProblemDiagnosis,
weak=True,
dispatch_uid=None,
apps=None) | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/models.py | models.py |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('IoT_MaintOps', '0068_auto_20181023_1636'),
]
operations = [
migrations.AlterModelOptions(
name='blueprint',
options={'ordering': ('equipment_unique_type_group', '-trained_to_date', '-timestamp')},
),
migrations.AlterModelOptions(
name='equipmentinstancedailyriskscore',
options={'ordering': ('equipment_unique_type_group', 'equipment_instance', 'risk_score_name', '-date')},
),
migrations.AlterModelOptions(
name='equipmentuniquetypegroupdatafieldblueprintbenchmarkmetricprofile',
options={'ordering': ('equipment_unique_type_group', 'equipment_data_field', '-trained_to_date')},
),
migrations.AlterModelOptions(
name='equipmentuniquetypegroupdatafieldprofile',
options={'ordering': ('equipment_unique_type_group', 'equipment_data_field', '-to_date')},
),
migrations.AlterModelOptions(
name='equipmentuniquetypegroupserviceconfig',
options={'ordering': ('-active', 'equipment_unique_type_group')},
),
migrations.RemoveField(
model_name='alert',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='blueprint',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='equipmentinstancedailyriskscore',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupdatafieldblueprintbenchmarkmetricprofile',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupdatafieldprofile',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupserviceconfig',
name='equipment_general_type',
),
] | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0069_auto_20181024_0852.py | 0069_auto_20181024_0852.py |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('IoT_DataMgmt', '0014_auto_20180803_1031'),
('IoT_MaintOps', '0032_auto_20180722_2217'),
]
operations = [
migrations.CreateModel(
name='EquipmentUniqueTypeGroupMeasurementDataFieldBlueprintBenchmarkMetricProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('to_date', models.DateField(default=None)),
('n', models.IntegerField(default=0)),
('mae', models.FloatField(blank=True, null=True)),
('medae', models.FloatField(blank=True, null=True)),
('r2', models.FloatField(blank=True, null=True)),
('last_updated', models.DateTimeField()),
('equipment_data_field', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_measurement_data_field_measurement_data_field_benchmark_metric_profiles', related_query_name='equipment_unique_type_group_measurement_data_field_measurement_data_field_benchmark_metric_profile', to='IoT_DataMgmt.EquipmentDataField')),
('equipment_general_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_measurement_data_field_measurement_data_field_benchmark_metric_profiles', related_query_name='equipment_unique_type_group_measurement_data_field_measurement_data_field_benchmark_metric_profile', to='IoT_DataMgmt.EquipmentGeneralType')),
('equipment_unique_type_group', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_measurement_data_field_measurement_data_field_benchmark_metric_profiles', related_query_name='equipment_unique_type_group_measurement_data_field_measurement_data_field_benchmark_metric_profile', to='IoT_DataMgmt.EquipmentUniqueTypeGroup')),
],
),
migrations.CreateModel(
name='EquipmentUniqueTypeGroupMeasurementDataFieldProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('to_date', models.DateField(default=None)),
('valid_proportion', models.FloatField(default=0)),
('sample_min', models.FloatField(blank=True, null=True)),
('outlier_rst_min', models.FloatField(blank=True, null=True)),
('sample_quartile', models.FloatField(blank=True, null=True)),
('sample_median', models.FloatField(blank=True, null=True)),
('sample_3rd_quartile', models.FloatField(blank=True, null=True)),
('outlier_rst_max', models.FloatField(blank=True, null=True)),
('sample_max', models.FloatField(blank=True, null=True)),
('last_updated', models.DateTimeField()),
('equipment_data_field', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_measurement_data_field_profiles', related_query_name='equipment_unique_type_group_measurement_data_field_profile', to='IoT_DataMgmt.EquipmentDataField')),
('equipment_general_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_measurement_data_field_profiles', related_query_name='equipment_unique_type_group_measurement_data_field_profile', to='IoT_DataMgmt.EquipmentGeneralType')),
('equipment_unique_type_group', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_measurement_data_field_profiles', related_query_name='equipment_unique_type_group_measurement_data_field_profile', to='IoT_DataMgmt.EquipmentUniqueTypeGroup')),
],
),
migrations.CreateModel(
name='EquipmentUniqueTypeGroupServiceConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('active', models.BooleanField(default=True)),
('equipment_general_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_service_configs', related_query_name='equipment_unique_type_group_service_config', to='IoT_DataMgmt.EquipmentGeneralType')),
('equipment_unique_type_group', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_service_configs', related_query_name='equipment_unique_type_group_service_config', to='IoT_DataMgmt.EquipmentUniqueTypeGroup')),
],
),
migrations.AlterField(
model_name='alert',
name='diagnosis_status',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='IoT_MaintOps.AlertDiagnosisStatus'),
),
] | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0033_auto_20180831_1301.py | 0033_auto_20180831_1301.py |
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('IoT_DataMgmt', '0014_auto_20180803_1031'),
('IoT_MaintOps', '0034_auto_20180831_1311'),
]
operations = [
migrations.CreateModel(
name='EquipmentUniqueTypeGroupDataFieldBlueprintBenchmarkMetricProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('to_date', models.DateField(default=None)),
('n', models.IntegerField(default=0)),
('mae', models.FloatField(blank=True, null=True)),
('medae', models.FloatField(blank=True, null=True)),
('r2', models.FloatField(blank=True, null=True)),
('last_updated', models.DateTimeField()),
('equipment_data_field', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_data_field_measurement_data_field_benchmark_metric_profiles', related_query_name='equipment_unique_type_group_data_field_measurement_data_field_benchmark_metric_profile', to='IoT_DataMgmt.EquipmentDataField')),
('equipment_general_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_data_field_measurement_data_field_benchmark_metric_profiles', related_query_name='equipment_unique_type_group_data_field_measurement_data_field_benchmark_metric_profile', to='IoT_DataMgmt.EquipmentGeneralType')),
('equipment_unique_type_group', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_data_field_measurement_data_field_benchmark_metric_profiles', related_query_name='equipment_unique_type_group_data_field_measurement_data_field_benchmark_metric_profile', to='IoT_DataMgmt.EquipmentUniqueTypeGroup')),
],
),
migrations.CreateModel(
name='EquipmentUniqueTypeGroupDataFieldProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('to_date', models.DateField(default=None)),
('valid_proportion', models.FloatField(default=0)),
('distinct_values', django.contrib.postgres.fields.jsonb.JSONField(default=list)),
('n_distinct_values', models.IntegerField(default=0)),
('sample_min', models.FloatField(blank=True, null=True)),
('outlier_rst_min', models.FloatField(blank=True, null=True)),
('sample_quartile', models.FloatField(blank=True, null=True)),
('sample_median', models.FloatField(blank=True, null=True)),
('sample_3rd_quartile', models.FloatField(blank=True, null=True)),
('outlier_rst_max', models.FloatField(blank=True, null=True)),
('sample_max', models.FloatField(blank=True, null=True)),
('last_updated', models.DateTimeField()),
('equipment_data_field', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_data_field_profiles', related_query_name='equipment_unique_type_group_data_field_profile', to='IoT_DataMgmt.EquipmentDataField')),
('equipment_general_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_data_field_profiles', related_query_name='equipment_unique_type_group_data_field_profile', to='IoT_DataMgmt.EquipmentGeneralType')),
('equipment_unique_type_group', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_type_group_data_field_profiles', related_query_name='equipment_unique_type_group_data_field_profile', to='IoT_DataMgmt.EquipmentUniqueTypeGroup')),
],
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmeasurementdatafieldblueprintbenchmarkmetricprofile',
name='equipment_data_field',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmeasurementdatafieldblueprintbenchmarkmetricprofile',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmeasurementdatafieldblueprintbenchmarkmetricprofile',
name='equipment_unique_type_group',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmeasurementdatafieldprofile',
name='equipment_data_field',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmeasurementdatafieldprofile',
name='equipment_general_type',
),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmeasurementdatafieldprofile',
name='equipment_unique_type_group',
),
migrations.DeleteModel(
name='EquipmentUniqueTypeGroupMeasurementDataFieldBlueprintBenchmarkMetricProfile',
),
migrations.DeleteModel(
name='EquipmentUniqueTypeGroupMeasurementDataFieldProfile',
),
] | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0035_auto_20180831_1410.py | 0035_auto_20180831_1410.py |
from django.db import migrations
class Migration(migrations.Migration):
"""Remove PPP."""
dependencies = [
('IoT_MaintOps', '0114_remove_last_updated_fields')
]
operations = [
migrations.AlterUniqueTogether(
name=('equipmentuniquetypegroupdatafield'
'blueprintbenchmarkmetricprofile'),
unique_together=None),
migrations.RemoveField(
model_name=('equipmentuniquetypegroupdatafield'
'blueprintbenchmarkmetricprofile'),
name='equipment_data_field'),
migrations.RemoveField(
model_name=('equipmentuniquetypegroupdatafield'
'blueprintbenchmarkmetricprofile'),
name='equipment_unique_type_group'),
migrations.AlterUniqueTogether(
name='equipmentuniquetypegroupmonitoreddatafieldconfig',
unique_together=None),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmonitoreddatafieldconfig',
name='equipment_unique_type_group_service_config'),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmonitoreddatafieldconfig',
name='manually_excluded_equipment_data_fields'),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmonitoreddatafieldconfig',
name='manually_included_equipment_data_fields'),
migrations.RemoveField(
model_name='equipmentuniquetypegroupmonitoreddatafieldconfig',
name='monitored_equipment_data_field'),
migrations.RemoveField(
model_name='equipmentuniquetypegroupserviceconfig',
name='equipment_unique_type_group'),
migrations.RemoveField(
model_name='equipmentuniquetypegroupserviceconfig',
name='global_excluded_equipment_data_fields'),
migrations.DeleteModel(
name='Blueprint'),
migrations.DeleteModel(
name=('EquipmentUniqueTypeGroupDataField'
'BlueprintBenchmarkMetricProfile')),
migrations.DeleteModel(
name='EquipmentUniqueTypeGroupMonitoredDataFieldConfig'),
migrations.DeleteModel(
name='EquipmentUniqueTypeGroupServiceConfig')
] | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0115_remove_ppp.py | 0115_remove_ppp.py |
import django.contrib.postgres.fields.ranges
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('IoT_DataMgmt', '0063_auto_20190511_0024'),
('IoT_MaintOps', '0094_auto_20190513_2248'),
]
operations = [
migrations.RenameField(
model_name='alert',
old_name='equipment_problem_diagnoses',
new_name='equipment_instance_problem_diagnoses',
),
migrations.RenameField(
model_name='alert',
old_name='has_associated_equipment_problem_diagnoses',
new_name='has_associated_equipment_instance_problem_diagnoses',
),
migrations.RenameField(
model_name='equipmentinstanceproblemdiagnosis',
old_name='alerts',
new_name='alert_periods',
),
migrations.RenameField(
model_name='equipmentinstanceproblemdiagnosis',
old_name='has_associated_alerts',
new_name='has_associated_equipment_instance_alert_periods',
),
migrations.AddField(
model_name='alert',
name='has_associated_equipment_instance_alarm_periods',
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddField(
model_name='equipmentinstanceproblemdiagnosis',
name='has_associated_equipment_instance_alarm_periods',
field=models.BooleanField(db_index=True, default=False),
),
migrations.AlterField(
model_name='alert',
name='equipment_instance',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_alert_periods', related_query_name='equipment_instance_alert_period', to='IoT_DataMgmt.EquipmentInstance'),
),
migrations.AlterField(
model_name='alert',
name='equipment_unique_type_group',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_alert_periods', related_query_name='equipment_instance_alert_period', to='IoT_DataMgmt.EquipmentUniqueTypeGroup'),
),
migrations.CreateModel(
name='EquipmentInstanceAlarmPeriod',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_utc_date_time', models.DateTimeField(db_index=True)),
('to_utc_date_time', models.DateTimeField(blank=True, db_index=True, null=True)),
('duration_in_days', models.FloatField(blank=True, db_index=True, null=True)),
('date_range', django.contrib.postgres.fields.ranges.DateRangeField(blank=True, null=True)),
('has_associated_equipment_instance_alert_periods', models.BooleanField(db_index=True, default=False)),
('has_associated_equipment_instance_problem_diagnoses', models.BooleanField(db_index=True, default=False)),
('last_updated', models.DateTimeField(auto_now=True)),
('alarm_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_alarm_periods', related_query_name='equipment_instance_alarm_period', to='IoT_MaintOps.EquipmentProblemType')),
('equipment_instance', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_alarm_periods', related_query_name='equipment_instance_alarm_period', to='IoT_DataMgmt.EquipmentInstance')),
('equipment_instance_alert_periods', models.ManyToManyField(blank=True, related_name='equipment_instance_alarm_periods', related_query_name='equipment_instance_alarm_period', to='IoT_MaintOps.Alert')),
('equipment_instance_problem_diagnoses', models.ManyToManyField(blank=True, related_name='equipment_instance_alarm_periods', related_query_name='equipment_instance_alarm_period', to='IoT_MaintOps.EquipmentInstanceProblemDiagnosis')),
],
options={
'ordering': ('equipment_instance', 'alarm_type', 'from_utc_date_time'),
'unique_together': {('equipment_instance', 'alarm_type', 'from_utc_date_time', 'to_utc_date_time'), ('equipment_instance', 'alarm_type', 'from_utc_date_time')},
},
),
migrations.AddField(
model_name='alert',
name='alarm_periods',
field=models.ManyToManyField(blank=True, to='IoT_MaintOps.EquipmentInstanceAlarmPeriod'),
),
migrations.AddField(
model_name='equipmentinstanceproblemdiagnosis',
name='alarm_periods',
field=models.ManyToManyField(blank=True, to='IoT_MaintOps.EquipmentInstanceAlarmPeriod'),
),
] | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0095_auto_20190514_0317.py | 0095_auto_20190514_0317.py |
from django.db import migrations
class Migration(migrations.Migration):
"""Verbose names."""
dependencies = [
('IoT_MaintOps', '0116_rename_alert_equipmentinstancealertperiod')
]
operations = [
migrations.AlterModelOptions(
name='alertdiagnosisstatus',
options={'ordering': ('index',),
'verbose_name': 'Alert Diagnosis Status',
'verbose_name_plural': 'Alert Diagnosis Statuses'}),
migrations.AlterModelOptions(
name='equipmentinstancealarmperiod',
options={'ordering': ('equipment_instance', '-from_utc_date_time'),
'verbose_name': 'Equipment Instance Alarm Period',
'verbose_name_plural': 'Equipment Instance Alarm Periods'}
),
migrations.AlterModelOptions(
name='equipmentinstancealertperiod',
options={'ordering': ('diagnosis_status',
'-ongoing',
'risk_score_name',
'-threshold',
'-cumulative_excess_risk_score'),
'verbose_name': 'Equipment Instance Alert Period',
'verbose_name_plural': 'Equipment Instance Alert Periods'}
),
migrations.AlterModelOptions(
name='equipmentinstancedailyriskscore',
options={
'verbose_name': 'Equipment Instance Daily Risk Score',
'verbose_name_plural': 'Equipment Instance Daily Risk Scores'
}),
migrations.AlterModelOptions(
name='equipmentinstanceproblemdiagnosis',
options={
'ordering': ('dismissed', '-to_date', 'from_date'),
'verbose_name': 'Equipment Instance Problem Diagnosis',
'verbose_name_plural': 'Equipment Instance Problem Diagnoses'
}),
migrations.AlterModelOptions(
name='equipmentproblemtype',
options={'ordering': ('name',),
'verbose_name': 'Equipment Problem Type',
'verbose_name_plural': 'Equipment Problem Types'}),
migrations.AlterModelOptions(
name='globalconfig',
options={'ordering': ('key',),
'verbose_name': 'Global Config',
'verbose_name_plural': 'Global Configs'})
] | Aitomatic-Contrib | /Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0117_verbose_names.py | 0117_verbose_names.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.