hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
815eed7c81aa394ca156f60562a609ed561c8f68 | 4,253 | py | Python | tests/tests.py | ActivityWatch/activitywatch-old | e69b071ff701368cee7bac5d01e5936c200e58be | [
"MIT"
]
| 4 | 2017-01-30T16:27:18.000Z | 2017-09-28T19:14:13.000Z | tests/tests.py | ActivityWatch/activitywatch-old | e69b071ff701368cee7bac5d01e5936c200e58be | [
"MIT"
]
| null | null | null | tests/tests.py | ActivityWatch/activitywatch-old | e69b071ff701368cee7bac5d01e5936c200e58be | [
"MIT"
]
| 2 | 2020-06-22T07:11:51.000Z | 2020-12-11T02:46:22.000Z | from copy import copy
from itertools import groupby
import unittest
from datetime import datetime, timedelta
from typing import List
from activitywatch.base import Watcher, Activity, Logger
from activitywatch.settings import Settings
from activitywatch.utils import floor_datetime, ceil_datetime
from activitywatch.filters.split import split_by_interval, overlaps
from activitywatch.filters.chunk import chunk_by_tags
class MockWatcher(Watcher):
def run(self):
pass
def wait(self):
pass
identifier = "mock"
def __init__(self):
settings = Settings()
settings["watchers"][self.identifier] = {}
Watcher.__init__(self)
class MockLogger(Logger):
def log(self, activities: List[Activity]):
pass
def wait(self):
pass
identifier = "mock"
def __init__(self):
settings = Settings()
settings["loggers"][self.identifier] = {}
Logger.__init__(self)
class LoggerWatcherTest(unittest.TestCase):
def test_activity_flow(self):
watcher = MockWatcher()
logger = MockLogger()
logger.add_watcher(watcher)
watcher.dispatch_activity(Activity("test", datetime.now()-timedelta(days=1), datetime.now()))
activities = logger.flush_activities()
self.assertTrue(len(activities) == 1)
activities = logger.flush_activities()
self.assertTrue(len(activities) == 0)
class ActivityTest(unittest.TestCase):
def test_to_zenobase(self):
TAG = "something"
activity = Activity(TAG, started_at=datetime.now(), ended_at=datetime.now())
event = activity.to_zenobase_event()
self.assertTrue(event["tag"] == TAG)
class SettingsTest(unittest.TestCase):
def test_instance(self):
self.assertIs(Settings(), Settings())
HOUR = timedelta(hours=1)
class SplitActivityTest(unittest.TestCase):
def test_by_hour(self):
dt = datetime(2015, 1, 1, 8, 30)
td = timedelta(hours=3, minutes=23)
activity = Activity([], dt, dt+td)
split = split_by_interval([copy(activity), copy(activity)], interval=HOUR)
self.assertEqual(len(split), 8)
activity.end += -td + timedelta(minutes=2)
split = split_by_interval([copy(activity)], interval=HOUR)
self.assertEqual(len(split), 1)
def test_ceil_hour(self):
def ceil_hour(td):
return ceil_datetime(td, td=timedelta(hours=1))
self.assertEqual(ceil_hour(datetime(2015, 1, 1, 6, 2)), datetime(2015, 1, 1, 7))
self.assertEqual(ceil_hour(datetime(2015, 1, 1, 6, 2)), ceil_hour(datetime(2015, 1, 1, 6, 58)))
self.assertNotEqual(ceil_hour(datetime(2015, 1, 1, 5, 2)), ceil_hour(datetime(2015, 1, 1, 6, 4)))
def test_floor_hour(self):
def floor_hour(td):
return floor_datetime(td, td=timedelta(hours=1))
self.assertEqual(floor_hour(datetime(2015, 1, 1, 6, 2)), datetime(2015, 1, 1, 6))
self.assertEqual(floor_hour(datetime(2015, 1, 1, 6, 2)), floor_hour(datetime(2015, 1, 1, 6, 5)))
def test_overlaps_hour(self):
def overlaps_hours(td):
return overlaps(td, interval=timedelta(hours=1))
activity = Activity([], datetime(2015, 1, 1, 5, 23), datetime(2015, 1, 1, 6, 6))
self.assertTrue(overlaps_hours(activity))
activity = Activity([], datetime(2015, 1, 1, 5, 23), datetime(2015, 1, 1, 6, 0, 0, 1))
self.assertTrue(overlaps_hours(activity))
activity = Activity([], datetime(2015, 1, 1, 6, 30), datetime(2015, 1, 1, 6, 59))
self.assertFalse(overlaps_hours(activity))
class ChunkTest(unittest.TestCase):
def test_chunk_by_tags(self):
interval = timedelta(minutes=5)
start = floor_datetime(datetime.now(), interval)
activities = [Activity(["test"], start, start+interval*0.5),
Activity(["test2"], start+interval, start+interval*1.5),
Activity(["test"], start+interval*2, start+interval*2.5)]
self.assertEqual(3, len(activities))
activities.append(Activity(["test"], start+interval, start+interval*1.5))
self.assertEqual(4, len(activities))
self.assertEqual(2, len(chunk_by_tags(activities)))
| 32.715385 | 105 | 0.652481 | 3,786 | 0.890195 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.01834 |
815ef73f8d3016d9f5e536e42e63dc78b0cafca1 | 254,077 | py | Python | lang/it/basic_vocabulary_it.py | gtoffoli/commons-cops | e4b1f556c550e25bb2e6a9eabe8db963877c08d3 | [
"MIT"
]
| 5 | 2016-11-13T02:41:02.000Z | 2020-01-20T10:01:26.000Z | lang/it/basic_vocabulary_it.py | gtoffoli/commons | 8b51a08a37c6d0b38fd4ecde82c20036c2dc168f | [
"MIT"
]
| null | null | null | lang/it/basic_vocabulary_it.py | gtoffoli/commons | 8b51a08a37c6d0b38fd4ecde82c20036c2dc168f | [
"MIT"
]
| null | null | null | voc_it = [
['a', 'noun', 'c'],
['a', 'preposition', 'a'],
['abbagliante', 'pres_part', 'c'],
['abbagliante', 'adjective', 'c'],
['abbagliante', 'noun', 'c'],
['abbaiare', 'verb', 'c'],
['abbandonare', 'verb', 'a'],
['abbandonato', 'past_part', 'b'],
['abbandonato', 'adjective', 'b'],
['abbandono', 'noun', 'b'],
['abbassare', 'verb', 'a'],
['abbasso', 'adverb', 'c'],
['abbasso', 'exclamation', 'c'],
['abbastanza', 'adverb', 'a'],
['abbattere', 'verb', 'b'],
['abbeverare', 'verb', 'c'],
['abbigliamento', 'noun', 'b'],
['abbinare', 'verb', 'b'],
['abbonamento', 'noun', 'b'],
['abbonare', 'verb', 'c'],
['abbondante', 'pres_part', 'b'],
['abbondante', 'adjective', 'b'],
['abbondare', 'verb', 'c'],
['abbottonare', 'verb', 'c'],
['abbracciare', 'verb', 'a'],
['abbraccio', 'noun', 'b'],
['abbreviare', 'verb', 'c'],
['abbronzare', 'verb', 'c'],
['abete', 'noun', 'c'],
['abile', 'adjective', 'b'],
['abilità', 'noun', 'b'],
['abisso', 'noun', 'b'],
['abitante', 'pres_part', 'b'],
['abitante', 'adjective', 'b'],
['abitante', 'noun', 'b'],
['abitare', 'verb', 'a'],
['abitare', 'noun', 'a'],
['abitazione', 'noun', 'b'],
['abito', 'noun', 'a'],
['abituale', 'adjective', 'b'],
['abituare', 'verb', 'a'],
['abitudine', 'noun', 'a'],
['abolire', 'verb', 'b'],
['abortire', 'verb', 'c'],
['aborto', 'noun', 'c'],
['abruzzese', 'adjective', 'c'],
['abruzzese', 'noun', 'c'],
['abusare', 'verb', 'c'],
['abuso', 'noun', 'b'],
['acca', 'noun', 'c'],
['accademia', 'noun', 'b'],
['accademico', 'adjective', 'b'],
['accademico', 'noun', 'b'],
['accadere', 'verb', 'a'],
['accampamento', 'noun', 'c'],
['accanto', 'adverb', 'a'],
['accappatoio', 'noun', 'c'],
['accarezzare', 'verb', 'b'],
['accattone', 'noun', 'c'],
['accavallare', 'verb', 'c'],
['accecare', 'verb', 'c'],
['accedere', 'verb', 'b'],
['accelerare', 'verb', 'b'],
['acceleratore', 'adjective', 'c'],
['acceleratore', 'noun', 'c'],
['accelerazione', 'noun', 'b'],
['accendere', 'verb', 'a'],
['accendino', 'noun', 'c'],
['accennare', 'verb', 'b'],
['accenno', 'noun', 'c'],
['accentare', 'verb', 'c'],
['accertamento', 'noun', 'b'],
['accertare', 'verb', 'b'],
['acceso', 'past_part', 'b'],
['acceso', 'adjective', 'b'],
['accesso', 'noun', 'a'],
['accessorio', 'adjective', 'b'],
['accessorio', 'noun', 'b'],
['accetta', 'noun', 'c'],
['accettabile', 'adjective', 'b'],
['accettare', 'verb', 'a'],
['acchiappare', 'verb', 'c'],
['acciacco', 'noun', 'c'],
['acciaio', 'noun', 'b'],
['accidente', 'noun', 'b'],
['acciuga', 'noun', 'c'],
['accogliente', 'pres_part', 'c'],
['accogliente', 'adjective', 'c'],
['accoglienza', 'noun', 'b'],
['accogliere', 'verb', 'a'],
['accoltellare', 'verb', 'c'],
['accomodare', 'verb', 'b'],
['accompagnare', 'verb', 'a'],
['acconsentire', 'verb', 'c'],
['accontentare', 'verb', 'b'],
['accorciare', 'verb', 'c'],
['accordare', 'verb', 'b'],
['accordo', 'noun', 'a'],
['accorgersi', 'verb', 'a'],
['accorrere', 'verb', 'c'],
['accostare', 'verb', 'b'],
['accudire', 'verb', 'c'],
['accumulare', 'verb', 'b'],
['accumulatore', 'adjective', 'c'],
['accumulatore', 'noun', 'c'],
['accurato', 'past_part', 'b'],
['accurato', 'adjective', 'b'],
['accusa', 'noun', 'a'],
['accusare', 'verb', 'a'],
['accento', 'noun', 'b'],
['acerbo', 'adjective', 'c'],
['aceto', 'noun', 'c'],
['acido', 'adjective', 'b'],
['acido', 'noun', 'b'],
['acqua', 'noun', 'a'],
['acquarello', 'noun', 'c'],
['acquario', 'noun', 'c'],
['acquasanta', 'noun', 'c'],
['acquisire', 'verb', 'b'],
['acquisizione', 'noun', 'b'],
['acquistare', 'verb', 'a'],
['acquisto', 'noun', 'a'],
['acquolina', 'noun', 'c'],
['acrobata', 'noun', 'c'],
['acuto', 'adjective', 'b'],
['acuto', 'noun', 'b'],
['adattare', 'verb', 'b'],
['adattatore', 'noun', 'c'],
['adatto', 'adjective', 'a'],
['addetto', 'past_part', 'b'],
['addetto', 'adjective', 'b'],
['addetto', 'noun', 'b'],
['addio', 'exclamation', 'b'],
['addio', 'noun', 'b'],
['addirittura', 'adverb', 'a'],
['addizione', 'noun', 'c'],
['addobbare', 'verb', 'c'],
['addolcire', 'verb', 'c'],
['addomesticare', 'verb', 'c'],
['addormentarsi', 'verb', 'b'],
['addormentato', 'past_part', 'c'],
['addormentato', 'adjective', 'c'],
['addossare', 'verb', 'a'],
['addosso', 'adverb', 'c'],
['addosso', 'exclamation', 'c'],
['addrizzare', 'verb', 'c'],
['adeguare', 'verb', 'b'],
['adeguato', 'past_part', 'b'],
['adeguato', 'adjective', 'b'],
['adeguato', 'noun', 'b'],
['aderente', 'pres_part', 'c'],
['aderente', 'adjective', 'c'],
['aderente', 'noun', 'c'],
['aderire', 'verb', 'b'],
['adesione', 'noun', 'b'],
['adesso', 'adverb', 'a'],
['adolescente', 'adjective', 'a'],
['adolescente', 'noun', 'a'],
['adolescenza', 'noun', 'b'],
['adoperare', 'verb', 'b'],
['adorare', 'verb', 'a'],
['adottare', 'verb', 'a'],
['adozione', 'noun', 'b'],
['adriatico', 'adjective', 'c'],
['adulto', 'adjective', 'a'],
['adulto', 'noun', 'a'],
['aereo', 'adjective', 'a'],
['aereo', 'noun', 'a'],
['aereo', 'noun', 'b'],
['aeroplano', 'noun', 'c'],
['aeroporto', 'noun', 'b'],
['afa', 'noun', 'c'],
['affacciare', 'verb', 'b'],
['affamare', 'verb', 'c'],
['affamato', 'past_part', 'c'],
['affamato', 'adjective', 'c'],
['affamato', 'noun', 'c'],
['affannarsi', 'verb', 'c'],
['affannato', 'past_part', 'c'],
['affannato', 'adjective', 'c'],
['affanno', 'noun', 'c'],
['affare', 'noun', 'a'],
['affascinante', 'pres_part', 'b'],
['affascinante', 'adjective', 'b'],
['affascinare', 'verb', 'b'],
['affaticare', 'verb', 'c'],
['affatto', 'adverb', 'a'],
['affermare', 'verb', 'a'],
['affermazione', 'noun', 'b'],
['afferrare', 'verb', 'b'],
['affettare', 'verb', 'c'],
['affettato', 'past_part', 'c'],
['affettato', 'adjective', 'c'],
['affettato', 'noun', 'c'],
['affetto', 'noun', 'b'],
['affetto', 'adjective', 'b'],
['affettuoso', 'adjective', 'b'],
['affezionato', 'past_part', 'c'],
['affezionato', 'adjective', 'c'],
['affiancare', 'verb', 'b'],
['affidamento', 'noun', 'b'],
['affidare', 'verb', 'a'],
['affilato', 'past_part', 'c'],
['affilato', 'adjective', 'c'],
['affinché', 'conjunction', 'b'],
['affittare', 'verb', 'b'],
['affitto', 'noun', 'b'],
['affogare', 'verb', 'c'],
['affollare', 'verb', 'c'],
['affondare', 'verb', 'b'],
['affresco', 'noun', 'b'],
['affrontare', 'verb', 'a'],
['affumicare', 'verb', 'c'],
['africano', 'adjective', 'b'],
['africano', 'noun', 'b'],
['agenda', 'noun', 'b'],
['agente', 'pres_part', 'a'],
['agente', 'adjective', 'a'],
['agente', 'noun', 'a'],
['agenzia', 'noun', 'a'],
['agganciare', 'verb', 'b'],
['aggettivo', 'noun', 'b'],
['aggiornamento', 'noun', 'b'],
['aggiornare', 'verb', 'b'],
['aggirare', 'verb', 'b'],
['aggiungere', 'verb', 'a'],
['aggiustare', 'verb', 'b'],
['aggrapparsi', 'verb', 'b'],
['aggravare', 'verb', 'c'],
['aggredire', 'verb', 'b'],
['aggressione', 'noun', 'b'],
['aggressivo', 'adjective', 'b'],
['agiato', 'past_part', 'c'],
['agiato', 'adjective', 'c'],
['agile', 'adjective', 'c'],
['agio', 'noun', 'b'],
['agire', 'verb', 'a'],
['agitare', 'verb', 'b'],
['agitazione', 'noun', 'b'],
['aglio', 'noun', 'c'],
['agnello', 'noun', 'b'],
['ago', 'noun', 'b'],
['agonia', 'noun', 'c'],
['agosto', 'noun', 'a'],
['agricolo', 'adjective', 'b'],
['agricoltore', 'noun', 'c'],
['agricoltura', 'noun', 'b'],
['agrume', 'noun', 'c'],
['aguzzare', 'verb', 'c'],
['aguzzo', 'adjective', 'c'],
['aiuola', 'noun', 'c'],
['aiutare', 'verb', 'a'],
['aiuto', 'noun', 'a'],
['aiuto', 'exclamation', 'a'],
['ala', 'noun', 'a'],
['alba', 'noun', 'a'],
['albanese', 'adjective', 'b'],
['albanese', 'noun', 'b'],
['albergo', 'noun', 'a'],
['albero', 'noun', 'a'],
['albicocca', 'noun', 'c'],
['albicocca', 'adjective', 'c'],
['album', 'noun', 'a'],
['alcol', 'noun', 'b'],
['alcuno', 'adjective', 'a'],
['alcuno', 'pronoun', 'a'],
['alfabeto', 'noun', 'c'],
['alga', 'noun', 'c'],
['algerino', 'adjective', 'c'],
['algerino', 'noun', 'c'],
['alieno', 'adjective', 'b'],
['alieno', 'noun', 'b'],
['alimentare', 'adjective', 'b'],
['alimentare', 'noun', 'b'],
['alimentare', 'verb', 'b'],
['alimentari', 'noun', 'c'],
['alimentazione', 'noun', 'b'],
['alimento', 'noun', 'b'],
['alito', 'noun', 'c'],
['allacciare', 'verb', 'c'],
['allagare', 'verb', 'c'],
['allargare', 'verb', 'b'],
['allarmare', 'verb', 'c'],
['allarme', 'noun', 'b'],
['allattare', 'verb', 'c'],
['alleanza', 'noun', 'b'],
['allearsi', 'verb', 'c'],
['alleato', 'past_part', 'b'],
['alleato', 'adjective', 'b'],
['alleato', 'noun', 'b'],
['allegato', 'past_part', 'b'],
['allegato', 'adjective', 'b'],
['allegato', 'noun', 'b'],
['alleggerire', 'verb', 'c'],
['allegria', 'noun', 'b'],
['allegro', 'adjective', 'b'],
['allegro', 'adverb', 'b'],
['allegro', 'noun', 'b'],
['allenamento', 'noun', 'b'],
['allenare', 'verb', 'b'],
['allenatore', 'adjective', 'b'],
['allenatore', 'noun', 'b'],
['allentare', 'verb', 'c'],
['allergia', 'noun', 'c'],
['allevare', 'verb', 'b'],
['allievo', 'noun', 'b'],
['allineare', 'verb', 'c'],
['alloggio', 'noun', 'b'],
['allontanare', 'verb', 'a'],
['allora', 'adverb', 'a'],
['allora', 'conjunction', 'a'],
['alluce', 'noun', 'c'],
['alludere', 'verb', 'b'],
['alluminio', 'noun', 'c'],
['allungare', 'verb', 'a'],
['alluvione', 'noun', 'c'],
['almeno', 'adverb', 'a'],
['alquanto', 'adjective', 'b'],
['alquanto', 'pronoun', 'b'],
['alquanto', 'adverb', 'b'],
['altalena', 'noun', 'c'],
['altamente', 'adverb', 'b'],
['altare', 'noun', 'b'],
['alterare', 'verb', 'b'],
['alternare', 'verb', 'b'],
['alternativa', 'noun', 'b'],
['alternativo', 'adjective', 'b'],
['alterno', 'adjective', 'c'],
['altezza', 'noun', 'a'],
['alto', 'adjective', 'a'],
['alto', 'noun', 'a'],
['alto', 'adverb', 'a'],
['altoatesino', 'adjective', 'c'],
['altoatesino', 'noun', 'c'],
['altopiano', 'noun', 'c'],
['altrettanto', 'adjective', 'a'],
['altrettanto', 'pronoun', 'a'],
['altrettanto', 'adverb', 'a'],
['altrimenti', 'adverb', 'a'],
['altro', 'adjective', 'a'],
['altro', 'pronoun', 'a'],
['altro', 'adverb', 'a'],
['altrove', 'adverb', 'b'],
['altrui', 'adjective', 'b'],
['altrui', 'pronoun', 'b'],
['alunno', 'noun', 'b'],
['alveare', 'noun', 'c'],
['alzare', 'verb', 'a'],
['amante', 'pres_part', 'a'],
['amante', 'adjective', 'a'],
['amante', 'noun', 'a'],
['amare', 'verb', 'a'],
['amaro', 'adjective', 'b'],
['amaro', 'noun', 'b'],
['amato', 'past_part', 'b'],
['amato', 'adjective', 'b'],
['amato', 'noun', 'b'],
['ambasciata', 'noun', 'c'],
['ambientale', 'adjective', 'a'],
['ambientare', 'verb', 'b'],
['ambiente', 'noun', 'a'],
['ambiente', 'adjective', 'a'],
['ambito', 'noun', 'a'],
['ambizione', 'noun', 'b'],
['ambulanza', 'noun', 'b'],
['americano', 'adjective', 'a'],
['americano', 'noun', 'a'],
['amicizia', 'noun', 'a'],
['amico', 'adjective', 'a'],
['amico', 'noun', 'a'],
['ammaccare', 'verb', 'c'],
['ammalarsi', 'verb', 'b'],
['ammalato', 'past_part', 'c'],
['ammalato', 'adjective', 'c'],
['ammalato', 'noun', 'c'],
['ammanettare', 'verb', 'c'],
['ammassare', 'verb', 'c'],
['ammasso', 'noun', 'c'],
['ammazzare', 'verb', 'a'],
['ammettere', 'verb', 'a'],
['amministrativo', 'adjective', 'b'],
['amministrativo', 'noun', 'b'],
['amministratore', 'noun', 'b'],
['amministrazione', 'noun', 'a'],
['ammirare', 'verb', 'b'],
['ammissione', 'noun', 'b'],
['ammobiliare', 'verb', 'c'],
['ammoniaca', 'noun', 'c'],
['ammorbidente', 'pres_part', 'c'],
['ammorbidente', 'adjective', 'c'],
['ammorbidente', 'noun', 'c'],
['ammucchiare', 'verb', 'c'],
['ammuffire', 'verb', 'c'],
['amore', 'noun', 'a'],
['amoroso', 'adjective', 'b'],
['amoroso', 'noun', 'b'],
['ampiamente', 'adverb', 'b'],
['ampio', 'adjective', 'a'],
['ampio', 'noun', 'a'],
['amplificatore', 'adjective', 'c'],
['amplificatore', 'noun', 'c'],
['analcolico', 'adjective', 'c'],
['analcolico', 'noun', 'c'],
['analfabeta', 'adjective', 'c'],
['analfabeta', 'noun', 'c'],
['analisi', 'noun', 'a'],
['analitico', 'adjective', 'b'],
['analizzare', 'verb', 'a'],
['analogo', 'adjective', 'b'],
['ananas', 'noun', 'c'],
['anarchico', 'adjective', 'c'],
['anarchico', 'noun', 'c'],
['anatra', 'noun', 'c'],
['anche', 'conjunction', 'a'],
['anche', 'adverb', 'a'],
['anconetano', 'adjective', 'c'],
['anconetano', 'noun', 'c'],
['ancora', 'adverb', 'a'],
['ancora', 'conjunction', 'a'],
['ancorare', 'verb', 'b'],
['andamento', 'noun', 'b'],
['andare', 'verb', 'a'],
['andata', 'noun', 'c'],
['anello', 'noun', 'a'],
['angelo', 'noun', 'a'],
['angolare', 'adjective', 'b'],
['angolare', 'noun', 'b'],
['angolo', 'noun', 'a'],
['angoscia', 'noun', 'b'],
['anima', 'noun', 'a'],
['animale', 'noun', 'a'],
['animale', 'adjective', 'b'],
['animare', 'verb', 'a'],
['animato', 'past_part', 'b'],
['animato', 'adjective', 'b'],
['animato', 'adverb', 'b'],
['animo', 'noun', 'b'],
['animo', 'exclamation', 'b'],
['annacquare', 'verb', 'c'],
['annaffiare', 'verb', 'c'],
['annebbiare', 'verb', 'c'],
['anniversario', 'noun', 'b'],
['anniversario', 'adjective', 'b'],
['anno', 'noun', 'a'],
['annodare', 'verb', 'c'],
['annoiare', 'verb', 'b'],
['annotare', 'verb', 'b'],
['annuale', 'adjective', 'b'],
['annuale', 'noun', 'b'],
['annuire', 'verb', 'b'],
['annullare', 'verb', 'b'],
['annunciare', 'verb', 'a'],
['annuncio', 'noun', 'b'],
['annusare', 'verb', 'c'],
['anonimo', 'adjective', 'b'],
['anonimo', 'noun', 'b'],
['ansia', 'noun', 'a'],
['ansioso', 'adjective', 'b'],
['ansioso', 'noun', 'b'],
['antartico', 'adjective', 'c'],
['antartico', 'noun', 'c'],
['antenna', 'noun', 'b'],
['anteprima', 'noun', 'b'],
['anteriore', 'adjective', 'b'],
['anticalcare', 'adjective', 'c'],
['antichità', 'noun', 'c'],
['anticipare', 'verb', 'b'],
['anticipo', 'noun', 'b'],
['antico', 'adjective', 'a'],
['antico', 'noun', 'a'],
['antipasto', 'noun', 'c'],
['antirughe', 'adjective', 'c'],
['antirughe', 'noun', 'c'],
['antropologia', 'noun', 'b'],
['anulare', 'adjective', 'c'],
['anulare', 'noun', 'c'],
['anzi', 'adverb', 'a'],
['anzi', 'preposition', 'a'],
['anziano', 'adjective', 'a'],
['anziano', 'noun', 'a'],
['anziché', 'conjunction', 'b'],
['aostano', 'adjective', 'c'],
['aostano', 'noun', 'c'],
['ape', 'noun', 'b'],
['aperitivo', 'noun', 'c'],
['aperitivo', 'adjective', 'c'],
['aperto', 'past_part', 'a'],
['aperto', 'adjective', 'a'],
['aperto', 'noun', 'a'],
['aperto', 'adverb', 'a'],
['apertura', 'noun', 'a'],
['aspettativa', 'noun', 'b'],
['apostolo', 'noun', 'c'],
['appalto', 'noun', 'b'],
['appannare', 'verb', 'c'],
['apparato', 'noun', 'b'],
['apparecchiare', 'verb', 'c'],
['apparecchiatura', 'noun', 'c'],
['apparecchio', 'noun', 'b'],
['apparente', 'pres_part', 'b'],
['apparente', 'adjective', 'b'],
['apparentemente', 'adverb', 'b'],
['apparenza', 'noun', 'b'],
['apparire', 'verb', 'a'],
['apparizione', 'noun', 'b'],
['appartamento', 'noun', 'a'],
['appartenenza', 'noun', 'b'],
['appartenere', 'verb', 'a'],
['appassionare', 'verb', 'b'],
['appassionarsi', 'verb', 'c'],
['appassionato', 'past_part', 'b'],
['appassionato', 'adjective', 'b'],
['appassionato', 'noun', 'b'],
['appello', 'noun', 'b'],
['appena', 'adverb', 'a'],
['appena', 'conjunction', 'a'],
['appendere', 'verb', 'b'],
['appendicite', 'noun', 'c'],
['appenninico', 'adjective', 'c'],
['appeso', 'past_part', 'c'],
['appeso', 'adjective', 'c'],
['appeso', 'noun', 'c'],
['appiccicare', 'verb', 'c'],
['appiglio', 'noun', 'c'],
['applauso', 'noun', 'b'],
['applicare', 'verb', 'a'],
['applicazione', 'noun', 'b'],
['appoggiare', 'verb', 'a'],
['appoggio', 'noun', 'b'],
['apposito', 'adjective', 'b'],
['apposta', 'adverb', 'b'],
['apposta', 'adjective', 'b'],
['apprendere', 'verb', 'b'],
['apprendimento', 'noun', 'b'],
['apprendista', 'noun', 'c'],
['apprezzare', 'verb', 'a'],
['approccio', 'noun', 'b'],
['approfittare', 'verb', 'b'],
['approfondimento', 'noun', 'b'],
['approfondire', 'verb', 'b'],
['approvare', 'verb', 'b'],
['approvazione', 'noun', 'b'],
['appuntamento', 'noun', 'a'],
['appuntire', 'verb', 'c'],
['appunto', 'noun', 'b'],
['appunto', 'adverb', 'a'],
['aprile', 'noun', 'a'],
['aprire', 'verb', 'a'],
['apriscatole', 'noun', 'c'],
['aquila', 'noun', 'c'],
['aquilano', 'adjective', 'c'],
['aquilano', 'noun', 'c'],
['aquilone', 'noun', 'c'],
['arabo', 'adjective', 'a'],
['arabo', 'noun', 'a'],
['arachide', 'noun', 'c'],
['aragosta', 'noun', 'c'],
['aranciata', 'noun', 'c'],
['arancio', 'noun', 'c'],
['arare', 'verb', 'c'],
['aratro', 'noun', 'c'],
['arbitro', 'noun', 'b'],
['archeologo', 'noun', 'c'],
['architettare', 'verb', 'b'],
['architetto', 'noun', 'b'],
['architettonico', 'adjective', 'b'],
['architettura', 'noun', 'b'],
['archiviare', 'verb', 'b'],
['archivio', 'noun', 'b'],
['arco', 'noun', 'a'],
['arcobaleno', 'noun', 'c'],
['area', 'noun', 'a'],
['argentino', 'adjective', 'b'],
['argentino', 'noun', 'b'],
['argento', 'noun', 'b'],
['argomentare', 'verb', 'b'],
['argomentazione', 'noun', 'b'],
['argomento', 'noun', 'a'],
['aria', 'noun', 'a'],
['aristocratico', 'adjective', 'c'],
['aristocratico', 'noun', 'c'],
['aritmetica', 'noun', 'c'],
['aritmetico', 'adjective', 'c'],
['aritmetico', 'noun', 'c'],
['arma', 'noun', 'a'],
['armadio', 'noun', 'b'],
['armamento', 'noun', 'c'],
['armare', 'verb', 'b'],
['armato', 'past_part', 'b'],
['armato', 'adjective', 'b'],
['armato', 'noun', 'b'],
['armonia', 'noun', 'b'],
['aroma', 'noun', 'c'],
['arrabbiarsi', 'verb', 'a'],
['arrampicarsi', 'verb', 'b'],
['arredamento', 'noun', 'b'],
['arredare', 'verb', 'c'],
['arrendersi', 'verb', 'b'],
['arrendersi', 'verb', 'c'],
['arrestare', 'verb', 'a'],
['arresto', 'noun', 'b'],
['arricchire', 'verb', 'b'],
['arrivare', 'verb', 'a'],
['arrivederci', 'exclamation', 'b'],
['arrivederci', 'noun', 'b'],
['arrivo', 'noun', 'a'],
['arrosto', 'noun', 'c'],
['arrosto', 'adjective', 'c'],
['arrosto', 'adverb', 'c'],
['arrugginire', 'verb', 'c'],
['arte', 'noun', 'a'],
['arteria', 'noun', 'b'],
['artico', 'adjective', 'c'],
['artico', 'noun', 'c'],
['articolare', 'verb', 'b'],
['articolare', 'noun', 'b'],
['articolazione', 'noun', 'b'],
['articolo', 'noun', 'a'],
['artificiale', 'adjective', 'b'],
['artigianale', 'adjective', 'c'],
['artigiano', 'noun', 'b'],
['artigiano', 'adjective', 'b'],
['artiglieria', 'noun', 'c'],
['artiglio', 'noun', 'c'],
['artista', 'noun', 'a'],
['artistico', 'adjective', 'a'],
['artistico', 'noun', 'a'],
['ascella', 'noun', 'c'],
['ascensore', 'noun', 'b'],
['ascesa', 'noun', 'b'],
['ascesso', 'noun', 'c'],
['ascia', 'noun', 'c'],
['asciugamano', 'noun', 'b'],
['asciugare', 'verb', 'b'],
['asciutto', 'adjective', 'b'],
['asciutto', 'noun', 'b'],
['ascoltare', 'verb', 'a'],
['ascolto', 'noun', 'b'],
['asfaltare', 'verb', 'c'],
['asfalto', 'noun', 'c'],
['asiatico', 'adjective', 'b'],
['asiatico', 'noun', 'b'],
['asilo', 'noun', 'b'],
['asino', 'noun', 'b'],
['asma', 'noun', 'c'],
['asparago', 'noun', 'c'],
['aspettare', 'verb', 'a'],
['aspetto', 'noun', 'a'],
['aspirapolvere', 'noun', 'c'],
['aspirare', 'verb', 'b'],
['aspirazione', 'noun', 'b'],
['aspro', 'adjective', 'b'],
['aspro', 'noun', 'b'],
['assaggiare', 'verb', 'b'],
['assaggio', 'noun', 'c'],
['assai', 'adverb', 'a'],
['assai', 'adjective', 'a'],
['assai', 'noun', 'a'],
['assalire', 'verb', 'c'],
['assaltare', 'verb', 'c'],
['assalto', 'noun', 'b'],
['assaporare', 'verb', 'c'],
['assassinare', 'verb', 'b'],
['assassinio', 'noun', 'c'],
['assassino', 'noun', 'b'],
['assassino', 'adjective', 'b'],
['asse', 'noun', 'b'],
['assediare', 'verb', 'c'],
['assegnare', 'verb', 'b'],
['assegno', 'noun', 'b'],
['assemblea', 'noun', 'b'],
['assente', 'adjective', 'b'],
['assente', 'noun', 'b'],
['assenza', 'noun', 'a'],
['assicurare', 'verb', 'a'],
['assicurazione', 'noun', 'b'],
['assieme', 'adverb', 'a'],
['assieme', 'noun', 'a'],
['assistente', 'pres_part', 'b'],
['assistente', 'adjective', 'b'],
['assistente', 'noun', 'b'],
['assistenza', 'noun', 'b'],
['assistere', 'verb', 'a'],
['associare', 'verb', 'b'],
['associazione', 'noun', 'a'],
['assolutamente', 'adverb', 'a'],
['assoluto', 'adjective', 'a'],
['assoluto', 'noun', 'a'],
['assoluzione', 'noun', 'c'],
['assolvere', 'verb', 'b'],
['assomigliare', 'verb', 'b'],
['assorbente', 'pres_part', 'c'],
['assorbente', 'adjective', 'c'],
['assorbente', 'noun', 'c'],
['assorbire', 'verb', 'b'],
['assordare', 'verb', 'c'],
['assumere', 'verb', 'a'],
['assunzione', 'noun', 'b'],
['assurdo', 'adjective', 'a'],
['assurdo', 'noun', 'a'],
['asta', 'noun', 'b'],
['astemio', 'adjective', 'c'],
['astemio', 'noun', 'c'],
['astratto', 'past_part', 'b'],
['astratto', 'adjective', 'b'],
['astratto', 'noun', 'b'],
['astronave', 'noun', 'c'],
['astuccio', 'noun', 'c'],
['astuto', 'adjective', 'c'],
['astuto', 'noun', 'c'],
['astuzia', 'noun', 'c'],
['ateniese', 'adjective', 'c'],
['ateniese', 'noun', 'c'],
['ateo', 'adjective', 'b'],
['ateo', 'noun', 'b'],
['atlantico', 'adjective', 'c'],
['atleta', 'noun', 'b'],
['atmosfera', 'noun', 'a'],
['atomica', 'noun', 'c'],
['atomico', 'adjective', 'b'],
['atomo', 'noun', 'b'],
['atrio', 'noun', 'c'],
['atroce', 'adjective', 'b'],
['attaccante', 'pres_part', 'c'],
['attaccante', 'adjective', 'c'],
['attaccante', 'noun', 'c'],
['attaccapanni', 'noun', 'c'],
['attaccare', 'verb', 'a'],
['attacco', 'noun', 'a'],
['atteggiamento', 'noun', 'a'],
['atteggiare', 'verb', 'c'],
['attendere', 'verb', 'a'],
['attenere', 'verb', 'b'],
['attentamente', 'adverb', 'b'],
['attentare', 'verb', 'c'],
['attentato', 'noun', 'b'],
['attento', 'adjective', 'a'],
['attenzione', 'noun', 'a'],
['atterraggio', 'noun', 'c'],
['atterrare', 'verb', 'b'],
['attesa', 'noun', 'a'],
['attestare', 'verb', 'b'],
['attimo', 'noun', 'a'],
['attingere', 'verb', 'b'],
['attirare', 'verb', 'b'],
['attivare', 'verb', 'b'],
['attività', 'noun', 'a'],
['attivo', 'adjective', 'a'],
['attivo', 'noun', 'a'],
['atto', 'noun', 'a'],
['attore', 'noun', 'a'],
['attorno', 'adverb', 'a'],
['attrarre', 'verb', 'b'],
['attraversare', 'verb', 'a'],
['attraverso', 'preposition', 'a'],
['attraverso', 'adverb', 'a'],
['attrazione', 'noun', 'b'],
['attrezzare', 'verb', 'b'],
['attrezzatura', 'noun', 'b'],
['attrezzo', 'noun', 'b'],
['attribuire', 'verb', 'b'],
['attrice', 'noun', 'b'],
['attuale', 'adjective', 'a'],
['attualità', 'noun', 'b'],
['attualmente', 'adverb', 'b'],
['attuare', 'verb', 'b'],
['augurare', 'verb', 'b'],
['augurio', 'noun', 'b'],
['aula', 'noun', 'b'],
['aumentare', 'verb', 'a'],
['aumento', 'noun', 'a'],
['australiano', 'adjective', 'c'],
['australiano', 'noun', 'c'],
['austriaco', 'adjective', 'b'],
['austriaco', 'noun', 'b'],
['autentico', 'adjective', 'b'],
['autentico', 'noun', 'b'],
['autista', 'noun', 'b'],
['auto', 'noun', 'a'],
['autoambulanza', 'noun', 'c'],
['autobotte', 'noun', 'c'],
['autobus', 'noun', 'b'],
['autografo', 'adjective', 'c'],
['autografo', 'noun', 'c'],
['automaticamente', 'adverb', 'b'],
['automatico', 'adjective', 'b'],
['automatico', 'noun', 'b'],
['automobile', 'noun', 'b'],
['automobilista', 'noun', 'c'],
['autonomia', 'noun', 'b'],
['autonomo', 'adjective', 'b'],
['autonomo', 'noun', 'b'],
['autore', 'noun', 'a'],
['autorevole', 'adjective', 'c'],
['autorità', 'noun', 'a'],
['autorizzare', 'verb', 'a'],
['autoscontro', 'noun', 'c'],
['autoscuola', 'noun', 'c'],
['autostop', 'noun', 'c'],
['autostrada', 'noun', 'b'],
['autotreno', 'noun', 'c'],
['autunno', 'noun', 'b'],
['avambraccio', 'noun', 'c'],
['avanguardia', 'noun', 'b'],
['avanti', 'adverb', 'a'],
['avanti', 'adjective', 'a'],
['avanti', 'loc-comando', 'a'],
['avanti', 'preposition', 'a'],
['avanti', 'noun', 'a'],
['avanzare', 'verb', 'a'],
['avanzato', 'past_part', 'b'],
['avanzato', 'adjective', 'b'],
['avanzo', 'noun', 'c'],
['avarizia', 'noun', 'c'],
['avaro', 'adjective', 'c'],
['avaro', 'noun', 'c'],
['avena', 'noun', 'c'],
['avere', 'verb', 'a'],
['aviazione', 'noun', 'c'],
['avvantaggiare', 'verb', 'c'],
['avvelenare', 'verb', 'b'],
['avvelenato', 'past_part', 'c'],
['avvelenato', 'adjective', 'c'],
['avvenimento', 'noun', 'b'],
['avvenire', 'adjective', 'a'],
['avvenire', 'noun', 'a'],
['avventura', 'noun', 'a'],
['avverare', 'verb', 'c'],
['avversario', 'noun', 'b'],
['avvertire', 'verb', 'a'],
['avviamento', 'noun', 'c'],
['avviare', 'verb', 'a'],
['avvicinare', 'verb', 'a'],
['avvio', 'noun', 'b'],
['avvisare', 'verb', 'b'],
['avviso', 'noun', 'b'],
['avvitare', 'verb', 'c'],
['avvocato', 'noun', 'a'],
['avvolgere', 'verb', 'b'],
['azienda', 'noun', 'a'],
['aziendale', 'adjective', 'b'],
['azione', 'noun', 'a'],
['azione', 'noun', 'b'],
['azzardare', 'verb', 'b'],
['azzardo', 'noun', 'c'],
['azzurro', 'noun', 'a'],
['azzurro', 'adjective', 'a'],
['babbo', 'noun', 'b'],
['baby', 'noun', 'b'],
['baby', 'adjective', 'b'],
['babydoll', 'noun', 'c'],
['bacca', 'noun', 'c'],
['baccalà', 'noun', 'c'],
['bacheca', 'noun', 'b'],
['baciare', 'verb', 'a'],
['bacinella', 'noun', 'c'],
['bacino', 'noun', 'b'],
['bacio', 'noun', 'a'],
['baco', 'noun', 'c'],
['badare', 'verb', 'b'],
['baffo', 'noun', 'b'],
['bagagliaio', 'noun', 'c'],
['bagaglio', 'noun', 'b'],
['bagnare', 'verb', 'b'],
['bagnato', 'past_part', 'b'],
['bagnato', 'adjective', 'b'],
['bagnato', 'noun', 'b'],
['bagno', 'noun', 'a'],
['bagnoschiuma', 'noun', 'c'],
['balcone', 'noun', 'b'],
['balena', 'noun', 'b'],
['balia', 'noun', 'b'],
['ballare', 'verb', 'a'],
['ballerina', 'noun', 'c'],
['ballerino', 'noun', 'c'],
['ballerino', 'adjective', 'c'],
['balletto', 'noun', 'c'],
['ballo', 'noun', 'b'],
['balsamo', 'noun', 'c'],
['bambina', 'noun', 'a'],
['bambinaia', 'noun', 'c'],
['bambino', 'noun', 'a'],
['bambino', 'adjective', 'a'],
['bambola', 'noun', 'b'],
['banale', 'adjective', 'b'],
['banana', 'noun', 'c'],
['banca', 'noun', 'a'],
['bancarella', 'noun', 'c'],
['bancario', 'adjective', 'b'],
['bancario', 'noun', 'b'],
['banco', 'noun', 'b'],
['bancone', 'noun', 'b'],
['band', 'noun', 'b'],
['banda', 'noun', 'b'],
['bandiera', 'noun', 'b'],
['bando', 'noun', 'b'],
['bar', 'noun', 'a'],
['bara', 'noun', 'b'],
['baracca', 'noun', 'c'],
['barba', 'noun', 'b'],
['barbabietola', 'noun', 'c'],
['barbaro', 'adjective', 'b'],
['barbaro', 'noun', 'b'],
['barca', 'noun', 'a'],
['barella', 'noun', 'c'],
['barese', 'adjective', 'c'],
['barese', 'noun', 'c'],
['barile', 'noun', 'c'],
['barista', 'noun', 'c'],
['barriera', 'noun', 'b'],
['basare', 'verb', 'a'],
['base', 'noun', 'a'],
['basetta', 'noun', 'c'],
['basilica', 'noun', 'b'],
['basilico', 'noun', 'c'],
['basket', 'noun', 'c'],
['basso', 'adjective', 'a'],
['basso', 'noun', 'a'],
['basso', 'adverb', 'a'],
['bastardo', 'adjective', 'b'],
['bastardo', 'noun', 'b'],
['bastare', 'verb', 'a'],
['bastonare', 'verb', 'c'],
['bastone', 'noun', 'b'],
['battaglia', 'noun', 'a'],
['battello', 'noun', 'c'],
['battere', 'verb', 'a'],
['battere', 'noun', 'a'],
['batteria', 'noun', 'b'],
['batterio', 'noun', 'b'],
['batticuore', 'noun', 'c'],
['battipanni', 'noun', 'c'],
['battito', 'noun', 'c'],
['battuta', 'noun', 'a'],
['batuffolo', 'noun', 'c'],
['baule', 'noun', 'c'],
['bava', 'noun', 'c'],
['bavaglio', 'noun', 'c'],
['beato', 'past_part', 'b'],
['beato', 'adjective', 'b'],
['beato', 'noun', 'b'],
['beccare', 'verb', 'b'],
['befana', 'noun', 'c'],
['beffa', 'noun', 'c'],
['beh', 'exclamation', 'a'],
['belare', 'verb', 'c'],
['belga', 'adjective', 'c'],
['belga', 'noun', 'c'],
['bella', 'noun', 'b'],
['bellezza', 'noun', 'a'],
['bello', 'adjective', 'a'],
['bello', 'noun', 'a'],
['benché', 'conjunction', 'b'],
['benda', 'noun', 'c'],
['bene', 'adverb', 'a'],
['bene', 'exclamation', 'a'],
['bene', 'noun', 'a'],
['benedetto', 'past_part', 'b'],
['benedetto', 'adjective', 'b'],
['benedetto', 'noun', 'b'],
['beneficenza', 'noun', 'c'],
['beneficio', 'noun', 'b'],
['benessere', 'noun', 'b'],
['benestante', 'adjective', 'c'],
['benestante', 'noun', 'c'],
['bensì', 'conjunction', 'b'],
['bensì', 'adverb', 'b'],
['benvenuto', 'adjective', 'b'],
['benvenuto', 'noun', 'b'],
['benzina', 'noun', 'b'],
['benzinaio', 'noun', 'c'],
['bere', 'verb', 'a'],
['bere', 'noun', 'a'],
['berlinese', 'adjective', 'c'],
['berlinese', 'noun', 'c'],
['berretto', 'noun', 'c'],
['bersaglio', 'noun', 'b'],
['besciamella', 'noun', 'c'],
['bestemmia', 'noun', 'c'],
['bestia', 'noun', 'b'],
['bestiale', 'adjective', 'c'],
['bevanda', 'noun', 'b'],
['bevitore', 'noun', 'c'],
['bevuta', 'noun', 'c'],
['bi', 'noun', 'c'],
['bianco', 'adjective', 'a'],
['bianco', 'noun', 'a'],
['bibbia', 'noun', 'b'],
['bibita', 'noun', 'c'],
['biblico', 'adjective', 'b'],
['biblico', 'noun', 'b'],
['bibliografia', 'noun', 'b'],
['biblioteca', 'noun', 'b'],
['bicchiere', 'noun', 'a'],
['bici', 'noun', 'b'],
['bicicletta', 'noun', 'b'],
['bidè', 'noun', 'c'],
['bidello', 'noun', 'c'],
['biglia', 'noun', 'c'],
['biglietteria', 'noun', 'c'],
['biglietto', 'noun', 'a'],
['bikini', 'noun', 'c'],
['bilancia', 'noun', 'b'],
['bilancio', 'noun', 'b'],
['biliardo', 'noun', 'c'],
['bimba', 'noun', 'b'],
['bimbo', 'noun', 'b'],
['binario', 'noun', 'c'],
['biografia', 'noun', 'b'],
['biologia', 'noun', 'b'],
['biologico', 'adjective', 'b'],
['biologico', 'noun', 'b'],
['bionda', 'noun', 'b'],
['biondo', 'adjective', 'b'],
['biondo', 'noun', 'b'],
['birichino', 'noun', 'c'],
['birichino', 'adjective', 'c'],
['birillo', 'noun', 'c'],
['birra', 'noun', 'b'],
['bisbigliare', 'verb', 'c'],
['biscia', 'noun', 'c'],
['biscotto', 'adjective', 'b'],
['biscotto', 'noun', 'b'],
['bisnonno', 'noun', 'c'],
['bisognare', 'verb', 'a'],
['bisogno', 'noun', 'a'],
['bistecca', 'noun', 'c'],
['bistecchiera', 'noun', 'c'],
['bisticciare', 'verb', 'c'],
['bit', 'noun', 'b'],
['bizzarro', 'adjective', 'b'],
['bloccare', 'verb', 'a'],
['blocco', 'noun', 'b'],
['blocco', 'noun', 'b'],
['blog', 'noun', 'a'],
['blu', 'adjective', 'a'],
['blu', 'noun', 'a'],
['bocca', 'noun', 'a'],
['bocchino', 'noun', 'c'],
['boccia', 'noun', 'c'],
['bocciare', 'verb', 'b'],
['bocciatura', 'noun', 'c'],
['bocciolo', 'noun', 'c'],
['boccone', 'noun', 'c'],
['boh', 'exclamation', 'b'],
['boia', 'noun', 'c'],
['boia', 'adjective', 'c'],
['bolla', 'noun', 'b'],
['bolletta', 'noun', 'b'],
['bollito', 'past_part', 'c'],
['bollito', 'adjective', 'c'],
['bollito', 'noun', 'c'],
['bollitore', 'noun', 'c'],
['bollo', 'noun', 'c'],
['bolognese', 'adjective', 'c'],
['bolognese', 'noun', 'c'],
['bolzanino', 'adjective', 'c'],
['bolzanino', 'noun', 'c'],
['bomba', 'noun', 'b'],
['bombardare', 'verb', 'b'],
['bombola', 'noun', 'c'],
['bomboniera', 'noun', 'c'],
['bontà', 'noun', 'b'],
['bordo', 'noun', 'a'],
['borgata', 'noun', 'c'],
['borghese', 'adjective', 'b'],
['borghese', 'noun', 'b'],
['borghesia', 'noun', 'c'],
['borgo', 'noun', 'b'],
['borotalco', 'noun', 'c'],
['borsa', 'noun', 'a'],
['borsa', 'noun', 'b'],
['borsetta', 'noun', 'c'],
['bosco', 'noun', 'a'],
['bosniaco', 'adjective', 'c'],
['bosniaco', 'noun', 'c'],
['boss', 'noun', 'b'],
['bossolo', 'noun', 'c'],
['botanica', 'noun', 'c'],
['botta', 'noun', 'b'],
['botte', 'noun', 'c'],
['bottega', 'noun', 'b'],
['bottegaio', 'noun', 'c'],
['bottegaio', 'adjective', 'c'],
['bottiglia', 'noun', 'a'],
['botto', 'noun', 'c'],
['bottone', 'noun', 'b'],
['bovino', 'adjective', 'c'],
['bovino', 'noun', 'c'],
['box', 'noun', 'b'],
['boxer', 'noun', 'c'],
['braccialetto', 'noun', 'c'],
['bracciante', 'noun', 'c'],
['braccio', 'noun', 'a'],
['branco', 'noun', 'b'],
['brand', 'noun', 'b'],
['brandello', 'noun', 'c'],
['brano', 'noun', 'a'],
['brasiliano', 'adjective', 'b'],
['brasiliano', 'noun', 'b'],
['bravo', 'adjective', 'a'],
['bravo', 'noun', 'a'],
['bravo', 'exclamation', 'a'],
['bresaola', 'noun', 'c'],
['bretella', 'noun', 'c'],
['breve', 'adjective', 'a'],
['breve', 'adverb', 'a'],
['breve', 'noun', 'a'],
['briciola', 'noun', 'c'],
['brigantaggio', 'noun', 'c'],
['brigante', 'noun', 'c'],
['brillante', 'pres_part', 'b'],
['brillante', 'adjective', 'b'],
['brillante', 'noun', 'b'],
['brillantina', 'noun', 'c'],
['brillare', 'verb', 'b'],
['brina', 'noun', 'c'],
['brioche', 'noun', 'c'],
['britannico', 'adjective', 'b'],
['britannico', 'noun', 'b'],
['brivido', 'noun', 'b'],
['brocca', 'noun', 'c'],
['brogliaccio', 'noun', 'b'],
['bronchite', 'noun', 'c'],
['brontolare', 'verb', 'c'],
['bronzo', 'noun', 'b'],
['bruciare', 'verb', 'a'],
['bruciato', 'past_part', 'b'],
['bruciato', 'adjective', 'b'],
['bruciato', 'noun', 'b'],
['bruciatura', 'noun', 'c'],
['bruco', 'noun', 'c'],
['bruco', 'adjective', 'c'],
['bruschetta', 'noun', 'c'],
['brutale', 'adjective', 'c'],
['brutto', 'adjective', 'a'],
['brutto', 'noun', 'a'],
['brutto', 'adverb', 'a'],
['buca', 'noun', 'b'],
['bucare', 'verb', 'b'],
['bucato', 'noun', 'c'],
['buccia', 'noun', 'c'],
['buco', 'noun', 'a'],
['budino', 'noun', 'c'],
['bufala', 'noun', 'c'],
['bufalo', 'noun', 'c'],
['bufera', 'noun', 'c'],
['buffet', 'noun', 'c'],
['buffo', 'adjective', 'b'],
['buffo', 'noun', 'b'],
['bugia', 'noun', 'b'],
['bugiardo', 'adjective', 'b'],
['bugiardo', 'noun', 'b'],
['buio', 'adjective', 'a'],
['buio', 'noun', 'a'],
['bulgaro', 'adjective', 'c'],
['bulgaro', 'noun', 'c'],
['buonafede', 'noun', 'c'],
['buonasera', 'exclamation', 'b'],
['buongiorno', 'exclamation', 'a'],
['buongusto', 'noun', 'c'],
['buono', 'adjective', 'a'],
['buono', 'noun', 'a'],
['buono', 'adverb', 'a'],
['buonuomo', 'noun', 'c'],
['burattino', 'noun', 'c'],
['burocrazia', 'noun', 'c'],
['burrasca', 'noun', 'c'],
['burro', 'noun', 'b'],
['burrone', 'noun', 'c'],
['business', 'noun', 'b'],
['business', 'adjective', 'b'],
['bussare', 'verb', 'b'],
['bussola', 'noun', 'c'],
['busta', 'noun', 'b'],
['bustina', 'noun', 'c'],
['busto', 'noun', 'c'],
['buttare', 'verb', 'a'],
['cabina', 'noun', 'b'],
['cacao', 'noun', 'c'],
['cacca', 'noun', 'b'],
['caccia', 'noun', 'a'],
['cacciare', 'verb', 'a'],
['cacciatore', 'noun', 'b'],
['cacciavite', 'noun', 'c'],
['cadavere', 'noun', 'a'],
['cadere', 'verb', 'a'],
['cadere', 'noun', 'a'],
['caduta', 'noun', 'b'],
['caffè', 'noun', 'a'],
['caffè', 'adjective', 'a'],
['caffellatte', 'noun', 'c'],
['caffellatte', 'adjective', 'c'],
['caffettiera', 'noun', 'c'],
['cagare', 'verb', 'b'],
['cagliaritano', 'adjective', 'c'],
['cagliaritano', 'noun', 'c'],
['calabrese', 'adjective', 'c'],
['calabrese', 'noun', 'c'],
['calabrone', 'noun', 'c'],
['calamaro', 'noun', 'c'],
['calamita', 'noun', 'c'],
['calare', 'verb', 'b'],
['calcagno', 'noun', 'c'],
['calciare', 'verb', 'c'],
['calciatore', 'noun', 'b'],
['calcinaccio', 'noun', 'c'],
['calcio', 'noun', 'a'],
['calcolare', 'verb', 'b'],
['calcolatore', 'adjective', 'c'],
['calcolatore', 'noun', 'c'],
['calcolatrice', 'noun', 'c'],
['calcolo', 'noun', 'b'],
['caldo', 'adjective', 'a'],
['caldo', 'noun', 'a'],
['caldo', 'adverb', 'a'],
['calendario', 'noun', 'b'],
['calligrafia', 'noun', 'c'],
['callo', 'noun', 'c'],
['calma', 'noun', 'b'],
['calmare', 'verb', 'b'],
['calmo', 'adjective', 'b'],
['calo', 'noun', 'b'],
['calore', 'noun', 'a'],
['calpestare', 'verb', 'c'],
['calunnia', 'noun', 'c'],
['calvario', 'noun', 'c'],
['calza', 'noun', 'b'],
['calzare', 'verb', 'c'],
['calzatura', 'noun', 'c'],
['calzino', 'noun', 'c'],
['calzolaio', 'noun', 'c'],
['calzoleria', 'noun', 'c'],
['calzone', 'noun', 'c'],
['cambiamento', 'noun', 'a'],
['cambiare', 'verb', 'a'],
['cambio', 'noun', 'a'],
['camera', 'noun', 'a'],
['camerata', 'noun', 'c'],
['cameriere', 'noun', 'b'],
['camicetta', 'noun', 'c'],
['camicia', 'noun', 'b'],
['caminetto', 'noun', 'c'],
['camion', 'noun', 'a'],
['camionista', 'noun', 'c'],
['cammello', 'noun', 'c'],
['cammello', 'adjective', 'c'],
['camminare', 'verb', 'a'],
['camminata', 'noun', 'c'],
['cammino', 'noun', 'b'],
['camomilla', 'noun', 'c'],
['camorra', 'noun', 'b'],
['campagna', 'noun', 'a'],
['campana', 'noun', 'b'],
['campanella', 'noun', 'c'],
['campanello', 'noun', 'b'],
['campanile', 'noun', 'c'],
['campano', 'adjective', 'c'],
['campano', 'noun', 'c'],
['campare', 'verb', 'b'],
['campeggio', 'noun', 'c'],
['campionato', 'noun', 'b'],
['campione', 'noun', 'a'],
['campo', 'noun', 'a'],
['campobassano', 'adjective', 'c'],
['campobassano', 'noun', 'c'],
['camposanto', 'noun', 'c'],
['canadese', 'adjective', 'c'],
['canadese', 'noun', 'c'],
['canaglia', 'noun', 'c'],
['canale', 'noun', 'a'],
['canapa', 'noun', 'c'],
['canarino', 'noun', 'c'],
['canarino', 'adjective', 'c'],
['cancellare', 'verb', 'a'],
['cancellatura', 'noun', 'c'],
['cancello', 'noun', 'b'],
['cancro', 'noun', 'b'],
['candela', 'noun', 'b'],
['candeliere', 'noun', 'c'],
['candidare', 'verb', 'b'],
['candidato', 'past_part', 'a'],
['candidato', 'adjective', 'a'],
['candidato', 'noun', 'a'],
['candido', 'adjective', 'b'],
['cane', 'noun', 'a'],
['canestro', 'noun', 'c'],
['canguro', 'noun', 'c'],
['canna', 'noun', 'b'],
['cannibale', 'adjective', 'c'],
['cannibale', 'noun', 'c'],
['cannuccia', 'noun', 'c'],
['canone', 'noun', 'b'],
['canottiera', 'noun', 'c'],
['canotto', 'noun', 'c'],
['cantante', 'pres_part', 'b'],
['cantante', 'adjective', 'b'],
['cantante', 'noun', 'b'],
['cantare', 'verb', 'a'],
['cantautore', 'noun', 'c'],
['cantiere', 'noun', 'b'],
['cantilena', 'noun', 'c'],
['cantina', 'noun', 'b'],
['canto', 'noun', 'a'],
['canzone', 'noun', 'a'],
['caos', 'noun', 'b'],
['capace', 'adjective', 'a'],
['capacità', 'noun', 'a'],
['capanna', 'noun', 'b'],
['capannone', 'noun', 'b'],
['caparra', 'noun', 'c'],
['capello', 'noun', 'a'],
['capire', 'verb', 'a'],
['capitale', 'adjective', 'a'],
['capitale', 'noun', 'a'],
['capitano', 'noun', 'a'],
['capitare', 'verb', 'a'],
['capitolo', 'noun', 'a'],
['capo', 'noun', 'a'],
['capodanno', 'noun', 'c'],
['capogiro', 'noun', 'c'],
['capolavoro', 'noun', 'b'],
['capoluogo', 'noun', 'c'],
['caporale', 'noun', 'b'],
['caporale', 'adjective', 'b'],
['caposquadra', 'noun', 'c'],
['capotavola', 'noun', 'c'],
['capoufficio', 'noun', 'c'],
['cappa', 'noun', 'c'],
['cappella', 'noun', 'b'],
['cappelliera', 'noun', 'c'],
['cappello', 'noun', 'b'],
['cappero', 'noun', 'c'],
['cappotto', 'noun', 'c'],
['cappuccino', 'adjective', 'c'],
['cappuccino', 'noun', 'c'],
['cappuccino', 'adjective', 'c'],
['cappuccio', 'noun', 'c'],
['capra', 'noun', 'b'],
['capriccio', 'noun', 'b'],
['capriola', 'noun', 'c'],
['carabiniere', 'noun', 'a'],
['caramella', 'noun', 'b'],
['caramella', 'adjective', 'b'],
['carattere', 'noun', 'a'],
['caratteristica', 'noun', 'a'],
['caratteristico', 'adjective', 'b'],
['caratterizzare', 'verb', 'a'],
['carbone', 'noun', 'b'],
['carburante', 'pres_part', 'c'],
['carburante', 'adjective', 'c'],
['carburante', 'noun', 'c'],
['carcassa', 'noun', 'c'],
['carcerato', 'past_part', 'c'],
['carcerato', 'adjective', 'c'],
['carcerato', 'noun', 'c'],
['carcere', 'noun', 'a'],
['carciofino', 'noun', 'c'],
['carciofo', 'noun', 'c'],
['cardellino', 'noun', 'c'],
['cardiaco', 'adjective', 'b'],
['cardiaco', 'noun', 'b'],
['cardigan', 'noun', 'c'],
['cardinale', 'adjective', 'b'],
['cardinale', 'noun', 'b'],
['cardinale', 'adjective', 'b'],
['carenza', 'noun', 'b'],
['carica', 'noun', 'loc-comando'],
['caricare', 'verb', 'a'],
['carico', 'noun', 'a'],
['carico', 'adjective', 'b'],
['carino', 'adjective', 'a'],
['carità', 'noun', 'b'],
['carnagione', 'noun', 'c'],
['carne', 'noun', 'a'],
['carnevale', 'noun', 'c'],
['carnivoro', 'adjective', 'c'],
['carnivoro', 'noun', 'c'],
['carnoso', 'adjective', 'c'],
['carnoso', 'noun', 'c'],
['caro', 'adjective', 'a'],
['caro', 'adverb', 'a'],
['caro', 'noun', 'a'],
['carosello', 'noun', 'c'],
['carovana', 'noun', 'c'],
['carriera', 'noun', 'a'],
['carro', 'noun', 'b'],
['carrozzeria', 'noun', 'c'],
['carta', 'noun', 'a'],
['cartaceo', 'adjective', 'b'],
['cartella', 'noun', 'b'],
['cartello', 'noun', 'b'],
['cartoleria', 'noun', 'c'],
['cartolina', 'noun', 'b'],
['cartone', 'noun', 'b'],
['cartuccia', 'noun', 'c'],
['casa', 'noun', 'a'],
['casalinga', 'noun', 'c'],
['casalingo', 'adjective', 'c'],
['casalingo', 'noun', 'c'],
['cascare', 'verb', 'b'],
['cascata', 'noun', 'c'],
['casco', 'noun', 'c'],
['caserma', 'noun', 'b'],
['casetta', 'noun', 'b'],
['casino', 'noun', 'a'],
['caso', 'noun', 'a'],
['cassa', 'noun', 'a'],
['cassaforte', 'noun', 'c'],
['cassapanca', 'noun', 'c'],
['casseruola', 'noun', 'c'],
['cassetta', 'noun', 'b'],
['cassettiera', 'noun', 'c'],
['cassetto', 'noun', 'b'],
['cassiera', 'noun', 'c'],
['castagna', 'noun', 'c'],
['castagno', 'noun', 'c'],
['castano', 'adjective', 'c'],
['castello', 'noun', 'a'],
['castoro', 'noun', 'c'],
['casuale', 'adjective', 'b'],
['casuale', 'noun', 'b'],
['catalogo', 'noun', 'b'],
['catanzarese', 'adjective', 'c'],
['catanzarese', 'noun', 'c'],
['catarro', 'noun', 'c'],
['catasta', 'noun', 'c'],
['catastrofe', 'noun', 'b'],
['catechismo', 'noun', 'c'],
['categoria', 'noun', 'a'],
['catena', 'noun', 'a'],
['catenaccio', 'noun', 'c'],
['catino', 'noun', 'c'],
['catrame', 'noun', 'c'],
['cattedrale', 'adjective', 'b'],
['cattedrale', 'noun', 'b'],
['cattivo', 'adjective', 'a'],
['cattivo', 'noun', 'a'],
['cattolico', 'adjective', 'a'],
['cattolico', 'noun', 'a'],
['catturare', 'verb', 'b'],
['causa', 'noun', 'a'],
['causare', 'verb', 'a'],
['cavalcare', 'verb', 'b'],
['cavaliere', 'noun', 'a'],
['cavalletta', 'noun', 'c'],
['cavallo', 'noun', 'a'],
['cavare', 'verb', 'b'],
['cavatappi', 'noun', 'c'],
['caverna', 'noun', 'c'],
['caviglia', 'noun', 'b'],
['cavità', 'noun', 'b'],
['cavo', 'adjective', 'b'],
['cavo', 'noun', 'b'],
['cavo', 'noun', 'b'],
['cavolo', 'noun', 'b'],
['cazzata', 'noun', 'b'],
['cazzo', 'noun', 'a'],
['ce', 'pronoun', 'a'],
['ce', 'adverb', 'a'],
['cece', 'noun', 'c'],
['ceco', 'adjective', 'c'],
['ceco', 'noun', 'c'],
['cecoslovacco', 'adjective', 'c'],
['cecoslovacco', 'noun', 'c'],
['cedere', 'verb', 'a'],
['celare', 'verb', 'b'],
['celebrare', 'verb', 'b'],
['celebre', 'adjective', 'b'],
['celeste', 'adjective', 'b'],
['celeste', 'noun', 'b'],
['cella', 'noun', 'b'],
['cellula', 'noun', 'a'],
['cellulare', 'adjective', 'a'],
['cellulare', 'noun', 'a'],
['cemento', 'noun', 'b'],
['cena', 'noun', 'a'],
['cenare', 'verb', 'b'],
['cenere', 'noun', 'b'],
['cenere', 'adjective', 'b'],
['cenno', 'noun', 'b'],
['centesimo', 'adjective', 'b'],
['centesimo', 'noun', 'b'],
['centimetro', 'noun', 'b'],
['centinaio', 'noun', 'a'],
['cento', 'adjective', 'a'],
['cento', 'noun', 'a'],
['centrale', 'adjective', 'a'],
['centrale', 'noun', 'a'],
['centralino', 'noun', 'c'],
['centrare', 'verb', 'b'],
['centro', 'noun', 'a'],
['centroamericano', 'adjective', 'c'],
['centroamericano', 'noun', 'c'],
['ceramica', 'noun', 'b'],
['cercare', 'verb', 'a'],
['cerchio', 'noun', 'b'],
['cereale', 'noun', 'c'],
['cereale', 'adjective', 'c'],
['cerebrale', 'adjective', 'b'],
['cerebrale', 'noun', 'b'],
['cerimonia', 'noun', 'b'],
['cerino', 'noun', 'c'],
['cerniera', 'noun', 'c'],
['cerotto', 'noun', 'c'],
['certamente', 'adverb', 'a'],
['certezza', 'noun', 'a'],
['certificare', 'verb', 'b'],
['certificato', 'past_part', 'b'],
['certificato', 'adjective', 'b'],
['certificato', 'noun', 'b'],
['certo', 'adjective', 'a'],
['certo', 'adjective', 'a'],
['certo', 'pronoun', 'a'],
['certo', 'adverb', 'a'],
['cervello', 'noun', 'a'],
['cervo', 'noun', 'c'],
['cespuglio', 'noun', 'b'],
['cessare', 'verb', 'b'],
['cesso', 'noun', 'b'],
['cestino', 'noun', 'c'],
['cesto', 'noun', 'c'],
['cetriolo', 'noun', 'c'],
['chat', 'noun', 'b'],
['che', 'pronoun', 'a'],
['che', 'adjective', 'a'],
['che', 'noun', 'a'],
['chewingum', 'noun', 'c'],
['chi', 'pronoun', 'a'],
['chiacchiera', 'noun', 'b'],
['chiacchierare', 'verb', 'b'],
['chiamare', 'verb', 'a'],
['chiamata', 'noun', 'b'],
['chiaramente', 'adverb', 'a'],
['chiarezza', 'noun', 'b'],
['chiarire', 'verb', 'a'],
['chiaro', 'adjective', 'a'],
['chiaro', 'noun', 'a'],
['chiaro', 'adverb', 'a'],
['chiasso', 'noun', 'c'],
['chiave', 'noun', 'a'],
['chiazza', 'noun', 'c'],
['chiedere', 'verb', 'a'],
['chiesa', 'noun', 'a'],
['chilo', 'noun', 'b'],
['chilogrammo', 'noun', 'c'],
['chilometro', 'noun', 'a'],
['chimico', 'adjective', 'a'],
['chimico', 'noun', 'a'],
['china', 'noun', 'c'],
['chinare', 'verb', 'b'],
['chinotto', 'noun', 'c'],
['chiodo', 'noun', 'b'],
['chiosco', 'noun', 'b'],
['chirurgia', 'noun', 'b'],
['chirurgico', 'adjective', 'b'],
['chirurgico', 'noun', 'b'],
['chirurgo', 'noun', 'b'],
['chissà', 'adverb', 'a'],
['chitarra', 'noun', 'b'],
['chiudere', 'verb', 'a'],
['chiunque', 'pronoun', 'a'],
['chiuso', 'past_part', 'a'],
['chiuso', 'adjective', 'a'],
['chiuso', 'noun', 'a'],
['chiuso', 'adverb', 'a'],
['chiusura', 'noun', 'b'],
['ci', 'noun', 'c'],
['ci', 'pronoun', 'a'],
['ci', 'adverb', 'a'],
['ciabatta', 'noun', 'c'],
['ciambella', 'noun', 'c'],
['ciao', 'exclamation', 'a'],
['ciascuno', 'adjective', 'a'],
['ciascuno', 'pronoun', 'a'],
['cibare', 'verb', 'c'],
['cibo', 'noun', 'a'],
['cicatrice', 'noun', 'b'],
['ciclismo', 'noun', 'b'],
['ciclista', 'noun', 'c'],
['ciclo', 'noun', 'b'],
['cicogna', 'noun', 'c'],
['cicoria', 'noun', 'c'],
['cieco', 'adjective', 'b'],
['cieco', 'noun', 'b'],
['cielo', 'noun', 'a'],
['cifra', 'noun', 'a'],
['ciglio', 'noun', 'b'],
['cigno', 'noun', 'c'],
['cileno', 'adjective', 'c'],
['cileno', 'noun', 'c'],
['ciliegia', 'noun', 'c'],
['ciliegia', 'adjective', 'c'],
['ciliegio', 'noun', 'c'],
['cilindro', 'noun', 'c'],
['cima', 'noun', 'c'],
['cimice', 'noun', 'c'],
['ciminiera', 'noun', 'c'],
['cimitero', 'noun', 'b'],
['cinema', 'noun', 'a'],
['cinematografico', 'adjective', 'b'],
['cinese', 'adjective', 'a'],
['cinese', 'noun', 'a'],
['cinghia', 'noun', 'c'],
['cinghiale', 'noun', 'c'],
['cinguettare', 'verb', 'c'],
['cinguettio', 'noun', 'c'],
['cinico', 'adjective', 'c'],
['cinico', 'noun', 'c'],
['cinquanta', 'adjective', 'a'],
['cinquanta', 'noun', 'a'],
['cinque', 'adjective', 'a'],
['cinque', 'noun', 'a'],
['cinquecento', 'adjective', 'b'],
['cinquecento', 'noun', 'b'],
['cintura', 'noun', 'b'],
['cinturino', 'noun', 'c'],
['ciò', 'pronoun', 'a'],
['ciocca', 'noun', 'c'],
['cioccolatino', 'noun', 'c'],
['cioccolato', 'noun', 'b'],
['cioccolato', 'adjective', 'b'],
['cioè', 'conjunction', 'a'],
['ciotola', 'noun', 'c'],
['cipolla', 'noun', 'b'],
['cipresso', 'noun', 'c'],
['cipriota', 'adjective', 'c'],
['cipriota', 'noun', 'c'],
['circa', 'preposition', 'a'],
['circa', 'adverb', 'a'],
['circa', 'noun', 'a'],
['circo', 'noun', 'b'],
['circolare', 'adjective', 'b'],
['circolare', 'noun', 'b'],
['circolare', 'verb', 'b'],
['circolazione', 'noun', 'b'],
['circolo', 'noun', 'b'],
['circondare', 'verb', 'a'],
['circostanza', 'noun', 'a'],
['circuito', 'noun', 'b'],
['citare', 'verb', 'a'],
['citato', 'past_part', 'b'],
['citato', 'adjective', 'b'],
['citato', 'noun', 'b'],
['citazione', 'noun', 'b'],
['citofono', 'noun', 'c'],
['città', 'noun', 'a'],
['cittadina', 'noun', 'b'],
['cittadinanza', 'noun', 'b'],
['cittadino', 'adjective', 'a'],
['cittadino', 'noun', 'a'],
['ciuffo', 'noun', 'c'],
['civile', 'adjective', 'a'],
['civile', 'noun', 'a'],
['civiltà', 'noun', 'b'],
['clacson', 'noun', 'c'],
['clan', 'noun', 'b'],
['clandestino', 'adjective', 'b'],
['clandestino', 'noun', 'b'],
['classe', 'noun', 'a'],
['classico', 'adjective', 'a'],
['classico', 'noun', 'a'],
['classifica', 'noun', 'b'],
['classificare', 'verb', 'b'],
['clero', 'noun', 'c'],
['cliccare', 'verb', 'b'],
['cliente', 'noun', 'a'],
['clima', 'noun', 'b'],
['clinica', 'noun', 'b'],
['clinico', 'adjective', 'b'],
['clinico', 'noun', 'b'],
['clistere', 'noun', 'c'],
['cloro', 'noun', 'c'],
['club', 'noun', 'b'],
['cobra', 'noun', 'c'],
['cocaina', 'noun', 'b'],
['coccinella', 'noun', 'c'],
['coccio', 'noun', 'c'],
['cocciuto', 'adjective', 'c'],
['cocciuto', 'noun', 'c'],
['cocco', 'noun', 'c'],
['coccodrillo', 'noun', 'c'],
['coccola', 'noun', 'c'],
['coccolare', 'verb', 'c'],
['cocomero', 'noun', 'c'],
['coda', 'noun', 'a'],
['codice', 'noun', 'a'],
['coerente', 'adjective', 'b'],
['cofano', 'noun', 'c'],
['cogliere', 'verb', 'a'],
['coglione', 'noun', 'a'],
['cognato', 'noun', 'b'],
['cognato', 'adjective', 'b'],
['cognome', 'noun', 'b'],
['coincidenza', 'noun', 'b'],
['coincidere', 'verb', 'b'],
['coinvolgere', 'verb', 'a'],
['coinvolgimento', 'noun', 'b'],
['colare', 'verb', 'b'],
['colata', 'noun', 'c'],
['colazione', 'noun', 'b'],
['colera', 'noun', 'c'],
['colica', 'noun', 'c'],
['colino', 'noun', 'c'],
['colla', 'noun', 'c'],
['collaborare', 'verb', 'b'],
['collaboratore', 'noun', 'b'],
['collaborazione', 'noun', 'b'],
['collana', 'noun', 'b'],
['collant', 'noun', 'c'],
['collant', 'adjective', 'c'],
['collare', 'noun', 'c'],
['collasso', 'noun', 'c'],
['collaterale', 'adjective', 'b'],
['collaterale', 'noun', 'b'],
['colle', 'noun', 'c'],
['collega', 'noun', 'a'],
['collegamento', 'noun', 'b'],
['collegare', 'verb', 'a'],
['collegio', 'noun', 'b'],
['collera', 'noun', 'c'],
['colletta', 'noun', 'c'],
['collettivo', 'adjective', 'b'],
['collettivo', 'noun', 'b'],
['collezione', 'noun', 'b'],
['collina', 'noun', 'b'],
['collo', 'noun', 'a'],
['collocare', 'verb', 'b'],
['colloquio', 'noun', 'b'],
['colluttorio', 'noun', 'c'],
['colmo', 'noun', 'c'],
['colomba', 'noun', 'b'],
['colombo', 'noun', 'c'],
['colonna', 'noun', 'a'],
['colonnello', 'noun', 'b'],
['colorante', 'pres_part', 'c'],
['colorante', 'adjective', 'c'],
['colorante', 'noun', 'c'],
['colorare', 'verb', 'b'],
['colorato', 'past_part', 'b'],
['colorato', 'adjective', 'b'],
['colore', 'noun', 'a'],
['coloro', 'pronoun', 'a'],
['colosso', 'noun', 'c'],
['colpa', 'noun', 'a'],
['colpevole', 'adjective', 'b'],
['colpevole', 'noun', 'b'],
['colpire', 'verb', 'a'],
['colpo', 'noun', 'a'],
['coltellata', 'noun', 'c'],
['coltello', 'noun', 'a'],
['coltivare', 'verb', 'b'],
['coltivazione', 'noun', 'c'],
['colto', 'adjective', 'b'],
['colto', 'noun', 'b'],
['colui', 'pronoun', 'b'],
['coma', 'noun', 'b'],
['comandamento', 'noun', 'b'],
['comandante', 'pres_part', 'b'],
['comandante', 'adjective', 'b'],
['comandante', 'noun', 'b'],
['comandare', 'verb', 'b'],
['comando', 'noun', 'b'],
['combaciare', 'verb', 'c'],
['combattente', 'pres_part', 'c'],
['combattente', 'adjective', 'c'],
['combattente', 'noun', 'c'],
['combattere', 'verb', 'a'],
['combattimento', 'noun', 'b'],
['combinare', 'verb', 'b'],
['combinazione', 'noun', 'b'],
['come', 'adverb', 'a'],
['come', 'conjunction', 'a'],
['cometa', 'noun', 'c'],
['comfort', 'noun', 'c'],
['comico', 'adjective', 'b'],
['comico', 'noun', 'b'],
['cominciare', 'verb', 'a'],
['cominciare', 'noun', 'a'],
['comitato', 'noun', 'b'],
['comma', 'noun', 'b'],
['commedia', 'noun', 'b'],
['commentare', 'verb', 'a'],
['commento', 'noun', 'a'],
['commerciale', 'adjective', 'a'],
['commerciale', 'noun', 'a'],
['commerciante', 'pres_part', 'b'],
['commerciante', 'adjective', 'b'],
['commerciante', 'noun', 'b'],
['commercio', 'noun', 'b'],
['commettere', 'verb', 'a'],
['commissariato', 'noun', 'b'],
['commissario', 'noun', 'a'],
['commissione', 'noun', 'a'],
['community', 'noun', 'b'],
['commuovere', 'verb', 'b'],
['comodino', 'noun', 'c'],
['comodità', 'noun', 'c'],
['comodo', 'adjective', 'a'],
['comodo', 'noun', 'a'],
['compagnia', 'noun', 'a'],
['compagno', 'noun', 'a'],
['compagno', 'adjective', 'a'],
['comparire', 'verb', 'a'],
['comparsa', 'noun', 'b'],
['compassione', 'noun', 'c'],
['compasso', 'noun', 'c'],
['compatibile', 'adjective', 'b'],
['compatriota', 'noun', 'c'],
['compatto', 'adjective', 'b'],
['compatto', 'noun', 'b'],
['compensare', 'verb', 'b'],
['compenso', 'noun', 'b'],
['competente', 'adjective', 'b'],
['competente', 'noun', 'b'],
['competenza', 'noun', 'b'],
['competere', 'verb', 'b'],
['competizione', 'noun', 'b'],
['compiangere', 'verb', 'c'],
['compiere', 'verb', 'a'],
['compilare', 'verb', 'b'],
['compito', 'noun', 'a'],
['compleanno', 'noun', 'b'],
['complessivo', 'adjective', 'b'],
['complesso', 'noun', 'b'],
['complesso', 'adjective', 'a'],
['completamente', 'adverb', 'a'],
['completare', 'verb', 'b'],
['completo', 'adjective', 'a'],
['completo', 'noun', 'a'],
['complicare', 'verb', 'b'],
['complicato', 'past_part', 'b'],
['complicato', 'adjective', 'b'],
['complice', 'noun', 'b'],
['complice', 'adjective', 'b'],
['complimento', 'noun', 'b'],
['complotto', 'noun', 'c'],
['componente', 'pres_part', 'b'],
['componente', 'adjective', 'b'],
['componente', 'noun', 'b'],
['comporre', 'verb', 'a'],
['comportamento', 'noun', 'a'],
['comportare', 'verb', 'a'],
['composizione', 'noun', 'b'],
['composto', 'past_part', 'b'],
['composto', 'adjective', 'b'],
['composto', 'noun', 'b'],
['comprare', 'verb', 'a'],
['comprendere', 'verb', 'a'],
['comprensibile', 'adjective', 'b'],
['comprensione', 'noun', 'b'],
['comprensivo', 'adjective', 'c'],
['compreso', 'past_part', 'a'],
['compreso', 'adjective', 'a'],
['compromesso', 'noun', 'b'],
['compromettere', 'verb', 'b'],
['computer', 'noun', 'a'],
['comunale', 'adjective', 'b'],
['comunale', 'noun', 'b'],
['comune', 'adjective', 'a'],
['comune', 'noun', 'a'],
['comune', 'noun', 'a'],
['comunicare', 'verb', 'a'],
['comunicazione', 'noun', 'a'],
['comunione', 'noun', 'b'],
['comunismo', 'noun', 'b'],
['comunista', 'adjective', 'a'],
['comunista', 'noun', 'a'],
['comunità', 'noun', 'a'],
['comunque', 'adverb', 'a'],
['comunque', 'conjunction', 'a'],
['con', 'preposition', 'a'],
['conca', 'noun', 'c'],
['concedere', 'verb', 'b'],
['concentrare', 'verb', 'a'],
['concentrazione', 'noun', 'b'],
['concepire', 'noun', 'b'],
['concerto', 'noun', 'a'],
['concessione', 'noun', 'b'],
['concesso', 'past_part', 'b'],
['concesso', 'adjective', 'b'],
['concetto', 'past_part', 'a'],
['concetto', 'adjective', 'a'],
['concetto', 'noun', 'a'],
['concezione', 'noun', 'b'],
['conchiglia', 'noun', 'c'],
['concime', 'noun', 'c'],
['concludere', 'verb', 'a'],
['conclusione', 'noun', 'a'],
['concordare', 'verb', 'b'],
['concorrente', 'pres_part', 'b'],
['concorrente', 'adjective', 'b'],
['concorrente', 'noun', 'b'],
['concorrenza', 'noun', 'b'],
['concorrere', 'verb', 'b'],
['concorso', 'noun', 'b'],
['concreto', 'adjective', 'a'],
['concreto', 'noun', 'a'],
['condanna', 'noun', 'b'],
['condannare', 'verb', 'a'],
['condimento', 'noun', 'c'],
['condividere', 'verb', 'a'],
['condizionare', 'verb', 'b'],
['condizione', 'noun', 'a'],
['condoglianza', 'noun', 'c'],
['condominio', 'noun', 'b'],
['condotta', 'noun', 'b'],
['condurre', 'verb', 'a'],
['conduttore', 'adjective', 'b'],
['conduttore', 'noun', 'b'],
['conduttura', 'noun', 'c'],
['conferenza', 'noun', 'b'],
['conferire', 'verb', 'b'],
['conferma', 'noun', 'b'],
['confermare', 'verb', 'a'],
['confessare', 'verb', 'b'],
['confessione', 'noun', 'b'],
['confessore', 'noun', 'c'],
['confetto', 'noun', 'c'],
['confetto', 'adjective', 'c'],
['confettura', 'noun', 'c'],
['confezione', 'noun', 'b'],
['conficcare', 'verb', 'c'],
['confidare', 'verb', 'b'],
['confidenza', 'noun', 'b'],
['confine', 'noun', 'a'],
['conflitto', 'noun', 'b'],
['confondere', 'verb', 'a'],
['confortare', 'verb', 'c'],
['confrontare', 'verb', 'b'],
['confronto', 'noun', 'a'],
['confusione', 'noun', 'b'],
['confuso', 'past_part', 'b'],
['confuso', 'adjective', 'b'],
['congedo', 'noun', 'c'],
['congelare', 'verb', 'b'],
['congelatore', 'noun', 'c'],
['congestione', 'noun', 'c'],
['congiura', 'noun', 'c'],
['congresso', 'noun', 'b'],
['coniglio', 'noun', 'b'],
['coniugato', 'past_part', 'c'],
['coniugato', 'adjective', 'c'],
['coniugato', 'noun', 'c'],
['coniuge', 'noun', 'b'],
['connessione', 'noun', 'b'],
['connettere', 'verb', 'b'],
['cono', 'noun', 'b'],
['conoscenza', 'noun', 'a'],
['conoscere', 'verb', 'a'],
['conosciuto', 'past_part', 'b'],
['conosciuto', 'adjective', 'b'],
['conosciuto', 'noun', 'b'],
['conquista', 'noun', 'b'],
['conquistare', 'verb', 'a'],
['consapevole', 'adjective', 'b'],
['consapevolezza', 'noun', 'b'],
['consegna', 'noun', 'b'],
['consegnare', 'verb', 'a'],
['conseguente', 'pres_part', 'b'],
['conseguente', 'adjective', 'b'],
['conseguente', 'noun', 'b'],
['conseguenza', 'noun', 'a'],
['conseguire', 'verb', 'b'],
['consenso', 'noun', 'b'],
['consentire', 'verb', 'a'],
['conservare', 'verb', 'a'],
['conservazione', 'noun', 'b'],
['considerare', 'verb', 'a'],
['considerazione', 'noun', 'a'],
['consigliare', 'verb', 'a'],
['consigliere', 'noun', 'b'],
['consiglio', 'noun', 'a'],
['consistente', 'pres_part', 'b'],
['consistente', 'adjective', 'b'],
['consistenza', 'noun', 'b'],
['consistere', 'verb', 'b'],
['consolare', 'verb', 'b'],
['consonante', 'noun', 'c'],
['consorzio', 'noun', 'b'],
['constatare', 'verb', 'b'],
['consueto', 'adjective', 'b'],
['consueto', 'noun', 'b'],
['consulente', 'adjective', 'b'],
['consulente', 'noun', 'b'],
['consulenza', 'noun', 'b'],
['consultare', 'verb', 'b'],
['consumare', 'verb', 'a'],
['consumatore', 'noun', 'b'],
['consumatore', 'adjective', 'b'],
['consumazione', 'noun', 'c'],
['consumo', 'noun', 'b'],
['contachilometri', 'noun', 'c'],
['contadino', 'noun', 'b'],
['contadino', 'adjective', 'b'],
['contagiare', 'verb', 'c'],
['contagio', 'noun', 'c'],
['contagioso', 'adjective', 'c'],
['contagocce', 'noun', 'c'],
['contaminare', 'verb', 'b'],
['contante', 'pres_part', 'b'],
['contante', 'adjective', 'b'],
['contante', 'noun', 'b'],
['contare', 'verb', 'a'],
['contatore', 'noun', 'c'],
['contattare', 'verb', 'b'],
['contatto', 'noun', 'a'],
['conte', 'noun', 'b'],
['contemplare', 'verb', 'b'],
['contemporaneamente', 'adverb', 'b'],
['contemporaneo', 'adjective', 'a'],
['contemporaneo', 'noun', 'a'],
['contenere', 'verb', 'a'],
['contenitore', 'adjective', 'b'],
['contenitore', 'noun', 'b'],
['contentare', 'verb', 'b'],
['contentezza', 'noun', 'c'],
['contento', 'adjective', 'a'],
['contenuto', 'past_part', 'a'],
['contenuto', 'adjective', 'a'],
['contenuto', 'noun', 'a'],
['contestare', 'verb', 'b'],
['contestazione', 'noun', 'b'],
['contesto', 'noun', 'a'],
['continente', 'noun', 'b'],
['continuamente', 'adverb', 'b'],
['continuare', 'verb', 'a'],
['continuazione', 'noun', 'b'],
['continuità', 'noun', 'b'],
['continuo', 'adjective', 'a'],
['continuo', 'noun', 'a'],
['continuo', 'adverb', 'a'],
['conto', 'noun', 'a'],
['contorno', 'noun', 'b'],
['contrabbandiere', 'noun', 'c'],
['contrabbando', 'noun', 'c'],
['contraccambiare', 'verb', 'c'],
['contraddizione', 'noun', 'b'],
['contrario', 'adjective', 'a'],
['contrario', 'noun', 'a'],
['contrarre', 'verb', 'b'],
['contrastare', 'verb', 'b'],
['contrasto', 'noun', 'b'],
['contratto', 'noun', 'a'],
['contribuire', 'verb', 'b'],
['contributo', 'noun', 'b'],
['contro', 'preposition', 'a'],
['contro', 'adverb', 'a'],
['contro', 'noun', 'a'],
['controllare', 'verb', 'a'],
['controllo', 'noun', 'a'],
['controllore', 'noun', 'c'],
['convegno', 'noun', 'b'],
['conveniente', 'pres_part', 'b'],
['conveniente', 'adjective', 'b'],
['convenire', 'verb', 'b'],
['convenzione', 'noun', 'b'],
['conversazione', 'noun', 'a'],
['conversione', 'noun', 'b'],
['convertire', 'verb', 'b'],
['convincente', 'pres_part', 'b'],
['convincente', 'adjective', 'b'],
['convincere', 'verb', 'a'],
['convinto', 'past_part', 'b'],
['convinto', 'adjective', 'b'],
['convinzione', 'noun', 'b'],
['convivenza', 'noun', 'b'],
['convivere', 'verb', 'b'],
['convocare', 'verb', 'b'],
['convulsione', 'noun', 'c'],
['coordinamento', 'noun', 'b'],
['coordinare', 'verb', 'b'],
['coperchio', 'noun', 'c'],
['coperta', 'noun', 'b'],
['copertina', 'noun', 'b'],
['coperto', 'past_part', 'b'],
['coperto', 'adjective', 'b'],
['coperto', 'noun', 'b'],
['copertura', 'noun', 'b'],
['copia', 'noun', 'a'],
['copiare', 'verb', 'b'],
['copione', 'noun', 'b'],
['coppa', 'noun', 'b'],
['coppia', 'noun', 'a'],
['copricostume', 'noun', 'c'],
['copriletto', 'noun', 'c'],
['coprire', 'verb', 'a'],
['copyright', 'noun', 'b'],
['coraggio', 'noun', 'a'],
['coraggio', 'exclamation', 'a'],
['coraggioso', 'adjective', 'b'],
['corallo', 'noun', 'c'],
['corallo', 'adjective', 'c'],
['corazza', 'noun', 'c'],
['corazzata', 'noun', 'c'],
['corazziere', 'noun', 'c'],
['corda', 'noun', 'a'],
['coriandolo', 'noun', 'c'],
['coricare', 'verb', 'c'],
['cornacchia', 'noun', 'c'],
['cornetto', 'noun', 'c'],
['cornice', 'noun', 'b'],
['corno', 'noun', 'b'],
['cornuto', 'adjective', 'c'],
['cornuto', 'noun', 'c'],
['coro', 'noun', 'b'],
['corona', 'noun', 'b'],
['corpo', 'noun', 'a'],
['corporatura', 'noun', 'c'],
['correggere', 'verb', 'a'],
['corrente', 'pres_part', 'a'],
['corrente', 'adjective', 'a'],
['corrente', 'noun', 'a'],
['corrente', 'adverb', 'a'],
['correre', 'verb', 'a'],
['correttamente', 'adverb', 'b'],
['corretto', 'past_part', 'b'],
['corretto', 'adjective', 'b'],
['correzione', 'noun', 'c'],
['corridoio', 'noun', 'b'],
['corridore', 'adjective', 'c'],
['corridore', 'noun', 'c'],
['corriera', 'noun', 'c'],
['corriere', 'noun', 'a'],
['corrispondente', 'pres_part', 'b'],
['corrispondente', 'adjective', 'b'],
['corrispondente', 'noun', 'b'],
['corrispondenza', 'noun', 'b'],
['corrispondere', 'verb', 'a'],
['corruzione', 'noun', 'b'],
['corsa', 'noun', 'a'],
['corsia', 'noun', 'c'],
['corso', 'noun', 'a'],
['corte', 'noun', 'a'],
['corteccia', 'noun', 'c'],
['corteggiare', 'verb', 'c'],
['cortesia', 'noun', 'b'],
['cortile', 'noun', 'b'],
['corto', 'adjective', 'a'],
['corvo', 'noun', 'c'],
['cosa', 'noun', 'a'],
['coscia', 'noun', 'b'],
['cosciente', 'adjective', 'c'],
['coscienza', 'noun', 'a'],
['così', 'adverb', 'a'],
['cosiddetto', 'adjective', 'a'],
['costa', 'noun', 'a'],
['costante', 'adjective', 'b'],
['costante', 'noun', 'b'],
['costantemente', 'adverb', 'b'],
['costare', 'verb', 'a'],
['costellazione', 'noun', 'b'],
['costituire', 'verb', 'a'],
['costituzionale', 'adjective', 'b'],
['costituzione', 'noun', 'b'],
['costo', 'noun', 'a'],
['costoso', 'adjective', 'b'],
['costringere', 'verb', 'a'],
['costruire', 'verb', 'a'],
['costruttivo', 'adjective', 'b'],
['costruzione', 'noun', 'a'],
['costume', 'noun', 'a'],
['cotoletta', 'noun', 'c'],
['cotone', 'noun', 'b'],
['cottura', 'noun', 'c'],
['covare', 'verb', 'c'],
['covo', 'noun', 'c'],
['cozza', 'noun', 'c'],
['cracker', 'noun', 'c'],
['cranio', 'noun', 'b'],
['cravatta', 'noun', 'b'],
['creare', 'verb', 'a'],
['creatività', 'noun', 'b'],
['creativo', 'adjective', 'b'],
['creativo', 'noun', 'b'],
['creatura', 'noun', 'b'],
['creazione', 'noun', 'b'],
['credente', 'pres_part', 'b'],
['credente', 'adjective', 'b'],
['credente', 'noun', 'b'],
['credenza', 'noun', 'c'],
['credere', 'verb', 'a'],
['credere', 'noun', 'a'],
['credibile', 'adjective', 'b'],
['credito', 'noun', 'a'],
['creditore', 'noun', 'b'],
['credo', 'noun', 'c'],
['crema', 'noun', 'b'],
['crema', 'adjective', 'b'],
['crepaccio', 'noun', 'c'],
['crêpe', 'noun', 'c'],
['crescente', 'pres_part', 'b'],
['crescente', 'adjective', 'b'],
['crescente', 'noun', 'b'],
['crescere', 'verb', 'a'],
['crescita', 'noun', 'a'],
['cretino', 'adjective', 'b'],
['cretino', 'noun', 'b'],
['criceto', 'noun', 'c'],
['criminale', 'adjective', 'b'],
['criminale', 'noun', 'b'],
['crimine', 'noun', 'b'],
['criniera', 'noun', 'c'],
['crisantemo', 'noun', 'c'],
['crisi', 'noun', 'a'],
['cristallo', 'noun', 'b'],
['cristianesimo', 'noun', 'b'],
['cristiano', 'adjective', 'a'],
['cristiano', 'noun', 'a'],
['criterio', 'noun', 'b'],
['critica', 'noun', 'a'],
['criticare', 'verb', 'b'],
['critico', 'adjective', 'a'],
['critico', 'noun', 'a'],
['croato', 'adjective', 'c'],
['croato', 'noun', 'c'],
['croce', 'noun', 'b'],
['crocifiggere', 'verb', 'c'],
['crocifisso', 'past_part', 'c'],
['crocifisso', 'adjective', 'c'],
['crocifisso', 'noun', 'c'],
['crollare', 'verb', 'b'],
['cronaca', 'noun', 'b'],
['cronico', 'adjective', 'b'],
['cronico', 'noun', 'b'],
['cronista', 'noun', 'c'],
['crostaceo', 'noun', 'c'],
['crostino', 'noun', 'c'],
['crudele', 'adjective', 'b'],
['crudele', 'noun', 'b'],
['crudo', 'adjective', 'b'],
['crudo', 'noun', 'b'],
['cu', 'noun', 'c'],
['cubo', 'noun', 'b'],
['cubo', 'adjective', 'b'],
['cucchiaio', 'noun', 'b'],
['cuccia', 'noun', 'c'],
['cucciolo', 'noun', 'b'],
['cucina', 'noun', 'a'],
['cucinare', 'verb', 'a'],
['cucire', 'verb', 'b'],
['cucito', 'past_part', 'c'],
['cucito', 'adjective', 'c'],
['cucito', 'noun', 'c'],
['cucitura', 'noun', 'c'],
['cuffia', 'noun', 'b'],
['cugino', 'noun', 'b'],
['cui', 'pronoun', 'a'],
['cullare', 'verb', 'c'],
['culo', 'noun', 'a'],
['culto', 'noun', 'b'],
['cultura', 'noun', 'a'],
['culturale', 'adjective', 'a'],
['cumulo', 'noun', 'c'],
['cuocere', 'verb', 'b'],
['cuoco', 'noun', 'b'],
['cuore', 'noun', 'a'],
['cupo', 'adjective', 'b'],
['cupo', 'noun', 'b'],
['cura', 'noun', 'a'],
['curare', 'verb', 'a'],
['curiosare', 'verb', 'b'],
['curiosità', 'noun', 'b'],
['curioso', 'adjective', 'a'],
['curioso', 'noun', 'a'],
['curriculum', 'noun', 'b'],
['curva', 'noun', 'b'],
['curvo', 'adjective', 'b'],
['curvo', 'noun', 'b'],
['cuscino', 'noun', 'b'],
['custode', 'noun', 'b'],
['custode', 'adjective', 'b'],
['custodia', 'noun', 'b'],
['custodire', 'verb', 'b'],
['da', 'preposition', 'a'],
['dado', 'noun', 'c'],
['danese', 'adjective', 'c'],
['danese', 'noun', 'c'],
['dannato', 'past_part', 'b'],
['dannato', 'adjective', 'b'],
['dannato', 'noun', 'b'],
['danneggiare', 'verb', 'b'],
['danno', 'noun', 'a'],
['dannoso', 'adjective', 'c'],
['danza', 'noun', 'b'],
['dappertutto', 'adverb', 'b'],
['dare', 'verb', 'a'],
['dare', 'noun', 'a'],
['data', 'noun', 'a'],
['dato', 'past_part', 'a'],
['dato', 'adjective', 'a'],
['dato', 'noun', 'a'],
['dattero', 'noun', 'c'],
['davanti', 'adverb', 'a'],
['davanti', 'adjective', 'a'],
['davanti', 'noun', 'a'],
['davanzale', 'noun', 'c'],
['davvero', 'adverb', 'a'],
['dea', 'noun', 'b'],
['debito', 'noun', 'a'],
['debole', 'adjective', 'a'],
['debole', 'noun', 'a'],
['debolezza', 'noun', 'b'],
['decennio', 'noun', 'b'],
['decidere', 'verb', 'a'],
['decina', 'noun', 'a'],
['decisamente', 'adverb', 'b'],
['decisione', 'noun', 'a'],
['decisivo', 'adjective', 'b'],
['deciso', 'past_part', 'b'],
['deciso', 'adjective', 'b'],
['decorare', 'verb', 'b'],
['decorato', 'past_part', 'c'],
['decorato', 'adjective', 'c'],
['decorato', 'noun', 'c'],
['decorazione', 'noun', 'b'],
['decoroso', 'adjective', 'c'],
['decreto', 'noun', 'b'],
['dedica', 'noun', 'c'],
['dedicare', 'verb', 'a'],
['dedurre', 'verb', 'b'],
['deficiente', 'adjective', 'b'],
['deficiente', 'noun', 'b'],
['definire', 'verb', 'a'],
['definitivamente', 'adverb', 'b'],
['definitivo', 'adjective', 'a'],
['definitivo', 'noun', 'a'],
['definizione', 'noun', 'a'],
['deformare', 'verb', 'c'],
['deforme', 'adjective', 'c'],
['deforme', 'noun', 'c'],
['defunto', 'past_part', 'b'],
['defunto', 'adjective', 'b'],
['defunto', 'noun', 'b'],
['degno', 'adjective', 'b'],
['degradare', 'verb', 'b'],
['delegare', 'verb', 'b'],
['delegato', 'past_part', 'b'],
['delegato', 'adjective', 'b'],
['delegato', 'noun', 'b'],
['delegazione', 'noun', 'c'],
['delfino', 'noun', 'c'],
['delicatezza', 'noun', 'c'],
['delicato', 'adjective', 'b'],
['delicato', 'noun', 'b'],
['delinquente', 'pres_part', 'c'],
['delinquente', 'adjective', 'c'],
['delinquente', 'noun', 'c'],
['delirare', 'verb', 'c'],
['delirio', 'noun', 'b'],
['delitto', 'noun', 'b'],
['delizia', 'noun', 'c'],
['delizioso', 'adjective', 'b'],
['deludere', 'verb', 'b'],
['delusione', 'noun', 'b'],
['deluso', 'past_part', 'b'],
['deluso', 'adjective', 'b'],
['deluso', 'noun', 'b'],
['democratico', 'adjective', 'b'],
['democratico', 'noun', 'b'],
['democrazia', 'noun', 'a'],
['democristiano', 'adjective', 'c'],
['democristiano', 'noun', 'c'],
['demoralizzare', 'verb', 'c'],
['denaro', 'noun', 'a'],
['denominare', 'verb', 'b'],
['denso', 'adjective', 'b'],
['dente', 'noun', 'a'],
['dentiera', 'noun', 'c'],
['dentifricio', 'noun', 'c'],
['dentista', 'noun', 'b'],
['dentro', 'adverb', 'a'],
['dentro', 'preposition', 'a'],
['dentro', 'noun', 'a'],
['denuncia', 'noun', 'b'],
['denunciare', 'verb', 'a'],
['deodorante', 'pres_part', 'c'],
['deodorante', 'adjective', 'c'],
['deodorante', 'noun', 'c'],
['depilazione', 'noun', 'c'],
['deporre', 'verb', 'b'],
['depositare', 'verb', 'b'],
['deposito', 'noun', 'b'],
['deposizione', 'noun', 'b'],
['depressione', 'noun', 'b'],
['deprimere', 'verb', 'b'],
['depuratore', 'adjective', 'c'],
['depuratore', 'noun', 'c'],
['deputato', 'past_part', 'b'],
['deputato', 'adjective', 'b'],
['deputato', 'noun', 'b'],
['derivare', 'verb', 'a'],
['derubare', 'verb', 'c'],
['descrivere', 'verb', 'a'],
['descrizione', 'noun', 'a'],
['deserto', 'noun', 'b'],
['deserto', 'adjective', 'b'],
['desiderare', 'verb', 'a'],
['desiderio', 'noun', 'a'],
['design', 'noun', 'b'],
['dessert', 'noun', 'c'],
['destinare', 'verb', 'a'],
['destinazione', 'noun', 'b'],
['destino', 'noun', 'a'],
['destra', 'noun', 'a'],
['destro', 'adjective', 'a'],
['destro', 'noun', 'a'],
['detective', 'noun', 'b'],
['detenere', 'verb', 'b'],
['detenuto', 'past_part', 'c'],
['detenuto', 'adjective', 'c'],
['detenuto', 'noun', 'c'],
['determinare', 'verb', 'a'],
['determinato', 'past_part', 'a'],
['determinato', 'adjective', 'a'],
['determinazione', 'noun', 'b'],
['detersivo', 'adjective', 'c'],
['detersivo', 'noun', 'c'],
['dettagliato', 'past_part', 'b'],
['dettagliato', 'adjective', 'b'],
['dettaglio', 'noun', 'a'],
['dettare', 'verb', 'b'],
['dettato', 'past_part', 'c'],
['dettato', 'adjective', 'c'],
['dettato', 'noun', 'c'],
['devastare', 'verb', 'b'],
['deviare', 'verb', 'c'],
['deviazione', 'noun', 'c'],
['di', 'preposition', 'a'],
['di', 'noun', 'c'],
['diagnosi', 'noun', 'b'],
['dialetto', 'noun', 'a'],
['dialogare', 'verb', 'b'],
['dialogo', 'noun', 'a'],
['diamante', 'noun', 'a'],
['diametro', 'noun', 'b'],
['diario', 'noun', 'b'],
['diario', 'adjective', 'b'],
['diavolo', 'noun', 'a'],
['dibattito', 'noun', 'b'],
['dicembre', 'noun', 'a'],
['dichiarare', 'verb', 'a'],
['dichiarazione', 'noun', 'a'],
['diciotto', 'adjective', 'b'],
['diciotto', 'noun', 'b'],
['dieci', 'adjective', 'a'],
['dieci', 'noun', 'a'],
['diecimila', 'adjective', 'b'],
['diecimila', 'noun', 'b'],
['dieta', 'noun', 'b'],
['dietetico', 'adjective', 'c'],
['dietro', 'preposition', 'a'],
['dietro', 'adverb', 'a'],
['dietro', 'adjective', 'a'],
['dietro', 'noun', 'a'],
['difendere', 'verb', 'a'],
['difensore', 'adjective', 'b'],
['difensore', 'noun', 'b'],
['difesa', 'noun', 'a'],
['difetto', 'noun', 'b'],
['differente', 'pres_part', 'a'],
['differente', 'adjective', 'a'],
['differenza', 'noun', 'a'],
['difficile', 'adjective', 'a'],
['difficile', 'noun', 'a'],
['difficilmente', 'adverb', 'b'],
['difficoltà', 'noun', 'a'],
['diffidente', 'adjective', 'c'],
['diffidente', 'noun', 'c'],
['diffidenza', 'noun', 'c'],
['diffondere', 'verb', 'a'],
['diffusione', 'noun', 'b'],
['diffuso', 'past_part', 'b'],
['diffuso', 'adjective', 'b'],
['diga', 'noun', 'c'],
['digestione', 'noun', 'c'],
['digestivo', 'adjective', 'c'],
['digestivo', 'noun', 'c'],
['digitale', 'adjective', 'b'],
['digitale', 'noun', 'b'],
['digiunare', 'verb', 'c'],
['dignità', 'noun', 'b'],
['diluvio', 'noun', 'c'],
['dimagrante', 'pres_part', 'c'],
['dimagrante', 'adjective', 'c'],
['dimensione', 'noun', 'a'],
['dimenticare', 'verb', 'a'],
['dimettere', 'verb', 'b'],
['dimezzare', 'verb', 'c'],
['diminuire', 'verb', 'b'],
['dimostrare', 'verb', 'a'],
['dimostrazione', 'noun', 'b'],
['dinamica', 'noun', 'b'],
['dinamico', 'adjective', 'b'],
['dinosauro', 'noun', 'c'],
['dintorno', 'adverb', 'b'],
['dintorno', 'noun', 'b'],
['dio', 'noun', 'a'],
['dipartimento', 'noun', 'b'],
['dipendente', 'pres_part', 'a'],
['dipendente', 'adjective', 'a'],
['dipendente', 'noun', 'a'],
['dipendenza', 'noun', 'b'],
['dipendere', 'verb', 'a'],
['dipingere', 'verb', 'b'],
['dipinto', 'past_part', 'b'],
['dipinto', 'adjective', 'b'],
['dipinto', 'noun', 'b'],
['diploma', 'noun', 'b'],
['diplomatico', 'adjective', 'b'],
['diplomatico', 'noun', 'b'],
['dire', 'verb', 'a'],
['dire', 'noun', 'a'],
['diretta', 'noun', 'b'],
['direttamente', 'adverb', 'a'],
['diretto', 'past_part', 'a'],
['diretto', 'adjective', 'a'],
['diretto', 'noun', 'a'],
['direttore', 'noun', 'a'],
['direttore', 'adjective', 'a'],
['direttrice', 'noun', 'c'],
['direzione', 'noun', 'a'],
['dirigente', 'adjective', 'b'],
['dirigente', 'noun', 'b'],
['dirigere', 'verb', 'a'],
['diritto', 'noun', 'a'],
['disagio', 'noun', 'b'],
['disastro', 'noun', 'b'],
['disattento', 'adjective', 'c'],
['discarica', 'noun', 'b'],
['discendere', 'verb', 'b'],
['discepolo', 'noun', 'b'],
['discesa', 'noun', 'b'],
['disciplina', 'noun', 'b'],
['disco', 'noun', 'a'],
['discordia', 'noun', 'c'],
['discorso', 'noun', 'a'],
['discoteca', 'noun', 'b'],
['discreto', 'adjective', 'b'],
['discreto', 'noun', 'b'],
['discussione', 'noun', 'a'],
['discusso', 'past_part', 'b'],
['discusso', 'adjective', 'b'],
['discutere', 'verb', 'a'],
['disegnare', 'verb', 'a'],
['disegno', 'noun', 'a'],
['diseredare', 'verb', 'c'],
['disgrazia', 'noun', 'b'],
['disinfettante', 'pres_part', 'c'],
['disinfettante', 'adjective', 'c'],
['disinfettare', 'verb', 'c'],
['disinteresse', 'noun', 'c'],
['disoccupazione', 'noun', 'b'],
['disonesto', 'adjective', 'c'],
['disonesto', 'noun', 'c'],
['disordinato', 'past_part', 'c'],
['disordinato', 'adjective', 'c'],
['disordine', 'noun', 'b'],
['dispari', 'adjective', 'c'],
['dispensa', 'noun', 'c'],
['disperare', 'verb', 'b'],
['disperato', 'past_part', 'b'],
['disperato', 'adjective', 'b'],
['disperazione', 'noun', 'b'],
['disperdere', 'verb', 'b'],
['dispetto', 'noun', 'b'],
['dispettoso', 'adjective', 'c'],
['dispiacere', 'verb', 'a'],
['disponibile', 'adjective', 'a'],
['disponibile', 'noun', 'a'],
['disponibilità', 'noun', 'b'],
['disporre', 'verb', 'a'],
['dispositivo', 'adjective', 'b'],
['dispositivo', 'noun', 'b'],
['disposizione', 'noun', 'a'],
['disprezzo', 'noun', 'b'],
['dissenso', 'noun', 'c'],
['distacco', 'noun', 'b'],
['distante', 'pres_part', 'b'],
['distante', 'adjective', 'b'],
['distante', 'adverb', 'b'],
['distanza', 'noun', 'a'],
['distendere', 'verb', 'b'],
['disteso', 'past_part', 'c'],
['disteso', 'adjective', 'c'],
['disteso', 'noun', 'c'],
['distinguere', 'verb', 'a'],
['distintivo', 'adjective', 'c'],
['distintivo', 'noun', 'c'],
['distinto', 'past_part', 'b'],
['distinto', 'adjective', 'b'],
['distinto', 'noun', 'b'],
['distinzione', 'noun', 'b'],
['distrarre', 'verb', 'b'],
['distratto', 'past_part', 'c'],
['distratto', 'adjective', 'c'],
['distrazione', 'noun', 'c'],
['distretto', 'noun', 'b'],
['distribuire', 'verb', 'a'],
['distributore', 'adjective', 'b'],
['distributore', 'noun', 'b'],
['distribuzione', 'noun', 'b'],
['distruggere', 'verb', 'a'],
['distrutto', 'past_part', 'c'],
['distrutto', 'adjective', 'c'],
['distruzione', 'noun', 'b'],
['disturbare', 'verb', 'b'],
['disturbo', 'noun', 'b'],
['disubbidiente', 'pres_part', 'c'],
['disubbidiente', 'adjective', 'c'],
['disubbidienza', 'noun', 'c'],
['disubbidire', 'verb', 'c'],
['dito', 'noun', 'a'],
['ditta', 'noun', 'b'],
['dittatura', 'noun', 'b'],
['divano', 'noun', 'a'],
['divano-letto', 'noun', 'c'],
['divenire', 'verb', 'a'],
['divenire', 'noun', 'a'],
['diventare', 'verb', 'a'],
['diversamente', 'adverb', 'b'],
['diversità', 'noun', 'b'],
['diverso', 'adjective', 'a'],
['diverso', 'adjective', 'a'],
['diverso', 'pronoun', 'a'],
['divertente', 'pres_part', 'a'],
['divertente', 'adjective', 'a'],
['divertimento', 'noun', 'b'],
['divertire', 'verb', 'a'],
['divertito', 'past_part', 'b'],
['divertito', 'adjective', 'b'],
['dividere', 'verb', 'a'],
['divieto', 'noun', 'b'],
['divinità', 'noun', 'b'],
['divino', 'adjective', 'b'],
['divino', 'noun', 'b'],
['divisa', 'noun', 'b'],
['divisione', 'noun', 'b'],
['divorare', 'verb', 'b'],
['divorziare', 'verb', 'c'],
['divorzio', 'noun', 'b'],
['dizionario', 'noun', 'b'],
['do', 'noun', 'c'],
['doccia', 'noun', 'b'],
['docciaschiuma', 'noun', 'c'],
['docente', 'pres_part', 'b'],
['docente', 'adjective', 'b'],
['docente', 'noun', 'b'],
['docile', 'adjective', 'c'],
['documentare', 'verb', 'b'],
['documentario', 'adjective', 'b'],
['documentario', 'noun', 'b'],
['documentazione', 'noun', 'b'],
['documento', 'noun', 'a'],
['dodici', 'adjective', 'a'],
['dodici', 'noun', 'a'],
['dogana', 'noun', 'c'],
['dolce', 'adjective', 'a'],
['dolce', 'noun', 'a'],
['dolce', 'adverb', 'a'],
['dolcezza', 'noun', 'b'],
['dolcificante', 'pres_part', 'c'],
['dolcificante', 'adjective', 'c'],
['dolcificante', 'noun', 'c'],
['dolciume', 'noun', 'c'],
['dolere', 'verb', 'c'],
['dolersi', 'verb', 'c'],
['dollaro', 'noun', 'a'],
['dolore', 'noun', 'a'],
['doloroso', 'adjective', 'b'],
['domanda', 'noun', 'a'],
['domandare', 'verb', 'a'],
['domani', 'adverb', 'a'],
['domani', 'noun', 'a'],
['domenica', 'noun', 'a'],
['domestica', 'noun', 'c'],
['domestico', 'adjective', 'b'],
['domestico', 'noun', 'b'],
['dominante', 'pres_part', 'b'],
['dominante', 'adjective', 'b'],
['dominante', 'noun', 'b'],
['dominare', 'verb', 'b'],
['dominio', 'noun', 'b'],
['don', 'noun', 'a'],
['donare', 'verb', 'b'],
['dondolare', 'verb', 'c'],
['donna', 'noun', 'a'],
['dono', 'noun', 'b'],
['dopo', 'adverb', 'a'],
['dopo', 'preposition', 'a'],
['dopo', 'conjunction', 'a'],
['dopo', 'adjective', 'a'],
['dopo', 'noun', 'a'],
['dopobarba', 'noun', 'c'],
['doppio', 'adjective', 'a'],
['doppio', 'noun', 'a'],
['doppio', 'adverb', 'a'],
['doppione', 'noun', 'c'],
['dorato', 'past_part', 'b'],
['dorato', 'adjective', 'b'],
['dorato', 'noun', 'b'],
['dormiglione', 'adjective', 'c'],
['dormiglione', 'noun', 'c'],
['dormire', 'verb', 'a'],
['dorso', 'noun', 'b'],
['dose', 'noun', 'b'],
['dotare', 'verb', 'b'],
['dotato', 'past_part', 'b'],
['dotato', 'adjective', 'b'],
['dote', 'noun', 'b'],
['dottore', 'noun', 'a'],
['dottoressa', 'noun', 'b'],
['dottrina', 'noun', 'b'],
['dove', 'adverb', 'a'],
['dove', 'conjunction', 'a'],
['dove', 'noun', 'a'],
['dovere', 'verb', 'a'],
['dovere', 'noun', 'a'],
['dovuto', 'past_part', 'b'],
['dovuto', 'adjective', 'b'],
['dovuto', 'noun', 'b'],
['dozzina', 'noun', 'b'],
['drago', 'noun', 'b'],
['dramma', 'noun', 'b'],
['drammatico', 'adjective', 'b'],
['dritto', 'adjective', 'b'],
['dritto', 'adverb', 'b'],
['dritto', 'noun', 'b'],
['drizzare', 'verb', 'c'],
['droga', 'noun', 'a'],
['drogare', 'verb', 'b'],
['drogato', 'past_part', 'c'],
['drogato', 'adjective', 'c'],
['drogato', 'noun', 'c'],
['dubbio', 'noun', 'a'],
['dubbio', 'adjective', 'b'],
['dubitare', 'verb', 'b'],
['dublinese', 'adjective', 'c'],
['dublinese', 'noun', 'c'],
['due', 'adjective', 'a'],
['due', 'noun', 'a'],
['duecento', 'adjective', 'b'],
['duecento', 'noun', 'b'],
['duello', 'noun', 'b'],
['duemila', 'adjective', 'b'],
['duemila', 'noun', 'b'],
['dunque', 'conjunction', 'a'],
['dunque', 'noun', 'a'],
['duomo', 'noun', 'c'],
['durante', 'pres_part', 'a'],
['durante', 'preposition', 'a'],
['durante', 'noun', 'a'],
['durare', 'verb', 'a'],
['durata', 'noun', 'a'],
['duro', 'adjective', 'a'],
['duro', 'noun', 'a'],
['duro', 'adverb', 'a'],
['e', 'noun', 'c'],
['e', 'conjunction', 'a'],
['ebbene', 'conjunction', 'b'],
['ebraico', 'adjective', 'b'],
['ebraico', 'noun', 'b'],
['ebreo', 'adjective', 'a'],
['ebreo', 'noun', 'a'],
['eccellente', 'pres_part', 'b'],
['eccellente', 'adjective', 'b'],
['eccellenza', 'noun', 'b'],
['eccessivo', 'adjective', 'b'],
['eccesso', 'noun', 'b'],
['eccetera', 'adverb', 'b'],
['eccezionale', 'adjective', 'b'],
['eccezione', 'noun', 'b'],
['eccitare', 'verb', 'b'],
['ecco', 'adverb', 'a'],
['eco', 'noun', 'b'],
['ecologico', 'adjective', 'b'],
['economia', 'noun', 'a'],
['economico', 'adjective', 'a'],
['economico', 'noun', 'a'],
['economista', 'noun', 'b'],
['edicola', 'noun', 'a'],
['edificio', 'noun', 'a'],
['editore', 'noun', 'a'],
['editore', 'adjective', 'a'],
['editoriale', 'adjective', 'b'],
['editoriale', 'noun', 'b'],
['edizione', 'noun', 'a'],
['educare', 'verb', 'b'],
['educativo', 'adjective', 'b'],
['educato', 'past_part', 'c'],
['educato', 'adjective', 'c'],
['educazione', 'noun', 'a'],
['effe', 'noun', 'c'],
['effettivamente', 'adverb', 'a'],
['effettivo', 'adjective', 'b'],
['effettivo', 'noun', 'b'],
['effetto', 'noun', 'a'],
['effettuare', 'verb', 'a'],
['efficace', 'adjective', 'b'],
['efficacia', 'noun', 'b'],
['efficiente', 'adjective', 'b'],
['efficienza', 'noun', 'b'],
['egiziano', 'adjective', 'c'],
['egiziano', 'noun', 'c'],
['egli', 'pronoun', 'a'],
['elaborare', 'verb', 'b'],
['elaborazione', 'noun', 'b'],
['elastico', 'adjective', 'b'],
['elastico', 'noun', 'b'],
['elegante', 'adjective', 'a'],
['eleganza', 'noun', 'b'],
['eleggere', 'verb', 'b'],
['elementare', 'adjective', 'a'],
['elemento', 'noun', 'a'],
['elemosina', 'noun', 'c'],
['elencare', 'verb', 'b'],
['elenco', 'noun', 'a'],
['elettorale', 'adjective', 'b'],
['elettore', 'noun', 'b'],
['elettricista', 'noun', 'c'],
['elettricità', 'noun', 'c'],
['elettrico', 'adjective', 'a'],
['elettrico', 'noun', 'a'],
['elettrodomestico', 'noun', 'c'],
['elettromagnetico', 'adjective', 'b'],
['elettrone', 'noun', 'b'],
['elettronico', 'adjective', 'a'],
['elevare', 'verb', 'b'],
['elevato', 'past_part', 'b'],
['elevato', 'adjective', 'b'],
['elezione', 'noun', 'b'],
['elica', 'noun', 'c'],
['elicottero', 'noun', 'c'],
['eliminare', 'verb', 'a'],
['eliminazione', 'noun', 'b'],
['elle', 'noun', 'c'],
['elmo', 'noun', 'c'],
['e-mail', 'noun', 'a'],
['emanare', 'verb', 'b'],
['emergenza', 'noun', 'b'],
['emergere', 'verb', 'a'],
['emettere', 'verb', 'b'],
['emigrazione', 'noun', 'c'],
['emiliano', 'adjective', 'c'],
['emiliano', 'noun', 'c'],
['emissione', 'noun', 'b'],
['emme', 'noun', 'c'],
['emmenthal', 'noun', 'c'],
['emo', 'noun', 'b'],
['emotivo', 'adjective', 'b'],
['emotivo', 'noun', 'b'],
['emozionante', 'pres_part', 'c'],
['emozionante', 'adjective', 'c'],
['emozionare', 'verb', 'b'],
['emozionato', 'past_part', 'c'],
['emozionato', 'adjective', 'c'],
['emozione', 'noun', 'a'],
['enciclopedia', 'noun', 'c'],
['energetico', 'adjective', 'b'],
['energetico', 'noun', 'b'],
['energia', 'noun', 'a'],
['enne', 'noun', 'c'],
['ennesimo', 'adjective', 'b'],
['enorme', 'adjective', 'a'],
['ente', 'noun', 'a'],
['entità', 'noun', 'b'],
['entrambi', 'pronoun', 'a'],
['entrambi', 'adjective', 'a'],
['entrare', 'verb', 'a'],
['entrare', 'noun', 'a'],
['entrata', 'noun', 'a'],
['entro', 'preposition', 'a'],
['entro', 'adverb', 'a'],
['entusiasmo', 'noun', 'b'],
['entusiasta', 'adjective', 'b'],
['entusiasta', 'noun', 'b'],
['epifania', 'noun', 'c'],
['episodio', 'noun', 'a'],
['epoca', 'noun', 'a'],
['eppure', 'conjunction', 'a'],
['equazione', 'noun', 'b'],
['equilibrio', 'noun', 'a'],
['equino', 'adjective', 'c'],
['equino', 'noun', 'c'],
['equipaggio', 'noun', 'c'],
['equivalere', 'verb', 'b'],
['equivoco', 'adjective', 'b'],
['equivoco', 'noun', 'b'],
['era', 'noun', 'a'],
['erba', 'noun', 'b'],
['erede', 'noun', 'b'],
['eredità', 'noun', 'b'],
['ereditare', 'verb', 'b'],
['ergastolo', 'noun', 'c'],
['ergere', 'verb', 'b'],
['ernia', 'noun', 'c'],
['eroe', 'noun', 'a'],
['eroina', 'noun', 'c'],
['erotico', 'adjective', 'b'],
['erotico', 'noun', 'b'],
['errare', 'verb', 'b'],
['erre', 'noun', 'c'],
['errore', 'noun', 'a'],
['esagerare', 'verb', 'b'],
['esagerato', 'past_part', 'b'],
['esagerato', 'adjective', 'b'],
['esagerato', 'noun', 'b'],
['esagerazione', 'noun', 'c'],
['esagono', 'noun', 'c'],
['esagono', 'adjective', 'c'],
['esaltare', 'verb', 'b'],
['esaltazione', 'noun', 'c'],
['esame', 'noun', 'a'],
['esaminare', 'verb', 'b'],
['esattamente', 'adverb', 'a'],
['esatto', 'adjective', 'a'],
['esatto', 'adverb', 'a'],
['esaurire', 'verb', 'b'],
['esca', 'noun', 'c'],
['eschimese', 'adjective', 'c'],
['eschimese', 'noun', 'c'],
['esclamare', 'verb', 'b'],
['esclamazione', 'noun', 'c'],
['escludere', 'verb', 'a'],
['esclusione', 'noun', 'b'],
['esclusivamente', 'adverb', 'b'],
['esclusivo', 'adjective', 'b'],
['escluso', 'past_part', 'b'],
['escluso', 'adjective', 'b'],
['escluso', 'noun', 'b'],
['esecutivo', 'adjective', 'b'],
['esecutivo', 'noun', 'b'],
['esecuzione', 'noun', 'b'],
['eseguire', 'verb', 'a'],
['esempio', 'noun', 'a'],
['esemplare', 'noun', 'b'],
['esemplare', 'adjective', 'b'],
['esercitare', 'verb', 'b'],
['esercito', 'noun', 'a'],
['esercizio', 'noun', 'a'],
['esibire', 'verb', 'b'],
['esigenza', 'noun', 'a'],
['esigere', 'verb', 'b'],
['esilio', 'noun', 'c'],
['esistente', 'pres_part', 'b'],
['esistente', 'adjective', 'b'],
['esistente', 'noun', 'b'],
['esistenza', 'noun', 'a'],
['esistere', 'verb', 'a'],
['esitare', 'verb', 'b'],
['esito', 'noun', 'b'],
['esordio', 'noun', 'b'],
['espansione', 'noun', 'b'],
['espellere', 'verb', 'b'],
['esperienza', 'noun', 'a'],
['esperimento', 'noun', 'b'],
['esperto', 'past_part', 'a'],
['esperto', 'adjective', 'a'],
['esperto', 'noun', 'a'],
['esplicito', 'adjective', 'b'],
['esplodere', 'verb', 'b'],
['esplorare', 'verb', 'b'],
['esplosione', 'noun', 'b'],
['esplosivo', 'adjective', 'b'],
['esplosivo', 'noun', 'b'],
['esponente', 'pres_part', 'b'],
['esponente', 'noun', 'b'],
['esporre', 'verb', 'a'],
['esposizione', 'noun', 'b'],
['espressione', 'noun', 'a'],
['espresso', 'past_part', 'c'],
['espresso', 'adjective', 'c'],
['espresso', 'noun', 'c'],
['esprimere', 'verb', 'a'],
['essa', 'pronoun', 'a'],
['esse', 'noun', 'c'],
['esse', 'pronoun', 'b'],
['essenza', 'noun', 'b'],
['essenziale', 'adjective', 'b'],
['essenziale', 'noun', 'b'],
['essenzialmente', 'adverb', 'b'],
['essere', 'verb', 'a'],
['essere', 'noun', 'a'],
['essi', 'pronoun', 'a'],
['esso', 'pronoun', 'a'],
['est', 'noun', 'b'],
['est', 'adjective', 'b'],
['estate', 'noun', 'a'],
['estendere', 'verb', 'b'],
['estensione', 'noun', 'b'],
['esterno', 'adjective', 'a'],
['esterno', 'noun', 'a'],
['estero', 'adjective', 'a'],
['estero', 'noun', 'a'],
['estetico', 'adjective', 'b'],
['estivo', 'adjective', 'b'],
['estone', 'adjective', 'c'],
['estone', 'noun', 'c'],
['estraneo', 'adjective', 'b'],
['estraneo', 'noun', 'b'],
['estrarre', 'verb', 'b'],
['estratto', 'past_part', 'b'],
['estratto', 'adjective', 'b'],
['estratto', 'noun', 'b'],
['estrazione', 'noun', 'b'],
['estremamente', 'adverb', 'b'],
['estremità', 'noun', 'b'],
['estremo', 'adjective', 'a'],
['estremo', 'noun', 'a'],
['età', 'noun', 'a'],
['eterno', 'adjective', 'b'],
['eterno', 'noun', 'b'],
['etica', 'noun', 'b'],
['etichetta', 'noun', 'b'],
['etico', 'adjective', 'b'],
['ettaro', 'noun', 'c'],
['etto', 'noun', 'c'],
['euro', 'noun', 'a'],
['europeo', 'adjective', 'a'],
['europeo', 'noun', 'a'],
['evadere', 'verb', 'c'],
['evaporare', 'verb', 'c'],
['evasione', 'noun', 'b'],
['evento', 'noun', 'a'],
['eventuale', 'adjective', 'a'],
['eventualmente', 'adverb', 'b'],
['evidente', 'adjective', 'a'],
['evidentemente', 'adverb', 'a'],
['evidenza', 'noun', 'b'],
['evidenziare', 'verb', 'b'],
['evidenziatore', 'adjective', 'c'],
['evidenziatore', 'noun', 'c'],
['evitare', 'verb', 'a'],
['evocare', 'verb', 'b'],
['evoluzione', 'noun', 'b'],
['ex', 'adjective', 'a'],
['ex', 'noun', 'a'],
['ex', 'preposition', 'a'],
['extra', 'adjective', 'b'],
['extra', 'noun', 'b'],
['fa', 'adverb', 'a'],
['fabbrica', 'noun', 'a'],
['fabbricare', 'verb', 'b'],
['fabbro', 'noun', 'c'],
['faccenda', 'noun', 'b'],
['faccia', 'noun', 'a'],
['facciata', 'noun', 'b'],
['facile', 'adjective', 'a'],
['facile', 'adverb', 'a'],
['facilità', 'noun', 'b'],
['facilitare', 'verb', 'b'],
['facilitazione', 'noun', 'c'],
['facilmente', 'adverb', 'a'],
['facoltà', 'noun', 'b'],
['fagiano', 'noun', 'c'],
['falco', 'noun', 'c'],
['falegname', 'noun', 'c'],
['fallimento', 'noun', 'b'],
['fallire', 'verb', 'b'],
['fallito', 'past_part', 'b'],
['fallito', 'adjective', 'b'],
['fallito', 'noun', 'b'],
['falso', 'adjective', 'a'],
['falso', 'adverb', 'a'],
['falso', 'noun', 'a'],
['fama', 'noun', 'b'],
['fame', 'noun', 'a'],
['famiglia', 'noun', 'a'],
['familiare', 'adjective', 'a'],
['familiare', 'noun', 'a'],
['famoso', 'adjective', 'a'],
['fan', 'noun', 'b'],
['fanale', 'noun', 'c'],
['fanciulla', 'noun', 'b'],
['fanciullo', 'adjective', 'c'],
['fanciullo', 'noun', 'c'],
['fango', 'noun', 'b'],
['fangoso', 'adjective', 'c'],
['fantascienza', 'noun', 'b'],
['fantasia', 'noun', 'a'],
['fantasma', 'noun', 'b'],
['fantastico', 'adjective', 'a'],
['fantastico', 'noun', 'a'],
['fanteria', 'noun', 'c'],
['fantino', 'noun', 'c'],
['fantoccio', 'noun', 'c'],
['fare', 'verb', 'a'],
['fare', 'noun', 'a'],
['farfalla', 'noun', 'b'],
['farina', 'noun', 'b'],
['farmacia', 'noun', 'b'],
['farmaco', 'noun', 'b'],
['faro', 'noun', 'c'],
['fascia', 'noun', 'a'],
['fasciatoio', 'noun', 'c'],
['fascicolo', 'noun', 'b'],
['fascino', 'noun', 'b'],
['fascio', 'noun', 'b'],
['fascismo', 'noun', 'b'],
['fascista', 'adjective', 'b'],
['fascista', 'noun', 'b'],
['fase', 'noun', 'a'],
['fastidio', 'noun', 'a'],
['fastidioso', 'adjective', 'b'],
['fata', 'noun', 'b'],
['fatica', 'noun', 'a'],
['faticare', 'verb', 'b'],
['faticoso', 'adjective', 'b'],
['fatto', 'noun', 'a'],
['fattore', 'noun', 'a'],
['fattoria', 'noun', 'b'],
['fattura', 'noun', 'b'],
['fatturato', 'past_part', 'b'],
['fatturato', 'adjective', 'b'],
['fatturato', 'noun', 'b'],
['fauna', 'noun', 'c'],
['fava', 'noun', 'c'],
['favola', 'noun', 'b'],
['favoloso', 'adjective', 'b'],
['favore', 'noun', 'a'],
['favorevole', 'adjective', 'b'],
['favorire', 'verb', 'b'],
['fax', 'noun', 'b'],
['fazzoletto', 'noun', 'b'],
['febbraio', 'noun', 'a'],
['febbre', 'noun', 'b'],
['fecondare', 'verb', 'c'],
['fede', 'noun', 'a'],
['fedele', 'adjective', 'b'],
['fedele', 'noun', 'b'],
['fedeltà', 'noun', 'b'],
['federa', 'noun', 'c'],
['federale', 'adjective', 'b'],
['federale', 'noun', 'b'],
['fegato', 'noun', 'b'],
['felice', 'adjective', 'a'],
['felicità', 'noun', 'b'],
['felino', 'noun', 'c'],
['felino', 'adjective', 'c'],
['felpa', 'noun', 'c'],
['femmina', 'noun', 'a'],
['femminile', 'adjective', 'a'],
['femminile', 'noun', 'a'],
['fenomeno', 'noun', 'a'],
['feria', 'noun', 'b'],
['feriale', 'adjective', 'c'],
['ferie', 'noun', 'c'],
['ferire', 'verb', 'b'],
['ferita', 'noun', 'a'],
['ferito', 'past_part', 'b'],
['ferito', 'adjective', 'b'],
['ferito', 'noun', 'b'],
['fermaglio', 'noun', 'c'],
['fermare', 'verb', 'a'],
['fermo', 'adjective', 'a'],
['feroce', 'adjective', 'b'],
['ferragosto', 'noun', 'c'],
['ferramenta', 'noun', 'c'],
['ferro', 'noun', 'a'],
['ferrovia', 'noun', 'b'],
['ferroviario', 'adjective', 'b'],
['ferroviere', 'noun', 'c'],
['fertilizzante', 'pres_part', 'c'],
['fertilizzante', 'adjective', 'c'],
['fertilizzante', 'noun', 'c'],
['fessura', 'noun', 'c'],
['festa', 'noun', 'a'],
['festeggiare', 'verb', 'a'],
['festival', 'noun', 'b'],
['festivo', 'adjective', 'c'],
['fetta', 'noun', 'b'],
['fiaba', 'noun', 'b'],
['fiala', 'noun', 'c'],
['fiamma', 'noun', 'b'],
['fiammifero', 'noun', 'c'],
['fiammifero', 'adjective', 'c'],
['fianco', 'noun', 'a'],
['fiatare', 'verb', 'c'],
['fiato', 'noun', 'b'],
['fibbia', 'noun', 'c'],
['fibra', 'noun', 'b'],
['ficcare', 'verb', 'b'],
['fiction', 'noun', 'b'],
['fidanzamento', 'noun', 'c'],
['fidanzarsi', 'verb', 'b'],
['fidanzata', 'noun', 'b'],
['fidanzato', 'past_part', 'b'],
['fidanzato', 'adjective', 'b'],
['fidanzato', 'noun', 'b'],
['fidarsi', 'verb', 'a'],
['fiducia', 'noun', 'a'],
['fiducioso', 'adjective', 'c'],
['fieno', 'noun', 'c'],
['fiera', 'noun', 'b'],
['fiero', 'adjective', 'b'],
['figlia', 'noun', 'a'],
['figliastro', 'noun', 'c'],
['figlio', 'noun', 'a'],
['figura', 'noun', 'a'],
['figurare', 'verb', 'a'],
['figurina', 'noun', 'c'],
['fila', 'noun', 'a'],
['filante', 'pres_part', 'c'],
['filante', 'adjective', 'c'],
['filante', 'noun', 'c'],
['filare', 'verb', 'b'],
['filastrocca', 'noun', 'c'],
['file', 'noun', 'a'],
['filetto', 'noun', 'c'],
['film', 'noun', 'a'],
['filmato', 'past_part', 'b'],
['filmato', 'adjective', 'b'],
['filmato', 'noun', 'b'],
['filo', 'noun', 'a'],
['filosofia', 'noun', 'a'],
['filosofico', 'adjective', 'b'],
['filosofo', 'noun', 'b'],
['filtrare', 'verb', 'b'],
['filtro', 'noun', 'b'],
['finale', 'adjective', 'a'],
['finale', 'noun', 'a'],
['finalità', 'noun', 'b'],
['finalmente', 'adverb', 'a'],
['finanza', 'noun', 'b'],
['finanziamento', 'noun', 'b'],
['finanziare', 'verb', 'b'],
['finanziario', 'adjective', 'a'],
['finanziatore', 'adjective', 'c'],
['finanziatore', 'noun', 'c'],
['finché', 'conjunction', 'a'],
['fine', 'noun', 'a'],
['fine', 'adjective', 'b'],
['finestra', 'noun', 'a'],
['finestrino', 'noun', 'b'],
['fingere', 'verb', 'a'],
['finimondo', 'noun', 'c'],
['finire', 'verb', 'a'],
['finire', 'noun', 'a'],
['finito', 'past_part', 'b'],
['finito', 'adjective', 'b'],
['finlandese', 'adjective', 'c'],
['finlandese', 'noun', 'c'],
['fino', 'preposition', 'a'],
['fino', 'adverb', 'a'],
['finocchio', 'noun', 'c'],
['finora', 'adverb', 'b'],
['finta', 'noun', 'b'],
['finto', 'past_part', 'a'],
['finto', 'adjective', 'a'],
['fiocco', 'noun', 'c'],
['fionda', 'noun', 'c'],
['fioraio', 'noun', 'c'],
['fiore', 'noun', 'a'],
['fiorentino', 'adjective', 'b'],
['fiorentino', 'noun', 'b'],
['fiorito', 'past_part', 'c'],
['fiorito', 'adjective', 'c'],
['firma', 'noun', 'a'],
['firmare', 'verb', 'a'],
['fiscale', 'adjective', 'b'],
['fiscale', 'noun', 'b'],
['fisicamente', 'adverb', 'b'],
['fisico', 'adjective', 'a'],
['fisico', 'noun', 'a'],
['fissare', 'verb', 'a'],
['fisso', 'adjective', 'a'],
['fisso', 'adverb', 'a'],
['fisso', 'noun', 'a'],
['fitto', 'past_part', 'b'],
['fitto', 'adjective', 'b'],
['fitto', 'adverb', 'b'],
['fitto', 'noun', 'b'],
['fiume', 'noun', 'a'],
['fiuto', 'noun', 'c'],
['flash', 'noun', 'b'],
['flauto', 'noun', 'c'],
['flessibile', 'adjective', 'b'],
['flessibile', 'noun', 'b'],
['flora', 'noun', 'c'],
['fluido', 'adjective', 'b'],
['fluido', 'noun', 'b'],
['fluoro', 'noun', 'c'],
['flusso', 'noun', 'b'],
['foca', 'noun', 'c'],
['focaccia', 'noun', 'c'],
['fodera', 'noun', 'c'],
['foderare', 'verb', 'c'],
['foglia', 'noun', 'b'],
['foglio', 'noun', 'a'],
['fogna', 'noun', 'c'],
['folla', 'noun', 'b'],
['folle', 'adjective', 'b'],
['folle', 'noun', 'b'],
['follia', 'noun', 'b'],
['fondamentale', 'adjective', 'a'],
['fondamentale', 'noun', 'a'],
['fondamentalmente', 'adverb', 'b'],
['fondamento', 'noun', 'b'],
['fondare', 'verb', 'a'],
['fondatore', 'noun', 'b'],
['fondazione', 'noun', 'b'],
['fondere', 'verb', 'b'],
['fondo', 'adjective', 'loc-comando'],
['fondo', 'noun', 'loc-comando'],
['fondo', 'adverb', 'loc-comando'],
['fontana', 'noun', 'b'],
['fontanella', 'noun', 'c'],
['fonte', 'noun', 'a'],
['forare', 'verb', 'b'],
['forbice', 'noun', 'c'],
['forchetta', 'noun', 'c'],
['forcina', 'noun', 'c'],
['foresta', 'noun', 'b'],
['forestale', 'adjective', 'c'],
['forestale', 'noun', 'c'],
['forfora', 'noun', 'c'],
['forma', 'noun', 'a'],
['formaggino', 'noun', 'c'],
['formaggio', 'noun', 'b'],
['formale', 'adjective', 'b'],
['formare', 'verb', 'a'],
['formato', 'past_part', 'b'],
['formato', 'adjective', 'b'],
['formato', 'noun', 'b'],
['formazione', 'noun', 'a'],
['formula', 'noun', 'a'],
['formulare', 'verb', 'b'],
['fornace', 'noun', 'c'],
['fornaio', 'noun', 'c'],
['fornello', 'noun', 'b'],
['fornire', 'verb', 'a'],
['fornitore', 'adjective', 'b'],
['fornitore', 'noun', 'b'],
['forno', 'noun', 'b'],
['foro', 'noun', 'b'],
['forse', 'adverb', 'a'],
['forse', 'noun', 'a'],
['forte', 'adjective', 'a'],
['forte', 'adverb', 'a'],
['forte', 'noun', 'a'],
['fortemente', 'adverb', 'b'],
['fortuna', 'noun', 'a'],
['fortunatamente', 'adverb', 'b'],
['fortunato', 'adjective', 'b'],
['forum', 'noun', 'b'],
['forza', 'noun', 'a'],
['forzare', 'verb', 'b'],
['fosforescente', 'adjective', 'c'],
['fossa', 'noun', 'b'],
['fossetta', 'noun', 'c'],
['fosso', 'noun', 'c'],
['foto', 'noun', 'a'],
['fotografare', 'verb', 'b'],
['fotografia', 'noun', 'a'],
['fotografico', 'adjective', 'b'],
['fotografo', 'noun', 'b'],
['fottere', 'verb', 'b'],
['foulard', 'noun', 'c'],
['fra', 'preposition', 'a'],
['fracasso', 'noun', 'c'],
['fragile', 'adjective', 'b'],
['frammento', 'noun', 'b'],
['francamente', 'adverb', 'b'],
['francese', 'adjective', 'a'],
['francese', 'noun', 'a'],
['francobollo', 'noun', 'c'],
['frangia', 'noun', 'c'],
['frase', 'noun', 'a'],
['fratello', 'noun', 'a'],
['frazione', 'noun', 'b'],
['freccia', 'noun', 'b'],
['freddezza', 'noun', 'c'],
['freddo', 'adjective', 'a'],
['freddo', 'noun', 'a'],
['fregare', 'verb', 'a'],
['frenare', 'verb', 'b'],
['frenetico', 'adjective', 'b'],
['freno', 'noun', 'b'],
['frequentare', 'verb', 'a'],
['frequente', 'adjective', 'b'],
['frequenza', 'noun', 'b'],
['fresco', 'adjective', 'a'],
['fresco', 'noun', 'a'],
['fretta', 'noun', 'a'],
['frigo', 'noun', 'b'],
['frigorifero', 'adjective', 'b'],
['frigorifero', 'noun', 'b'],
['fringuello', 'noun', 'c'],
['frittata', 'noun', 'c'],
['fritto', 'past_part', 'c'],
['fritto', 'adjective', 'c'],
['fritto', 'noun', 'c'],
['friulano', 'adjective', 'c'],
['friulano', 'noun', 'c'],
['fronte', 'noun', 'a'],
['frontiera', 'noun', 'b'],
['frugare', 'verb', 'b'],
['frumento', 'noun', 'c'],
['fruscio', 'noun', 'c'],
['frusta', 'noun', 'c'],
['frutta', 'noun', 'b'],
['fruttivendolo', 'noun', 'c'],
['frutto', 'noun', 'a'],
['fucile', 'noun', 'b'],
['fuga', 'noun', 'a'],
['fuggire', 'verb', 'a'],
['fulmine', 'noun', 'b'],
['fumare', 'verb', 'a'],
['fumetto', 'noun', 'b'],
['fumo', 'noun', 'a'],
['fumo', 'adjective', 'a'],
['fune', 'noun', 'c'],
['funerale', 'noun', 'b'],
['funerale', 'adjective', 'b'],
['fungo', 'noun', 'b'],
['funzionale', 'adjective', 'b'],
['funzionale', 'noun', 'b'],
['funzionamento', 'noun', 'b'],
['funzionare', 'verb', 'a'],
['funzionario', 'noun', 'b'],
['funzione', 'noun', 'a'],
['fuoco', 'noun', 'loc-comando'],
['fuori', 'adverb', 'a'],
['fuori', 'preposition', 'a'],
['fuori', 'noun', 'a'],
['fuori', 'adjective', 'a'],
['furbo', 'adjective', 'b'],
['furbo', 'noun', 'b'],
['furfante', 'noun', 'c'],
['furgone', 'noun', 'b'],
['furia', 'noun', 'b'],
['furioso', 'adjective', 'b'],
['furto', 'noun', 'b'],
['fusione', 'noun', 'b'],
['fuso', 'past_part', 'b'],
['fuso', 'adjective', 'b'],
['fuso', 'noun', 'b'],
['futuro', 'adjective', 'a'],
['futuro', 'noun', 'a'],
['gabbia', 'noun', 'b'],
['galassia', 'noun', 'b'],
['galeotto', 'noun', 'c'],
['galera', 'noun', 'b'],
['galleggiare', 'verb', 'c'],
['galleria', 'noun', 'b'],
['gallese', 'adjective', 'c'],
['gallese', 'noun', 'c'],
['galletta', 'noun', 'c'],
['gallina', 'noun', 'b'],
['gallo', 'noun', 'c'],
['gamba', 'noun', 'a'],
['gambero', 'noun', 'c'],
['gambo', 'noun', 'c'],
['ganascia', 'noun', 'c'],
['gancio', 'noun', 'c'],
['gara', 'noun', 'a'],
['garage', 'noun', 'b'],
['garantire', 'verb', 'a'],
['garanzia', 'noun', 'b'],
['garbo', 'noun', 'c'],
['gargarismo', 'noun', 'c'],
['garofano', 'noun', 'c'],
['garza', 'noun', 'c'],
['gas', 'noun', 'a'],
['gasolio', 'noun', 'c'],
['gassosa', 'noun', 'c'],
['gastronomia', 'noun', 'c'],
['gatto', 'noun', 'a'],
['gavetta', 'noun', 'c'],
['gay', 'adjective', 'b'],
['gay', 'noun', 'b'],
['gazza', 'noun', 'c'],
['gelateria', 'noun', 'c'],
['gelatina', 'noun', 'c'],
['gelato', 'past_part', 'b'],
['gelato', 'adjective', 'b'],
['gelato', 'noun', 'b'],
['gelido', 'adjective', 'b'],
['gelo', 'noun', 'c'],
['gelosia', 'noun', 'b'],
['geloso', 'adjective', 'b'],
['gelsomino', 'noun', 'c'],
['gemello', 'adjective', 'b'],
['gemello', 'noun', 'b'],
['gemma', 'noun', 'c'],
['gene', 'noun', 'b'],
['generale', 'adjective', 'a'],
['generale', 'noun', 'a'],
['generalmente', 'adverb', 'b'],
['generare', 'verb', 'a'],
['generazione', 'noun', 'a'],
['genere', 'noun', 'a'],
['generico', 'adjective', 'b'],
['generico', 'noun', 'b'],
['generosità', 'noun', 'c'],
['generoso', 'adjective', 'b'],
['genetico', 'adjective', 'b'],
['gengiva', 'noun', 'c'],
['geniale', 'adjective', 'b'],
['genio', 'noun', 'b'],
['genitore', 'noun', 'a'],
['gennaio', 'noun', 'a'],
['genovese', 'adjective', 'c'],
['genovese', 'noun', 'c'],
['gente', 'noun', 'a'],
['gentile', 'adjective', 'a'],
['gentile', 'noun', 'a'],
['genuino', 'adjective', 'c'],
['geografico', 'adjective', 'b'],
['geografo', 'noun', 'c'],
['geometra', 'noun', 'c'],
['geometria', 'noun', 'c'],
['geometrico', 'adjective', 'c'],
['gesso', 'noun', 'b'],
['gestione', 'noun', 'a'],
['gestire', 'verb', 'a'],
['gesto', 'noun', 'a'],
['gestore', 'noun', 'b'],
['gettare', 'verb', 'a'],
['gettone', 'noun', 'c'],
['ghiaccio', 'noun', 'b'],
['ghiacciolo', 'noun', 'c'],
['ghianda', 'noun', 'c'],
['ghiro', 'noun', 'c'],
['gi', 'noun', 'c'],
['già', 'adverb', 'a'],
['giacca', 'noun', 'a'],
['giacere', 'verb', 'b'],
['giaguaro', 'noun', 'c'],
['giallo', 'adjective', 'a'],
['giallo', 'noun', 'a'],
['giapponese', 'adjective', 'a'],
['giapponese', 'noun', 'a'],
['giardinaggio', 'noun', 'c'],
['giardiniera', 'noun', 'c'],
['giardino', 'noun', 'a'],
['gigante', 'noun', 'b'],
['gigante', 'adjective', 'b'],
['gigantesco', 'adjective', 'b'],
['giglio', 'noun', 'b'],
['ginnastica', 'noun', 'b'],
['ginocchio', 'noun', 'a'],
['giocare', 'verb', 'a'],
['giocatore', 'noun', 'a'],
['giocattolo', 'noun', 'b'],
['gioco', 'noun', 'a'],
['gioia', 'noun', 'a'],
['gioiello', 'noun', 'b'],
['gioioso', 'adjective', 'c'],
['giordano', 'adjective', 'c'],
['giordano', 'noun', 'c'],
['giornale', 'noun', 'a'],
['giornale', 'adjective', 'a'],
['giornalino', 'noun', 'c'],
['giornalista', 'noun', 'a'],
['giornata', 'noun', 'a'],
['giorno', 'noun', 'a'],
['giostra', 'noun', 'c'],
['giovane', 'adjective', 'a'],
['giovane', 'noun', 'a'],
['giovanile', 'adjective', 'b'],
['giovedì', 'noun', 'b'],
['gioventù', 'noun', 'b'],
['giovinezza', 'noun', 'b'],
['giraffa', 'noun', 'c'],
['girare', 'verb', 'a'],
['giravite', 'noun', 'c'],
['giretto', 'noun', 'c'],
['giro', 'noun', 'a'],
['gironzolare', 'verb', 'c'],
['girotondo', 'noun', 'c'],
['gita', 'noun', 'b'],
['giù', 'adverb', 'a'],
['giù', 'adjective', 'a'],
['giubba', 'noun', 'c'],
['giubbotto', 'noun', 'c'],
['giudicare', 'verb', 'a'],
['giudice', 'noun', 'a'],
['giudiziario', 'adjective', 'b'],
['giudizio', 'noun', 'a'],
['giugno', 'noun', 'a'],
['giungere', 'verb', 'a'],
['giungla', 'noun', 'c'],
['giuramento', 'noun', 'b'],
['giurare', 'verb', 'a'],
['giuria', 'noun', 'c'],
['giuridico', 'adjective', 'b'],
['giustamente', 'adverb', 'b'],
['giustificare', 'verb', 'b'],
['giustizia', 'noun', 'a'],
['giusto', 'adjective', 'a'],
['giusto', 'noun', 'a'],
['giusto', 'adverb', 'a'],
['gli', 'pronoun', 'a'],
['glicine', 'noun', 'c'],
['global', 'adjective', 'b'],
['global', 'noun', 'b'],
['globale', 'adjective', 'b'],
['gloria', 'noun', 'b'],
['gnocco', 'noun', 'c'],
['gnomo', 'noun', 'c'],
['goal', 'noun', 'b'],
['gobbo', 'adjective', 'c'],
['gobbo', 'noun', 'c'],
['goccia', 'noun', 'b'],
['godere', 'verb', 'a'],
['gola', 'noun', 'b'],
['goloso', 'adjective', 'c'],
['gomito', 'noun', 'b'],
['gomitolo', 'noun', 'c'],
['gomma', 'noun', 'b'],
['gonfiare', 'verb', 'b'],
['gonfio', 'adjective', 'b'],
['gonfio', 'noun', 'b'],
['gonna', 'noun', 'b'],
['gorgonzola', 'noun', 'c'],
['gorilla', 'noun', 'c'],
['gossip', 'noun', 'b'],
['governare', 'verb', 'b'],
['governatore', 'noun', 'b'],
['governo', 'noun', 'a'],
['gradino', 'noun', 'b'],
['gradire', 'verb', 'b'],
['grado', 'noun', 'a'],
['graffiare', 'verb', 'c'],
['graffio', 'noun', 'c'],
['grafico', 'adjective', 'b'],
['grafico', 'noun', 'b'],
['grammatica', 'noun', 'b'],
['grammo', 'noun', 'b'],
['grana', 'noun', 'c'],
['granaio', 'noun', 'c'],
['granchio', 'noun', 'c'],
['grande', 'adjective', 'a'],
['grande', 'noun', 'a'],
['grandezza', 'noun', 'b'],
['grandine', 'noun', 'c'],
['grandioso', 'adjective', 'b'],
['grano', 'noun', 'b'],
['granturco', 'noun', 'c'],
['grappa', 'noun', 'c'],
['grasso', 'adjective', 'a'],
['grasso', 'noun', 'a'],
['gratis', 'adverb', 'b'],
['gratis', 'adjective', 'b'],
['grattare', 'verb', 'b'],
['grattugiato', 'past_part', 'c'],
['grattugiato', 'adjective', 'c'],
['gratuito', 'adjective', 'b'],
['grave', 'adjective', 'a'],
['grave', 'noun', 'a'],
['grave', 'adverb', 'a'],
['gravidanza', 'noun', 'b'],
['gravità', 'noun', 'b'],
['grazie', 'exclamation', 'a'],
['grazie', 'noun', 'a'],
['grazioso', 'adjective', 'c'],
['greco', 'adjective', 'a'],
['greco', 'noun', 'a'],
['grembiule', 'noun', 'c'],
['gridare', 'verb', 'a'],
['grido', 'noun', 'b'],
['grigio', 'adjective', 'a'],
['grigio', 'noun', 'a'],
['griglia', 'noun', 'c'],
['grinza', 'noun', 'c'],
['grissino', 'noun', 'c'],
['grossista', 'noun', 'c'],
['grosso', 'adjective', 'a'],
['grosso', 'noun', 'a'],
['grotta', 'noun', 'b'],
['gru', 'noun', 'c'],
['gruppo', 'noun', 'a'],
['guadagnare', 'verb', 'a'],
['guadagno', 'noun', 'b'],
['guaio', 'noun', 'b'],
['guaire', 'verb', 'c'],
['guancia', 'noun', 'b'],
['guanciale', 'noun', 'c'],
['guanciale', 'adjective', 'c'],
['guanto', 'noun', 'b'],
['guardare', 'verb', 'a'],
['guardaroba', 'noun', 'c'],
['guardia', 'noun', 'a'],
['guarire', 'verb', 'b'],
['guarnizione', 'noun', 'c'],
['guasto', 'noun', 'c'],
['guerra', 'noun', 'a'],
['guerriero', 'noun', 'b'],
['guerriero', 'adjective', 'b'],
['gufo', 'noun', 'c'],
['guida', 'noun', 'a'],
['guidare', 'verb', 'a'],
['guidatore', 'noun', 'c'],
['guinzaglio', 'noun', 'c'],
['gustare', 'verb', 'b'],
['gusto', 'noun', 'a'],
['gustoso', 'adjective', 'c'],
['hamburger', 'noun', 'c'],
['hobby', 'noun', 'b'],
['home', 'noun', 'b'],
['hotel', 'noun', 'b'],
['hyperlink', 'noun', 'b'],
['i', 'noun', 'c'],
['i', 'determiner', 'b'],
['icona', 'noun', 'b'],
['ics', 'noun', 'c'],
['idea', 'noun', 'a'],
['ideale', 'adjective', 'a'],
['ideale', 'noun', 'a'],
['ideare', 'verb', 'b'],
['identico', 'adjective', 'b'],
['identico', 'noun', 'b'],
['identificare', 'verb', 'a'],
['identificazione', 'noun', 'b'],
['identità', 'noun', 'a'],
['ideologia', 'noun', 'b'],
['ideologico', 'adjective', 'b'],
['idiota', 'adjective', 'a'],
['idiota', 'noun', 'a'],
['idraulico', 'adjective', 'b'],
['idraulico', 'noun', 'b'],
['idrico', 'adjective', 'b'],
['idrogeno', 'noun', 'b'],
['ieri', 'adverb', 'a'],
['ieri', 'noun', 'a'],
['igiene', 'noun', 'c'],
['ignorante', 'pres_part', 'b'],
['ignorante', 'adjective', 'b'],
['ignorante', 'noun', 'b'],
['ignoranza', 'noun', 'b'],
['ignorare', 'verb', 'a'],
['ignoto', 'adjective', 'b'],
['ignoto', 'noun', 'b'],
['il', 'determiner', 'a'],
['il', 'pronoun', 'a'],
['illecito', 'adjective', 'b'],
['illecito', 'noun', 'b'],
['illegale', 'adjective', 'b'],
['illegale', 'noun', 'b'],
['illegittimo', 'adjective', 'c'],
['illegittimo', 'noun', 'c'],
['illudere', 'verb', 'b'],
['illuminare', 'verb', 'b'],
['illuminato', 'past_part', 'b'],
['illuminato', 'adjective', 'b'],
['illuminato', 'noun', 'b'],
['illusione', 'noun', 'b'],
['illustrare', 'verb', 'b'],
['illustre', 'adjective', 'b'],
['imballare', 'verb', 'c'],
['imbarazzante', 'pres_part', 'b'],
['imbarazzante', 'adjective', 'b'],
['imbarazzato', 'past_part', 'b'],
['imbarazzato', 'adjective', 'b'],
['imbarazzo', 'noun', 'b'],
['imbattersi', 'verb', 'b'],
['imbecille', 'adjective', 'b'],
['imbecille', 'noun', 'b'],
['imbiancare', 'verb', 'c'],
['imbianchino', 'noun', 'c'],
['imbottigliare', 'verb', 'c'],
['imbrogliare', 'verb', 'c'],
['imbroglio', 'noun', 'c'],
['imbuto', 'noun', 'c'],
['imitare', 'verb', 'b'],
['immaginare', 'verb', 'a'],
['immaginare', 'noun', 'a'],
['immaginario', 'adjective', 'b'],
['immaginario', 'noun', 'b'],
['immaginazione', 'noun', 'b'],
['immagine', 'noun', 'a'],
['immaturo', 'adjective', 'c'],
['immediatamente', 'adverb', 'a'],
['immediato', 'adjective', 'b'],
['immediato', 'noun', 'b'],
['immenso', 'adjective', 'b'],
['immenso', 'noun', 'b'],
['immergere', 'verb', 'b'],
['immigrato', 'past_part', 'b'],
['immigrato', 'adjective', 'b'],
['immigrato', 'noun', 'b'],
['immobile', 'adjective', 'a'],
['immobile', 'noun', 'a'],
['immobiliare', 'adjective', 'b'],
['immobiliare', 'noun', 'b'],
['immondizia', 'noun', 'c'],
['impallidire', 'verb', 'c'],
['imparare', 'verb', 'a'],
['impastare', 'verb', 'c'],
['impatto', 'noun', 'b'],
['impaziente', 'adjective', 'c'],
['impaziente', 'noun', 'c'],
['impazzire', 'verb', 'b'],
['impedire', 'verb', 'a'],
['impegnare', 'verb', 'a'],
['impegnativo', 'adjective', 'b'],
['impegnato', 'past_part', 'c'],
['impegnato', 'adjective', 'c'],
['impegno', 'noun', 'a'],
['imperare', 'verb', 'b'],
['imperatore', 'noun', 'b'],
['imperiale', 'adjective', 'b'],
['imperiale', 'noun', 'b'],
['impermeabile', 'adjective', 'c'],
['impermeabile', 'noun', 'c'],
['impero', 'noun', 'b'],
['impero', 'adjective', 'b'],
['impianto', 'noun', 'a'],
['impiegare', 'verb', 'a'],
['impiegato', 'past_part', 'b'],
['impiegato', 'adjective', 'b'],
['impiegato', 'noun', 'b'],
['impiego', 'noun', 'b'],
['implicare', 'verb', 'b'],
['imporre', 'verb', 'a'],
['importante', 'pres_part', 'a'],
['importante', 'adjective', 'a'],
['importante', 'noun', 'a'],
['importanza', 'noun', 'a'],
['importare', 'verb', 'a'],
['importo', 'noun', 'b'],
['impossibile', 'adjective', 'a'],
['impossibile', 'noun', 'a'],
['impostare', 'verb', 'b'],
['impostazione', 'noun', 'b'],
['impreciso', 'adjective', 'c'],
['imprenditore', 'noun', 'b'],
['impresa', 'noun', 'a'],
['impressionante', 'pres_part', 'b'],
['impressionante', 'adjective', 'b'],
['impressionare', 'verb', 'b'],
['impressione', 'noun', 'a'],
['imprevisto', 'adjective', 'b'],
['imprevisto', 'noun', 'b'],
['imprigionare', 'verb', 'c'],
['improbabile', 'adjective', 'b'],
['impronta', 'noun', 'b'],
['improvvisamente', 'adverb', 'b'],
['improvvisare', 'verb', 'b'],
['improvviso', 'adjective', 'a'],
['improvviso', 'noun', 'a'],
['imprudente', 'adjective', 'c'],
['imprudente', 'noun', 'c'],
['impulsivo', 'adjective', 'c'],
['impulsivo', 'noun', 'c'],
['impulso', 'noun', 'b'],
['imputata', 'noun', 'b'],
['imputato', 'past_part', 'a'],
['imputato', 'adjective', 'a'],
['imputato', 'noun', 'a'],
['in', 'preposition', 'a'],
['inaspettato', 'adjective', 'b'],
['inaugurare', 'verb', 'b'],
['incamminare', 'verb', 'c'],
['incantare', 'verb', 'c'],
['incapace', 'adjective', 'b'],
['incapace', 'noun', 'b'],
['incapacità', 'noun', 'b'],
['incaricare', 'verb', 'b'],
['incarico', 'noun', 'b'],
['incartare', 'verb', 'c'],
['incassare', 'verb', 'b'],
['incasso', 'noun', 'c'],
['incastrare', 'verb', 'b'],
['incatenare', 'verb', 'c'],
['incazzarsi', 'verb', 'b'],
['incendio', 'noun', 'b'],
['incertezza', 'noun', 'b'],
['incerto', 'adjective', 'b'],
['incerto', 'noun', 'b'],
['inchiesta', 'noun', 'b'],
['inchiodare', 'verb', 'c'],
['incidente', 'noun', 'a'],
['incidere', 'verb', 'b'],
['incinta', 'adjective', 'b'],
['incitare', 'verb', 'c'],
['incivile', 'adjective', 'c'],
['incivile', 'noun', 'c'],
['includere', 'verb', 'b'],
['incluso', 'past_part', 'b'],
['incluso', 'adjective', 'b'],
['incluso', 'noun', 'b'],
['incollare', 'verb', 'b'],
['incominciare', 'verb', 'b'],
['incompleto', 'adjective', 'c'],
['incomprensibile', 'adjective', 'b'],
['inconsolabile', 'adjective', 'c'],
['incontentabile', 'adjective', 'c'],
['incontrare', 'verb', 'a'],
['incontro', 'noun', 'a'],
['incontro', 'adverb', 'b'],
['incoraggiare', 'verb', 'b'],
['incoronare', 'verb', 'c'],
['incorreggibile', 'adjective', 'c'],
['incredibile', 'adjective', 'a'],
['incremento', 'noun', 'b'],
['incrinare', 'verb', 'c'],
['incrociare', 'verb', 'b'],
['incrocio', 'noun', 'c'],
['incubo', 'noun', 'b'],
['incurabile', 'adjective', 'c'],
['incurabile', 'noun', 'c'],
['incuriosire', 'verb', 'b'],
['indagare', 'verb', 'b'],
['indagine', 'noun', 'a'],
['indescrivibile', 'adjective', 'c'],
['indiano', 'adjective', 'b'],
['indiano', 'noun', 'b'],
['indicare', 'verb', 'a'],
['indicazione', 'noun', 'a'],
['indice', 'noun', 'a'],
['indice', 'adjective', 'a'],
['indietreggiare', 'verb', 'c'],
['indietro', 'adverb', 'a'],
['indietro', 'adjective', 'a'],
['indietro', 'loc-comando', 'a'],
['indifeso', 'adjective', 'c'],
['indifferente', 'adjective', 'b'],
['indifferente', 'noun', 'b'],
['indifferenza', 'noun', 'b'],
['indigestione', 'noun', 'c'],
['indimenticabile', 'adjective', 'c'],
['indipendente', 'adjective', 'b'],
['indipendente', 'noun', 'b'],
['indipendentemente', 'adverb', 'b'],
['indipendenza', 'noun', 'b'],
['indiretto', 'adjective', 'b'],
['indirizzare', 'verb', 'b'],
['indirizzo', 'noun', 'a'],
['indisciplinato', 'adjective', 'c'],
['indispensabile', 'adjective', 'b'],
['indispensabile', 'noun', 'b'],
['individuale', 'adjective', 'b'],
['individuare', 'verb', 'a'],
['individuo', 'noun', 'a'],
['individuo', 'adjective', 'a'],
['indizio', 'noun', 'b'],
['indossare', 'verb', 'a'],
['indovinare', 'verb', 'b'],
['indovinello', 'noun', 'c'],
['indubbiamente', 'adverb', 'b'],
['indumento', 'noun', 'c'],
['indurre', 'verb', 'b'],
['industria', 'noun', 'a'],
['industriale', 'adjective', 'a'],
['industriale', 'noun', 'a'],
['inedito', 'adjective', 'b'],
['inefficace', 'adjective', 'c'],
['inerte', 'adjective', 'c'],
['inesistente', 'adjective', 'b'],
['inesperienza', 'noun', 'c'],
['inesperto', 'adjective', 'c'],
['inevitabile', 'adjective', 'b'],
['inevitabile', 'noun', 'b'],
['inevitabilmente', 'adverb', 'b'],
['infame', 'adjective', 'c'],
['infame', 'noun', 'c'],
['infantile', 'adjective', 'b'],
['infanzia', 'noun', 'b'],
['infarto', 'noun', 'b'],
['infatti', 'conjunction', 'a'],
['infatti', 'adverb', 'a'],
['infedele', 'adjective', 'c'],
['infedele', 'noun', 'c'],
['infelice', 'adjective', 'b'],
['infelice', 'noun', 'b'],
['inferiore', 'adjective', 'a'],
['infermiera', 'noun', 'b'],
['infermiere', 'noun', 'c'],
['inferno', 'noun', 'b'],
['inferno', 'adjective', 'b'],
['infezione', 'noun', 'b'],
['infilare', 'verb', 'a'],
['infine', 'adverb', 'a'],
['infinito', 'adjective', 'a'],
['infinito', 'noun', 'a'],
['influenza', 'noun', 'b'],
['influenzare', 'verb', 'b'],
['informare', 'verb', 'a'],
['informatica', 'noun', 'b'],
['informatico', 'adjective', 'b'],
['informatico', 'noun', 'b'],
['informativo', 'adjective', 'b'],
['informazione', 'noun', 'a'],
['infradito', 'adjective', 'c'],
['infradito', 'noun', 'c'],
['infrastruttura', 'noun', 'b'],
['infuriare', 'verb', 'b'],
['infuso', 'past_part', 'c'],
['infuso', 'adjective', 'c'],
['infuso', 'noun', 'c'],
['ingannare', 'verb', 'b'],
['inganno', 'noun', 'b'],
['ingegnere', 'noun', 'b'],
['ingegneria', 'noun', 'b'],
['ingelosire', 'verb', 'c'],
['ingenuo', 'adjective', 'b'],
['ingenuo', 'noun', 'b'],
['ingessare', 'verb', 'c'],
['ingiusto', 'adjective', 'b'],
['ingiusto', 'noun', 'b'],
['inglese', 'adjective', 'a'],
['inglese', 'noun', 'a'],
['ingoiare', 'verb', 'b'],
['ingorgo', 'noun', 'c'],
['ingrandire', 'verb', 'c'],
['ingrassare', 'verb', 'b'],
['ingrediente', 'noun', 'b'],
['ingresso', 'noun', 'a'],
['iniezione', 'noun', 'c'],
['iniziale', 'adjective', 'a'],
['iniziale', 'noun', 'a'],
['inizialmente', 'adverb', 'b'],
['iniziare', 'verb', 'a'],
['iniziativa', 'noun', 'a'],
['inizio', 'noun', 'a'],
['innamorarsi', 'verb', 'a'],
['innamorato', 'past_part', 'b'],
['innamorato', 'adjective', 'b'],
['innamorato', 'noun', 'b'],
['innanzitutto', 'adverb', 'b'],
['innervosire', 'verb', 'c'],
['innocente', 'adjective', 'b'],
['innocente', 'noun', 'b'],
['innocuo', 'adjective', 'b'],
['innovativo', 'adjective', 'b'],
['innovazione', 'noun', 'b'],
['inoltre', 'adverb', 'a'],
['inquadrare', 'verb', 'b'],
['inquietante', 'pres_part', 'b'],
['inquietante', 'adjective', 'b'],
['inquinamento', 'noun', 'b'],
['inquinare', 'verb', 'c'],
['inquinato', 'past_part', 'c'],
['inquinato', 'adjective', 'c'],
['insalata', 'noun', 'b'],
['insegna', 'noun', 'b'],
['insegnamento', 'noun', 'b'],
['insegnante', 'pres_part', 'a'],
['insegnante', 'adjective', 'a'],
['insegnante', 'noun', 'a'],
['insegnare', 'verb', 'a'],
['inseguire', 'verb', 'b'],
['inseparabile', 'adjective', 'c'],
['inseparabile', 'noun', 'c'],
['inserimento', 'noun', 'b'],
['inserire', 'verb', 'a'],
['insetticida', 'adjective', 'c'],
['insetto', 'noun', 'b'],
['insieme', 'adverb', 'a'],
['insieme', 'noun', 'a'],
['insinuare', 'verb', 'b'],
['insistere', 'verb', 'a'],
['insoddisfatto', 'adjective', 'c'],
['insolito', 'adjective', 'b'],
['insolito', 'noun', 'b'],
['insomma', 'adverb', 'a'],
['insopportabile', 'adjective', 'b'],
['insospettire', 'verb', 'c'],
['installare', 'verb', 'b'],
['insuccesso', 'noun', 'c'],
['insultare', 'verb', 'b'],
['insulto', 'noun', 'b'],
['intanto', 'adverb', 'a'],
['intasare', 'verb', 'c'],
['intatto', 'adjective', 'b'],
['integrale', 'adjective', 'b'],
['integrale', 'noun', 'b'],
['integrare', 'verb', 'b'],
['integrazione', 'noun', 'b'],
['intellettuale', 'adjective', 'b'],
['intellettuale', 'noun', 'b'],
['intelligente', 'adjective', 'a'],
['intelligenza', 'noun', 'b'],
['intendere', 'verb', 'a'],
['intensità', 'noun', 'b'],
['intenso', 'adjective', 'a'],
['intento', 'noun', 'b'],
['intenzione', 'noun', 'a'],
['interagire', 'verb', 'b'],
['interamente', 'adverb', 'b'],
['interazione', 'noun', 'b'],
['intercettare', 'verb', 'b'],
['intercettazione', 'noun', 'b'],
['interessante', 'pres_part', 'a'],
['interessante', 'adjective', 'a'],
['interessare', 'verb', 'a'],
['interessato', 'past_part', 'b'],
['interessato', 'adjective', 'b'],
['interessato', 'noun', 'b'],
['interesse', 'noun', 'a'],
['interiore', 'adjective', 'b'],
['interiore', 'noun', 'b'],
['interlocutore', 'noun', 'b'],
['internazionale', 'adjective', 'a'],
['internazionale', 'noun', 'a'],
['internet', 'noun', 'a'],
['interno', 'adjective', 'a'],
['interno', 'noun', 'a'],
['intero', 'adjective', 'a'],
['intero', 'noun', 'a'],
['interpretare', 'verb', 'a'],
['interpretazione', 'noun', 'b'],
['interprete', 'noun', 'b'],
['interrogare', 'verb', 'b'],
['interrogativo', 'adjective', 'b'],
['interrogativo', 'noun', 'b'],
['interrogatorio', 'adjective', 'b'],
['interrogatorio', 'noun', 'b'],
['interrogazione', 'noun', 'c'],
['interrompere', 'verb', 'a'],
['interruttore', 'noun', 'c'],
['interruzione', 'noun', 'b'],
['intervallo', 'noun', 'b'],
['intervenire', 'verb', 'a'],
['intervento', 'noun', 'a'],
['intervista', 'noun', 'a'],
['intesa', 'noun', 'b'],
['intestare', 'verb', 'b'],
['intestino', 'noun', 'c'],
['intimidire', 'verb', 'c'],
['intimità', 'noun', 'b'],
['intimo', 'adjective', 'b'],
['intimo', 'noun', 'b'],
['intitolare', 'verb', 'b'],
['intonaco', 'noun', 'c'],
['intorno', 'adverb', 'a'],
['intorno', 'preposition', 'a'],
['intorno', 'adjective', 'a'],
['intorno', 'noun', 'a'],
['intraprendere', 'verb', 'b'],
['intravedere', 'verb', 'b'],
['intrecciare', 'verb', 'b'],
['introdurre', 'verb', 'a'],
['introduzione', 'noun', 'b'],
['intuire', 'verb', 'b'],
['intuizione', 'noun', 'b'],
['inutile', 'adjective', 'a'],
['invadente', 'pres_part', 'c'],
['invadente', 'adjective', 'c'],
['invadente', 'noun', 'c'],
['invadere', 'verb', 'b'],
['invasione', 'noun', 'b'],
['invecchiare', 'verb', 'b'],
['invece', 'adverb', 'a'],
['inventare', 'verb', 'a'],
['invenzione', 'noun', 'b'],
['invernale', 'adjective', 'b'],
['invernale', 'noun', 'b'],
['inverno', 'noun', 'a'],
['investimento', 'noun', 'b'],
['investire', 'verb', 'a'],
['inviare', 'verb', 'a'],
['inviato', 'past_part', 'b'],
['inviato', 'adjective', 'b'],
['inviato', 'noun', 'b'],
['invidiare', 'verb', 'b'],
['invidioso', 'adjective', 'c'],
['invidioso', 'noun', 'c'],
['invincibile', 'adjective', 'c'],
['invisibile', 'adjective', 'b'],
['invisibile', 'noun', 'b'],
['invitare', 'verb', 'a'],
['invitato', 'past_part', 'b'],
['invitato', 'adjective', 'b'],
['invitato', 'noun', 'b'],
['invito', 'noun', 'b'],
['invocare', 'verb', 'b'],
['inzuppare', 'verb', 'c'],
['io', 'pronoun', 'a'],
['ionico', 'adjective', 'c'],
['ipotesi', 'noun', 'a'],
['ipotizzare', 'verb', 'b'],
['ippopotamo', 'noun', 'c'],
['ipsilon', 'noun', 'c'],
['ira', 'noun', 'b'],
['irlandese', 'adjective', 'b'],
['irlandese', 'noun', 'b'],
['ironia', 'noun', 'b'],
['ironico', 'adjective', 'b'],
['irriconoscibile', 'adjective', 'c'],
['irritare', 'verb', 'b'],
['iscritto', 'past_part', 'b'],
['iscritto', 'adjective', 'b'],
['iscritto', 'noun', 'b'],
['iscrivere', 'verb', 'a'],
['iscrizione', 'noun', 'b'],
['islamico', 'adjective', 'b'],
['islamico', 'noun', 'b'],
['islandese', 'adjective', 'c'],
['islandese', 'noun', 'c'],
['isola', 'noun', 'a'],
['isolare', 'verb', 'b'],
['isolato', 'past_part', 'b'],
['isolato', 'adjective', 'b'],
['isolato', 'noun', 'b'],
['ispettore', 'noun', 'b'],
['ispirare', 'verb', 'a'],
['ispirazione', 'noun', 'b'],
['israeliano', 'adjective', 'c'],
['israeliano', 'noun', 'c'],
['istante', 'noun', 'a'],
['istanza', 'noun', 'b'],
['istintivo', 'adjective', 'c'],
['istinto', 'noun', 'b'],
['istituto', 'noun', 'a'],
['istituzionale', 'adjective', 'b'],
['istituzione', 'noun', 'a'],
['istruttivo', 'adjective', 'c'],
['istruttore', 'noun', 'c'],
['istruzione', 'noun', 'a'],
['italiano', 'adjective', 'a'],
['italiano', 'noun', 'a'],
['iugoslavo', 'adjective', 'c'],
['iugoslavo', 'noun', 'c'],
['jeans', 'noun', 'b'],
['karatè', 'noun', 'c'],
['ketchup', 'noun', 'c'],
['killer', 'noun', 'b'],
['killer', 'adjective', 'b'],
['kit', 'noun', 'c'],
['kiwi', 'noun', 'c'],
['là', 'adverb', 'a'],
['la', 'determiner', 'a'],
['la', 'pronoun', 'a'],
['labbro', 'noun', 'a'],
['labirinto', 'noun', 'c'],
['laboratorio', 'noun', 'a'],
['laborioso', 'adjective', 'c'],
['lacca', 'noun', 'c'],
['lacca', 'adjective', 'c'],
['laccio', 'noun', 'c'],
['lacrima', 'noun', 'a'],
['laddove', 'adverb', 'b'],
['laddove', 'conjunction', 'b'],
['ladro', 'noun', 'b'],
['laggiù', 'adverb', 'b'],
['lago', 'noun', 'a'],
['laico', 'adjective', 'b'],
['laico', 'noun', 'b'],
['lama', 'noun', 'b'],
['lamentare', 'verb', 'a'],
['lamentela', 'noun', 'c'],
['lametta', 'noun', 'c'],
['lamiera', 'noun', 'c'],
['lampada', 'noun', 'b'],
['lampadario', 'noun', 'c'],
['lampo', 'noun', 'b'],
['lampo', 'adjective', 'b'],
['lampo', 'noun', 'b'],
['lana', 'noun', 'b'],
['lancetta', 'noun', 'c'],
['lanciare', 'verb', 'a'],
['lancio', 'noun', 'b'],
['lanterna', 'noun', 'c'],
['lapis', 'noun', 'c'],
['lardo', 'noun', 'c'],
['larghezza', 'noun', 'c'],
['largo', 'adjective', 'a'],
['largo', 'noun', 'a'],
['largo', 'adverb', 'a'],
['lasagna', 'noun', 'c'],
['lasciare', 'verb', 'a'],
['lassù', 'adverb', 'b'],
['lastra', 'noun', 'b'],
['laterale', 'adjective', 'b'],
['laterale', 'noun', 'b'],
['latino', 'adjective', 'b'],
['latino', 'noun', 'b'],
['lato', 'noun', 'a'],
['latta', 'noun', 'c'],
['lattante', 'pres_part', 'c'],
['lattante', 'adjective', 'c'],
['lattante', 'noun', 'c'],
['latte', 'noun', 'a'],
['latte', 'adjective', 'a'],
['latteria', 'noun', 'c'],
['lattina', 'noun', 'c'],
['lattuga', 'noun', 'c'],
['laurea', 'noun', 'b'],
['laureare', 'verb', 'b'],
['laureato', 'past_part', 'b'],
['laureato', 'adjective', 'b'],
['laureato', 'noun', 'b'],
['lava', 'noun', 'c'],
['lavabo', 'noun', 'c'],
['lavagna', 'noun', 'c'],
['lavagna', 'adjective', 'c'],
['lavanda', 'noun', 'c'],
['lavanderia', 'noun', 'c'],
['lavandino', 'noun', 'c'],
['lavapiatti', 'noun', 'c'],
['lavare', 'verb', 'a'],
['lavastoviglie', 'noun', 'c'],
['lavatrice', 'noun', 'b'],
['lavello', 'noun', 'c'],
['lavorare', 'verb', 'a'],
['lavorativo', 'adjective', 'b'],
['lavoratore', 'adjective', 'a'],
['lavoratore', 'noun', 'a'],
['lavorazione', 'noun', 'b'],
['lavoro', 'noun', 'a'],
['laziale', 'adjective', 'c'],
['laziale', 'noun', 'c'],
['le', 'determiner', 'a'],
['le', 'pronoun', 'a'],
['le', 'pronoun', 'a'],
['leader', 'noun', 'b'],
['lealtà', 'noun', 'c'],
['lebbra', 'noun', 'c'],
['leccare', 'verb', 'b'],
['leccio', 'noun', 'c'],
['lecito', 'adjective', 'b'],
['lecito', 'noun', 'b'],
['lega', 'noun', 'b'],
['legale', 'adjective', 'a'],
['legale', 'noun', 'a'],
['legame', 'noun', 'b'],
['legare', 'verb', 'a'],
['legato', 'past_part', 'a'],
['legato', 'adjective', 'a'],
['legato', 'noun', 'a'],
['legge', 'noun', 'a'],
['leggenda', 'noun', 'b'],
['leggere', 'verb', 'a'],
['leggermente', 'adverb', 'b'],
['leggero', 'adjective', 'a'],
['leggero', 'adverb', 'a'],
['leggero', 'noun', 'a'],
['legislativo', 'adjective', 'b'],
['legittimo', 'adjective', 'b'],
['legna', 'noun', 'c'],
['legno', 'noun', 'a'],
['legume', 'noun', 'c'],
['lei', 'pronoun', 'a'],
['lentamente', 'adverb', 'a'],
['lente', 'noun', 'c'],
['lenticchia', 'noun', 'c'],
['lentiggine', 'noun', 'c'],
['lento', 'adjective', 'a'],
['lento', 'noun', 'a'],
['lento', 'adverb', 'a'],
['lenza', 'noun', 'c'],
['lenzuolo', 'noun', 'b'],
['leone', 'noun', 'b'],
['leonessa', 'noun', 'c'],
['leopardo', 'noun', 'c'],
['lepre', 'noun', 'c'],
['lesione', 'noun', 'b'],
['lessare', 'verb', 'c'],
['lessema', 'noun', 'b'],
['lettera', 'noun', 'a'],
['letterale', 'adjective', 'c'],
['letteralmente', 'adverb', 'b'],
['letterario', 'adjective', 'b'],
['letteratura', 'noun', 'a'],
['letto', 'noun', 'a'],
['lettone', 'noun', 'c'],
['lettore', 'noun', 'a'],
['lettura', 'noun', 'a'],
['leva', 'noun', 'b'],
['levare', 'verb', 'a'],
['levare', 'noun', 'a'],
['lezione', 'noun', 'a'],
['lì', 'adverb', 'a'],
['li', 'pronoun', 'a'],
['libanese', 'adjective', 'b'],
['libanese', 'noun', 'b'],
['liberale', 'adjective', 'b'],
['liberale', 'noun', 'b'],
['liberamente', 'adverb', 'b'],
['liberare', 'verb', 'a'],
['liberazione', 'noun', 'b'],
['libero', 'adjective', 'a'],
['libero', 'noun', 'a'],
['libertà', 'noun', 'a'],
['libico', 'adjective', 'c'],
['libico', 'noun', 'c'],
['libraio', 'noun', 'c'],
['libreria', 'noun', 'b'],
['libretto', 'noun', 'b'],
['libro', 'noun', 'a'],
['licenza', 'noun', 'b'],
['licenziamento', 'noun', 'c'],
['licenziare', 'verb', 'b'],
['liceo', 'noun', 'b'],
['lido', 'noun', 'c'],
['lieto', 'adjective', 'b'],
['lieve', 'adjective', 'b'],
['lievito', 'noun', 'c'],
['ligure', 'adjective', 'c'],
['ligure', 'noun', 'c'],
['lima', 'noun', 'c'],
['limare', 'verb', 'c'],
['limitare', 'verb', 'a'],
['limitato', 'past_part', 'b'],
['limitato', 'adjective', 'b'],
['limite', 'noun', 'a'],
['limite', 'adjective', 'a'],
['limonata', 'noun', 'c'],
['limone', 'noun', 'b'],
['limone', 'adjective', 'b'],
['linea', 'noun', 'a'],
['lineare', 'adjective', 'b'],
['lineare', 'noun', 'b'],
['linfa', 'noun', 'b'],
['lingerie', 'noun', 'c'],
['lingua', 'noun', 'a'],
['linguaggio', 'noun', 'a'],
['linguistica', 'noun', 'b'],
['linguistico', 'adjective', 'b'],
['linguistico', 'noun', 'b'],
['link', 'noun', 'b'],
['liquido', 'adjective', 'a'],
['liquido', 'noun', 'a'],
['liquore', 'noun', 'c'],
['lira', 'noun', 'a'],
['lirico', 'adjective', 'b'],
['lisbonese', 'adjective', 'c'],
['lisbonese', 'noun', 'c'],
['liscio', 'adjective', 'b'],
['liscio', 'noun', 'b'],
['lista', 'noun', 'a'],
['lite', 'noun', 'b'],
['litigare', 'verb', 'a'],
['litigio', 'noun', 'b'],
['litro', 'noun', 'b'],
['lituano', 'adjective', 'c'],
['lituano', 'noun', 'c'],
['live', 'adjective', 'b'],
['livello', 'noun', 'a'],
['lo', 'determiner', 'a'],
['lo', 'pronoun', 'a'],
['locale', 'adjective', 'a'],
['locale', 'noun', 'a'],
['locale', 'noun', 'a'],
['località', 'noun', 'b'],
['locanda', 'noun', 'c'],
['locazione', 'noun', 'b'],
['locomotiva', 'noun', 'c'],
['logica', 'noun', 'b'],
['logico', 'adjective', 'b'],
['logico', 'noun', 'b'],
['logoro', 'past_part', 'c'],
['logoro', 'adjective', 'c'],
['lombardo', 'adjective', 'b'],
['lombardo', 'noun', 'b'],
['londinese', 'adjective', 'c'],
['londinese', 'noun', 'c'],
['lontananza', 'noun', 'b'],
['lontano', 'adjective', 'a'],
['lontano', 'adverb', 'a'],
['lontano', 'noun', 'a'],
['lonza', 'noun', 'c'],
['look', 'noun', 'b'],
['loro', 'pronoun', 'a'],
['loro', 'adjective', 'a'],
['lotta', 'noun', 'a'],
['lottare', 'verb', 'b'],
['lozione', 'noun', 'c'],
['lucano', 'adjective', 'c'],
['lucano', 'noun', 'c'],
['luccicare', 'verb', 'c'],
['lucciola', 'noun', 'c'],
['luce', 'noun', 'a'],
['lucente', 'pres_part', 'c'],
['lucente', 'adjective', 'c'],
['lucente', 'noun', 'c'],
['lucertola', 'noun', 'c'],
['lucidare', 'verb', 'c'],
['lucido', 'adjective', 'b'],
['lucido', 'noun', 'b'],
['luglio', 'noun', 'a'],
['lui', 'pronoun', 'a'],
['lumaca', 'noun', 'c'],
['luminoso', 'adjective', 'b'],
['luna', 'noun', 'a'],
['lunedì', 'noun', 'a'],
['lunghezza', 'noun', 'b'],
['lungo', 'adjective', 'a'],
['lungo', 'preposition', 'a'],
['lungo', 'noun', 'a'],
['luogo', 'noun', 'a'],
['lupo', 'noun', 'a'],
['lussemburghese', 'adjective', 'c'],
['lussemburghese', 'noun', 'c'],
['lusso', 'noun', 'b'],
['lutto', 'noun', 'b'],
['ma', 'conjunction', 'a'],
['ma', 'noun', 'a'],
['maccherone', 'noun', 'c'],
['macchia', 'noun', 'a'],
['macchina', 'noun', 'a'],
['macchinista', 'noun', 'c'],
['macedone', 'adjective', 'c'],
['macedone', 'noun', 'c'],
['macedonia', 'noun', 'c'],
['maceria', 'noun', 'b'],
['macinare', 'verb', 'c'],
['madonna', 'noun', 'b'],
['madonna', 'exclamation', 'b'],
['madre', 'noun', 'a'],
['madrileno', 'adjective', 'c'],
['madrileno', 'noun', 'c'],
['madrileno', 'adjective', 'c'],
['madrileno', 'noun', 'c'],
['madrina', 'noun', 'c'],
['maestra', 'noun', 'b'],
['maestranza', 'noun', 'c'],
['maestro', 'noun', 'a'],
['maestro', 'adjective', 'a'],
['mafia', 'noun', 'b'],
['mafioso', 'adjective', 'b'],
['mafioso', 'noun', 'b'],
['magari', 'exclamation', 'a'],
['magari', 'conjunction', 'a'],
['magari', 'adverb', 'a'],
['magazzino', 'noun', 'b'],
['maggio', 'noun', 'a'],
['maggioranza', 'noun', 'a'],
['maggiorenne', 'adjective', 'c'],
['maggiorenne', 'noun', 'c'],
['maggiormente', 'adverb', 'b'],
['magia', 'noun', 'b'],
['magico', 'adjective', 'a'],
['magistrato', 'noun', 'b'],
['magistratura', 'noun', 'b'],
['maglia', 'noun', 'a'],
['maglietta', 'noun', 'b'],
['magnetico', 'adjective', 'b'],
['magnifico', 'adjective', 'b'],
['mago', 'noun', 'b'],
['mago', 'adjective', 'b'],
['magro', 'adjective', 'b'],
['magro', 'noun', 'b'],
['mah', 'exclamation', 'b'],
['mai', 'adverb', 'a'],
['maiale', 'noun', 'b'],
['maionese', 'noun', 'c'],
['mais', 'noun', 'c'],
['maiuscola', 'noun', 'c'],
['malato', 'adjective', 'a'],
['malato', 'noun', 'a'],
['malattia', 'noun', 'a'],
['malaugurio', 'noun', 'c'],
['malavita', 'noun', 'c'],
['male', 'adverb', 'a'],
['male', 'exclamation', 'a'],
['male', 'noun', 'a'],
['maledetto', 'past_part', 'b'],
['maledetto', 'adjective', 'b'],
['maledetto', 'noun', 'b'],
['maledizione', 'noun', 'b'],
['maledizione', 'exclamation', 'b'],
['maleducato', 'adjective', 'c'],
['maleducato', 'noun', 'c'],
['maleducazione', 'noun', 'c'],
['malgrado', 'noun', 'b'],
['malgrado', 'adverb', 'b'],
['malgrado', 'conjunction', 'b'],
['malgrado', 'preposition', 'b'],
['malinconia', 'noun', 'b'],
['malinteso', 'adjective', 'c'],
['malinteso', 'noun', 'c'],
['malizia', 'noun', 'c'],
['maltempo', 'noun', 'c'],
['maltese', 'adjective', 'c'],
['maltese', 'noun', 'c'],
['maltrattamento', 'noun', 'c'],
['maltrattare', 'verb', 'c'],
['malva', 'noun', 'c'],
['malvagio', 'adjective', 'b'],
['malvagio', 'noun', 'b'],
['mamma', 'noun', 'a'],
['mammella', 'noun', 'c'],
['mammifero', 'noun', 'c'],
['manager', 'noun', 'b'],
['mancanza', 'noun', 'a'],
['mancare', 'verb', 'a'],
['mancato', 'past_part', 'b'],
['mancato', 'adjective', 'b'],
['mancino', 'adjective', 'c'],
['mancino', 'noun', 'c'],
['manco', 'adjective', 'b'],
['manco', 'adverb', 'b'],
['mandare', 'verb', 'a'],
['mandarino', 'noun', 'c'],
['mandarino', 'adjective', 'c'],
['mandato', 'past_part', 'b'],
['mandato', 'adjective', 'b'],
['mandato', 'noun', 'b'],
['mandorla', 'noun', 'c'],
['mandorlo', 'noun', 'c'],
['manganello', 'noun', 'c'],
['mangiare', 'verb', 'a'],
['mangime', 'noun', 'c'],
['mania', 'noun', 'b'],
['maniaco', 'adjective', 'c'],
['maniaco', 'noun', 'c'],
['manica', 'noun', 'b'],
['manico', 'noun', 'b'],
['maniera', 'noun', 'a'],
['manifestare', 'verb', 'a'],
['manifestazione', 'noun', 'a'],
['manifesto', 'noun', 'b'],
['mano', 'noun', 'a'],
['manodopera', 'noun', 'c'],
['manoscritto', 'adjective', 'b'],
['manoscritto', 'noun', 'b'],
['manovale', 'noun', 'c'],
['manovra', 'noun', 'b'],
['mantello', 'noun', 'b'],
['mantenere', 'verb', 'a'],
['manuale', 'adjective', 'b'],
['manuale', 'noun', 'b'],
['manuale', 'noun', 'b'],
['manutenzione', 'noun', 'b'],
['manzo', 'noun', 'c'],
['mappa', 'noun', 'b'],
['marca', 'noun', 'b'],
['marcare', 'verb', 'b'],
['marchigiano', 'adjective', 'c'],
['marchigiano', 'noun', 'c'],
['marchio', 'noun', 'b'],
['marcia', 'noun', 'b'],
['marciapiede', 'noun', 'b'],
['marcio', 'adjective', 'b'],
['marcio', 'noun', 'b'],
['marcire', 'verb', 'c'],
['marco', 'noun', 'a'],
['mare', 'noun', 'a'],
['marea', 'noun', 'b'],
['maresciallo', 'noun', 'b'],
['margherita', 'noun', 'c'],
['marginale', 'adjective', 'b'],
['marginale', 'noun', 'b'],
['margine', 'noun', 'b'],
['marinaio', 'noun', 'b'],
['marino', 'adjective', 'b'],
['marino', 'noun', 'b'],
['marionetta', 'noun', 'c'],
['marito', 'noun', 'a'],
['marketing', 'noun', 'b'],
['marmellata', 'noun', 'c'],
['marmo', 'noun', 'b'],
['marocchino', 'adjective', 'c'],
['marocchino', 'noun', 'c'],
['marrone', 'noun', 'b'],
['marrone', 'adjective', 'b'],
['martedì', 'noun', 'b'],
['marzo', 'noun', 'a'],
['mascarpone', 'noun', 'c'],
['maschera', 'noun', 'b'],
['mascherare', 'verb', 'b'],
['mascherato', 'past_part', 'c'],
['mascherato', 'adjective', 'c'],
['maschile', 'adjective', 'a'],
['maschile', 'noun', 'a'],
['maschio', 'noun', 'a'],
['maschio', 'adjective', 'a'],
['massa', 'noun', 'a'],
['massa', 'adverb', 'a'],
['massacrare', 'verb', 'b'],
['massacro', 'noun', 'c'],
['massaggio', 'noun', 'c'],
['massaia', 'noun', 'c'],
['massiccio', 'adjective', 'b'],
['massiccio', 'noun', 'b'],
['massimo', 'adjective', 'a'],
['massimo', 'noun', 'a'],
['massimo', 'adverb', 'a'],
['master', 'noun', 'b'],
['masticare', 'verb', 'b'],
['masturbare', 'verb', 'b'],
['matematica', 'noun', 'b'],
['matematico', 'adjective', 'b'],
['matematico', 'noun', 'b'],
['materasso', 'noun', 'b'],
['materia', 'noun', 'a'],
['materiale', 'adjective', 'a'],
['materiale', 'noun', 'a'],
['maternità', 'noun', 'b'],
['materno', 'adjective', 'b'],
['matita', 'noun', 'b'],
['matricola', 'noun', 'b'],
['matrimoniale', 'adjective', 'b'],
['matrimoniale', 'noun', 'b'],
['matrimonio', 'noun', 'a'],
['mattina', 'noun', 'a'],
['mattinata', 'noun', 'b'],
['mattino', 'noun', 'a'],
['matto', 'adjective', 'a'],
['matto', 'noun', 'a'],
['mattone', 'noun', 'b'],
['mattone', 'adjective', 'b'],
['mattone', 'noun', 'b'],
['maturare', 'verb', 'b'],
['maturità', 'noun', 'b'],
['maturo', 'adjective', 'b'],
['mazzo', 'noun', 'b'],
['me', 'pronoun', 'a'],
['meccanico', 'adjective', 'a'],
['meccanico', 'noun', 'a'],
['meccanismo', 'noun', 'a'],
['medaglia', 'noun', 'b'],
['medesimo', 'adjective', 'b'],
['medesimo', 'pronoun', 'b'],
['media', 'noun', 'a'],
['media', 'noun', 'b'],
['mediante', 'preposition', 'b'],
['medicare', 'verb', 'c'],
['medicina', 'noun', 'a'],
['medico', 'noun', 'a'],
['medico', 'adjective', 'b'],
['medievale', 'adjective', 'b'],
['medio', 'adjective', 'a'],
['medio', 'noun', 'a'],
['medioevo', 'noun', 'b'],
['meditare', 'verb', 'b'],
['mediterraneo', 'adjective', 'b'],
['mediterraneo', 'noun', 'b'],
['meglio', 'adverb', 'a'],
['meglio', 'adjective', 'a'],
['meglio', 'noun', 'a'],
['mela', 'noun', 'b'],
['melagrana', 'noun', 'c'],
['melanzana', 'noun', 'c'],
['melo', 'noun', 'c'],
['melograno', 'noun', 'c'],
['melone', 'noun', 'c'],
['membrana', 'noun', 'b'],
['membro', 'noun', 'a'],
['memoria', 'noun', 'a'],
['menare', 'verb', 'b'],
['mendicante', 'pres_part', 'c'],
['mendicante', 'adjective', 'c'],
['mendicante', 'noun', 'c'],
['meno', 'adverb', 'a'],
['meno', 'adjective', 'a'],
['meno', 'preposition', 'a'],
['meno', 'noun', 'a'],
['mensa', 'noun', 'b'],
['mensile', 'adjective', 'b'],
['mensile', 'noun', 'b'],
['mensola', 'noun', 'c'],
['menta', 'noun', 'c'],
['mentale', 'adjective', 'a'],
['mentalità', 'noun', 'b'],
['mente', 'noun', 'a'],
['mentire', 'verb', 'a'],
['mento', 'noun', 'b'],
['mentre', 'conjunction', 'a'],
['menu', 'noun', 'b'],
['menzogna', 'noun', 'b'],
['meraviglia', 'noun', 'b'],
['meravigliare', 'verb', 'b'],
['meraviglioso', 'adjective', 'a'],
['meraviglioso', 'noun', 'a'],
['mercante', 'noun', 'b'],
['mercato', 'noun', 'a'],
['merce', 'noun', 'b'],
['merceria', 'noun', 'c'],
['mercoledì', 'noun', 'b'],
['merda', 'noun', 'a'],
['merenda', 'noun', 'c'],
['merendina', 'noun', 'c'],
['meridiano', 'adjective', 'c'],
['meridiano', 'noun', 'c'],
['meridionale', 'adjective', 'a'],
['meridionale', 'noun', 'a'],
['meridione', 'noun', 'c'],
['meritare', 'verb', 'a'],
['merito', 'noun', 'a'],
['merlo', 'noun', 'c'],
['merluzzo', 'noun', 'c'],
['mero', 'adjective', 'b'],
['mescolare', 'verb', 'b'],
['mese', 'noun', 'a'],
['messa', 'noun', 'b'],
['messa', 'noun', 'b'],
['messaggio', 'noun', 'a'],
['messe', 'noun', 'c'],
['messicano', 'adjective', 'c'],
['messicano', 'noun', 'c'],
['mestiere', 'noun', 'a'],
['mestolo', 'noun', 'c'],
['mestruazione', 'noun', 'c'],
['metà', 'noun', 'a'],
['meta', 'noun', 'b'],
['metafora', 'noun', 'b'],
['metallico', 'adjective', 'b'],
['metallo', 'noun', 'b'],
['metalmeccanico', 'adjective', 'c'],
['metalmeccanico', 'noun', 'c'],
['meteo', 'adjective', 'b'],
['meteo', 'noun', 'b'],
['metodo', 'noun', 'a'],
['metro', 'noun', 'a'],
['metropolitano', 'adjective', 'b'],
['metropolitano', 'noun', 'b'],
['mettere', 'verb', 'a'],
['mezzanotte', 'noun', 'b'],
['mezzo', 'adjective', 'a'],
['mezzo', 'noun', 'a'],
['mezzo', 'adverb', 'a'],
['mezzogiorno', 'noun', 'b'],
['mi', 'pronoun', 'a'],
['miagolare', 'verb', 'c'],
['mica', 'noun', 'a'],
['mica', 'adverb', 'a'],
['micio', 'noun', 'c'],
['microfono', 'noun', 'b'],
['miele', 'noun', 'b'],
['miele', 'adjective', 'b'],
['mietere', 'verb', 'c'],
['migliaio', 'noun', 'c'],
['migliaio', 'noun', 'a'],
['miglioramento', 'noun', 'b'],
['migliorare', 'verb', 'a'],
['migliore', 'adjective', 'a'],
['migliore', 'noun', 'a'],
['migliore', 'adverb', 'a'],
['mignolo', 'noun', 'c'],
['mila', 'adjective', 'a'],
['milanese', 'adjective', 'b'],
['milanese', 'noun', 'b'],
['miliardo', 'noun', 'a'],
['milione', 'noun', 'a'],
['militare', 'adjective', 'a'],
['militare', 'noun', 'a'],
['mille', 'adjective', 'a'],
['mille', 'noun', 'a'],
['millennio', 'noun', 'b'],
['millimetro', 'noun', 'b'],
['mimosa', 'noun', 'c'],
['minaccia', 'noun', 'b'],
['minacciare', 'verb', 'a'],
['minchia', 'noun', 'b'],
['minestra', 'noun', 'c'],
['minestrone', 'noun', 'c'],
['mini', 'adjective', 'c'],
['miniera', 'noun', 'b'],
['minigonna', 'noun', 'c'],
['minimo', 'adjective', 'a'],
['minimo', 'noun', 'a'],
['ministero', 'noun', 'a'],
['ministro', 'noun', 'a'],
['minoranza', 'noun', 'b'],
['minore', 'adjective', 'a'],
['minore', 'noun', 'a'],
['minuscolo', 'adjective', 'b'],
['minuto', 'noun', 'a'],
['mio', 'adjective', 'a'],
['mio', 'pronoun', 'a'],
['miracolo', 'noun', 'a'],
['mirare', 'verb', 'b'],
['mischiare', 'verb', 'b'],
['miscuglio', 'noun', 'c'],
['miseria', 'noun', 'b'],
['misero', 'adjective', 'b'],
['missile', 'adjective', 'c'],
['missile', 'noun', 'c'],
['missione', 'noun', 'a'],
['mister', 'noun', 'c'],
['misterioso', 'adjective', 'b'],
['mistero', 'noun', 'a'],
['misto', 'adjective', 'b'],
['misto', 'noun', 'b'],
['misura', 'noun', 'a'],
['misurare', 'verb', 'b'],
['misurazione', 'noun', 'c'],
['mitico', 'adjective', 'b'],
['mito', 'noun', 'b'],
['mitragliatrice', 'noun', 'c'],
['mobile', 'adjective', 'a'],
['mobile', 'noun', 'a'],
['mobilio', 'noun', 'c'],
['mocassino', 'noun', 'c'],
['moda', 'noun', 'a'],
['modalità', 'noun', 'b'],
['modella', 'noun', 'b'],
['modellare', 'verb', 'c'],
['modello', 'noun', 'a'],
['moderato', 'past_part', 'b'],
['moderato', 'adjective', 'b'],
['moderato', 'adverb', 'b'],
['moderato', 'noun', 'b'],
['moderatore', 'adjective', 'b'],
['moderatore', 'noun', 'b'],
['modernità', 'noun', 'b'],
['moderno', 'adjective', 'a'],
['moderno', 'noun', 'a'],
['modestia', 'noun', 'c'],
['modesto', 'adjective', 'b'],
['modifica', 'noun', 'b'],
['modificare', 'verb', 'a'],
['modificazione', 'noun', 'b'],
['modo', 'noun', 'a'],
['modulo', 'noun', 'b'],
['moglie', 'noun', 'a'],
['molecola', 'noun', 'b'],
['molisano', 'adjective', 'c'],
['molisano', 'noun', 'c'],
['molla', 'noun', 'c'],
['mollare', 'verb', 'b'],
['mollusco', 'noun', 'c'],
['molo', 'noun', 'c'],
['moltiplicare', 'verb', 'b'],
['molto', 'adjective', 'a'],
['molto', 'pronoun', 'a'],
['molto', 'adverb', 'a'],
['molto', 'noun', 'a'],
['momento', 'noun', 'a'],
['monaca', 'noun', 'c'],
['monaco', 'noun', 'c'],
['monarchica', 'noun', 'c'],
['mondiale', 'adjective', 'a'],
['mondiale', 'noun', 'a'],
['mondo', 'noun', 'a'],
['monello', 'noun', 'c'],
['moneta', 'noun', 'a'],
['monetario', 'adjective', 'b'],
['monitor', 'noun', 'b'],
['monologo', 'noun', 'b'],
['montaggio', 'noun', 'b'],
['montagna', 'noun', 'a'],
['montare', 'verb', 'b'],
['monte', 'noun', 'a'],
['montenegrino', 'adjective', 'c'],
['montenegrino', 'noun', 'c'],
['monumento', 'noun', 'b'],
['mora', 'noun', 'b'],
['morale', 'adjective', 'a'],
['morale', 'noun', 'a'],
['morbido', 'adjective', 'b'],
['morbido', 'noun', 'b'],
['mordere', 'verb', 'b'],
['morire', 'verb', 'a'],
['moro', 'adjective', 'b'],
['moro', 'noun', 'b'],
['morsicare', 'verb', 'c'],
['morso', 'noun', 'c'],
['mortadella', 'noun', 'c'],
['mortale', 'adjective', 'b'],
['mortale', 'noun', 'b'],
['morte', 'noun', 'a'],
['morto', 'past_part', 'a'],
['morto', 'adjective', 'a'],
['morto', 'noun', 'a'],
['mosca', 'noun', 'b'],
['moscovita', 'adjective', 'c'],
['moscovita', 'noun', 'c'],
['mossa', 'noun', 'b'],
['mostarda', 'noun', 'c'],
['mostra', 'noun', 'a'],
['mostrare', 'verb', 'a'],
['mostro', 'noun', 'b'],
['motel', 'noun', 'c'],
['motivare', 'verb', 'b'],
['motivazione', 'noun', 'b'],
['motivo', 'noun', 'a'],
['moto', 'noun', 'a'],
['moto', 'noun', 'b'],
['motociclismo', 'noun', 'c'],
['motociclista', 'adjective', 'c'],
['motociclista', 'noun', 'c'],
['motore', 'adjective', 'a'],
['motore', 'noun', 'a'],
['motorino', 'noun', 'b'],
['motoscafo', 'noun', 'c'],
['mousse', 'noun', 'c'],
['movimento', 'noun', 'a'],
['mozzarella', 'noun', 'c'],
['mucca', 'noun', 'b'],
['mucchio', 'noun', 'b'],
['muggire', 'verb', 'c'],
['muggito', 'past_part', 'c'],
['muggito', 'noun', 'c'],
['mugnaio', 'noun', 'c'],
['mugolare', 'verb', 'c'],
['mulino', 'noun', 'c'],
['multa', 'noun', 'b'],
['multare', 'verb', 'c'],
['multinazionale', 'adjective', 'b'],
['multinazionale', 'noun', 'b'],
['multiplo', 'adjective', 'b'],
['multiplo', 'noun', 'b'],
['multipresa', 'noun', 'c'],
['mummia', 'noun', 'c'],
['mungere', 'verb', 'c'],
['municipio', 'noun', 'c'],
['muovere', 'verb', 'a'],
['murare', 'verb', 'c'],
['muratore', 'noun', 'c'],
['muro', 'noun', 'a'],
['muschio', 'noun', 'c'],
['muschio', 'adjective', 'c'],
['muscolare', 'adjective', 'b'],
['muscolare', 'noun', 'b'],
['muscolo', 'noun', 'a'],
['museo', 'noun', 'a'],
['musica', 'noun', 'a'],
['musicale', 'adjective', 'a'],
['musicista', 'noun', 'b'],
['muso', 'noun', 'b'],
['musulmano', 'adjective', 'b'],
['musulmano', 'noun', 'b'],
['muta', 'noun', 'c'],
['mutamento', 'noun', 'b'],
['mutanda', 'noun', 'b'],
['mutandina', 'noun', 'c'],
['mutare', 'verb', 'b'],
['mutazione', 'noun', 'b'],
['mutilato', 'past_part', 'c'],
['mutilato', 'adjective', 'c'],
['mutilato', 'noun', 'c'],
['muto', 'adjective', 'b'],
['muto', 'noun', 'b'],
['mutuo', 'noun', 'b'],
['nanna', 'noun', 'c'],
['nano', 'adjective', 'b'],
['nano', 'noun', 'b'],
['napoletano', 'adjective', 'b'],
['napoletano', 'noun', 'b'],
['narrare', 'verb', 'b'],
['narrativo', 'adjective', 'b'],
['narratore', 'noun', 'b'],
['narrazione', 'noun', 'b'],
['nasale', 'adjective', 'b'],
['nasale', 'noun', 'b'],
['nascere', 'verb', 'a'],
['nascere', 'noun', 'a'],
['nascita', 'noun', 'a'],
['nascondere', 'verb', 'a'],
['nascondiglio', 'noun', 'c'],
['nascondino', 'noun', 'c'],
['nascosto', 'past_part', 'a'],
['nascosto', 'adjective', 'a'],
['nascosto', 'noun', 'a'],
['naso', 'noun', 'a'],
['nastro', 'noun', 'a'],
['natale', 'adjective', 'a'],
['natale', 'noun', 'a'],
['natalizio', 'adjective', 'b'],
['natalizio', 'noun', 'b'],
['nato', 'past_part', 'b'],
['nato', 'adjective', 'b'],
['nato', 'noun', 'b'],
['natura', 'noun', 'a'],
['naturale', 'adjective', 'a'],
['naturale', 'noun', 'a'],
['naturalmente', 'adverb', 'a'],
['naufragio', 'noun', 'c'],
['navale', 'adjective', 'c'],
['nave', 'noun', 'a'],
['navicella', 'noun', 'c'],
['navigare', 'verb', 'b'],
['navigazione', 'noun', 'b'],
['nazionale', 'adjective', 'a'],
['nazionale', 'noun', 'a'],
['nazionalità', 'noun', 'c'],
['nazione', 'noun', 'a'],
['nazista', 'adjective', 'b'],
['nazista', 'noun', 'b'],
['ndrangheta', 'noun', 'c'],
['né', 'conjunction', 'a'],
['ne', 'pronoun', 'a'],
['ne', 'adverb', 'a'],
['neanche', 'adverb', 'a'],
['nebbia', 'noun', 'b'],
['necessariamente', 'adverb', 'b'],
['necessario', 'adjective', 'a'],
['necessario', 'noun', 'a'],
['necessità', 'noun', 'a'],
['necessitare', 'verb', 'b'],
['negare', 'verb', 'a'],
['negativo', 'adjective', 'a'],
['negativo', 'noun', 'a'],
['negativo', 'adverb', 'a'],
['negazione', 'noun', 'c'],
['negoziante', 'pres_part', 'c'],
['negoziante', 'noun', 'c'],
['negozio', 'noun', 'a'],
['negro', 'adjective', 'b'],
['negro', 'noun', 'b'],
['nemico', 'adjective', 'a'],
['nemico', 'noun', 'a'],
['nemmeno', 'adverb', 'a'],
['neo', 'noun', 'c'],
['neonato', 'noun', 'b'],
['neonato', 'adjective', 'b'],
['neppure', 'adverb', 'a'],
['nero', 'adjective', 'a'],
['nero', 'noun', 'a'],
['nervo', 'noun', 'b'],
['nervosismo', 'noun', 'c'],
['nervoso', 'adjective', 'a'],
['nervoso', 'noun', 'a'],
['nessuno', 'adjective', 'a'],
['nessuno', 'pronoun', 'a'],
['nettare', 'noun', 'c'],
['netto', 'adjective', 'b'],
['netto', 'noun', 'b'],
['netto', 'adverb', 'b'],
['network', 'noun', 'b'],
['neutro', 'adjective', 'b'],
['neutro', 'noun', 'b'],
['neve', 'noun', 'a'],
['nevicare', 'verb', 'c'],
['news', 'noun', 'b'],
['newyorkese', 'adjective', 'c'],
['newyorkese', 'noun', 'c'],
['nido', 'noun', 'b'],
['niente', 'pronoun', 'a'],
['niente', 'adjective', 'a'],
['niente', 'adverb', 'a'],
['nipote', 'noun', 'a'],
['no', 'adverb', 'a'],
['no', 'noun', 'a'],
['no', 'adjective', 'a'],
['nobile', 'adjective', 'b'],
['nobile', 'noun', 'b'],
['nocciola', 'noun', 'c'],
['nocciola', 'adjective', 'c'],
['nocciolina', 'noun', 'c'],
['nocivo', 'adjective', 'c'],
['nodo', 'noun', 'b'],
['noi', 'pronoun', 'a'],
['noia', 'noun', 'b'],
['noioso', 'adjective', 'b'],
['noleggiare', 'verb', 'c'],
['nome', 'noun', 'a'],
['nomina', 'noun', 'b'],
['nominare', 'verb', 'a'],
['non', 'adverb', 'a'],
['nonché', 'conjunction', 'b'],
['nonna', 'noun', 'a'],
['nonno', 'noun', 'a'],
['nono', 'adjective', 'b'],
['nono', 'noun', 'b'],
['nonostante', 'preposition', 'a'],
['nonostante', 'conjunction', 'a'],
['nord', 'noun', 'a'],
['nord', 'adjective', 'a'],
['nordamericano', 'adjective', 'c'],
['nordamericano', 'noun', 'c'],
['norma', 'noun', 'a'],
['normale', 'adjective', 'a'],
['normale', 'noun', 'a'],
['normalità', 'noun', 'b'],
['normalmente', 'adverb', 'b'],
['normativa', 'noun', 'b'],
['norvegese', 'adjective', 'c'],
['norvegese', 'noun', 'c'],
['nostalgia', 'noun', 'b'],
['nostro', 'adjective', 'a'],
['nostro', 'pronoun', 'a'],
['nota', 'noun', 'a'],
['notaio', 'noun', 'b'],
['notare', 'verb', 'a'],
['notevole', 'adjective', 'b'],
['notizia', 'noun', 'a'],
['noto', 'adjective', 'a'],
['noto', 'noun', 'a'],
['notte', 'noun', 'a'],
['notturno', 'adjective', 'b'],
['notturno', 'noun', 'b'],
['novanta', 'adjective', 'b'],
['novanta', 'noun', 'b'],
['nove', 'adjective', 'a'],
['nove', 'noun', 'a'],
['novella', 'noun', 'c'],
['novembre', 'noun', 'a'],
['novità', 'noun', 'a'],
['nozione', 'noun', 'b'],
['nozze', 'noun', 'b'],
['nube', 'noun', 'b'],
['nucleare', 'adjective', 'a'],
['nucleare', 'noun', 'a'],
['nucleo', 'noun', 'b'],
['nudo', 'adjective', 'a'],
['nudo', 'noun', 'a'],
['nulla', 'pronoun', 'a'],
['nulla', 'adverb', 'a'],
['numerare', 'verb', 'b'],
['numerazione', 'noun', 'c'],
['numero', 'noun', 'a'],
['numeroso', 'adjective', 'a'],
['nuora', 'noun', 'c'],
['nuotare', 'verb', 'b'],
['nuoto', 'noun', 'b'],
['nuovamente', 'adverb', 'b'],
['nuovo', 'adjective', 'a'],
['nuovo', 'noun', 'a'],
['nutrire', 'verb', 'b'],
['nuvola', 'noun', 'b'],
['nuvoloso', 'adjective', 'c'],
['nylon', 'noun', 'c'],
['o', 'noun', 'c'],
['o', 'conjunction', 'a'],
['obbedire', 'verb', 'b'],
['obbiettivo', 'adjective', 'c'],
['obbiettivo', 'noun', 'c'],
['obbligare', 'verb', 'a'],
['obbligatorio', 'adjective', 'b'],
['obbligazione', 'noun', 'b'],
['obbligo', 'noun', 'b'],
['obiettivo', 'adjective', 'a'],
['obiettivo', 'noun', 'a'],
['obiezione', 'noun', 'b'],
['oblò', 'noun', 'c'],
['occasione', 'noun', 'a'],
['occhiaia', 'noun', 'c'],
['occhiale', 'noun', 'a'],
['occhiale', 'adjective', 'a'],
['occhiata', 'noun', 'b'],
['occhiello', 'noun', 'c'],
['occhio', 'noun', 'a'],
['occidentale', 'adjective', 'a'],
['occidentale', 'noun', 'a'],
['occidente', 'noun', 'b'],
['occidente', 'adjective', 'b'],
['occorrere', 'verb', 'a'],
['occupare', 'verb', 'a'],
['occupato', 'past_part', 'c'],
['occupato', 'adjective', 'c'],
['occupato', 'noun', 'c'],
['occupazione', 'noun', 'b'],
['oceano', 'noun', 'b'],
['oculista', 'noun', 'c'],
['oddio', 'exclamation', 'b'],
['odiare', 'verb', 'a'],
['odio', 'noun', 'b'],
['odorare', 'verb', 'c'],
['odore', 'noun', 'a'],
['offendere', 'verb', 'b'],
['offerta', 'noun', 'a'],
['offesa', 'noun', 'b'],
['offeso', 'past_part', 'c'],
['offeso', 'adjective', 'c'],
['offeso', 'noun', 'c'],
['officina', 'noun', 'b'],
['offline', 'adjective', 'b'],
['offline', 'noun', 'b'],
['offrire', 'verb', 'a'],
['oggettivo', 'adjective', 'b'],
['oggetto', 'noun', 'a'],
['oggi', 'adverb', 'a'],
['oggi', 'noun', 'a'],
['ogni', 'adjective', 'a'],
['ognuno', 'pronoun', 'a'],
['ognuno', 'adjective', 'a'],
['ok', 'adverb', 'a'],
['ok', 'noun', 'a'],
['ok', 'adjective', 'a'],
['okay', 'adverb', 'a'],
['okay', 'noun', 'a'],
['okay', 'adjective', 'a'],
['olandese', 'adjective', 'b'],
['olandese', 'noun', 'b'],
['oliare', 'verb', 'c'],
['oliera', 'noun', 'c'],
['olimpico', 'adjective', 'b'],
['olio', 'noun', 'a'],
['oliva', 'noun', 'b'],
['oliva', 'adjective', 'b'],
['oltre', 'adverb', 'a'],
['oltre', 'preposition', 'a'],
['oltrepassare', 'verb', 'c'],
['oltretutto', 'adverb', 'b'],
['omaggio', 'noun', 'b'],
['ombelico', 'noun', 'c'],
['ombra', 'noun', 'a'],
['ombrellone', 'noun', 'c'],
['omicidio', 'noun', 'a'],
['omogeneizzato', 'past_part', 'c'],
['omogeneizzato', 'adjective', 'c'],
['omogeneizzato', 'noun', 'c'],
['omonimo', 'adjective', 'b'],
['omonimo', 'noun', 'b'],
['onda', 'noun', 'a'],
['ondata', 'noun', 'b'],
['ondeggiare', 'verb', 'c'],
['onere', 'noun', 'b'],
['onestamente', 'adverb', 'b'],
['onesto', 'adjective', 'b'],
['onesto', 'noun', 'b'],
['onesto', 'adverb', 'b'],
['online', 'adjective', 'b'],
['online', 'noun', 'b'],
['onorare', 'verb', 'b'],
['onore', 'noun', 'a'],
['opera', 'noun', 'a'],
['operaio', 'noun', 'a'],
['operaio', 'adjective', 'a'],
['operare', 'verb', 'a'],
['operativo', 'adjective', 'b'],
['operativo', 'noun', 'b'],
['operatore', 'adjective', 'b'],
['operatore', 'noun', 'b'],
['operazione', 'noun', 'a'],
['opinione', 'noun', 'a'],
['opporre', 'verb', 'a'],
['opportunità', 'noun', 'b'],
['opportuno', 'adjective', 'b'],
['opposizione', 'noun', 'b'],
['opposto', 'past_part', 'a'],
['opposto', 'adjective', 'a'],
['opposto', 'noun', 'a'],
['oppressivo', 'adjective', 'c'],
['oppresso', 'past_part', 'c'],
['oppresso', 'adjective', 'c'],
['oppresso', 'noun', 'c'],
['oppressore', 'adjective', 'c'],
['oppressore', 'noun', 'c'],
['oppure', 'conjunction', 'a'],
['opzione', 'noun', 'b'],
['ora', 'noun', 'a'],
['ora', 'adverb', 'a'],
['orale', 'adjective', 'b'],
['oramai', 'adverb', 'b'],
['orario', 'adjective', 'a'],
['orario', 'noun', 'a'],
['orbita', 'noun', 'b'],
['orchestra', 'noun', 'b'],
['orco', 'noun', 'b'],
['ordinamento', 'noun', 'b'],
['ordinanza', 'noun', 'b'],
['ordinare', 'verb', 'a'],
['ordinario', 'adjective', 'b'],
['ordinario', 'noun', 'b'],
['ordine', 'noun', 'a'],
['orecchino', 'noun', 'c'],
['orecchio', 'noun', 'a'],
['orefice', 'noun', 'c'],
['organico', 'adjective', 'b'],
['organico', 'noun', 'b'],
['organismo', 'noun', 'a'],
['organizzare', 'verb', 'a'],
['organizzato', 'past_part', 'b'],
['organizzato', 'adjective', 'b'],
['organizzato', 'noun', 'b'],
['organizzazione', 'noun', 'a'],
['organo', 'noun', 'a'],
['orgasmo', 'noun', 'b'],
['orgoglio', 'noun', 'b'],
['orgoglioso', 'adjective', 'b'],
['orientale', 'adjective', 'b'],
['orientale', 'noun', 'b'],
['orientamento', 'noun', 'b'],
['orientare', 'verb', 'b'],
['oriente', 'adjective', 'b'],
['oriente', 'noun', 'b'],
['origano', 'noun', 'c'],
['originale', 'adjective', 'a'],
['originale', 'noun', 'a'],
['originario', 'adjective', 'b'],
['origine', 'noun', 'a'],
['orizzontale', 'adjective', 'b'],
['orizzontale', 'noun', 'b'],
['orizzonte', 'noun', 'b'],
['orlo', 'noun', 'b'],
['orma', 'noun', 'c'],
['ormai', 'adverb', 'a'],
['ormone', 'noun', 'b'],
['oro', 'noun', 'a'],
['orologiaio', 'noun', 'c'],
['orologio', 'noun', 'a'],
['oroscopo', 'noun', 'b'],
['orribile', 'adjective', 'b'],
['orrore', 'noun', 'b'],
['orso', 'noun', 'b'],
['ortaggio', 'noun', 'c'],
['ortensia', 'noun', 'c'],
['ortica', 'noun', 'c'],
['orto', 'noun', 'b'],
['ortolano', 'noun', 'c'],
['ortolano', 'adjective', 'c'],
['orzo', 'noun', 'c'],
['osare', 'verb', 'b'],
['osceno', 'adjective', 'c'],
['oscillare', 'verb', 'b'],
['oscurare', 'verb', 'b'],
['oscuro', 'adjective', 'b'],
['oscuro', 'noun', 'b'],
['oscuro', 'adverb', 'b'],
['ospedale', 'noun', 'a'],
['ospitalità', 'noun', 'c'],
['ospitare', 'verb', 'a'],
['ospite', 'adjective', 'a'],
['ospite', 'noun', 'a'],
['ospizio', 'noun', 'c'],
['osservare', 'verb', 'a'],
['osservazione', 'noun', 'b'],
['ossessione', 'noun', 'b'],
['ossia', 'conjunction', 'b'],
['ossigeno', 'noun', 'b'],
['osso', 'noun', 'a'],
['ostacolare', 'verb', 'b'],
['ostacolo', 'noun', 'b'],
['ostaggio', 'noun', 'c'],
['oste', 'noun', 'c'],
['ostile', 'adjective', 'b'],
['ostinato', 'past_part', 'c'],
['ostinato', 'adjective', 'c'],
['ostrica', 'noun', 'c'],
['ottanta', 'adjective', 'b'],
['ottanta', 'noun', 'b'],
['ottavo', 'adjective', 'b'],
['ottavo', 'noun', 'b'],
['ottenere', 'verb', 'a'],
['ottica', 'noun', 'b'],
['ottimo', 'adjective', 'a'],
['ottimo', 'noun', 'a'],
['otto', 'adjective', 'a'],
['otto', 'noun', 'a'],
['ottobre', 'noun', 'a'],
['ottone', 'noun', 'c'],
['ovale', 'adjective', 'c'],
['ovale', 'noun', 'c'],
['ovatta', 'noun', 'c'],
['ove', 'adverb', 'b'],
['ove', 'conjunction', 'b'],
['ovest', 'noun', 'b'],
['ovest', 'adjective', 'b'],
['ovile', 'noun', 'c'],
['ovino', 'adjective', 'c'],
['ovino', 'noun', 'c'],
['ovunque', 'adverb', 'a'],
['ovunque', 'conjunction', 'a'],
['ovvero', 'conjunction', 'a'],
['ovviamente', 'adverb', 'a'],
['ovviare', 'verb', 'b'],
['ovvio', 'adjective', 'b'],
['ozono', 'noun', 'c'],
['pacchetto', 'noun', 'b'],
['pacco', 'noun', 'b'],
['pace', 'noun', 'a'],
['padella', 'noun', 'c'],
['padre', 'noun', 'a'],
['padrona', 'noun', 'b'],
['padronato', 'noun', 'c'],
['padrone', 'noun', 'a'],
['padroneggiare', 'verb', 'c'],
['paesaggio', 'noun', 'b'],
['paese', 'noun', 'a'],
['paga', 'noun', 'b'],
['pagamento', 'noun', 'a'],
['pagare', 'verb', 'a'],
['pagella', 'noun', 'c'],
['pagina', 'noun', 'a'],
['paglia', 'noun', 'b'],
['paglia', 'adjective', 'b'],
['pagliaio', 'noun', 'c'],
['pago', 'past_part', 'b'],
['pago', 'adjective', 'b'],
['paio', 'noun', 'a'],
['pala', 'noun', 'b'],
['palato', 'noun', 'c'],
['palazzina', 'noun', 'c'],
['palazzo', 'noun', 'a'],
['palco', 'noun', 'b'],
['palcoscenico', 'noun', 'b'],
['palermitano', 'adjective', 'c'],
['palermitano', 'noun', 'c'],
['palestinese', 'adjective', 'c'],
['palestinese', 'noun', 'c'],
['palestra', 'noun', 'b'],
['paletta', 'noun', 'c'],
['palla', 'noun', 'a'],
['pallacanestro', 'noun', 'c'],
['pallanuoto', 'noun', 'c'],
['pallavolo', 'noun', 'c'],
['pallido', 'adjective', 'b'],
['pallina', 'noun', 'b'],
['pallino', 'noun', 'c'],
['palloncino', 'noun', 'c'],
['pallone', 'noun', 'b'],
['pallottola', 'noun', 'c'],
['pallottoliere', 'noun', 'c'],
['palma', 'noun', 'c'],
['palo', 'noun', 'b'],
['palombaro', 'noun', 'c'],
['palpebra', 'noun', 'c'],
['palude', 'noun', 'c'],
['panca', 'noun', 'c'],
['pancarrè', 'noun', 'c'],
['pancetta', 'noun', 'c'],
['panchina', 'noun', 'b'],
['pancia', 'noun', 'b'],
['panciotto', 'noun', 'c'],
['panda', 'noun', 'c'],
['pandoro', 'noun', 'c'],
['pane', 'noun', 'a'],
['panetteria', 'noun', 'c'],
['panettiere', 'noun', 'c'],
['panettone', 'noun', 'c'],
['panico', 'adjective', 'b'],
['panico', 'noun', 'b'],
['paniere', 'noun', 'c'],
['panino', 'noun', 'b'],
['panna', 'noun', 'b'],
['pannello', 'noun', 'b'],
['panno', 'noun', 'b'],
['pannocchia', 'noun', 'c'],
['pannolino', 'noun', 'c'],
['pannolone', 'noun', 'c'],
['panorama', 'noun', 'b'],
['pantalone', 'noun', 'a'],
['pantera', 'noun', 'c'],
['pantofola', 'noun', 'c'],
['panzerotto', 'noun', 'c'],
['papa', 'noun', 'a'],
['papà', 'noun', 'a'],
['papavero', 'noun', 'c'],
['papera', 'noun', 'c'],
['papero', 'noun', 'c'],
['pappa', 'noun', 'c'],
['pappagallo', 'noun', 'c'],
['parabola', 'noun', 'c'],
['parabrezza', 'noun', 'c'],
['paracadute', 'noun', 'c'],
['paracadutista', 'noun', 'c'],
['paradiso', 'noun', 'b'],
['paradosso', 'noun', 'b'],
['paradosso', 'adjective', 'b'],
['parafulmine', 'noun', 'c'],
['paragonare', 'verb', 'b'],
['paragone', 'noun', 'b'],
['paralisi', 'noun', 'c'],
['paralizzato', 'past_part', 'c'],
['paralizzato', 'adjective', 'c'],
['parallelepipedo', 'noun', 'c'],
['parallelo', 'adjective', 'b'],
['parallelo', 'noun', 'b'],
['paralume', 'noun', 'c'],
['parametro', 'noun', 'b'],
['paraocchi', 'noun', 'c'],
['parare', 'verb', 'b'],
['paraurti', 'noun', 'c'],
['paravento', 'noun', 'c'],
['parcheggiare', 'verb', 'b'],
['parcheggio', 'noun', 'b'],
['parco', 'noun', 'a'],
['parecchio', 'adjective', 'a'],
['parecchio', 'pronoun', 'a'],
['parecchio', 'adverb', 'a'],
['parecchio', 'adjective', 'a'],
['pareggiare', 'verb', 'c'],
['pareggio', 'noun', 'c'],
['parente', 'noun', 'a'],
['parentesi', 'noun', 'b'],
['parere', 'verb', 'a'],
['parere', 'noun', 'a'],
['parete', 'noun', 'a'],
['pari', 'adjective', 'a'],
['pari', 'adverb', 'a'],
['pari', 'noun', 'a'],
['parigino', 'adjective', 'c'],
['parigino', 'noun', 'c'],
['parità', 'noun', 'c'],
['parlamentare', 'adjective', 'b'],
['parlamentare', 'noun', 'b'],
['parlamento', 'noun', 'b'],
['parlare', 'verb', 'a'],
['parmigiano', 'adjective', 'c'],
['parmigiano', 'noun', 'c'],
['parola', 'noun', 'a'],
['parquet', 'noun', 'c'],
['parroco', 'noun', 'c'],
['parrucca', 'noun', 'c'],
['parrucchiere', 'noun', 'c'],
['parte', 'noun', 'a'],
['parte', 'adverb', 'a'],
['partecipante', 'pres_part', 'b'],
['partecipante', 'adjective', 'b'],
['partecipante', 'noun', 'b'],
['partecipare', 'verb', 'a'],
['partecipazione', 'noun', 'b'],
['parteggiare', 'verb', 'c'],
['partenza', 'noun', 'a'],
['particella', 'noun', 'b'],
['particolare', 'adjective', 'a'],
['particolare', 'noun', 'a'],
['particolarmente', 'adverb', 'a'],
['partigiano', 'noun', 'b'],
['partigiano', 'adjective', 'b'],
['partire', 'verb', 'a'],
['partita', 'noun', 'a'],
['partito', 'noun', 'a'],
['partner', 'noun', 'b'],
['parto', 'noun', 'b'],
['partorire', 'verb', 'b'],
['party', 'noun', 'b'],
['parziale', 'adjective', 'b'],
['parziale', 'noun', 'b'],
['parzialmente', 'adverb', 'b'],
['pascolare', 'verb', 'c'],
['pasqua', 'noun', 'c'],
['pasquale', 'adjective', 'b'],
['passaggio', 'noun', 'a'],
['passare', 'verb', 'a'],
['passata', 'noun', 'c'],
['passatempo', 'noun', 'c'],
['passato', 'past_part', 'a'],
['passato', 'adjective', 'a'],
['passato', 'noun', 'a'],
['passeggero', 'adjective', 'b'],
['passeggero', 'noun', 'b'],
['passeggiare', 'verb', 'b'],
['passeggiata', 'noun', 'b'],
['passeggio', 'noun', 'c'],
['passero', 'noun', 'c'],
['passione', 'noun', 'a'],
['passivo', 'adjective', 'b'],
['passivo', 'noun', 'b'],
['passo', 'noun', 'a'],
['pasta', 'noun', 'a'],
['pasticca', 'noun', 'c'],
['pasticcere', 'noun', 'c'],
['pasticceria', 'noun', 'c'],
['pasticcino', 'noun', 'c'],
['pasticcio', 'noun', 'c'],
['pastiglia', 'noun', 'c'],
['pastina', 'noun', 'c'],
['pasto', 'noun', 'b'],
['pastore', 'noun', 'b'],
['patata', 'noun', 'b'],
['patatina', 'noun', 'c'],
['patè', 'noun', 'c'],
['patente', 'noun', 'b'],
['patetico', 'adjective', 'b'],
['patetico', 'noun', 'b'],
['patologia', 'noun', 'b'],
['patria', 'noun', 'b'],
['patrimonio', 'noun', 'b'],
['pattinaggio', 'noun', 'c'],
['pattinare', 'verb', 'c'],
['pattino', 'noun', 'c'],
['patto', 'noun', 'b'],
['pattumiera', 'noun', 'c'],
['paura', 'noun', 'a'],
['pauroso', 'adjective', 'c'],
['pausa', 'noun', 'a'],
['pavimento', 'noun', 'b'],
['pavone', 'noun', 'c'],
['pavone', 'adjective', 'c'],
['paziente', 'adjective', 'a'],
['paziente', 'noun', 'a'],
['pazienza', 'noun', 'a'],
['pazza', 'noun', 'c'],
['pazzesco', 'adjective', 'b'],
['pazzo', 'adjective', 'a'],
['pazzo', 'noun', 'a'],
['peccato', 'noun', 'b'],
['peccato', 'exclamation', 'b'],
['peccatore', 'noun', 'c'],
['peccatore', 'adjective', 'c'],
['pechinese', 'adjective', 'c'],
['pechinese', 'noun', 'c'],
['pecora', 'noun', 'b'],
['pecorino', 'adjective', 'c'],
['pecorino', 'noun', 'c'],
['pedalare', 'verb', 'c'],
['pedale', 'noun', 'c'],
['pedale', 'adjective', 'c'],
['pedone', 'noun', 'c'],
['pedone', 'adjective', 'c'],
['peggio', 'adverb', 'a'],
['peggio', 'adjective', 'a'],
['peggio', 'noun', 'a'],
['peggioramento', 'noun', 'c'],
['peggiorare', 'verb', 'b'],
['peggiore', 'adjective', 'b'],
['peggiore', 'noun', 'b'],
['peggiore', 'adverb', 'b'],
['pelato', 'past_part', 'c'],
['pelato', 'adjective', 'c'],
['pelato', 'noun', 'c'],
['pelle', 'noun', 'a'],
['pellegrino', 'noun', 'c'],
['pellegrino', 'adjective', 'c'],
['pellerossa', 'adjective', 'c'],
['pellerossa', 'noun', 'c'],
['pelletteria', 'noun', 'c'],
['pellicola', 'noun', 'b'],
['pelo', 'noun', 'b'],
['peloso', 'adjective', 'c'],
['peloso', 'noun', 'c'],
['peluche', 'noun', 'c'],
['pena', 'noun', 'a'],
['penale', 'adjective', 'b'],
['penale', 'noun', 'b'],
['pendere', 'verb', 'b'],
['pendolo', 'noun', 'c'],
['pene', 'noun', 'b'],
['penetrare', 'verb', 'b'],
['penisola', 'noun', 'c'],
['penna', 'noun', 'b'],
['pennarello', 'noun', 'c'],
['pensare', 'verb', 'a'],
['pensiero', 'noun', 'a'],
['pensionato', 'past_part', 'c'],
['pensionato', 'adjective', 'c'],
['pensionato', 'noun', 'c'],
['pensione', 'noun', 'a'],
['pentagono', 'noun', 'c'],
['pentirsi', 'verb', 'b'],
['pentola', 'noun', 'b'],
['penultimo', 'adjective', 'c'],
['pepe', 'noun', 'c'],
['peperoncino', 'noun', 'c'],
['peperone', 'noun', 'c'],
['per', 'preposition', 'a'],
['pera', 'noun', 'c'],
['peraltro', 'adverb', 'b'],
['percentuale', 'adjective', 'b'],
['percentuale', 'noun', 'b'],
['percepire', 'verb', 'a'],
['percezione', 'noun', 'b'],
['perché', 'adverb', 'a'],
['perché', 'conjunction', 'a'],
['perché', 'noun', 'a'],
['perciò', 'conjunction', 'a'],
['percorrere', 'verb', 'b'],
['percorso', 'past_part', 'a'],
['percorso', 'adjective', 'a'],
['percorso', 'noun', 'a'],
['perdere', 'verb', 'a'],
['perdita', 'noun', 'a'],
['perdonare', 'verb', 'a'],
['perdono', 'noun', 'b'],
['perduto', 'past_part', 'b'],
['perduto', 'adjective', 'b'],
['perfettamente', 'adverb', 'a'],
['perfetto', 'past_part', 'a'],
['perfetto', 'adjective', 'a'],
['perfetto', 'noun', 'a'],
['perfezione', 'noun', 'b'],
['perfino', 'adverb', 'a'],
['perfino', 'preposition', 'a'],
['pergola', 'noun', 'c'],
['pergolato', 'noun', 'c'],
['pergolato', 'adjective', 'c'],
['pericolo', 'noun', 'a'],
['pericoloso', 'adjective', 'a'],
['periferia', 'noun', 'b'],
['periodico', 'adjective', 'b'],
['periodico', 'noun', 'b'],
['periodo', 'noun', 'a'],
['perito', 'noun', 'b'],
['perito', 'adjective', 'b'],
['perla', 'noun', 'b'],
['perla', 'adjective', 'b'],
['permaloso', 'adjective', 'c'],
['permaloso', 'noun', 'c'],
['permanente', 'pres_part', 'b'],
['permanente', 'adjective', 'b'],
['permanente', 'noun', 'b'],
['permesso', 'past_part', 'b'],
['permesso', 'adjective', 'b'],
['permesso', 'noun', 'b'],
['permettere', 'verb', 'a'],
['pero', 'noun', 'c'],
['però', 'conjunction', 'a'],
['perpendicolare', 'adjective', 'c'],
['perpendicolare', 'noun', 'c'],
['perplesso', 'adjective', 'b'],
['perquisizione', 'noun', 'b'],
['perseguire', 'verb', 'b'],
['persiana', 'noun', 'c'],
['persiano', 'adjective', 'b'],
['persiano', 'noun', 'b'],
['persino', 'adverb', 'a'],
['perso', 'past_part', 'b'],
['perso', 'adjective', 'b'],
['persona', 'noun', 'a'],
['personaggio', 'noun', 'a'],
['personale', 'adjective', 'a'],
['personale', 'noun', 'a'],
['personale', 'noun', 'a'],
['personalità', 'noun', 'b'],
['personalmente', 'adverb', 'a'],
['pertanto', 'conjunction', 'b'],
['perugino', 'adjective', 'c'],
['perugino', 'noun', 'c'],
['peruviano', 'adjective', 'c'],
['peruviano', 'noun', 'c'],
['pervenire', 'verb', 'b'],
['pesante', 'pres_part', 'a'],
['pesante', 'adjective', 'a'],
['pesante', 'adverb', 'a'],
['pesare', 'verb', 'b'],
['pesca', 'noun', 'c'],
['pesca', 'adjective', 'c'],
['pesca', 'noun', 'b'],
['pescare', 'verb', 'b'],
['pescatore', 'noun', 'b'],
['pescatore', 'adjective', 'b'],
['pesce', 'noun', 'a'],
['peschereccio', 'noun', 'c'],
['peschereccio', 'adjective', 'c'],
['pescheria', 'noun', 'c'],
['pesco', 'noun', 'c'],
['peso', 'noun', 'a'],
['pessimo', 'adjective', 'b'],
['pestare', 'verb', 'c'],
['peste', 'noun', 'c'],
['pesto', 'past_part', 'c'],
['pesto', 'adjective', 'c'],
['pesto', 'noun', 'c'],
['petalo', 'noun', 'c'],
['petardo', 'noun', 'c'],
['petroliera', 'noun', 'c'],
['petrolio', 'noun', 'b'],
['pettegolezzo', 'noun', 'c'],
['pettegolo', 'adjective', 'c'],
['pettegolo', 'noun', 'c'],
['pettinare', 'verb', 'c'],
['pettinatura', 'noun', 'c'],
['pettine', 'noun', 'c'],
['pettirosso', 'noun', 'c'],
['petto', 'noun', 'a'],
['pezza', 'noun', 'c'],
['pezzetto', 'noun', 'b'],
['pezzo', 'noun', 'a'],
['pezzuola', 'noun', 'c'],
['pi', 'noun', 'c'],
['piacere', 'verb', 'a'],
['piacere', 'noun', 'a'],
['piacevole', 'adjective', 'b'],
['piadina', 'noun', 'c'],
['piaga', 'noun', 'c'],
['pialla', 'noun', 'c'],
['piallare', 'verb', 'c'],
['pianeggiante', 'pres_part', 'c'],
['pianeggiante', 'adjective', 'c'],
['pianerottolo', 'noun', 'b'],
['pianeta', 'noun', 'a'],
['piangere', 'verb', 'a'],
['piangere', 'noun', 'a'],
['piano', 'noun', 'a'],
['piano', 'noun', 'a'],
['piano', 'adjective', 'a'],
['piano', 'adverb', 'a'],
['pianoforte', 'noun', 'b'],
['pianoterra', 'noun', 'c'],
['pianta', 'noun', 'a'],
['piantare', 'verb', 'b'],
['pianto', 'noun', 'b'],
['pianura', 'noun', 'b'],
['piastra', 'noun', 'c'],
['piattaforma', 'noun', 'b'],
['piatto', 'adjective', 'a'],
['piatto', 'noun', 'a'],
['piazza', 'noun', 'a'],
['piazzale', 'noun', 'b'],
['piazzare', 'verb', 'b'],
['piccante', 'adjective', 'c'],
['picchiare', 'verb', 'b'],
['piccino', 'adjective', 'c'],
['piccino', 'noun', 'c'],
['piccione', 'noun', 'c'],
['picco', 'noun', 'b'],
['piccolo', 'adjective', 'a'],
['piccolo', 'noun', 'a'],
['piccone', 'noun', 'c'],
['picnic', 'noun', 'c'],
['pidocchio', 'noun', 'c'],
['piede', 'noun', 'a'],
['piega', 'noun', 'b'],
['piegare', 'verb', 'b'],
['pieghevole', 'adjective', 'c'],
['pieghevole', 'noun', 'c'],
['piemontese', 'adjective', 'b'],
['piemontese', 'noun', 'b'],
['piena', 'noun', 'c'],
['pienamente', 'adverb', 'b'],
['pieno', 'adjective', 'a'],
['pieno', 'noun', 'a'],
['pietà', 'noun', 'b'],
['pietra', 'noun', 'a'],
['pigiama', 'noun', 'c'],
['pigione', 'noun', 'c'],
['pigliare', 'verb', 'b'],
['pigna', 'noun', 'c'],
['pigrizia', 'noun', 'c'],
['pigro', 'adjective', 'c'],
['pigro', 'noun', 'c'],
['pila', 'noun', 'b'],
['pillola', 'noun', 'b'],
['pilota', 'noun', 'b'],
['pineta', 'noun', 'c'],
['ping-pong', 'noun', 'c'],
['pinguino', 'noun', 'c'],
['pinna', 'noun', 'c'],
['pinolo', 'noun', 'c'],
['pinza', 'noun', 'c'],
['pinzetta', 'noun', 'c'],
['pioggia', 'noun', 'a'],
['piombo', 'noun', 'b'],
['piombo', 'adjective', 'b'],
['piombo', 'noun', 'b'],
['pioppo', 'noun', 'c'],
['piovere', 'verb', 'b'],
['piovoso', 'adjective', 'c'],
['piovoso', 'noun', 'c'],
['pipì', 'noun', 'c'],
['pipistrello', 'noun', 'c'],
['pirata', 'noun', 'b'],
['piscina', 'noun', 'b'],
['pisello', 'noun', 'c'],
['pisello', 'adjective', 'c'],
['pisolino', 'noun', 'c'],
['pista', 'noun', 'b'],
['pistacchio', 'noun', 'c'],
['pistacchio', 'adjective', 'c'],
['pistola', 'noun', 'a'],
['pittare', 'verb', 'c'],
['pittore', 'noun', 'b'],
['pittore', 'adjective', 'b'],
['pittura', 'noun', 'b'],
['pitturare', 'verb', 'c'],
['più', 'adverb', 'a'],
['più', 'adjective', 'a'],
['più', 'preposition', 'a'],
['più', 'noun', 'a'],
['piuma', 'noun', 'c'],
['piumino', 'noun', 'c'],
['piuttosto', 'adverb', 'a'],
['pizza', 'noun', 'b'],
['pizzeria', 'noun', 'c'],
['pizzetta', 'noun', 'c'],
['pizzicare', 'verb', 'c'],
['pizzo', 'noun', 'c'],
['plaid', 'noun', 'c'],
['plastica', 'noun', 'b'],
['plastico', 'adjective', 'b'],
['plastico', 'noun', 'b'],
['platano', 'noun', 'c'],
['platino', 'noun', 'c'],
['platino', 'adjective', 'c'],
['plurale', 'noun', 'c'],
['plurale', 'adjective', 'c'],
['pneumatico', 'noun', 'c'],
['pochino', 'noun', 'b'],
['poco', 'adjective', 'a'],
['poco', 'pronoun', 'a'],
['poco', 'adverb', 'a'],
['podere', 'noun', 'c'],
['poema', 'noun', 'b'],
['poesia', 'noun', 'a'],
['poeta', 'noun', 'a'],
['poetico', 'adjective', 'b'],
['poetico', 'noun', 'b'],
['poggiapiedi', 'noun', 'c'],
['poggiare', 'verb', 'c'],
['poi', 'adverb', 'a'],
['poiché', 'conjunction', 'a'],
['poker', 'noun', 'b'],
['polacco', 'adjective', 'b'],
['polacco', 'noun', 'b'],
['polemica', 'noun', 'b'],
['polenta', 'noun', 'c'],
['polipo', 'noun', 'c'],
['politica', 'noun', 'a'],
['politico', 'adjective', 'a'],
['politico', 'noun', 'a'],
['polizia', 'noun', 'a'],
['poliziotto', 'noun', 'a'],
['pollaio', 'noun', 'c'],
['pollame', 'noun', 'c'],
['pollice', 'noun', 'b'],
['pollo', 'noun', 'c'],
['polmone', 'noun', 'b'],
['polo', 'noun', 'b'],
['polpa', 'noun', 'c'],
['polpastrello', 'noun', 'c'],
['polpetta', 'noun', 'c'],
['polpo', 'noun', 'c'],
['polsino', 'noun', 'c'],
['polso', 'noun', 'b'],
['poltrona', 'noun', 'b'],
['polvere', 'noun', 'a'],
['polverina', 'noun', 'c'],
['polveroso', 'adjective', 'c'],
['pomata', 'noun', 'c'],
['pomello', 'noun', 'c'],
['pomeriggio', 'noun', 'a'],
['pomodoro', 'noun', 'b'],
['pompa', 'noun', 'b'],
['pompelmo', 'noun', 'c'],
['pompiere', 'noun', 'c'],
['ponte', 'noun', 'a'],
['pony', 'noun', 'c'],
['pop', 'adjective', 'b'],
['pop', 'noun', 'b'],
['popolare', 'adjective', 'a'],
['popolare', 'noun', 'a'],
['popolare', 'verb', 'b'],
['popolarità', 'noun', 'c'],
['popolazione', 'noun', 'a'],
['popolo', 'noun', 'a'],
['porcellana', 'noun', 'c'],
['porcheria', 'noun', 'c'],
['porco', 'noun', 'b'],
['porco', 'adjective', 'b'],
['porgere', 'verb', 'b'],
['porno', 'adjective', 'b'],
['porno', 'noun', 'b'],
['porre', 'verb', 'a'],
['porta', 'noun', 'a'],
['portabagagli', 'noun', 'c'],
['portabagagli', 'adjective', 'c'],
['portacenere', 'noun', 'c'],
['portachiavi', 'noun', 'c'],
['portacipria', 'noun', 'c'],
['portaerei', 'noun', 'c'],
['portafinestra', 'noun', 'c'],
['portafoglio', 'noun', 'b'],
['portafortuna', 'noun', 'c'],
['portale', 'noun', 'b'],
['portamonete', 'noun', 'c'],
['portaombrelli', 'noun', 'c'],
['portare', 'verb', 'a'],
['portata', 'noun', 'b'],
['portatore', 'adjective', 'b'],
['portatore', 'noun', 'b'],
['portiere', 'noun', 'b'],
['portineria', 'noun', 'c'],
['porto', 'noun', 'a'],
['portoghese', 'adjective', 'b'],
['portoghese', 'noun', 'b'],
['portone', 'noun', 'b'],
['porzione', 'noun', 'b'],
['posa', 'noun', 'b'],
['posacenere', 'noun', 'c'],
['posare', 'verb', 'b'],
['posata', 'noun', 'c'],
['positivo', 'adjective', 'a'],
['positivo', 'noun', 'a'],
['positivo', 'adverb', 'a'],
['posizionare', 'verb', 'b'],
['posizione', 'noun', 'a'],
['possedere', 'verb', 'a'],
['possesso', 'noun', 'b'],
['possibile', 'adjective', 'a'],
['possibile', 'noun', 'a'],
['possibilità', 'noun', 'a'],
['post', 'noun', 'b'],
['posta', 'noun', 'a'],
['postale', 'adjective', 'b'],
['postare', 'verb', 'b'],
['posteggiatore', 'noun', 'c'],
['posteriore', 'adjective', 'b'],
['posteriore', 'noun', 'b'],
['postino', 'noun', 'c'],
['postino', 'adjective', 'c'],
['posto', 'noun', 'a'],
['potare', 'verb', 'c'],
['potente', 'pres_part', 'a'],
['potente', 'adjective', 'a'],
['potente', 'noun', 'a'],
['potentino', 'adjective', 'c'],
['potentino', 'noun', 'c'],
['potenza', 'noun', 'b'],
['potenziale', 'adjective', 'b'],
['potenziale', 'noun', 'b'],
['potere', 'verb', 'a'],
['potere', 'noun', 'a'],
['povero', 'adjective', 'a'],
['povertà', 'noun', 'b'],
['pozzanghera', 'noun', 'c'],
['pozzo', 'noun', 'b'],
['praghese', 'adjective', 'c'],
['praghese', 'noun', 'c'],
['pranzo', 'noun', 'a'],
['prassi', 'noun', 'b'],
['pratica', 'noun', 'a'],
['praticamente', 'adverb', 'a'],
['praticare', 'verb', 'b'],
['pratico', 'adjective', 'a'],
['prato', 'noun', 'b'],
['precario', 'adjective', 'b'],
['precedente', 'pres_part', 'a'],
['precedente', 'adjective', 'a'],
['precedente', 'noun', 'a'],
['precedentemente', 'adverb', 'b'],
['precedenza', 'noun', 'b'],
['precedere', 'verb', 'b'],
['precipitare', 'verb', 'b'],
['precisamente', 'adverb', 'b'],
['precisare', 'verb', 'a'],
['precisione', 'noun', 'b'],
['preciso', 'adjective', 'a'],
['preciso', 'adverb', 'a'],
['preda', 'noun', 'b'],
['predisporre', 'verb', 'b'],
['preferenza', 'noun', 'b'],
['preferire', 'verb', 'a'],
['preferito', 'past_part', 'b'],
['preferito', 'adjective', 'b'],
['preferito', 'noun', 'b'],
['pregare', 'verb', 'a'],
['preghiera', 'noun', 'b'],
['pregiato', 'past_part', 'c'],
['pregiato', 'adjective', 'c'],
['pregio', 'noun', 'b'],
['pregiudizio', 'noun', 'b'],
['prego', 'exclamation', 'a'],
['prelevare', 'verb', 'b'],
['preliminare', 'adjective', 'b'],
['preliminare', 'noun', 'b'],
['prémaman', 'adjective', 'c'],
['premere', 'verb', 'b'],
['premessa', 'noun', 'b'],
['premiare', 'verb', 'b'],
['premier', 'noun', 'b'],
['premio', 'noun', 'a'],
['premio', 'adjective', 'a'],
['prendere', 'verb', 'a'],
['prenotare', 'verb', 'b'],
['prenotazione', 'noun', 'c'],
['preoccupare', 'verb', 'a'],
['preoccupato', 'past_part', 'b'],
['preoccupato', 'adjective', 'b'],
['preoccupazione', 'noun', 'b'],
['preparare', 'verb', 'a'],
['preparazione', 'noun', 'b'],
['prepotente', 'adjective', 'c'],
['prepotente', 'noun', 'c'],
['presa', 'noun', 'a'],
['prescindere', 'verb', 'b'],
['prescrivere', 'verb', 'b'],
['prescrizione', 'noun', 'b'],
['presentare', 'verb', 'a'],
['presentazione', 'noun', 'b'],
['presente', 'adjective', 'a'],
['presente', 'noun', 'a'],
['presente', 'adverb', 'a'],
['presenza', 'noun', 'a'],
['presepe', 'noun', 'b'],
['preside', 'noun', 'c'],
['presidente', 'noun', 'a'],
['presidente', 'adjective', 'a'],
['presidenza', 'noun', 'b'],
['pressione', 'noun', 'a'],
['presso', 'adverb', 'a'],
['presso', 'preposition', 'a'],
['presso', 'noun', 'a'],
['presso', 'adjective', 'a'],
['prestare', 'verb', 'a'],
['prestazione', 'noun', 'b'],
['prestigio', 'noun', 'b'],
['prestigioso', 'adjective', 'b'],
['prestito', 'noun', 'b'],
['presto', 'adverb', 'a'],
['presto', 'exclamation', 'a'],
['presto', 'adjective', 'a'],
['presumere', 'verb', 'b'],
['presunto', 'past_part', 'b'],
['presunto', 'adjective', 'b'],
['presupposto', 'past_part', 'b'],
['presupposto', 'adjective', 'b'],
['presupposto', 'noun', 'b'],
['prete', 'noun', 'a'],
['pretendere', 'verb', 'a'],
['pretesa', 'noun', 'b'],
['pretesto', 'noun', 'b'],
['prevalentemente', 'adverb', 'b'],
['prevalere', 'verb', 'b'],
['prevedere', 'verb', 'a'],
['prevedibile', 'adjective', 'b'],
['prevenire', 'verb', 'b'],
['preventivo', 'adjective', 'b'],
['preventivo', 'noun', 'b'],
['prevenzione', 'noun', 'b'],
['previdenza', 'noun', 'c'],
['previsione', 'noun', 'b'],
['previsto', 'past_part', 'a'],
['previsto', 'adjective', 'a'],
['previsto', 'noun', 'a'],
['prezioso', 'adjective', 'a'],
['prezioso', 'noun', 'a'],
['prezzemolo', 'noun', 'c'],
['prezzo', 'noun', 'a'],
['prigione', 'noun', 'b'],
['prigioniero', 'adjective', 'b'],
['prigioniero', 'noun', 'b'],
['prima', 'adverb', 'a'],
['prima', 'adjective', 'a'],
['prima', 'noun', 'a'],
['prima', 'noun', 'a'],
['primario', 'adjective', 'b'],
['primario', 'noun', 'b'],
['primavera', 'noun', 'a'],
['primizia', 'noun', 'c'],
['primo', 'adjective', 'a'],
['primo', 'noun', 'a'],
['primo', 'adverb', 'a'],
['primula', 'noun', 'c'],
['principale', 'adjective', 'a'],
['principale', 'noun', 'a'],
['principalmente', 'adverb', 'b'],
['principe', 'noun', 'a'],
['principe', 'adjective', 'a'],
['principessa', 'noun', 'b'],
['principio', 'noun', 'a'],
['priorità', 'noun', 'b'],
['privacy', 'noun', 'b'],
['privare', 'verb', 'b'],
['privato', 'adjective', 'a'],
['privato', 'noun', 'a'],
['privilegio', 'noun', 'b'],
['privo', 'adjective', 'b'],
['privo', 'preposition', 'b'],
['privo', 'noun', 'b'],
['probabile', 'adjective', 'b'],
['probabilità', 'noun', 'b'],
['probabilmente', 'adverb', 'a'],
['problema', 'noun', 'a'],
['problematico', 'adjective', 'b'],
['procedere', 'verb', 'a'],
['procedimento', 'noun', 'b'],
['procedura', 'noun', 'a'],
['processo', 'noun', 'a'],
['proclamare', 'verb', 'b'],
['procura', 'noun', 'b'],
['procurare', 'verb', 'b'],
['procuratore', 'noun', 'b'],
['prodotto', 'past_part', 'a'],
['prodotto', 'adjective', 'a'],
['prodotto', 'noun', 'a'],
['produrre', 'verb', 'a'],
['produttivo', 'adjective', 'b'],
['produttore', 'adjective', 'b'],
['produttore', 'noun', 'b'],
['produzione', 'noun', 'a'],
['prof', 'noun', 'b'],
['professionale', 'adjective', 'a'],
['professione', 'noun', 'b'],
['professionista', 'noun', 'b'],
['professore', 'noun', 'a'],
['professoressa', 'noun', 'b'],
['profeta', 'noun', 'b'],
['profilattico', 'adjective', 'c'],
['profilattico', 'noun', 'c'],
['profilo', 'noun', 'a'],
['profitto', 'noun', 'b'],
['profondamente', 'adverb', 'b'],
['profondità', 'noun', 'b'],
['profondo', 'adjective', 'a'],
['profondo', 'noun', 'a'],
['profondo', 'adverb', 'a'],
['profumare', 'verb', 'b'],
['profumato', 'past_part', 'c'],
['profumato', 'adjective', 'c'],
['profumo', 'noun', 'b'],
['progettare', 'verb', 'b'],
['progettazione', 'noun', 'b'],
['progetto', 'noun', 'a'],
['programma', 'noun', 'a'],
['programmare', 'verb', 'b'],
['programmazione', 'noun', 'b'],
['progressista', 'adjective', 'c'],
['progressista', 'noun', 'c'],
['progressivo', 'adjective', 'b'],
['progresso', 'noun', 'b'],
['proibire', 'verb', 'b'],
['proiettare', 'verb', 'b'],
['proiettile', 'noun', 'b'],
['proiezione', 'noun', 'b'],
['prolunga', 'noun', 'c'],
['promessa', 'noun', 'b'],
['promettere', 'verb', 'a'],
['promozione', 'noun', 'b'],
['promuovere', 'verb', 'b'],
['pronto', 'adjective', 'a'],
['pronuncia', 'noun', 'c'],
['pronunciare', 'verb', 'a'],
['propaganda', 'noun', 'b'],
['propagandare', 'verb', 'c'],
['proporre', 'verb', 'a'],
['proporzione', 'noun', 'b'],
['proposito', 'noun', 'a'],
['proposizione', 'noun', 'c'],
['proposta', 'noun', 'a'],
['proprietà', 'noun', 'a'],
['proprietario', 'adjective', 'a'],
['proprietario', 'noun', 'a'],
['proprio', 'adjective', 'a'],
['proprio', 'adverb', 'a'],
['proprio', 'noun', 'a'],
['prosa', 'noun', 'b'],
['prosciugare', 'verb', 'c'],
['prosciutto', 'noun', 'b'],
['prosecco', 'noun', 'c'],
['proseguire', 'verb', 'a'],
['prospettiva', 'noun', 'b'],
['prossimo', 'adjective', 'a'],
['prossimo', 'noun', 'a'],
['prostituta', 'noun', 'b'],
['protagonista', 'adjective', 'a'],
['protagonista', 'noun', 'a'],
['proteggere', 'verb', 'a'],
['proteina', 'noun', 'b'],
['protesta', 'noun', 'b'],
['protestare', 'verb', 'b'],
['protetto', 'past_part', 'b'],
['protetto', 'adjective', 'b'],
['protetto', 'noun', 'b'],
['protezione', 'noun', 'b'],
['protocollo', 'noun', 'b'],
['prova', 'noun', 'a'],
['provare', 'verb', 'a'],
['provenienza', 'noun', 'b'],
['provenire', 'verb', 'a'],
['provincia', 'noun', 'a'],
['provinciale', 'adjective', 'b'],
['provinciale', 'noun', 'b'],
['provocare', 'verb', 'a'],
['provola', 'noun', 'c'],
['provolone', 'noun', 'c'],
['provvedere', 'verb', 'b'],
['provvedimento', 'noun', 'b'],
['provvisorio', 'adjective', 'b'],
['prudere', 'verb', 'c'],
['prugna', 'noun', 'c'],
['prugna', 'adjective', 'c'],
['prurito', 'noun', 'c'],
['pseudonimo', 'noun', 'b'],
['pseudonimo', 'adjective', 'b'],
['psichiatra', 'noun', 'b'],
['psichiatria', 'noun', 'c'],
['psichico', 'adjective', 'b'],
['psicologia', 'noun', 'b'],
['psicologico', 'adjective', 'b'],
['psicologo', 'noun', 'b'],
['pub', 'noun', 'b'],
['pubblicare', 'verb', 'a'],
['pubblicazione', 'noun', 'b'],
['pubblicità', 'noun', 'a'],
['pubblicitario', 'adjective', 'b'],
['pubblicitario', 'noun', 'b'],
['pubblico', 'adjective', 'a'],
['pubblico', 'noun', 'a'],
['pugilato', 'noun', 'c'],
['pugliese', 'adjective', 'c'],
['pugliese', 'noun', 'c'],
['pugno', 'noun', 'a'],
['pulce', 'noun', 'c'],
['pulce', 'adjective', 'c'],
['pulcino', 'noun', 'c'],
['puledro', 'noun', 'c'],
['pulire', 'verb', 'a'],
['pulito', 'past_part', 'b'],
['pulito', 'adjective', 'b'],
['pulito', 'noun', 'b'],
['pulizia', 'noun', 'b'],
['pullman', 'noun', 'b'],
['pullover', 'noun', 'c'],
['pulmino', 'noun', 'c'],
['pulsante', 'pres_part', 'b'],
['pulsante', 'adjective', 'b'],
['pulsante', 'noun', 'b'],
['puma', 'noun', 'c'],
['pungere', 'verb', 'c'],
['punire', 'verb', 'b'],
['punizione', 'noun', 'b'],
['punk', 'adjective', 'c'],
['punk', 'noun', 'c'],
['punta', 'noun', 'a'],
['puntare', 'verb', 'a'],
['puntata', 'noun', 'b'],
['puntato', 'past_part', 'b'],
['puntato', 'adjective', 'b'],
['punteggio', 'noun', 'c'],
['puntiglio', 'noun', 'c'],
['puntino', 'noun', 'b'],
['punto', 'noun', 'a'],
['puntuale', 'adjective', 'b'],
['puntura', 'noun', 'c'],
['pupa', 'noun', 'b'],
['pupazzo', 'noun', 'c'],
['pupo', 'noun', 'c'],
['purché', 'conjunction', 'b'],
['pure', 'adverb', 'a'],
['pure', 'conjunction', 'a'],
['purè', 'noun', 'c'],
['purga', 'noun', 'c'],
['puro', 'adjective', 'a'],
['puro', 'noun', 'a'],
['purtroppo', 'adverb', 'a'],
['puttana', 'noun', 'b'],
['puzza', 'noun', 'b'],
['puzzare', 'verb', 'b'],
['puzzle', 'noun', 'c'],
['qua', 'adverb', 'a'],
['quaderno', 'noun', 'b'],
['quadrato', 'past_part', 'b'],
['quadrato', 'adjective', 'b'],
['quadrato', 'noun', 'b'],
['quadrifoglio', 'noun', 'c'],
['quadro', 'adjective', 'a'],
['quadro', 'noun', 'a'],
['quaglia', 'noun', 'c'],
['qualche', 'adjective', 'a'],
['qualche', 'adverb', 'a'],
['qualcosa', 'pronoun', 'a'],
['qualcuno', 'pronoun', 'a'],
['qualcuno', 'adjective', 'a'],
['qualcuno', 'noun', 'a'],
['quale', 'adjective', 'a'],
['quale', 'pronoun', 'a'],
['quale', 'adverb', 'a'],
['quale', 'noun', 'a'],
['qualificare', 'verb', 'b'],
['qualità', 'noun', 'a'],
['qualora', 'conjunction', 'b'],
['qualsiasi', 'adjective', 'a'],
['qualunque', 'adjective', 'a'],
['qualunque', 'pronoun', 'a'],
['quando', 'conjunction', 'a'],
['quando', 'adverb', 'a'],
['quando', 'noun', 'a'],
['quantità', 'noun', 'a'],
['quantitativo', 'adjective', 'b'],
['quantitativo', 'noun', 'b'],
['quanto', 'adjective', 'a'],
['quanto', 'pronoun', 'a'],
['quanto', 'adverb', 'a'],
['quanto', 'noun', 'a'],
['quaranta', 'adjective', 'a'],
['quaranta', 'noun', 'a'],
['quarta', 'noun', 'b'],
['quartiere', 'noun', 'a'],
['quarto', 'adjective', 'a'],
['quarto', 'noun', 'a'],
['quasi', 'adverb', 'a'],
['quasi', 'conjunction', 'a'],
['quattordici', 'adjective', 'b'],
['quattordici', 'noun', 'b'],
['quattro', 'adjective', 'a'],
['quattro', 'noun', 'a'],
['quello', 'adjective', 'a'],
['quello', 'pronoun', 'a'],
['quercia', 'noun', 'c'],
['questione', 'noun', 'a'],
['questo', 'adjective', 'a'],
['questo', 'pronoun', 'a'],
['questura', 'noun', 'b'],
['qui', 'adverb', 'a'],
['quindi', 'adverb', 'a'],
['quindi', 'conjunction', 'a'],
['quindici', 'adjective', 'a'],
['quindici', 'noun', 'a'],
['quinta', 'noun', 'b'],
['quinto', 'adjective', 'b'],
['quinto', 'noun', 'b'],
['quiz', 'noun', 'a'],
['quota', 'noun', 'a'],
['quotidiano', 'adjective', 'a'],
['quotidiano', 'noun', 'a'],
['rabbia', 'noun', 'a'],
['racchetta', 'noun', 'c'],
['racchiudere', 'verb', 'b'],
['raccogliere', 'verb', 'a'],
['raccolta', 'noun', 'a'],
['raccomandare', 'verb', 'b'],
['raccomandazione', 'noun', 'c'],
['raccontare', 'verb', 'a'],
['racconto', 'noun', 'a'],
['raddoppiare', 'verb', 'b'],
['raddrizzare', 'verb', 'c'],
['radere', 'verb', 'c'],
['radiazione', 'noun', 'b'],
['radicale', 'adjective', 'b'],
['radicale', 'noun', 'b'],
['radicchio', 'noun', 'c'],
['radice', 'noun', 'a'],
['radio', 'noun', 'a'],
['radio', 'adjective', 'a'],
['rado', 'adjective', 'b'],
['rado', 'adverb', 'b'],
['raffigurare', 'verb', 'b'],
['raffinato', 'past_part', 'b'],
['raffinato', 'adjective', 'b'],
['raffinato', 'noun', 'b'],
['rafforzamento', 'noun', 'c'],
['rafforzare', 'verb', 'b'],
['raffreddore', 'noun', 'c'],
['ragazza', 'noun', 'a'],
['ragazzino', 'noun', 'a'],
['ragazzo', 'noun', 'a'],
['raggio', 'noun', 'a'],
['raggiungere', 'verb', 'a'],
['ragionamento', 'noun', 'b'],
['ragionare', 'verb', 'b'],
['ragione', 'noun', 'a'],
['ragionevole', 'adjective', 'b'],
['ragioniere', 'noun', 'b'],
['ragnatela', 'noun', 'c'],
['ragno', 'noun', 'c'],
['ragù', 'noun', 'c'],
['rallegrare', 'verb', 'c'],
['rallentare', 'verb', 'b'],
['rame', 'noun', 'b'],
['rammendo', 'noun', 'c'],
['ramo', 'noun', 'b'],
['rampicante', 'pres_part', 'c'],
['rampicante', 'adjective', 'c'],
['rampicante', 'noun', 'c'],
['rana', 'noun', 'c'],
['rancio', 'noun', 'c'],
['rapa', 'noun', 'c'],
['rapidamente', 'adverb', 'b'],
['rapido', 'adjective', 'a'],
['rapido', 'noun', 'a'],
['rapimento', 'noun', 'c'],
['rapina', 'noun', 'b'],
['rapinatore', 'adjective', 'c'],
['rapinatore', 'noun', 'c'],
['rapire', 'verb', 'b'],
['rapporto', 'noun', 'a'],
['rappresentante', 'pres_part', 'b'],
['rappresentante', 'adjective', 'b'],
['rappresentante', 'noun', 'b'],
['rappresentanza', 'noun', 'b'],
['rappresentare', 'verb', 'a'],
['rappresentazione', 'noun', 'b'],
['raramente', 'adverb', 'b'],
['raro', 'adjective', 'a'],
['raro', 'noun', 'a'],
['raro', 'adverb', 'a'],
['rasare', 'verb', 'c'],
['rasoio', 'noun', 'c'],
['rassegna', 'noun', 'b'],
['rassegnare', 'verb', 'b'],
['rassegnazione', 'noun', 'c'],
['rasserenare', 'verb', 'c'],
['rassicurare', 'verb', 'b'],
['rastrello', 'noun', 'c'],
['rata', 'noun', 'c'],
['rateale', 'adjective', 'c'],
['rattristare', 'verb', 'c'],
['rauco', 'adjective', 'c'],
['ravanello', 'noun', 'c'],
['razionale', 'adjective', 'b'],
['razionale', 'noun', 'b'],
['razza', 'noun', 'b'],
['razzo', 'noun', 'c'],
['re', 'noun', 'a'],
['reagire', 'verb', 'a'],
['reale', 'adjective', 'a'],
['reale', 'noun', 'a'],
['realistico', 'adjective', 'b'],
['realizzare', 'verb', 'a'],
['realizzazione', 'noun', 'b'],
['realmente', 'adverb', 'b'],
['realtà', 'noun', 'a'],
['reato', 'noun', 'a'],
['reazione', 'noun', 'a'],
['recare', 'verb', 'a'],
['recensione', 'noun', 'b'],
['recente', 'adjective', 'a'],
['recentemente', 'adverb', 'b'],
['recintare', 'verb', 'c'],
['recinto', 'past_part', 'c'],
['recinto', 'adjective', 'c'],
['recinto', 'noun', 'c'],
['recipiente', 'adjective', 'c'],
['recipiente', 'noun', 'c'],
['reciproco', 'adjective', 'b'],
['reciproco', 'noun', 'b'],
['recita', 'noun', 'c'],
['recitare', 'verb', 'a'],
['reclame', 'noun', 'c'],
['reclame', 'adjective', 'c'],
['reclamo', 'noun', 'c'],
['recluta', 'noun', 'c'],
['record', 'noun', 'b'],
['recuperare', 'verb', 'a'],
['recupero', 'noun', 'b'],
['redazione', 'noun', 'b'],
['reddito', 'noun', 'b'],
['redigere', 'verb', 'b'],
['referendum', 'noun', 'b'],
['regalare', 'verb', 'a'],
['regale', 'adjective', 'b'],
['regalo', 'noun', 'a'],
['reggere', 'verb', 'a'],
['reggimento', 'noun', 'c'],
['reggiseno', 'noun', 'b'],
['regia', 'noun', 'b'],
['regime', 'noun', 'a'],
['regina', 'noun', 'a'],
['regionale', 'adjective', 'b'],
['regionale', 'noun', 'b'],
['regione', 'noun', 'a'],
['regista', 'noun', 'a'],
['registrare', 'verb', 'a'],
['registratore', 'adjective', 'c'],
['registratore', 'noun', 'c'],
['registrazione', 'noun', 'a'],
['registro', 'noun', 'b'],
['regnare', 'verb', 'b'],
['regno', 'noun', 'a'],
['regola', 'noun', 'a'],
['regolamento', 'noun', 'b'],
['regolare', 'adjective', 'b'],
['regolare', 'noun', 'b'],
['regolare', 'verb', 'b'],
['regolarmente', 'adverb', 'b'],
['relativamente', 'adverb', 'b'],
['relativo', 'adjective', 'a'],
['relazione', 'noun', 'a'],
['religione', 'noun', 'a'],
['religioso', 'adjective', 'a'],
['religioso', 'noun', 'a'],
['remare', 'verb', 'c'],
['remo', 'noun', 'c'],
['remoto', 'adjective', 'b'],
['rendere', 'verb', 'a'],
['rene', 'noun', 'b'],
['reparto', 'noun', 'b'],
['repertorio', 'noun', 'b'],
['replica', 'noun', 'b'],
['replicare', 'verb', 'b'],
['repressione', 'noun', 'c'],
['reprimere', 'verb', 'c'],
['repubblica', 'noun', 'a'],
['repubblicano', 'adjective', 'b'],
['repubblicano', 'noun', 'b'],
['requisito', 'noun', 'b'],
['resa', 'noun', 'b'],
['residente', 'adjective', 'b'],
['residente', 'noun', 'b'],
['residenza', 'noun', 'b'],
['residuo', 'adjective', 'b'],
['residuo', 'noun', 'b'],
['resistente', 'pres_part', 'b'],
['resistente', 'adjective', 'b'],
['resistente', 'noun', 'b'],
['resistenza', 'noun', 'b'],
['resistere', 'verb', 'a'],
['resoconto', 'noun', 'c'],
['respingere', 'verb', 'b'],
['respirare', 'verb', 'a'],
['respirazione', 'noun', 'c'],
['respiro', 'noun', 'b'],
['responsabile', 'adjective', 'a'],
['responsabile', 'noun', 'a'],
['responsabilità', 'noun', 'a'],
['restare', 'verb', 'a'],
['restituire', 'verb', 'b'],
['resto', 'noun', 'a'],
['restringere', 'verb', 'b'],
['rete', 'noun', 'a'],
['retorica', 'noun', 'b'],
['retro', 'adverb', 'b'],
['retro', 'noun', 'b'],
['retta', 'noun', 'b'],
['rettangolare', 'adjective', 'c'],
['rettile', 'noun', 'c'],
['rettile', 'adjective', 'c'],
['retto', 'adjective', 'b'],
['retto', 'noun', 'b'],
['revisione', 'noun', 'b'],
['rialzare', 'verb', 'b'],
['riaprire', 'verb', 'b'],
['riassumere', 'verb', 'b'],
['ribadire', 'verb', 'b'],
['ribattere', 'verb', 'b'],
['ribellare', 'verb', 'b'],
['ribelle', 'adjective', 'b'],
['ribelle', 'noun', 'b'],
['ricadere', 'verb', 'b'],
['ricaduta', 'noun', 'c'],
['ricalcare', 'verb', 'c'],
['ricamare', 'verb', 'c'],
['ricambiare', 'verb', 'b'],
['ricambio', 'noun', 'c'],
['ricamo', 'noun', 'c'],
['ricarica', 'noun', 'c'],
['ricavare', 'verb', 'b'],
['ricchezza', 'noun', 'b'],
['riccio', 'adjective', 'c'],
['riccio', 'noun', 'c'],
['ricciolo', 'adjective', 'c'],
['ricciolo', 'noun', 'c'],
['ricco', 'adjective', 'a'],
['ricerca', 'noun', 'a'],
['ricercare', 'verb', 'b'],
['ricercatore', 'adjective', 'b'],
['ricercatore', 'noun', 'b'],
['ricetta', 'noun', 'a'],
['ricevere', 'verb', 'a'],
['ricevimento', 'noun', 'c'],
['ricevuta', 'noun', 'b'],
['richiamare', 'verb', 'a'],
['richiamo', 'noun', 'b'],
['richiedere', 'verb', 'a'],
['richiesta', 'noun', 'a'],
['richiudere', 'verb', 'b'],
['ricominciare', 'verb', 'a'],
['ricompensa', 'noun', 'c'],
['ricompensare', 'verb', 'c'],
['riconciliarsi', 'verb', 'c'],
['riconoscere', 'verb', 'a'],
['riconoscimento', 'noun', 'b'],
['ricopiare', 'verb', 'c'],
['ricoprire', 'verb', 'b'],
['ricordare', 'verb', 'a'],
['ricordo', 'noun', 'a'],
['ricorrere', 'verb', 'b'],
['ricorso', 'noun', 'b'],
['ricostruire', 'verb', 'b'],
['ricostruzione', 'noun', 'b'],
['ricotta', 'noun', 'c'],
['ricoverare', 'verb', 'b'],
['ricovero', 'noun', 'c'],
['ricreazione', 'noun', 'c'],
['ridare', 'verb', 'b'],
['ridere', 'verb', 'a'],
['ridere', 'noun', 'a'],
['ridicolo', 'adjective', 'b'],
['ridicolo', 'noun', 'b'],
['ridotto', 'past_part', 'b'],
['ridotto', 'adjective', 'b'],
['ridotto', 'noun', 'b'],
['ridurre', 'verb', 'a'],
['riduzione', 'noun', 'b'],
['riempire', 'verb', 'a'],
['rientrare', 'verb', 'a'],
['rientro', 'noun', 'b'],
['rifare', 'verb', 'a'],
['riferimento', 'noun', 'a'],
['riferire', 'verb', 'a'],
['rifinire', 'verb', 'c'],
['rifiutare', 'verb', 'a'],
['rifiuto', 'noun', 'a'],
['riflessione', 'noun', 'a'],
['riflesso', 'noun', 'b'],
['riflettere', 'verb', 'a'],
['riflettore', 'noun', 'c'],
['riflettore', 'adjective', 'c'],
['riforma', 'noun', 'b'],
['rifornimento', 'noun', 'c'],
['rifugiare', 'verb', 'b'],
['rifugio', 'noun', 'b'],
['riga', 'noun', 'a'],
['rigattiere', 'noun', 'c'],
['rigido', 'adjective', 'b'],
['rigore', 'noun', 'b'],
['rigoroso', 'adjective', 'b'],
['rigovernare', 'verb', 'c'],
['riguardare', 'verb', 'a'],
['riguardo', 'noun', 'a'],
['rilasciare', 'verb', 'b'],
['rilassare', 'verb', 'a'],
['rilegare', 'verb', 'c'],
['rileggere', 'verb', 'b'],
['rilevante', 'pres_part', 'b'],
['rilevante', 'adjective', 'b'],
['rilevare', 'verb', 'b'],
['rilievo', 'noun', 'b'],
['rima', 'noun', 'b'],
['rimandare', 'verb', 'b'],
['rimanenza', 'noun', 'c'],
['rimanere', 'verb', 'a'],
['rimbombare', 'verb', 'c'],
['rimborsare', 'verb', 'c'],
['rimediare', 'verb', 'b'],
['rimedio', 'noun', 'b'],
['rimettere', 'verb', 'a'],
['rimodernare', 'verb', 'c'],
['rimorchio', 'noun', 'c'],
['rimpiangere', 'verb', 'b'],
['rimproverare', 'verb', 'b'],
['rimprovero', 'noun', 'c'],
['rimuovere', 'verb', 'b'],
['rinascere', 'verb', 'b'],
['rinascimento', 'noun', 'b'],
['rinascimento', 'adjective', 'b'],
['rincarare', 'verb', 'c'],
['rinchiudere', 'verb', 'b'],
['rincorsa', 'noun', 'c'],
['rinforzo', 'noun', 'c'],
['rinfresco', 'noun', 'c'],
['ringhiare', 'verb', 'c'],
['ringhiera', 'noun', 'c'],
['ringhio', 'noun', 'c'],
['ringiovanire', 'verb', 'c'],
['ringraziare', 'verb', 'a'],
['rinnegare', 'verb', 'c'],
['rinnovare', 'verb', 'b'],
['rinoceronte', 'noun', 'c'],
['rintracciare', 'verb', 'b'],
['rinuncia', 'noun', 'c'],
['rinunciare', 'verb', 'a'],
['rinvenire', 'verb', 'b'],
['rinviare', 'verb', 'b'],
['rinvio', 'noun', 'c'],
['rione', 'noun', 'c'],
['riordinare', 'verb', 'c'],
['riparare', 'verb', 'b'],
['riparo', 'noun', 'b'],
['ripartire', 'verb', 'b'],
['ripartire', 'verb', 'b'],
['ripensamento', 'noun', 'c'],
['ripensare', 'verb', 'b'],
['ripetente', 'pres_part', 'c'],
['ripetente', 'adjective', 'c'],
['ripetente', 'noun', 'c'],
['ripetere', 'verb', 'a'],
['ripetizione', 'noun', 'b'],
['ripido', 'adjective', 'c'],
['ripiego', 'noun', 'c'],
['ripieno', 'adjective', 'c'],
['ripieno', 'noun', 'c'],
['riportare', 'verb', 'a'],
['riposare', 'verb', 'b'],
['riposo', 'noun', 'b'],
['riposo', 'loc-comando', 'b'],
['riposo', 'noun', 'b'],
['riprendere', 'verb', 'a'],
['ripresa', 'noun', 'b'],
['riprodurre', 'verb', 'b'],
['riproduzione', 'noun', 'a'],
['riproporre', 'verb', 'b'],
['riprovare', 'verb', 'b'],
['ripulire', 'verb', 'b'],
['risaia', 'noun', 'c'],
['risalire', 'verb', 'a'],
['risarcimento', 'noun', 'b'],
['risata', 'noun', 'b'],
['riscaldamento', 'noun', 'b'],
['riscaldare', 'verb', 'b'],
['riscattare', 'verb', 'c'],
['riscatto', 'noun', 'c'],
['rischiare', 'verb', 'a'],
['rischio', 'noun', 'a'],
['rischioso', 'adjective', 'b'],
['risciacquare', 'verb', 'c'],
['riscontrare', 'verb', 'b'],
['riscontro', 'noun', 'b'],
['riscuotere', 'verb', 'b'],
['risentimento', 'noun', 'c'],
['risentire', 'verb', 'b'],
['riserva', 'noun', 'b'],
['riservare', 'verb', 'a'],
['riservato', 'past_part', 'a'],
['riservato', 'adjective', 'a'],
['risiedere', 'verb', 'b'],
['riso', 'noun', 'b'],
['risoluzione', 'noun', 'b'],
['risolvere', 'verb', 'a'],
['risonanza', 'noun', 'b'],
['risorsa', 'noun', 'a'],
['risparmiare', 'verb', 'b'],
['risparmio', 'noun', 'b'],
['rispettare', 'verb', 'a'],
['rispettivamente', 'adverb', 'b'],
['rispettivo', 'adjective', 'b'],
['rispetto', 'noun', 'a'],
['risplendere', 'verb', 'c'],
['rispondere', 'verb', 'a'],
['risposta', 'noun', 'a'],
['rissa', 'noun', 'b'],
['ristampare', 'verb', 'c'],
['ristorante', 'noun', 'a'],
['ristretto', 'past_part', 'b'],
['ristretto', 'adjective', 'b'],
['ristretto', 'noun', 'b'],
['risultare', 'verb', 'a'],
['risultato', 'past_part', 'a'],
['risultato', 'adjective', 'a'],
['risultato', 'noun', 'a'],
['risvegliare', 'verb', 'b'],
['risveglio', 'noun', 'b'],
['ritagliare', 'verb', 'b'],
['ritardare', 'verb', 'b'],
['ritardo', 'noun', 'a'],
['ritenere', 'verb', 'a'],
['ritirare', 'verb', 'a'],
['ritirata', 'noun', 'c'],
['ritiro', 'noun', 'b'],
['ritmo', 'noun', 'a'],
['rito', 'noun', 'b'],
['ritoccare', 'verb', 'c'],
['ritornare', 'verb', 'a'],
['ritornello', 'noun', 'c'],
['ritorno', 'noun', 'a'],
['ritrarre', 'verb', 'b'],
['ritratto', 'past_part', 'b'],
['ritratto', 'adjective', 'b'],
['ritratto', 'noun', 'b'],
['ritrovare', 'verb', 'a'],
['ritrovo', 'noun', 'c'],
['ritto', 'adjective', 'c'],
['ritto', 'noun', 'c'],
['ritto', 'adverb', 'c'],
['ritto', 'preposition', 'c'],
['rituale', 'adjective', 'b'],
['rituale', 'noun', 'b'],
['riunione', 'noun', 'a'],
['riunire', 'verb', 'a'],
['riunito', 'past_part', 'c'],
['riunito', 'adjective', 'c'],
['riunito', 'noun', 'c'],
['riuscire', 'verb', 'a'],
['riuscita', 'noun', 'c'],
['riva', 'noun', 'b'],
['rivale', 'adjective', 'b'],
['rivale', 'noun', 'b'],
['rivedere', 'verb', 'a'],
['rivelare', 'verb', 'a'],
['rivelazione', 'noun', 'b'],
['rivendicare', 'verb', 'b'],
['rivendita', 'noun', 'c'],
['rivestimento', 'noun', 'c'],
['rivestire', 'verb', 'b'],
['rivincita', 'noun', 'c'],
['rivista', 'noun', 'a'],
['rivisto', 'past_part', 'b'],
['rivisto', 'adjective', 'b'],
['rivolgere', 'verb', 'a'],
['rivolta', 'noun', 'b'],
['rivoltare', 'verb', 'c'],
['rivoluzionario', 'adjective', 'b'],
['rivoluzionario', 'noun', 'b'],
['rivoluzione', 'noun', 'a'],
['roba', 'noun', 'a'],
['robot', 'noun', 'b'],
['robusto', 'adjective', 'b'],
['rocca', 'noun', 'c'],
['rocchetto', 'noun', 'c'],
['roccia', 'noun', 'b'],
['roccioso', 'adjective', 'c'],
['rock', 'noun', 'b'],
['rock', 'adjective', 'b'],
['rodaggio', 'noun', 'c'],
['rodere', 'verb', 'c'],
['romagnolo', 'adjective', 'c'],
['romagnolo', 'noun', 'c'],
['romano', 'adjective', 'a'],
['romano', 'noun', 'a'],
['romantico', 'adjective', 'b'],
['romantico', 'noun', 'b'],
['romanzo', 'noun', 'a'],
['rombo', 'noun', 'c'],
['romeno', 'adjective', 'c'],
['romeno', 'noun', 'c'],
['rompere', 'verb', 'a'],
['rondine', 'noun', 'c'],
['ronzare', 'verb', 'c'],
['ronzio', 'noun', 'c'],
['rosa', 'noun', 'a'],
['rosa', 'adjective', 'a'],
['rosario', 'noun', 'c'],
['rosato', 'adjective', 'c'],
['rosato', 'noun', 'c'],
['roseo', 'adjective', 'c'],
['roseo', 'noun', 'c'],
['rosetta', 'noun', 'c'],
['rosmarino', 'noun', 'c'],
['rosolia', 'noun', 'c'],
['rosso', 'adjective', 'a'],
['rosso', 'noun', 'a'],
['rossore', 'noun', 'c'],
['rosticceria', 'noun', 'c'],
['rotaia', 'noun', 'c'],
['rotella', 'noun', 'c'],
['rotolare', 'verb', 'c'],
['rotondo', 'adjective', 'b'],
['rotondo', 'noun', 'b'],
['rotta', 'noun', 'b'],
['rotto', 'past_part', 'b'],
['rotto', 'adjective', 'b'],
['rotto', 'noun', 'b'],
['rottura', 'noun', 'b'],
['roulotte', 'noun', 'c'],
['rovesciare', 'verb', 'b'],
['rovescio', 'adjective', 'b'],
['rovescio', 'noun', 'b'],
['rovina', 'noun', 'b'],
['rovinare', 'verb', 'a'],
['rovo', 'noun', 'c'],
['rozzo', 'adjective', 'c'],
['rubare', 'verb', 'a'],
['rubinetto', 'noun', 'c'],
['rubrica', 'noun', 'b'],
['rude', 'adjective', 'c'],
['ruga', 'noun', 'c'],
['ruggine', 'noun', 'c'],
['ruggine', 'adjective', 'c'],
['ruggire', 'verb', 'c'],
['ruggito', 'past_part', 'c'],
['ruggito', 'noun', 'c'],
['rullo', 'noun', 'c'],
['rumeno', 'adjective', 'c'],
['rumeno', 'noun', 'c'],
['ruminante', 'pres_part', 'c'],
['ruminante', 'adjective', 'c'],
['ruminante', 'noun', 'c'],
['rumore', 'noun', 'a'],
['ruolo', 'noun', 'a'],
['ruota', 'noun', 'b'],
['ruotare', 'verb', 'b'],
['ruscello', 'noun', 'c'],
['ruspa', 'noun', 'c'],
['russare', 'verb', 'c'],
['russo', 'adjective', 'a'],
['russo', 'noun', 'a'],
['rustico', 'adjective', 'c'],
['rustico', 'noun', 'c'],
['ruttare', 'verb', 'c'],
['rutto', 'noun', 'c'],
['sabato', 'noun', 'a'],
['sabbia', 'noun', 'b'],
['sabbia', 'adjective', 'b'],
['sabotare', 'verb', 'c'],
['saccheggiare', 'verb', 'c'],
['sacchetto', 'noun', 'b'],
['sacco', 'noun', 'a'],
['sacerdote', 'noun', 'b'],
['sacrificare', 'verb', 'b'],
['sacrificio', 'noun', 'b'],
['sacro', 'adjective', 'b'],
['sacro', 'noun', 'b'],
['safari', 'noun', 'c'],
['saga', 'noun', 'b'],
['saggezza', 'noun', 'b'],
['saggio', 'adjective', 'b'],
['saggio', 'noun', 'b'],
['saggio', 'noun', 'b'],
['sagra', 'noun', 'c'],
['sagrestano', 'noun', 'c'],
['sagrestano', 'adjective', 'c'],
['sala', 'noun', 'a'],
['salame', 'noun', 'c'],
['salare', 'verb', 'c'],
['salario', 'adjective', 'b'],
['salario', 'noun', 'b'],
['salatino', 'noun', 'c'],
['salato', 'past_part', 'b'],
['salato', 'adjective', 'b'],
['salato', 'noun', 'b'],
['saldatura', 'noun', 'c'],
['sale', 'noun', 'b'],
['salice', 'noun', 'c'],
['saliera', 'noun', 'c'],
['salire', 'verb', 'a'],
['salita', 'noun', 'b'],
['saliva', 'noun', 'c'],
['salmone', 'noun', 'c'],
['salmone', 'adjective', 'c'],
['salone', 'noun', 'b'],
['salotto', 'noun', 'b'],
['salsa', 'noun', 'b'],
['salsiccia', 'noun', 'c'],
['saltare', 'verb', 'a'],
['saltellare', 'verb', 'c'],
['salto', 'noun', 'b'],
['salume', 'noun', 'c'],
['salutare', 'verb', 'a'],
['salutare', 'noun', 'a'],
['salute', 'noun', 'a'],
['salute', 'exclamation', 'a'],
['saluto', 'noun', 'a'],
['salvadanaio', 'noun', 'c'],
['salvagente', 'noun', 'c'],
['salvare', 'verb', 'a'],
['salvaslip', 'noun', 'c'],
['salvatore', 'adjective', 'b'],
['salvatore', 'noun', 'b'],
['salve', 'exclamation', 'b'],
['salvezza', 'noun', 'b'],
['salvia', 'noun', 'c'],
['salvietta', 'noun', 'c'],
['salvo', 'adjective', 'a'],
['salvo', 'preposition', 'a'],
['sandalo', 'noun', 'c'],
['sangue', 'noun', 'a'],
['sangue', 'adjective', 'a'],
['sanguinare', 'verb', 'c'],
['sanguisuga', 'noun', 'c'],
['sanità', 'noun', 'b'],
['sanitaria', 'noun', 'c'],
['sanitario', 'adjective', 'b'],
['sanitario', 'noun', 'b'],
['sano', 'adjective', 'a'],
['santo', 'adjective', 'a'],
['santo', 'noun', 'a'],
['sanzione', 'noun', 'b'],
['sapere', 'verb', 'a'],
['sapere', 'noun', 'b'],
['sapiente', 'adjective', 'c'],
['sapiente', 'noun', 'c'],
['sapone', 'noun', 'b'],
['saponetta', 'noun', 'c'],
['sapore', 'noun', 'b'],
['saporito', 'past_part', 'c'],
['saporito', 'adjective', 'c'],
['sardina', 'noun', 'c'],
['sardo', 'adjective', 'b'],
['sardo', 'noun', 'b'],
['sarto', 'noun', 'c'],
['sasso', 'noun', 'b'],
['satellite', 'noun', 'b'],
['sazio', 'past_part', 'c'],
['sazio', 'adjective', 'c'],
['sbadato', 'adjective', 'c'],
['sbadato', 'noun', 'c'],
['sbadigliare', 'verb', 'c'],
['sbadiglio', 'noun', 'c'],
['sbagliare', 'verb', 'a'],
['sbagliato', 'past_part', 'a'],
['sbagliato', 'adjective', 'a'],
['sbaglio', 'noun', 'b'],
['sbarbare', 'verb', 'c'],
['sbarcare', 'verb', 'b'],
['sbarra', 'noun', 'c'],
['sbarramento', 'noun', 'c'],
['sbattere', 'verb', 'a'],
['sberla', 'noun', 'c'],
['sbiadire', 'verb', 'c'],
['sbiancare', 'verb', 'c'],
['sbigottire', 'verb', 'c'],
['sbloccare', 'verb', 'c'],
['sboccare', 'verb', 'c'],
['sbocciare', 'verb', 'c'],
['sbocco', 'noun', 'c'],
['sbornia', 'noun', 'c'],
['sbottonare', 'verb', 'c'],
['sbriciolare', 'verb', 'c'],
['sbrigare', 'verb', 'b'],
['sbronza', 'noun', 'c'],
['sbronzo', 'adjective', 'c'],
['sbucciare', 'verb', 'c'],
['sbuffare', 'verb', 'c'],
['scacchiera', 'noun', 'c'],
['scadenza', 'noun', 'b'],
['scadere', 'verb', 'b'],
['scaffale', 'noun', 'b'],
['scafo', 'noun', 'c'],
['scala', 'noun', 'a'],
['scalare', 'verb', 'b'],
['scalata', 'noun', 'c'],
['scaldabagno', 'noun', 'c'],
['scaldare', 'verb', 'b'],
['scalinata', 'noun', 'c'],
['scalino', 'noun', 'c'],
['scalpello', 'noun', 'c'],
['scalzo', 'adjective', 'c'],
['scambiare', 'verb', 'a'],
['scambio', 'noun', 'a'],
['scamorza', 'noun', 'c'],
['scampagnata', 'noun', 'c'],
['scampo', 'noun', 'c'],
['scandalizzare', 'verb', 'c'],
['scandalo', 'noun', 'b'],
['scandire', 'verb', 'b'],
['scansare', 'verb', 'c'],
['scapito', 'noun', 'c'],
['scappamento', 'noun', 'c'],
['scappare', 'verb', 'a'],
['scappatoia', 'noun', 'c'],
['scarabocchiare', 'verb', 'c'],
['scarabocchio', 'noun', 'c'],
['scarafaggio', 'noun', 'c'],
['scarcerare', 'verb', 'c'],
['scaricare', 'verb', 'a'],
['scaricatore', 'noun', 'c'],
['scarico', 'noun', 'b'],
['scarlattina', 'noun', 'c'],
['scarpa', 'noun', 'a'],
['scarpiera', 'noun', 'c'],
['scarpone', 'noun', 'c'],
['scarso', 'adjective', 'b'],
['scartare', 'verb', 'b'],
['scatenare', 'verb', 'b'],
['scatola', 'noun', 'a'],
['scattare', 'verb', 'a'],
['scatto', 'noun', 'b'],
['scavalcare', 'verb', 'c'],
['scavare', 'verb', 'b'],
['scavo', 'noun', 'c'],
['scegliere', 'verb', 'a'],
['scelta', 'noun', 'a'],
['scemo', 'past_part', 'b'],
['scemo', 'adjective', 'b'],
['scemo', 'noun', 'b'],
['scena', 'noun', 'a'],
['scenario', 'noun', 'b'],
['scendere', 'verb', 'a'],
['sceneggiatura', 'noun', 'b'],
['sceriffo', 'noun', 'c'],
['scheda', 'noun', 'b'],
['schedario', 'noun', 'c'],
['scheggia', 'noun', 'c'],
['scheletro', 'noun', 'c'],
['schema', 'noun', 'b'],
['schermo', 'noun', 'a'],
['scherzare', 'verb', 'a'],
['scherzo', 'noun', 'b'],
['scherzoso', 'adjective', 'c'],
['schiacciare', 'verb', 'b'],
['schiacciato', 'past_part', 'c'],
['schiacciato', 'adjective', 'c'],
['schiaffo', 'noun', 'b'],
['schiavo', 'adjective', 'b'],
['schiavo', 'noun', 'b'],
['schiena', 'noun', 'a'],
['schierare', 'verb', 'b'],
['schietto', 'adjective', 'c'],
['schifo', 'noun', 'a'],
['schifo', 'adjective', 'a'],
['schiuma', 'noun', 'c'],
['schizzare', 'verb', 'b'],
['schizzo', 'noun', 'b'],
['sci', 'noun', 'b'],
['scia', 'noun', 'b'],
['sciacquare', 'verb', 'c'],
['scialle', 'noun', 'c'],
['sciame', 'noun', 'c'],
['sciare', 'verb', 'c'],
['sciarpa', 'noun', 'c'],
['sciatore', 'noun', 'c'],
['scientifico', 'adjective', 'a'],
['scientifico', 'noun', 'a'],
['scienza', 'noun', 'a'],
['scienziato', 'noun', 'b'],
['scienziato', 'adjective', 'b'],
['scimmia', 'noun', 'b'],
['scintilla', 'noun', 'b'],
['sciocchezza', 'noun', 'b'],
['sciocco', 'adjective', 'b'],
['sciocco', 'noun', 'b'],
['sciogliere', 'verb', 'b'],
['scioperare', 'verb', 'c'],
['sciopero', 'noun', 'b'],
['scirocco', 'noun', 'c'],
['sciroppo', 'noun', 'c'],
['scivolare', 'verb', 'b'],
['scivolata', 'noun', 'c'],
['scivolo', 'noun', 'c'],
['scocciare', 'verb', 'c'],
['scodella', 'noun', 'c'],
['scodinzolare', 'verb', 'c'],
['scoglio', 'noun', 'c'],
['scoiattolo', 'noun', 'c'],
['scolapiatti', 'noun', 'c'],
['scolaro', 'noun', 'c'],
['scolastico', 'adjective', 'b'],
['scolastico', 'noun', 'b'],
['scolpire', 'verb', 'c'],
['scommessa', 'noun', 'b'],
['scommettere', 'verb', 'b'],
['scomodo', 'adjective', 'c'],
['scomparire', 'verb', 'a'],
['scomparsa', 'noun', 'b'],
['scompartimento', 'noun', 'c'],
['sconfiggere', 'verb', 'b'],
['sconfitta', 'noun', 'b'],
['scongelare', 'verb', 'c'],
['sconosciuto', 'past_part', 'a'],
['sconosciuto', 'adjective', 'a'],
['sconsigliare', 'verb', 'c'],
['scontato', 'past_part', 'b'],
['scontato', 'adjective', 'b'],
['scontento', 'adjective', 'c'],
['sconto', 'noun', 'b'],
['scontrare', 'verb', 'b'],
['scontro', 'noun', 'b'],
['sconvolgere', 'verb', 'b'],
['scopa', 'noun', 'c'],
['scopare', 'verb', 'b'],
['scoperta', 'noun', 'a'],
['scopo', 'noun', 'a'],
['scoppiare', 'verb', 'a'],
['scoprire', 'verb', 'a'],
['scordare', 'verb', 'b'],
['scorgere', 'verb', 'b'],
['scorpione', 'noun', 'c'],
['scorrere', 'verb', 'a'],
['scorretto', 'adjective', 'c'],
['scorso', 'past_part', 'a'],
['scorso', 'adjective', 'a'],
['scorso', 'noun', 'a'],
['scorta', 'noun', 'b'],
['scortese', 'adjective', 'c'],
['scossa', 'noun', 'c'],
['scout', 'noun', 'c'],
['scout', 'adjective', 'c'],
['scozzese', 'adjective', 'c'],
['scozzese', 'noun', 'c'],
['screpolare', 'verb', 'c'],
['scricchiolare', 'verb', 'c'],
['scritta', 'noun', 'b'],
['scritto', 'past_part', 'b'],
['scritto', 'adjective', 'b'],
['scritto', 'noun', 'b'],
['scrittore', 'noun', 'a'],
['scrittura', 'noun', 'a'],
['scrivania', 'noun', 'b'],
['scrivere', 'verb', 'a'],
['scrofa', 'noun', 'c'],
['scrupolo', 'noun', 'c'],
['scudetto', 'noun', 'c'],
['scudo', 'noun', 'b'],
['scultore', 'noun', 'c'],
['scultura', 'noun', 'b'],
['scuola', 'noun', 'a'],
['scuotere', 'verb', 'b'],
['scure', 'noun', 'c'],
['scurire', 'verb', 'c'],
['scuro', 'adjective', 'b'],
['scuro', 'noun', 'b'],
['scuro', 'adverb', 'b'],
['scusa', 'noun', 'a'],
['scusare', 'verb', 'a'],
['sdebitarsi', 'verb', 'c'],
['sdegnare', 'verb', 'c'],
['sdraiare', 'verb', 'b'],
['sdraiato', 'past_part', 'c'],
['sdraiato', 'adjective', 'c'],
['se', 'pronoun', 'a'],
['se', 'conjunction', 'a'],
['se', 'noun', 'a'],
['sebbene', 'conjunction', 'b'],
['seccare', 'verb', 'b'],
['seccatura', 'noun', 'c'],
['secchio', 'noun', 'b'],
['secchione', 'noun', 'b'],
['secco', 'adjective', 'a'],
['secco', 'noun', 'a'],
['secolo', 'noun', 'a'],
['seconda', 'noun', 'b'],
['secondario', 'adjective', 'b'],
['secondario', 'noun', 'b'],
['secondo', 'adjective', 'a'],
['secondo', 'noun', 'a'],
['secondo', 'adverb', 'a'],
['secondo', 'preposition', 'a'],
['secondo', 'conjunction', 'a'],
['sedano', 'noun', 'c'],
['sede', 'noun', 'a'],
['sedere', 'verb', 'a'],
['sedia', 'noun', 'a'],
['sedici', 'adjective', 'b'],
['sedici', 'noun', 'b'],
['sedile', 'noun', 'b'],
['sedurre', 'verb', 'b'],
['seduta', 'noun', 'b'],
['seduttore', 'adjective', 'c'],
['seduttore', 'noun', 'c'],
['seggiolino', 'noun', 'c'],
['seggiovia', 'noun', 'c'],
['segheria', 'noun', 'c'],
['segmento', 'noun', 'b'],
['segnalare', 'verb', 'a'],
['segnalazione', 'noun', 'b'],
['segnale', 'noun', 'a'],
['segnare', 'verb', 'a'],
['segno', 'noun', 'a'],
['segretaria', 'noun', 'b'],
['segretario', 'noun', 'b'],
['segreteria', 'noun', 'b'],
['segreto', 'noun', 'a'],
['segreto', 'adjective', 'a'],
['segreto', 'noun', 'a'],
['segreto', 'adverb', 'a'],
['seguente', 'pres_part', 'a'],
['seguente', 'adjective', 'a'],
['seguente', 'noun', 'a'],
['seguire', 'verb', 'a'],
['seguito', 'noun', 'a'],
['sei', 'adjective', 'a'],
['sei', 'noun', 'a'],
['selezionare', 'verb', 'b'],
['selezione', 'noun', 'b'],
['selva', 'noun', 'c'],
['selvaggina', 'noun', 'c'],
['selvaggio', 'adjective', 'b'],
['selvaggio', 'noun', 'b'],
['semaforo', 'noun', 'c'],
['semantico', 'adjective', 'b'],
['sembrare', 'verb', 'a'],
['seme', 'noun', 'b'],
['semestre', 'noun', 'c'],
['semifreddo', 'adjective', 'c'],
['semifreddo', 'noun', 'c'],
['seminare', 'verb', 'b'],
['semmai', 'conjunction', 'b'],
['semmai', 'adverb', 'b'],
['semolino', 'noun', 'c'],
['semplice', 'adjective', 'a'],
['semplice', 'noun', 'a'],
['semplicemente', 'adverb', 'a'],
['semplicità', 'noun', 'b'],
['semplificare', 'verb', 'b'],
['sempre', 'adverb', 'a'],
['senape', 'noun', 'c'],
['senape', 'adjective', 'c'],
['senato', 'noun', 'b'],
['senatore', 'noun', 'b'],
['sennò', 'adverb', 'b'],
['seno', 'noun', 'a'],
['sensazione', 'noun', 'a'],
['sensibile', 'adjective', 'b'],
['sensibile', 'noun', 'b'],
['sensibilità', 'noun', 'b'],
['senso', 'noun', 'a'],
['sensuale', 'adjective', 'b'],
['sentenza', 'noun', 'a'],
['sentiero', 'noun', 'b'],
['sentimentale', 'adjective', 'b'],
['sentimentale', 'noun', 'b'],
['sentimento', 'noun', 'a'],
['sentire', 'verb', 'a'],
['sentito', 'past_part', 'b'],
['sentito', 'adjective', 'b'],
['senza', 'preposition', 'a'],
['senza', 'conjunction', 'a'],
['separare', 'verb', 'a'],
['separato', 'past_part', 'b'],
['separato', 'adjective', 'b'],
['separato', 'noun', 'b'],
['separazione', 'noun', 'b'],
['sepolto', 'past_part', 'b'],
['sepolto', 'adjective', 'b'],
['sepolto', 'noun', 'b'],
['seppellire', 'verb', 'b'],
['seppia', 'noun', 'c'],
['seppia', 'adjective', 'c'],
['seppia', 'noun', 'c'],
['sequenza', 'noun', 'b'],
['sequestrare', 'verb', 'b'],
['sequestro', 'noun', 'b'],
['sera', 'noun', 'a'],
['serata', 'noun', 'a'],
['serbo', 'adjective', 'c'],
['serbo', 'noun', 'c'],
['serenata', 'noun', 'c'],
['serenità', 'noun', 'b'],
['sereno', 'adjective', 'a'],
['sereno', 'noun', 'a'],
['sergente', 'noun', 'b'],
['seriamente', 'adverb', 'b'],
['serie', 'noun', 'a'],
['serietà', 'noun', 'c'],
['serio', 'adjective', 'a'],
['serio', 'noun', 'a'],
['serpente', 'noun', 'b'],
['serra', 'noun', 'b'],
['servire', 'verb', 'a'],
['servizio', 'noun', 'a'],
['servo', 'noun', 'b'],
['servo', 'adjective', 'b'],
['sessanta', 'adjective', 'b'],
['sessanta', 'noun', 'b'],
['sesso', 'noun', 'a'],
['sessuale', 'adjective', 'a'],
['sesto', 'adjective', 'b'],
['sesto', 'noun', 'b'],
['set', 'noun', 'b'],
['seta', 'noun', 'b'],
['sete', 'noun', 'b'],
['setta', 'noun', 'b'],
['settanta', 'adjective', 'b'],
['settanta', 'noun', 'b'],
['sette', 'adjective', 'a'],
['sette', 'noun', 'a'],
['settembre', 'noun', 'a'],
['settentrione', 'noun', 'c'],
['settimana', 'noun', 'a'],
['settimanale', 'adjective', 'b'],
['settimanale', 'noun', 'b'],
['settimo', 'adjective', 'b'],
['settimo', 'noun', 'b'],
['settore', 'noun', 'a'],
['severo', 'adjective', 'b'],
['sexy', 'adjective', 'b'],
['sezione', 'noun', 'a'],
['sfera', 'noun', 'b'],
['sfida', 'noun', 'a'],
['sfidare', 'verb', 'b'],
['sfiducia', 'noun', 'c'],
['sfigato', 'adjective', 'b'],
['sfigato', 'noun', 'b'],
['sfilare', 'verb', 'b'],
['sfilata', 'noun', 'b'],
['sfinire', 'verb', 'c'],
['sfiorare', 'verb', 'b'],
['sfociare', 'verb', 'c'],
['sfogare', 'verb', 'b'],
['sfoglia', 'noun', 'c'],
['sfogliare', 'verb', 'b'],
['sfogo', 'noun', 'b'],
['sfollamento', 'noun', 'c'],
['sfollare', 'verb', 'c'],
['sfondare', 'verb', 'b'],
['sfondo', 'noun', 'b'],
['sfortunato', 'adjective', 'c'],
['sforzare', 'verb', 'b'],
['sforzo', 'noun', 'a'],
['sfrenato', 'past_part', 'c'],
['sfrenato', 'adjective', 'c'],
['sfruttare', 'verb', 'a'],
['sfuggire', 'verb', 'a'],
['sgabello', 'noun', 'c'],
['sganciare', 'verb', 'c'],
['sgarbato', 'adjective', 'c'],
['sgarbato', 'noun', 'c'],
['sgarbo', 'noun', 'c'],
['sgombro', 'noun', 'c'],
['sgomento', 'noun', 'c'],
['sgonfiare', 'verb', 'c'],
['sgozzare', 'verb', 'c'],
['sgrassare', 'verb', 'c'],
['sgrassatore', 'noun', 'c'],
['sgridare', 'verb', 'c'],
['sguardo', 'noun', 'a'],
['shampoo', 'noun', 'c'],
['share', 'noun', 'b'],
['shopping', 'noun', 'b'],
['shorts', 'noun', 'c'],
['show', 'noun', 'b'],
['sì', 'adverb', 'a'],
['sì', 'noun', 'a'],
['sì', 'adjective', 'a'],
['si', 'pronoun', 'a'],
['sia', 'conjunction', 'a'],
['siamese', 'adjective', 'c'],
['siamese', 'noun', 'c'],
['sicché', 'conjunction', 'b'],
['siccità', 'noun', 'c'],
['siccome', 'conjunction', 'a'],
['siccome', 'adverb', 'a'],
['siciliano', 'adjective', 'b'],
['siciliano', 'noun', 'b'],
['sicuramente', 'adverb', 'a'],
['sicurezza', 'noun', 'a'],
['sicuro', 'adjective', 'a'],
['sicuro', 'noun', 'a'],
['sicuro', 'adverb', 'a'],
['siepe', 'noun', 'c'],
['sigaretta', 'noun', 'a'],
['sigaro', 'noun', 'c'],
['sigla', 'noun', 'b'],
['significare', 'verb', 'a'],
['significativo', 'adjective', 'b'],
['significato', 'past_part', 'a'],
['significato', 'noun', 'a'],
['signora', 'noun', 'a'],
['signore', 'noun', 'a'],
['signorina', 'noun', 'a'],
['silenzio', 'noun', 'a'],
['silenzioso', 'adjective', 'b'],
['sillaba', 'noun', 'c'],
['simbolico', 'adjective', 'b'],
['simbolo', 'noun', 'a'],
['simile', 'adjective', 'a'],
['simile', 'adjective', 'a'],
['simile', 'noun', 'a'],
['simile', 'adverb', 'a'],
['simpatia', 'noun', 'b'],
['simpatico', 'adjective', 'a'],
['simulare', 'verb', 'b'],
['sinceramente', 'adverb', 'b'],
['sincero', 'adjective', 'b'],
['sindacale', 'adjective', 'b'],
['sindacato', 'noun', 'b'],
['sindaco', 'noun', 'b'],
['sindrome', 'noun', 'b'],
['single', 'noun', 'b'],
['singolare', 'adjective', 'b'],
['singolare', 'noun', 'b'],
['singolo', 'adjective', 'a'],
['singolo', 'noun', 'a'],
['sinistra', 'noun', 'a'],
['sinistro', 'adjective', 'a'],
['sinistro', 'noun', 'a'],
['sino', 'preposition', 'a'],
['sino', 'adverb', 'a'],
['sinonimo', 'noun', 'b'],
['sintesi', 'noun', 'b'],
['sintetico', 'adjective', 'b'],
['sintetizzare', 'verb', 'b'],
['sintomo', 'noun', 'b'],
['sir', 'noun', 'b'],
['siriano', 'adjective', 'c'],
['siriano', 'noun', 'c'],
['siringa', 'noun', 'c'],
['sistema', 'noun', 'a'],
['sistemare', 'verb', 'a'],
['sito', 'noun', 'a'],
['sito', 'adjective', 'a'],
['situare', 'verb', 'b'],
['situazione', 'noun', 'a'],
['slacciare', 'verb', 'c'],
['slanciato', 'past_part', 'c'],
['slanciato', 'adjective', 'c'],
['slavo', 'adjective', 'c'],
['slavo', 'noun', 'c'],
['slegare', 'verb', 'c'],
['slip', 'noun', 'c'],
['slitta', 'noun', 'c'],
['slogan', 'noun', 'b'],
['slogare', 'verb', 'c'],
['slogatura', 'noun', 'c'],
['slovacco', 'adjective', 'c'],
['slovacco', 'noun', 'c'],
['sloveno', 'adjective', 'c'],
['sloveno', 'noun', 'c'],
['smacchiare', 'verb', 'c'],
['smacchiatore', 'adjective', 'c'],
['smacchiatore', 'noun', 'c'],
['smaltimento', 'noun', 'b'],
['smalto', 'noun', 'c'],
['smascherare', 'verb', 'c'],
['smentire', 'verb', 'b'],
['smettere', 'verb', 'a'],
['smisurato', 'past_part', 'c'],
['smisurato', 'adjective', 'c'],
['smog', 'noun', 'c'],
['smontare', 'verb', 'b'],
['smorfia', 'noun', 'c'],
['smuovere', 'verb', 'c'],
['snack', 'noun', 'c'],
['sneaker', 'noun', 'c'],
['snello', 'adjective', 'c'],
['soccorrere', 'verb', 'c'],
['soccorso', 'noun', 'b'],
['socialdemocratico', 'adjective', 'c'],
['socialdemocratico', 'noun', 'c'],
['sociale', 'adjective', 'a'],
['sociale', 'noun', 'a'],
['socialista', 'adjective', 'b'],
['socialista', 'noun', 'b'],
['società', 'noun', 'a'],
['socievole', 'adjective', 'c'],
['socio', 'noun', 'b'],
['soddisfare', 'verb', 'a'],
['soddisfatto', 'past_part', 'b'],
['soddisfatto', 'adjective', 'b'],
['soddisfazione', 'noun', 'a'],
['sodo', 'adjective', 'b'],
['sodo', 'noun', 'b'],
['sodo', 'adverb', 'b'],
['sofà', 'noun', 'c'],
['sofferenza', 'noun', 'a'],
['soffermare', 'verb', 'b'],
['soffiare', 'verb', 'b'],
['soffice', 'adjective', 'c'],
['soffitta', 'noun', 'c'],
['soffitto', 'noun', 'b'],
['soffocare', 'verb', 'b'],
['soffriggere', 'verb', 'c'],
['soffrire', 'verb', 'a'],
['sofisticato', 'past_part', 'b'],
['sofisticato', 'adjective', 'b'],
['software', 'noun', 'b'],
['soggettivo', 'adjective', 'b'],
['soggetto', 'noun', 'a'],
['soggetto', 'adjective', 'b'],
['soggezione', 'noun', 'c'],
['soggiorno', 'noun', 'a'],
['soglia', 'noun', 'b'],
['sogliola', 'noun', 'c'],
['sognare', 'verb', 'a'],
['sogno', 'noun', 'a'],
['sol', 'noun', 'c'],
['solaio', 'noun', 'c'],
['solamente', 'adverb', 'a'],
['solamente', 'conjunction', 'a'],
['solare', 'adjective', 'b'],
['solare', 'noun', 'b'],
['solco', 'noun', 'b'],
['soldato', 'noun', 'a'],
['soldo', 'noun', 'a'],
['sole', 'noun', 'a'],
['solenne', 'adjective', 'b'],
['solidarietà', 'noun', 'b'],
['solido', 'adjective', 'b'],
['solido', 'noun', 'b'],
['solitamente', 'adverb', 'b'],
['solitario', 'adjective', 'b'],
['solitario', 'noun', 'b'],
['solito', 'adjective', 'a'],
['solito', 'noun', 'a'],
['solitudine', 'noun', 'b'],
['solletico', 'noun', 'c'],
['sollevare', 'verb', 'a'],
['sollievo', 'noun', 'b'],
['solo', 'adjective', 'a'],
['solo', 'noun', 'a'],
['solo', 'adverb', 'a'],
['solo', 'conjunction', 'a'],
['soltanto', 'adverb', 'a'],
['soltanto', 'conjunction', 'a'],
['soluzione', 'noun', 'a'],
['somigliare', 'verb', 'b'],
['somma', 'noun', 'a'],
['sommare', 'verb', 'b'],
['sondaggio', 'noun', 'a'],
['sonno', 'noun', 'a'],
['sonoro', 'adjective', 'b'],
['sonoro', 'noun', 'b'],
['soppalco', 'noun', 'c'],
['sopportare', 'verb', 'a'],
['sopra', 'preposition', 'a'],
['sopra', 'adverb', 'a'],
['sopra', 'adjective', 'a'],
['sopra', 'noun', 'a'],
['soprabito', 'noun', 'c'],
['sopracciglio', 'noun', 'c'],
['soprammobile', 'noun', 'c'],
['soprannome', 'noun', 'c'],
['soprattutto', 'adverb', 'a'],
['sopravvalutare', 'verb', 'c'],
['sopravvivenza', 'noun', 'b'],
['sopravvivere', 'verb', 'a'],
['sorcio', 'noun', 'c'],
['sordo', 'adjective', 'b'],
['sordo', 'noun', 'b'],
['sorella', 'noun', 'a'],
['sorgente', 'pres_part', 'b'],
['sorgente', 'adjective', 'b'],
['sorgente', 'noun', 'b'],
['sorgere', 'verb', 'b'],
['sorpassare', 'verb', 'c'],
['sorpasso', 'noun', 'c'],
['sorprendente', 'pres_part', 'b'],
['sorprendente', 'adjective', 'b'],
['sorprendere', 'verb', 'b'],
['sorpresa', 'noun', 'a'],
['sorridente', 'pres_part', 'c'],
['sorridente', 'adjective', 'c'],
['sorridere', 'verb', 'a'],
['sorriso', 'noun', 'a'],
['sorso', 'noun', 'c'],
['sorta', 'noun', 'a'],
['sorte', 'noun', 'b'],
['sorteggiare', 'verb', 'c'],
['sorteggio', 'noun', 'c'],
['sorvegliare', 'verb', 'b'],
['sospendere', 'verb', 'b'],
['sospensione', 'noun', 'b'],
['sospeso', 'past_part', 'b'],
['sospeso', 'adjective', 'b'],
['sospeso', 'noun', 'b'],
['sospettare', 'verb', 'b'],
['sospetto', 'noun', 'a'],
['sospetto', 'adjective', 'a'],
['sospetto', 'noun', 'a'],
['sospirare', 'verb', 'b'],
['sospiro', 'noun', 'b'],
['sosta', 'noun', 'b'],
['sostanza', 'noun', 'a'],
['sostanzialmente', 'adverb', 'b'],
['sostare', 'verb', 'c'],
['sostegno', 'noun', 'b'],
['sostenere', 'verb', 'a'],
['sostenitore', 'adjective', 'b'],
['sostenitore', 'noun', 'b'],
['sostituire', 'verb', 'a'],
['sostituzione', 'noun', 'b'],
['sottaceto', 'adjective', 'c'],
['sottaceto', 'adverb', 'c'],
['sottaceto', 'noun', 'c'],
['sotterraneo', 'adjective', 'b'],
['sotterraneo', 'noun', 'b'],
['sottile', 'adjective', 'a'],
['sottile', 'noun', 'a'],
['sottile', 'adverb', 'a'],
['sottinteso', 'past_part', 'c'],
['sottinteso', 'adjective', 'c'],
['sottinteso', 'noun', 'c'],
['sotto', 'preposition', 'a'],
['sotto', 'adverb', 'a'],
['sotto', 'adjective', 'a'],
['sotto', 'noun', 'a'],
['sottofondo', 'noun', 'b'],
['sottolineare', 'verb', 'a'],
['sottolio', 'adverb', 'c'],
['sottolio', 'adjective', 'c'],
['sottomarino', 'adjective', 'c'],
['sottomarino', 'noun', 'c'],
['sottopassaggio', 'noun', 'c'],
['sottoporre', 'verb', 'a'],
['sottoscrivere', 'verb', 'b'],
['sottovalutare', 'verb', 'b'],
['sottrarre', 'verb', 'b'],
['sovietico', 'adjective', 'b'],
['sovietico', 'noun', 'b'],
['sovrano', 'adjective', 'b'],
['sovrano', 'noun', 'b'],
['sovrapporre', 'verb', 'b'],
['spaccare', 'verb', 'b'],
['spaccatura', 'noun', 'c'],
['spacciare', 'verb', 'b'],
['spacciatore', 'noun', 'c'],
['spaccio', 'noun', 'c'],
['spada', 'noun', 'b'],
['spaghetto', 'noun', 'b'],
['spagnolo', 'adjective', 'a'],
['spagnolo', 'noun', 'a'],
['spago', 'noun', 'c'],
['spalancare', 'verb', 'b'],
['spalla', 'noun', 'a'],
['spalmabile', 'adjective', 'c'],
['spalmare', 'verb', 'c'],
['spam', 'noun', 'b'],
['sparare', 'verb', 'a'],
['sparecchiare', 'verb', 'c'],
['spargere', 'verb', 'b'],
['sparire', 'verb', 'a'],
['sparo', 'noun', 'b'],
['sparso', 'past_part', 'b'],
['sparso', 'adjective', 'b'],
['spassare', 'verb', 'b'],
['spasso', 'noun', 'c'],
['spavaldo', 'adjective', 'c'],
['spaventare', 'verb', 'a'],
['spaventato', 'past_part', 'b'],
['spaventato', 'adjective', 'b'],
['spaventoso', 'adjective', 'b'],
['spaziale', 'adjective', 'b'],
['spazio', 'noun', 'a'],
['spazioso', 'adjective', 'c'],
['spazzare', 'verb', 'b'],
['spazzatura', 'noun', 'b'],
['spazzino', 'noun', 'c'],
['spazzola', 'noun', 'c'],
['spazzolare', 'verb', 'c'],
['spazzolino', 'noun', 'c'],
['spazzolone', 'noun', 'c'],
['specchiarsi', 'verb', 'c'],
['specchio', 'noun', 'a'],
['speciale', 'adjective', 'a'],
['speciale', 'noun', 'a'],
['specialista', 'noun', 'b'],
['specializzato', 'past_part', 'b'],
['specializzato', 'adjective', 'b'],
['specializzato', 'noun', 'b'],
['specialmente', 'adverb', 'b'],
['specie', 'noun', 'a'],
['specie', 'adverb', 'a'],
['specificare', 'verb', 'b'],
['specifico', 'adjective', 'a'],
['specifico', 'noun', 'a'],
['speck', 'noun', 'c'],
['spedire', 'verb', 'b'],
['spedizione', 'noun', 'b'],
['spegnere', 'verb', 'a'],
['spellare', 'verb', 'c'],
['spendere', 'verb', 'a'],
['spennare', 'verb', 'c'],
['spensierato', 'adjective', 'c'],
['spento', 'past_part', 'b'],
['spento', 'adjective', 'b'],
['speranza', 'noun', 'a'],
['sperare', 'verb', 'a'],
['sperimentale', 'adjective', 'b'],
['sperimentare', 'verb', 'b'],
['sperimentazione', 'noun', 'b'],
['sperone', 'noun', 'c'],
['spesa', 'noun', 'a'],
['spesso', 'adjective', 'b'],
['spesso', 'adverb', 'a'],
['spessore', 'noun', 'b'],
['spettacolare', 'adjective', 'b'],
['spettacolo', 'noun', 'a'],
['spettare', 'verb', 'b'],
['spettatore', 'noun', 'b'],
['spettinare', 'verb', 'c'],
['spettro', 'noun', 'b'],
['spezia', 'noun', 'c'],
['spezzare', 'verb', 'b'],
['spia', 'noun', 'b'],
['spiacere', 'verb', 'b'],
['spiaggia', 'noun', 'a'],
['spianare', 'verb', 'c'],
['spiare', 'verb', 'b'],
['spiazzo', 'noun', 'c'],
['spiccare', 'verb', 'b'],
['spicciolo', 'adjective', 'c'],
['spicciolo', 'noun', 'c'],
['spiedino', 'noun', 'c'],
['spiedo', 'noun', 'c'],
['spiegare', 'verb', 'a'],
['spiegazione', 'noun', 'a'],
['spietato', 'adjective', 'b'],
['spiga', 'noun', 'c'],
['spigolo', 'noun', 'c'],
['spillo', 'noun', 'c'],
['spina', 'noun', 'b'],
['spinacio', 'noun', 'c'],
['spingere', 'verb', 'a'],
['spinta', 'noun', 'b'],
['spionaggio', 'noun', 'c'],
['spirito', 'noun', 'a'],
['spiritoso', 'adjective', 'c'],
['spirituale', 'adjective', 'b'],
['spirituale', 'noun', 'b'],
['splendente', 'pres_part', 'c'],
['splendente', 'adjective', 'c'],
['splendere', 'verb', 'b'],
['splendido', 'adjective', 'b'],
['splendore', 'noun', 'b'],
['spogliare', 'verb', 'b'],
['spogliatoio', 'noun', 'c'],
['spoglio', 'noun', 'c'],
['spolverare', 'verb', 'c'],
['sponda', 'noun', 'b'],
['spontaneo', 'adjective', 'b'],
['sporcare', 'verb', 'b'],
['sporcizia', 'noun', 'c'],
['sporco', 'adjective', 'a'],
['sporco', 'noun', 'a'],
['sporgente', 'pres_part', 'c'],
['sporgente', 'adjective', 'c'],
['sporgente', 'noun', 'c'],
['sporgere', 'verb', 'b'],
['sport', 'noun', 'a'],
['sport', 'adjective', 'a'],
['sportello', 'noun', 'b'],
['sportivo', 'adjective', 'a'],
['sportivo', 'noun', 'a'],
['sposare', 'verb', 'a'],
['sposato', 'past_part', 'b'],
['sposato', 'adjective', 'b'],
['sposato', 'noun', 'b'],
['sposo', 'noun', 'b'],
['spostamento', 'noun', 'b'],
['spostare', 'verb', 'a'],
['spot', 'noun', 'b'],
['spranga', 'noun', 'c'],
['spray', 'adjective', 'c'],
['spray', 'noun', 'c'],
['sprecare', 'verb', 'b'],
['spreco', 'noun', 'c'],
['spremere', 'verb', 'c'],
['spremuta', 'noun', 'c'],
['sprofondare', 'verb', 'b'],
['sproposito', 'noun', 'c'],
['spruzzare', 'verb', 'c'],
['spuma', 'noun', 'c'],
['spumante', 'pres_part', 'c'],
['spumante', 'adjective', 'c'],
['spumante', 'noun', 'c'],
['spuntare', 'verb', 'b'],
['spuntino', 'noun', 'c'],
['spunto', 'noun', 'b'],
['sputare', 'verb', 'b'],
['sputo', 'noun', 'c'],
['squadra', 'noun', 'a'],
['squallido', 'adjective', 'c'],
['squalo', 'noun', 'c'],
['squarcio', 'noun', 'c'],
['squillare', 'verb', 'b'],
['squisito', 'adjective', 'c'],
['stabile', 'adjective', 'b'],
['stabile', 'noun', 'b'],
['stabilire', 'verb', 'a'],
['stabilità', 'noun', 'b'],
['staccare', 'verb', 'a'],
['stacco', 'noun', 'c'],
['stadio', 'noun', 'b'],
['staffa', 'noun', 'c'],
['stagione', 'noun', 'a'],
['stagno', 'noun', 'c'],
['stalla', 'noun', 'b'],
['stallone', 'noun', 'c'],
['stamattina', 'adverb', 'b'],
['stampa', 'noun', 'a'],
['stampare', 'verb', 'b'],
['stampatello', 'noun', 'c'],
['stampato', 'past_part', 'b'],
['stampato', 'adjective', 'b'],
['stampato', 'noun', 'b'],
['stampella', 'noun', 'c'],
['stampo', 'noun', 'c'],
['stancare', 'verb', 'b'],
['stanchezza', 'noun', 'b'],
['stanco', 'adjective', 'a'],
['standard', 'noun', 'b'],
['standard', 'adjective', 'b'],
['stanga', 'noun', 'c'],
['stanotte', 'adverb', 'b'],
['stanza', 'noun', 'a'],
['star', 'noun', 'b'],
['stare', 'verb', 'a'],
['stasera', 'adverb', 'a'],
['statale', 'adjective', 'b'],
['statale', 'noun', 'b'],
['statistica', 'noun', 'b'],
['statistico', 'adjective', 'b'],
['statistico', 'noun', 'b'],
['stato', 'noun', 'a'],
['stato', 'noun', 'a'],
['statua', 'noun', 'b'],
['statunitense', 'adjective', 'b'],
['statunitense', 'noun', 'b'],
['status', 'noun', 'b'],
['stavolta', 'adverb', 'b'],
['stazione', 'noun', 'a'],
['stella', 'noun', 'a'],
['stellare', 'adjective', 'b'],
['stendere', 'verb', 'b'],
['stendibiancheria', 'noun', 'c'],
['stereo', 'adjective', 'c'],
['stereo', 'noun', 'c'],
['sterlina', 'noun', 'b'],
['sterzare', 'verb', 'c'],
['sterzo', 'noun', 'c'],
['stesso', 'adjective', 'a'],
['stesso', 'pronoun', 'a'],
['stile', 'noun', 'a'],
['stima', 'noun', 'b'],
['stimare', 'verb', 'b'],
['stimolare', 'verb', 'b'],
['stimolo', 'noun', 'b'],
['stinco', 'noun', 'c'],
['stipendiare', 'verb', 'c'],
['stipendio', 'noun', 'a'],
['stirare', 'verb', 'b'],
['stivaletto', 'noun', 'c'],
['stoffa', 'noun', 'b'],
['stomaco', 'noun', 'b'],
['stonare', 'verb', 'c'],
['stop', 'loc-comando', 'c'],
['stop', 'noun', 'c'],
['stoppa', 'noun', 'c'],
['storcere', 'verb', 'c'],
['storia', 'noun', 'a'],
['storico', 'adjective', 'a'],
['storico', 'noun', 'a'],
['stornello', 'noun', 'c'],
['storta', 'noun', 'c'],
['storto', 'past_part', 'b'],
['storto', 'adjective', 'b'],
['storto', 'adverb', 'b'],
['storto', 'noun', 'b'],
['stoviglia', 'noun', 'c'],
['stracchino', 'noun', 'c'],
['straccio', 'noun', 'b'],
['strada', 'noun', 'a'],
['stradale', 'adjective', 'b'],
['stradale', 'noun', 'b'],
['strage', 'noun', 'b'],
['strangolare', 'verb', 'c'],
['straniero', 'adjective', 'a'],
['straniero', 'noun', 'a'],
['strano', 'adjective', 'a'],
['straordinario', 'adjective', 'a'],
['straordinario', 'noun', 'a'],
['strappare', 'verb', 'b'],
['strategia', 'noun', 'a'],
['strategico', 'adjective', 'b'],
['strato', 'noun', 'b'],
['strega', 'noun', 'a'],
['stregare', 'verb', 'b'],
['stregone', 'noun', 'c'],
['stress', 'noun', 'b'],
['stretta', 'noun', 'b'],
['strettamente', 'adverb', 'b'],
['stretto', 'past_part', 'a'],
['stretto', 'adjective', 'a'],
['stretto', 'noun', 'a'],
['strillare', 'verb', 'b'],
['strillo', 'noun', 'c'],
['stringa', 'noun', 'c'],
['stringere', 'verb', 'a'],
['striscia', 'noun', 'b'],
['strisciare', 'verb', 'b'],
['strofinaccio', 'noun', 'c'],
['stronzata', 'noun', 'b'],
['stronzo', 'noun', 'a'],
['stronzo', 'adjective', 'a'],
['strumento', 'noun', 'a'],
['strutto', 'past_part', 'c'],
['strutto', 'adjective', 'c'],
['strutto', 'noun', 'c'],
['struttura', 'noun', 'a'],
['strutturale', 'adjective', 'b'],
['struzzo', 'noun', 'c'],
['studente', 'noun', 'a'],
['studiare', 'verb', 'a'],
['studio', 'noun', 'a'],
['studioso', 'adjective', 'b'],
['studioso', 'noun', 'b'],
['stufa', 'noun', 'c'],
['stuoia', 'noun', 'c'],
['stupefacente', 'pres_part', 'b'],
['stupefacente', 'adjective', 'b'],
['stupefacente', 'noun', 'b'],
['stupendo', 'adjective', 'b'],
['stupido', 'adjective', 'a'],
['stupido', 'noun', 'a'],
['stupire', 'verb', 'b'],
['stupito', 'past_part', 'b'],
['stupito', 'adjective', 'b'],
['stupore', 'noun', 'b'],
['stuzzicadenti', 'noun', 'c'],
['stuzzicare', 'verb', 'c'],
['style', 'noun', 'b'],
['su', 'preposition', 'a'],
['su', 'adverb', 'a'],
['su', 'exclamation', 'a'],
['su', 'noun', 'a'],
['subire', 'verb', 'a'],
['subito', 'adverb', 'a'],
['succedere', 'verb', 'a'],
['successione', 'noun', 'b'],
['successivamente', 'adverb', 'b'],
['successivo', 'adjective', 'a'],
['successo', 'noun', 'a'],
['succhiare', 'verb', 'b'],
['succo', 'noun', 'b'],
['sud', 'noun', 'a'],
['sud', 'adjective', 'a'],
['sudamericano', 'adjective', 'c'],
['sudamericano', 'noun', 'c'],
['sudare', 'verb', 'b'],
['sudato', 'past_part', 'c'],
['sudato', 'adjective', 'c'],
['suddito', 'noun', 'b'],
['suddito', 'adjective', 'b'],
['suddividere', 'verb', 'b'],
['sudicio', 'adjective', 'c'],
['sudicio', 'noun', 'c'],
['sudore', 'noun', 'b'],
['sudtirolese', 'adjective', 'c'],
['sudtirolese', 'noun', 'c'],
['sufficiente', 'adjective', 'a'],
['suggerimento', 'noun', 'b'],
['suggerire', 'verb', 'a'],
['suggestivo', 'adjective', 'b'],
['sughero', 'noun', 'c'],
['sugo', 'noun', 'b'],
['suicidio', 'noun', 'b'],
['suino', 'noun', 'c'],
['suino', 'adjective', 'c'],
['suo', 'adjective', 'a'],
['suo', 'pronoun', 'a'],
['suocera', 'noun', 'c'],
['suocero', 'noun', 'c'],
['suola', 'noun', 'c'],
['suolo', 'noun', 'b'],
['suonare', 'verb', 'a'],
['suono', 'noun', 'a'],
['suora', 'noun', 'a'],
['super', 'adjective', 'b'],
['super', 'noun', 'b'],
['superare', 'verb', 'a'],
['superbia', 'noun', 'c'],
['superficiale', 'adjective', 'b'],
['superficie', 'noun', 'a'],
['superiore', 'adjective', 'a'],
['superiore', 'noun', 'a'],
['supermercato', 'noun', 'b'],
['supporre', 'verb', 'b'],
['supportare', 'verb', 'b'],
['supporto', 'noun', 'a'],
['supremo', 'adjective', 'b'],
['surgelato', 'past_part', 'c'],
['surgelato', 'adjective', 'c'],
['surgelato', 'noun', 'c'],
['suscitare', 'verb', 'b'],
['susina', 'noun', 'c'],
['susino', 'noun', 'c'],
['susseguirsi', 'verb', 'c'],
['sussurrare', 'verb', 'b'],
['svanire', 'verb', 'b'],
['svedese', 'adjective', 'c'],
['svedese', 'noun', 'c'],
['sveglia', 'noun', 'c'],
['svegliare', 'verb', 'a'],
['svegliarsi', 'verb', 'c'],
['sveglio', 'past_part', 'b'],
['sveglio', 'adjective', 'b'],
['svelare', 'verb', 'b'],
['svelto', 'adjective', 'c'],
['svenire', 'verb', 'b'],
['sventola', 'noun', 'c'],
['sviluppare', 'verb', 'a'],
['sviluppato', 'past_part', 'b'],
['sviluppato', 'adjective', 'b'],
['sviluppo', 'noun', 'a'],
['svizzero', 'adjective', 'b'],
['svizzero', 'noun', 'b'],
['svolazzare', 'verb', 'c'],
['svolgere', 'verb', 'a'],
['svolgimento', 'noun', 'c'],
['svolta', 'noun', 'b'],
['svuotare', 'verb', 'b'],
['tabaccaio', 'noun', 'c'],
['tabella', 'noun', 'b'],
['tacca', 'noun', 'c'],
['tacchino', 'noun', 'c'],
['tacco', 'noun', 'b'],
['tacere', 'verb', 'a'],
['tacere', 'noun', 'a'],
['tag', 'noun', 'b'],
['taglia', 'noun', 'b'],
['tagliare', 'verb', 'a'],
['tagliatella', 'noun', 'c'],
['tagliato', 'past_part', 'b'],
['tagliato', 'adjective', 'b'],
['tagliere', 'noun', 'c'],
['taglio', 'noun', 'a'],
['tagliola', 'noun', 'c'],
['talco', 'noun', 'c'],
['tale', 'adjective', 'a'],
['tale', 'pronoun', 'a'],
['tale', 'adverb', 'a'],
['taleggio', 'noun', 'c'],
['talento', 'noun', 'b'],
['talmente', 'adverb', 'a'],
['talpa', 'noun', 'c'],
['talpa', 'adjective', 'c'],
['talpa', 'noun', 'c'],
['talvolta', 'adverb', 'b'],
['tamburo', 'noun', 'c'],
['tamponare', 'verb', 'c'],
['tangente', 'pres_part', 'b'],
['tangente', 'adjective', 'b'],
['tangente', 'noun', 'b'],
['tanto', 'adjective', 'a'],
['tanto', 'pronoun', 'a'],
['tanto', 'noun', 'a'],
['tanto', 'adverb', 'a'],
['tanto', 'conjunction', 'a'],
['tappa', 'noun', 'b'],
['tappare', 'verb', 'b'],
['tappetino', 'noun', 'c'],
['tappeto', 'noun', 'b'],
['tappezzare', 'verb', 'c'],
['tappo', 'noun', 'c'],
['tarallo', 'noun', 'c'],
['tarantella', 'noun', 'c'],
['tardi', 'adverb', 'a'],
['tardo', 'adjective', 'a'],
['tardo', 'adverb', 'a'],
['targa', 'noun', 'b'],
['tariffa', 'noun', 'b'],
['tarlo', 'noun', 'c'],
['tartaruga', 'noun', 'c'],
['tartufo', 'noun', 'c'],
['tasca', 'noun', 'a'],
['tassa', 'noun', 'a'],
['tassare', 'verb', 'c'],
['tassello', 'noun', 'c'],
['tasso', 'noun', 'b'],
['tastiera', 'noun', 'b'],
['tasto', 'noun', 'b'],
['tatto', 'noun', 'c'],
['tatuaggio', 'noun', 'b'],
['taverna', 'noun', 'c'],
['tavola', 'noun', 'a'],
['tavoletta', 'noun', 'c'],
['tavolino', 'noun', 'b'],
['tavolo', 'noun', 'a'],
['taxi', 'noun', 'b'],
['tazza', 'noun', 'b'],
['tè', 'noun', 'b'],
['te', 'pronoun', 'noun'],
['te', 'team', 'noun'],
['teatrale', 'adjective', 'b'],
['teatro', 'noun', 'a'],
['tecnica', 'noun', 'a'],
['tecnicamente', 'adverb', 'b'],
['tecnico', 'adjective', 'a'],
['tecnico', 'noun', 'a'],
['tecnologia', 'noun', 'a'],
['tecnologico', 'adjective', 'b'],
['tedesco', 'adjective', 'a'],
['tedesco', 'noun', 'a'],
['tegame', 'noun', 'c'],
['teglia', 'noun', 'c'],
['tegola', 'noun', 'c'],
['tela', 'noun', 'b'],
['telaio', 'noun', 'c'],
['telecamera', 'noun', 'b'],
['telecomandato', 'past_part', 'c'],
['telecomandato', 'adjective', 'c'],
['telecronaca', 'noun', 'c'],
['telecronista', 'noun', 'c'],
['telefilm', 'noun', 'b'],
['telefonare', 'verb', 'a'],
['telefonata', 'noun', 'a'],
['telefonico', 'adjective', 'a'],
['telefonino', 'noun', 'b'],
['telefono', 'noun', 'a'],
['telegiornale', 'noun', 'b'],
['telegrafico', 'adjective', 'c'],
['telegrafo', 'noun', 'c'],
['telegramma', 'noun', 'c'],
['telescopio', 'noun', 'b'],
['televisione', 'noun', 'a'],
['televisivo', 'adjective', 'a'],
['televisore', 'noun', 'b'],
['tema', 'noun', 'a'],
['temere', 'verb', 'a'],
['temperatura', 'noun', 'a'],
['tempesta', 'noun', 'b'],
['tempio', 'noun', 'b'],
['tempo', 'noun', 'a'],
['temporale', 'noun', 'b'],
['temporaneo', 'adjective', 'b'],
['tenaglia', 'noun', 'c'],
['tenda', 'noun', 'a'],
['tendenza', 'noun', 'a'],
['tendere', 'verb', 'a'],
['tenebra', 'noun', 'c'],
['tenente', 'noun', 'b'],
['tenere', 'verb', 'a'],
['tenerezza', 'noun', 'b'],
['tenero', 'adjective', 'b'],
['tenero', 'noun', 'b'],
['tennis', 'noun', 'b'],
['tensione', 'noun', 'a'],
['tentare', 'verb', 'a'],
['tentativo', 'noun', 'a'],
['tentazione', 'noun', 'b'],
['tenuta', 'noun', 'b'],
['teologia', 'noun', 'b'],
['teologo', 'noun', 'b'],
['teoria', 'noun', 'a'],
['teorico', 'adjective', 'b'],
['teorico', 'noun', 'b'],
['terapia', 'noun', 'a'],
['tergicristallo', 'noun', 'c'],
['terminale', 'adjective', 'b'],
['terminale', 'noun', 'b'],
['terminare', 'verb', 'a'],
['termine', 'noun', 'a'],
['termosifone', 'noun', 'c'],
['terra', 'noun', 'a'],
['terrazzo', 'noun', 'b'],
['terremoto', 'noun', 'b'],
['terreno', 'noun', 'a'],
['terrestre', 'adjective', 'b'],
['terrestre', 'noun', 'b'],
['terribile', 'adjective', 'a'],
['terriccio', 'noun', 'c'],
['territoriale', 'adjective', 'b'],
['territoriale', 'noun', 'b'],
['territorio', 'noun', 'a'],
['terrore', 'noun', 'b'],
['terrorismo', 'noun', 'b'],
['terrorista', 'adjective', 'b'],
['terrorista', 'noun', 'b'],
['terrorizzare', 'verb', 'b'],
['terzo', 'adjective', 'a'],
['terzo', 'noun', 'a'],
['teschio', 'noun', 'b'],
['tesi', 'noun', 'a'],
['teso', 'past_part', 'b'],
['teso', 'adjective', 'b'],
['tesoro', 'noun', 'a'],
['tessera', 'noun', 'b'],
['tessile', 'adjective', 'c'],
['tessile', 'noun', 'c'],
['tessuto', 'past_part', 'b'],
['tessuto', 'adjective', 'b'],
['tessuto', 'noun', 'b'],
['test', 'noun', 'a'],
['testa', 'noun', 'a'],
['testamento', 'noun', 'b'],
['testare', 'verb', 'b'],
['testimone', 'noun', 'a'],
['testimonianza', 'noun', 'b'],
['testimoniare', 'verb', 'b'],
['testo', 'noun', 'a'],
['tetta', 'noun', 'b'],
['tetto', 'noun', 'a'],
['tettoia', 'noun', 'c'],
['tg', 'sigla', 'b'],
['thermos', 'noun', 'c'],
['ti', 'noun', 'c'],
['ti', 'pronoun', 'a'],
['tic', 'noun', 'c'],
['ticchettio', 'noun', 'c'],
['tifare', 'verb', 'b'],
['tifo', 'noun', 'c'],
['tifoso', 'adjective', 'b'],
['tifoso', 'noun', 'b'],
['tigre', 'noun', 'b'],
['timbro', 'noun', 'c'],
['timidezza', 'noun', 'c'],
['timido', 'adjective', 'b'],
['timido', 'noun', 'b'],
['timone', 'noun', 'c'],
['timoniere', 'noun', 'c'],
['timore', 'noun', 'b'],
['tinello', 'noun', 'c'],
['tino', 'noun', 'c'],
['tipico', 'adjective', 'a'],
['tipo', 'noun', 'a'],
['tipologia', 'noun', 'b'],
['tiramisù', 'noun', 'c'],
['tiranno', 'noun', 'c'],
['tiranno', 'adjective', 'c'],
['tirare', 'verb', 'a'],
['tiro', 'noun', 'b'],
['tirocinio', 'noun', 'b'],
['tirrenico', 'adjective', 'c'],
['tisana', 'noun', 'c'],
['titolare', 'adjective', 'b'],
['titolare', 'noun', 'b'],
['titolo', 'noun', 'a'],
['tivù', 'noun', 'a'],
['tizio', 'noun', 'b'],
['toast', 'noun', 'c'],
['toccare', 'verb', 'a'],
['tocco', 'noun', 'b'],
['togliere', 'verb', 'a'],
['toilette', 'noun', 'c'],
['toletta', 'noun', 'c'],
['tolleranza', 'noun', 'b'],
['tollerare', 'verb', 'b'],
['tomba', 'noun', 'b'],
['tombola', 'noun', 'c'],
['tonaca', 'noun', 'c'],
['tondo', 'adjective', 'b'],
['tondo', 'noun', 'b'],
['tonnellata', 'noun', 'b'],
['tonno', 'noun', 'c'],
['tono', 'noun', 'a'],
['tonsilla', 'noun', 'c'],
['top', 'noun', 'b'],
['topo', 'noun', 'b'],
['topo', 'adjective', 'b'],
['toppa', 'noun', 'c'],
['torbido', 'adjective', 'c'],
['torbido', 'noun', 'c'],
['torcere', 'verb', 'b'],
['torcia', 'noun', 'c'],
['torcicollo', 'noun', 'c'],
['tordo', 'noun', 'c'],
['torero', 'noun', 'c'],
['torinese', 'adjective', 'c'],
['torinese', 'noun', 'c'],
['tormentare', 'verb', 'b'],
['tornaconto', 'noun', 'c'],
['tornare', 'verb', 'a'],
['torneo', 'noun', 'b'],
['tornio', 'noun', 'c'],
['toro', 'noun', 'b'],
['torre', 'noun', 'b'],
['torrone', 'noun', 'c'],
['torta', 'noun', 'b'],
['tortellino', 'noun', 'c'],
['torto', 'noun', 'b'],
['tortora', 'noun', 'c'],
['tortora', 'adjective', 'c'],
['tortora', 'noun', 'c'],
['tosare', 'verb', 'c'],
['toscano', 'adjective', 'b'],
['toscano', 'noun', 'b'],
['tosse', 'noun', 'b'],
['tossico', 'adjective', 'b'],
['tossico', 'noun', 'b'],
['tossire', 'verb', 'c'],
['tostapane', 'noun', 'c'],
['totale', 'adjective', 'a'],
['totale', 'noun', 'a'],
['totalmente', 'adverb', 'b'],
['tour', 'noun', 'b'],
['tovaglia', 'noun', 'b'],
['tovaglietta', 'noun', 'c'],
['tovagliolo', 'noun', 'c'],
['tra', 'preposition', 'a'],
['traballare', 'verb', 'c'],
['traboccare', 'verb', 'c'],
['trabocchetto', 'noun', 'c'],
['traccia', 'noun', 'a'],
['tracciare', 'verb', 'b'],
['tradimento', 'noun', 'b'],
['tradire', 'verb', 'b'],
['tradizionale', 'adjective', 'a'],
['tradizione', 'noun', 'a'],
['tradurre', 'verb', 'a'],
['traduzione', 'noun', 'a'],
['traffico', 'noun', 'a'],
['trafila', 'noun', 'c'],
['traforo', 'noun', 'c'],
['tragedia', 'noun', 'b'],
['traghetto', 'noun', 'c'],
['tragico', 'adjective', 'b'],
['tragico', 'noun', 'b'],
['trainare', 'verb', 'c'],
['trama', 'noun', 'b'],
['tramezzino', 'noun', 'c'],
['tramite', 'noun', 'preposition'],
['tramontare', 'verb', 'c'],
['tramonto', 'noun', 'b'],
['trampolino', 'noun', 'c'],
['trancio', 'noun', 'c'],
['tranne', 'preposition', 'a'],
['tranquillamente', 'adverb', 'b'],
['tranquillità', 'noun', 'b'],
['tranquillizzare', 'verb', 'c'],
['tranquillo', 'adjective', 'a'],
['tranquillo', 'adverb', 'a'],
['tranquillo', 'noun', 'a'],
['transito', 'noun', 'c'],
['trapano', 'noun', 'c'],
['trapezio', 'noun', 'c'],
['trapezio', 'adjective', 'c'],
['trapianto', 'noun', 'c'],
['trappola', 'noun', 'b'],
['trapunta', 'noun', 'c'],
['trarre', 'verb', 'a'],
['trascinare', 'verb', 'a'],
['trascorrere', 'verb', 'a'],
['trascrizione', 'noun', 'b'],
['trascurare', 'verb', 'b'],
['trasferimento', 'noun', 'b'],
['trasferire', 'verb', 'a'],
['trasformare', 'verb', 'a'],
['trasformazione', 'noun', 'b'],
['trasfusione', 'noun', 'c'],
['traslocare', 'verb', 'c'],
['trasloco', 'noun', 'c'],
['trasmettere', 'verb', 'a'],
['trasmissione', 'noun', 'a'],
['trasparente', 'adjective', 'b'],
['trasparente', 'noun', 'b'],
['trasparenza', 'noun', 'b'],
['trasportare', 'verb', 'b'],
['trasporto', 'noun', 'a'],
['trattamento', 'noun', 'a'],
['trattare', 'verb', 'a'],
['trattativa', 'noun', 'b'],
['trattato', 'noun', 'b'],
['trattenere', 'verb', 'a'],
['trattenuta', 'noun', 'c'],
['tratto', 'noun', 'a'],
['trattore', 'noun', 'c'],
['trauma', 'noun', 'b'],
['travasare', 'verb', 'c'],
['travestire', 'verb', 'c'],
['travolgere', 'verb', 'b'],
['tre', 'adjective', 'a'],
['tre', 'noun', 'a'],
['trebbiare', 'verb', 'c'],
['trecento', 'adjective', 'b'],
['trecento', 'noun', 'b'],
['tredici', 'adjective', 'b'],
['tredici', 'noun', 'b'],
['tremare', 'verb', 'b'],
['tremendo', 'adjective', 'b'],
['trend', 'noun', 'b'],
['treno', 'noun', 'a'],
['trenta', 'adjective', 'a'],
['trenta', 'noun', 'a'],
['trentino', 'adjective', 'c'],
['trentino', 'noun', 'c'],
['triangolo', 'noun', 'b'],
['tribù', 'noun', 'c'],
['tribunale', 'noun', 'a'],
['triestino', 'adjective', 'c'],
['triestino', 'noun', 'c'],
['trifoglio', 'noun', 'c'],
['trina', 'noun', 'c'],
['trincea', 'noun', 'c'],
['trionfo', 'noun', 'b'],
['triste', 'adjective', 'a'],
['tristezza', 'noun', 'b'],
['tritare', 'verb', 'c'],
['trofeo', 'noun', 'c'],
['tronco', 'noun', 'b'],
['trono', 'noun', 'b'],
['troppo', 'adjective', 'a'],
['troppo', 'pronoun', 'a'],
['troppo', 'adverb', 'a'],
['troppo', 'noun', 'a'],
['trota', 'noun', 'c'],
['trottare', 'verb', 'c'],
['trottola', 'noun', 'c'],
['trovare', 'verb', 'a'],
['truccare', 'verb', 'c'],
['trucco', 'noun', 'b'],
['trucco', 'noun', 'b'],
['truffa', 'noun', 'b'],
['truffare', 'verb', 'c'],
['truppa', 'noun', 'b'],
['t-shirt', 'noun', 'c'],
['tu', 'pronoun', 'a'],
['tubo', 'noun', 'b'],
['tuffare', 'verb', 'b'],
['tuffo', 'noun', 'c'],
['tulipano', 'noun', 'c'],
['tumore', 'noun', 'b'],
['tunica', 'noun', 'c'],
['tunisino', 'adjective', 'c'],
['tunisino', 'noun', 'c'],
['tunnel', 'noun', 'c'],
['tuo', 'adjective', 'a'],
['tuo', 'pronoun', 'a'],
['tuono', 'noun', 'c'],
['turbare', 'verb', 'b'],
['turco', 'adjective', 'b'],
['turco', 'noun', 'b'],
['turismo', 'noun', 'b'],
['turista', 'noun', 'b'],
['turistico', 'adjective', 'b'],
['turno', 'noun', 'a'],
['tuta', 'noun', 'b'],
['tutela', 'noun', 'b'],
['tutelare', 'verb', 'b'],
['tutore', 'noun', 'c'],
['tuttavia', 'conjunction', 'a'],
['tuttavia', 'adverb', 'a'],
['tutto', 'adjective', 'a'],
['tutto', 'pronoun', 'a'],
['tuttora', 'adverb', 'b'],
['u', 'noun', 'c'],
['ubriaco', 'adjective', 'b'],
['ubriaco', 'noun', 'b'],
['uccello', 'noun', 'a'],
['uccidere', 'verb', 'a'],
['ucraino', 'adjective', 'c'],
['ucraino', 'noun', 'c'],
['udienza', 'noun', 'b'],
['udinese', 'adjective', 'c'],
['udinese', 'noun', 'c'],
['udire', 'verb', 'b'],
['udire', 'noun', 'b'],
['ufficiale', 'noun', 'b'],
['ufficiale', 'adjective', 'a'],
['ufficialmente', 'adverb', 'b'],
['ufficio', 'noun', 'a'],
['uguale', 'adjective', 'a'],
['uguale', 'adverb', 'a'],
['uguale', 'noun', 'a'],
['ugualmente', 'adverb', 'b'],
['ulcera', 'noun', 'c'],
['ulteriore', 'adjective', 'a'],
['ulteriormente', 'adverb', 'b'],
['ultimamente', 'adverb', 'b'],
['ultimo', 'adjective', 'a'],
['ultimo', 'noun', 'a'],
['ultravioletto', 'noun', 'c'],
['ultravioletto', 'adjective', 'c'],
['umanità', 'noun', 'a'],
['umano', 'adjective', 'a'],
['umano', 'noun', 'a'],
['umbro', 'adjective', 'c'],
['umbro', 'noun', 'c'],
['umido', 'adjective', 'b'],
['umido', 'noun', 'b'],
['umile', 'adjective', 'b'],
['umile', 'noun', 'b'],
['umiliare', 'verb', 'b'],
['umore', 'noun', 'b'],
['umorismo', 'noun', 'c'],
['una', 'determiner', 'a'],
['una', 'pronoun', 'a'],
['undici', 'adjective', 'b'],
['undici', 'noun', 'b'],
['ungherese', 'adjective', 'c'],
['ungherese', 'noun', 'c'],
['unghia', 'noun', 'b'],
['unguento', 'noun', 'c'],
['unico', 'adjective', 'a'],
['unico', 'noun', 'a'],
['uniforme', 'adjective', 'b'],
['unione', 'noun', 'b'],
['unire', 'verb', 'a'],
['unità', 'noun', 'a'],
['unito', 'past_part', 'a'],
['unito', 'adjective', 'a'],
['unito', 'noun', 'a'],
['universale', 'adjective', 'b'],
['universale', 'noun', 'b'],
['università', 'noun', 'a'],
['universitario', 'adjective', 'b'],
['universitario', 'noun', 'b'],
['universo', 'noun', 'a'],
['uno', 'adjective', 'a'],
['uno', 'noun', 'a'],
['uno', 'determiner', 'a'],
['uno', 'pronoun', 'a'],
['uomo', 'noun', 'a'],
['uovo', 'noun', 'a'],
['uragano', 'noun', 'c'],
['urbanistico', 'adjective', 'b'],
['urbano', 'adjective', 'b'],
['urgente', 'adjective', 'b'],
['urgenza', 'noun', 'b'],
['urlare', 'verb', 'a'],
['urlo', 'noun', 'b'],
['urna', 'noun', 'c'],
['urtare', 'verb', 'b'],
['usare', 'verb', 'a'],
['usato', 'past_part', 'b'],
['usato', 'adjective', 'b'],
['usato', 'noun', 'b'],
['uscire', 'verb', 'a'],
['uscita', 'noun', 'a'],
['usignolo', 'noun', 'c'],
['uso', 'noun', 'a'],
['utensile', 'noun', 'c'],
['utente', 'noun', 'a'],
['utenza', 'noun', 'b'],
['utile', 'adjective', 'a'],
['utile', 'noun', 'a'],
['utilità', 'noun', 'b'],
['utilizzare', 'verb', 'a'],
['utilizzo', 'noun', 'b'],
['vabbè', 'exclamation', 'b'],
['vacanza', 'noun', 'a'],
['vacca', 'noun', 'b'],
['vaccino', 'noun', 'c'],
['vaffanculo', 'exclamation', 'b'],
['vagare', 'verb', 'b'],
['vagire', 'verb', 'c'],
['vago', 'adjective', 'b'],
['vago', 'noun', 'b'],
['valanga', 'noun', 'c'],
['valdostano', 'adjective', 'c'],
['valdostano', 'noun', 'c'],
['valere', 'verb', 'a'],
['valido', 'adjective', 'b'],
['valigia', 'noun', 'b'],
['valle', 'noun', 'b'],
['valore', 'noun', 'a'],
['valorizzare', 'verb', 'b'],
['valoroso', 'adjective', 'c'],
['valoroso', 'noun', 'c'],
['valutare', 'verb', 'a'],
['valutazione', 'noun', 'b'],
['valvola', 'noun', 'c'],
['vampata', 'noun', 'c'],
['vampiro', 'noun', 'b'],
['vandalo', 'adjective', 'c'],
['vandalo', 'noun', 'c'],
['vanga', 'noun', 'c'],
['vangelo', 'noun', 'b'],
['vanitoso', 'adjective', 'c'],
['vanitoso', 'noun', 'c'],
['vano', 'adjective', 'b'],
['vano', 'noun', 'b'],
['vantaggio', 'noun', 'a'],
['vantaggioso', 'adjective', 'c'],
['vantare', 'verb', 'b'],
['vanto', 'noun', 'c'],
['vapore', 'noun', 'b'],
['variabile', 'adjective', 'b'],
['variabile', 'noun', 'b'],
['variante', 'pres_part', 'b'],
['variante', 'adjective', 'b'],
['variante', 'noun', 'b'],
['variare', 'verb', 'b'],
['variazione', 'noun', 'b'],
['varietà', 'noun', 'b'],
['vario', 'adjective', 'a'],
['vario', 'adjective', 'a'],
['vario', 'pronoun', 'a'],
['variopinto', 'adjective', 'c'],
['vasca', 'noun', 'b'],
['vaso', 'noun', 'b'],
['vasto', 'adjective', 'b'],
['vasto', 'noun', 'b'],
['ve', 'pronoun', 'a'],
['ve', 'adverb', 'a'],
['vecchio', 'adjective', 'a'],
['vecchio', 'noun', 'a'],
['vedere', 'verb', 'a'],
['vedere', 'noun', 'a'],
['vedova', 'noun', 'b'],
['vegetale', 'adjective', 'b'],
['vegetale', 'noun', 'b'],
['veglia', 'noun', 'c'],
['veglione', 'noun', 'c'],
['veicolo', 'noun', 'b'],
['vela', 'noun', 'b'],
['veleno', 'noun', 'b'],
['velenoso', 'adjective', 'c'],
['vellutato', 'past_part', 'c'],
['vellutato', 'adjective', 'c'],
['velluto', 'noun', 'c'],
['velo', 'noun', 'b'],
['veloce', 'adjective', 'a'],
['veloce', 'adverb', 'a'],
['veloce', 'noun', 'a'],
['velocemente', 'adverb', 'b'],
['velocità', 'noun', 'a'],
['vena', 'noun', 'b'],
['vendemmiare', 'verb', 'c'],
['vendere', 'verb', 'a'],
['vendetta', 'noun', 'b'],
['vendicare', 'verb', 'b'],
['vendita', 'noun', 'a'],
['venditore', 'adjective', 'b'],
['venditore', 'noun', 'b'],
['venerdì', 'noun', 'a'],
['veneto', 'adjective', 'b'],
['veneto', 'noun', 'b'],
['veneziano', 'adjective', 'c'],
['veneziano', 'noun', 'c'],
['venire', 'verb', 'a'],
['ventaglio', 'noun', 'c'],
['ventata', 'noun', 'c'],
['venti', 'adjective', 'a'],
['venti', 'noun', 'a'],
['venticinque', 'adjective', 'b'],
['venticinque', 'noun', 'b'],
['ventilatore', 'adjective', 'c'],
['ventilatore', 'noun', 'c'],
['ventina', 'noun', 'b'],
['ventiquattro', 'adjective', 'b'],
['ventiquattro', 'noun', 'b'],
['vento', 'noun', 'a'],
['ventre', 'noun', 'b'],
['venuta', 'noun', 'c'],
['veramente', 'adverb', 'a'],
['verbale', 'adjective', 'a'],
['verbale', 'noun', 'a'],
['verbo', 'noun', 'b'],
['verde', 'adjective', 'a'],
['verde', 'noun', 'a'],
['verdura', 'noun', 'b'],
['vergine', 'adjective', 'b'],
['vergine', 'noun', 'b'],
['vergogna', 'noun', 'b'],
['vergognarsi', 'verb', 'b'],
['verifica', 'noun', 'b'],
['verificare', 'verb', 'a'],
['verità', 'noun', 'a'],
['verme', 'noun', 'b'],
['vernice', 'noun', 'b'],
['vero', 'adjective', 'a'],
['vero', 'noun', 'a'],
['versare', 'verb', 'a'],
['versione', 'noun', 'a'],
['verso', 'noun', 'a'],
['verso', 'preposition', 'a'],
['vertebra', 'noun', 'c'],
['verticale', 'adjective', 'b'],
['verticale', 'noun', 'b'],
['vertice', 'noun', 'b'],
['vertigine', 'noun', 'c'],
['vescovo', 'noun', 'b'],
['vescovo', 'adjective', 'b'],
['vespa', 'noun', 'c'],
['veste', 'noun', 'b'],
['vestire', 'verb', 'a'],
['vestito', 'noun', 'a'],
['vestito', 'past_part', 'b'],
['vestito', 'adjective', 'b'],
['veterinario', 'adjective', 'c'],
['veterinario', 'noun', 'c'],
['vetrina', 'noun', 'b'],
['vetro', 'noun', 'a'],
['vettura', 'noun', 'b'],
['vi', 'pronoun', 'a'],
['vi', 'adverb', 'a'],
['via', 'noun', 'a'],
['via', 'adverb', 'a'],
['via', 'exclamation', 'a'],
['via', 'noun', 'a'],
['viaggiare', 'verb', 'a'],
['viaggiatore', 'noun', 'b'],
['viaggiatrice', 'noun', 'c'],
['viaggio', 'noun', 'a'],
['viale', 'noun', 'b'],
['vibrare', 'verb', 'b'],
['vice', 'noun', 'b'],
['vicenda', 'noun', 'a'],
['viceversa', 'adverb', 'b'],
['vicinanza', 'noun', 'b'],
['vicino', 'adjective', 'a'],
['vicino', 'noun', 'a'],
['vicino', 'adverb', 'a'],
['vicolo', 'noun', 'b'],
['video', 'adjective', 'a'],
['video', 'noun', 'a'],
['videogioco', 'noun', 'b'],
['viennese', 'adjective', 'c'],
['viennese', 'noun', 'c'],
['vietare', 'verb', 'b'],
['vigile', 'adjective', 'b'],
['vigile', 'noun', 'b'],
['vigilia', 'noun', 'b'],
['vigna', 'noun', 'c'],
['vigore', 'noun', 'b'],
['villa', 'noun', 'a'],
['villaggio', 'noun', 'a'],
['vincente', 'pres_part', 'b'],
['vincente', 'adjective', 'b'],
['vincente', 'noun', 'b'],
['vincere', 'verb', 'a'],
['vincitore', 'adjective', 'b'],
['vincitore', 'noun', 'b'],
['vincolo', 'noun', 'b'],
['vino', 'noun', 'a'],
['vino', 'adjective', 'a'],
['viola', 'noun', 'b'],
['viola', 'adjective', 'b'],
['violare', 'verb', 'b'],
['violazione', 'noun', 'b'],
['violentare', 'verb', 'c'],
['violento', 'adjective', 'a'],
['violento', 'noun', 'a'],
['violenza', 'noun', 'a'],
['violetta', 'noun', 'c'],
['violetto', 'adjective', 'c'],
['violetto', 'noun', 'c'],
['violino', 'noun', 'b'],
['vipera', 'noun', 'c'],
['virgola', 'noun', 'b'],
['virtù', 'noun', 'b'],
['virtuale', 'adjective', 'b'],
['virus', 'noun', 'b'],
['visibile', 'adjective', 'b'],
['visibile', 'noun', 'b'],
['visione', 'noun', 'a'],
['visita', 'noun', 'a'],
['visitare', 'verb', 'a'],
['visitatore', 'noun', 'b'],
['visivo', 'adjective', 'b'],
['viso', 'noun', 'a'],
['vissuto', 'past_part', 'b'],
['vissuto', 'adjective', 'b'],
['vissuto', 'noun', 'b'],
['vista', 'noun', 'a'],
['vita', 'noun', 'a'],
['vitale', 'adjective', 'b'],
['vitale', 'noun', 'b'],
['vitamina', 'noun', 'c'],
['vite', 'noun', 'c'],
['vitello', 'noun', 'c'],
['vittima', 'noun', 'a'],
['vittoria', 'noun', 'a'],
['vivace', 'adjective', 'b'],
['vivace', 'adverb', 'b'],
['vivace', 'noun', 'b'],
['vivente', 'pres_part', 'b'],
['vivente', 'adjective', 'b'],
['vivente', 'noun', 'b'],
['vivere', 'verb', 'a'],
['vivere', 'noun', 'a'],
['vivo', 'adjective', 'a'],
['vivo', 'noun', 'a'],
['viziare', 'verb', 'c'],
['viziato', 'past_part', 'c'],
['viziato', 'adjective', 'c'],
['vizio', 'noun', 'b'],
['vocabolario', 'noun', 'b'],
['vocale', 'noun', 'b'],
['vocale', 'adjective', 'b'],
['vocazione', 'noun', 'b'],
['voce', 'noun', 'a'],
['vodka', 'noun', 'c'],
['voglia', 'noun', 'a'],
['voi', 'pronoun', 'a'],
['volantino', 'noun', 'c'],
['volare', 'verb', 'a'],
['volata', 'noun', 'c'],
['volenteroso', 'adjective', 'c'],
['volentieri', 'adverb', 'b'],
['volere', 'verb', 'a'],
['volgare', 'adjective', 'b'],
['volgare', 'noun', 'b'],
['volgere', 'verb', 'b'],
['volo', 'noun', 'a'],
['volontà', 'noun', 'a'],
['volontariato', 'noun', 'b'],
['volontario', 'adjective', 'b'],
['volontario', 'noun', 'b'],
['volta', 'noun', 'a'],
['voltare', 'verb', 'a'],
['volto', 'noun', 'a'],
['volume', 'noun', 'a'],
['vomitare', 'verb', 'b'],
['vomito', 'noun', 'c'],
['vongola', 'noun', 'c'],
['vostro', 'adjective', 'a'],
['vostro', 'pronoun', 'a'],
['votare', 'verb', 'a'],
['votazione', 'noun', 'c'],
['voto', 'noun', 'a'],
['vu', 'noun', 'c'],
['vuotare', 'verb', 'c'],
['vuoto', 'adjective', 'a'],
['vuoto', 'noun', 'a'],
['wafer', 'noun', 'c'],
['web', 'noun', 'a'],
['weekend', 'noun', 'b'],
['whisky', 'noun', 'c'],
['wurstel', 'noun', 'c'],
['yogurt', 'noun', 'c'],
['zaino', 'noun', 'b'],
['zampa', 'noun', 'b'],
['zampogna', 'noun', 'c'],
['zanna', 'noun', 'c'],
['zanzara', 'noun', 'c'],
['zattera', 'noun', 'c'],
['zebra', 'noun', 'c'],
['zero', 'adjective', 'a'],
['zero', 'noun', 'a'],
['zero', 'symbol', 'a'],
['zeta', 'noun', 'c'],
['zia', 'noun', 'a'],
['zingaro', 'adjective', 'c'],
['zingaro', 'noun', 'c'],
['zio', 'noun', 'a'],
['zitella', 'noun', 'c'],
['zitto', 'adjective', 'a'],
['zitto', 'noun', 'a'],
['zoccolo', 'noun', 'c'],
['zolla', 'noun', 'c'],
['zona', 'noun', 'a'],
['zoo', 'noun', 'c'],
['zoppicare', 'verb', 'c'],
['zoppo', 'adjective', 'c'],
['zoppo', 'noun', 'c'],
['zucca', 'noun', 'b'],
['zucchero', 'noun', 'b'],
['zucchina', 'noun', 'c'],
['zuffa', 'noun', 'c'],
['zuppa', 'noun', 'c'],
] | 28.130757 | 40 | 0.50236 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 181,972 | 0.715794 |
81603469dc6c0438b2388abedcbbaed330402503 | 940 | py | Python | pytorch/torch/_utils_internal.py | raghavnauhria/whatmt | c20483a437c82936cb0fb8080925e37b9c4bba87 | [
"MIT"
]
| 15 | 2019-08-10T02:36:38.000Z | 2021-07-14T13:45:32.000Z | torch/_utils_internal.py | wxwoods/mctorch | 7cd6eb51fdd01fa75ed9245039a4f145ba342de2 | [
"BSD-3-Clause"
]
| 7 | 2019-10-21T03:08:51.000Z | 2022-03-11T23:54:28.000Z | pytorch/torch/_utils_internal.py | raghavnauhria/whatmt | c20483a437c82936cb0fb8080925e37b9c4bba87 | [
"MIT"
]
| 5 | 2019-09-27T02:41:40.000Z | 2021-11-05T20:40:49.000Z | from __future__ import absolute_import, division, print_function, unicode_literals
import os
# this arbitrary-looking assortment of functionality is provided here
# to have a central place for overrideable behavior. The motivating
# use is the FB build environment, where this source file is replaced
# by an equivalent.
if os.path.basename(os.path.dirname(__file__)) == 'shared':
torch_parent = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
else:
torch_parent = os.path.dirname(os.path.dirname(__file__))
def get_file_path(*path_components):
return os.path.join(torch_parent, *path_components)
def get_file_path_2(*path_components):
return os.path.join(*path_components)
def get_writable_path(path):
return path
def prepare_multiprocessing_environment(path):
pass
def resolve_library_path(path):
return os.path.realpath(path)
TEST_MASTER_ADDR = '127.0.0.1'
TEST_MASTER_PORT = 29500
| 24.736842 | 82 | 0.774468 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 243 | 0.258511 |
816071ad63a52503bfce3572b1ef2ec295dea013 | 9,099 | py | Python | libs/models.py | aquastripe/DenseCLIP | 9481bc4aac39265eb313041ae53b8f33d755508b | [
"Apache-2.0"
]
| 7 | 2022-02-02T14:34:38.000Z | 2022-03-21T09:54:14.000Z | libs/models.py | aquastripe/DenseCLIP | 9481bc4aac39265eb313041ae53b8f33d755508b | [
"Apache-2.0"
]
| null | null | null | libs/models.py | aquastripe/DenseCLIP | 9481bc4aac39265eb313041ae53b8f33d755508b | [
"Apache-2.0"
]
| null | null | null | import json
from collections import OrderedDict
from typing import Union, List
import clip
import torch
import torch.nn as nn
import torch.nn.functional as F
from libs.definitions import ROOT
label_file = ROOT / 'imagenet_class_index.json'
with open(label_file, 'r') as f:
labels = json.load(f)
_DEFAULT_CLASSNAMES = [value[1] for value in labels.values()]
# templates are copied from https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
_DEFAULT_TEMPLATES = [
'a bad photo of a {}.',
'a photo of many {}.',
'a sculpture of a {}.',
'a photo of the hard to see {}.',
'a low resolution photo of the {}.',
'a rendering of a {}.',
'graffiti of a {}.',
'a bad photo of the {}.',
'a cropped photo of the {}.',
'a tattoo of a {}.',
'the embroidered {}.',
'a photo of a hard to see {}.',
'a bright photo of a {}.',
'a photo of a clean {}.',
'a photo of a dirty {}.',
'a dark photo of the {}.',
'a drawing of a {}.',
'a photo of my {}.',
'the plastic {}.',
'a photo of the cool {}.',
'a close-up photo of a {}.',
'a black and white photo of the {}.',
'a painting of the {}.',
'a painting of a {}.',
'a pixelated photo of the {}.',
'a sculpture of the {}.',
'a bright photo of the {}.',
'a cropped photo of a {}.',
'a plastic {}.',
'a photo of the dirty {}.',
'a jpeg corrupted photo of a {}.',
'a blurry photo of the {}.',
'a photo of the {}.',
'a good photo of the {}.',
'a rendering of the {}.',
'a {} in a video game.',
'a photo of one {}.',
'a doodle of a {}.',
'a close-up photo of the {}.',
'a photo of a {}.',
'the origami {}.',
'the {} in a video game.',
'a sketch of a {}.',
'a doodle of the {}.',
'a origami {}.',
'a low resolution photo of a {}.',
'the toy {}.',
'a rendition of the {}.',
'a photo of the clean {}.',
'a photo of a large {}.',
'a rendition of a {}.',
'a photo of a nice {}.',
'a photo of a weird {}.',
'a blurry photo of a {}.',
'a cartoon {}.',
'art of a {}.',
'a sketch of the {}.',
'a embroidered {}.',
'a pixelated photo of a {}.',
'itap of the {}.',
'a jpeg corrupted photo of the {}.',
'a good photo of a {}.',
'a plushie {}.',
'a photo of the nice {}.',
'a photo of the small {}.',
'a photo of the weird {}.',
'the cartoon {}.',
'art of the {}.',
'a drawing of the {}.',
'a photo of the large {}.',
'a black and white photo of a {}.',
'the plushie {}.',
'a dark photo of a {}.',
'itap of a {}.',
'graffiti of the {}.',
'a toy {}.',
'itap of my {}.',
'a photo of a cool {}.',
'a photo of a small {}.',
'a tattoo of the {}.',
]
class DenseClip(nn.Module):
_AVAILABLE_MODELS = ['RN50', 'RN50x16'] # refer to Table 3. in the paper
def __init__(self,
name: str,
classnames: List[str] = None,
templates: List[str] = None,
device: Union[str, torch.device] = 'cuda' if torch.cuda.is_available() else 'cpu',
jit: bool = False, download_root: str = None):
super(DenseClip, self).__init__()
self.clip_model, self.preprocess = clip.load(name, device, jit, download_root)
if classnames is None:
classnames = _DEFAULT_CLASSNAMES
if templates is None:
templates = _DEFAULT_TEMPLATES
self._init_visual(device)
self._init_zeroshot_classifier(classnames, templates, device)
def _init_visual(self, device):
self.visual = self.clip_model.visual
self.conv1 = nn.Conv2d(self.visual.attnpool.v_proj.in_features,
self.visual.attnpool.v_proj.out_features,
kernel_size=(1, 1)).to(device).to(self.dtype)
self.conv2 = nn.Conv2d(self.visual.attnpool.c_proj.in_features,
self.visual.attnpool.c_proj.out_features,
kernel_size=(1, 1)).to(device).to(self.dtype)
conv1_weight_shape = (*self.visual.attnpool.v_proj.weight.shape, 1, 1)
conv2_weight_shape = (*self.visual.attnpool.c_proj.weight.shape, 1, 1)
self.conv1.load_state_dict(
OrderedDict(weight=self.visual.attnpool.v_proj.weight.reshape(conv1_weight_shape),
bias=self.visual.attnpool.v_proj.bias))
self.conv2.load_state_dict(
OrderedDict(weight=self.visual.attnpool.c_proj.weight.reshape(conv2_weight_shape),
bias=self.visual.attnpool.c_proj.bias))
@torch.no_grad()
def _init_zeroshot_classifier(self, classnames, templates, device):
# refer to: https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
zeroshot_weights = []
for classname in classnames:
texts = [template.format(classname) for template in templates] # format with class
texts = clip.tokenize(texts).to(device) # tokenize
class_embeddings = self.clip_model.encode_text(texts) # embed with text encoder
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
class_embedding = class_embeddings.mean(dim=0)
class_embedding /= class_embedding.norm()
zeroshot_weights.append(class_embedding)
# shape: [E, C]
# where E is the dimension of an embedding and C is the number of classes.
self.zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(device)
@property
def dtype(self):
return self.visual.conv1.weight.dtype
def _stem(self, x):
for conv, bn in [(self.visual.conv1, self.visual.bn1),
(self.visual.conv2, self.visual.bn2),
(self.visual.conv3, self.visual.bn3)]:
x = self.visual.relu(bn(conv(x)))
x = self.visual.avgpool(x)
return x
def encode_image(self, image):
image = image.type(self.dtype)
feature = self._stem(image)
feature = self.visual.layer1(feature)
feature = self.visual.layer2(feature)
feature = self.visual.layer3(feature)
feature = self.visual.layer4(feature)
# removed attnpool
feature = self.conv1(feature)
feature = self.conv2(feature)
return feature
def forward(self, images):
# [B, E, h, w]
features = self.encode_image(images)
# [B, w, h, E]
features_t = features.transpose(1, 3)
# [B, w, h, C]
output_t = features_t @ self.zeroshot_weights
# [B, C, h, w]
output = output_t.transpose(1, 3)
output = F.interpolate(output, size=images.shape[-2:], mode='bilinear')
return output
@staticmethod
def available_models():
return DenseClip._AVAILABLE_MODELS
class Clip(nn.Module):
_AVAILABLE_MODELS = ['RN50', 'RN50x16'] # refer to Table 3. in the paper
def __init__(self,
name: str,
classnames: List[str] = None,
templates: List[str] = None,
device: Union[str, torch.device] = 'cuda' if torch.cuda.is_available() else 'cpu',
jit: bool = False, download_root: str = None):
super(Clip, self).__init__()
self.clip_model, self.preprocess = clip.load(name, device, jit, download_root)
if classnames is None:
classnames = _DEFAULT_CLASSNAMES
if templates is None:
templates = _DEFAULT_TEMPLATES
self._init_zeroshot_classifier(classnames, templates, device)
@torch.no_grad()
def _init_zeroshot_classifier(self, classnames, templates, device):
# refer to: https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
zeroshot_weights = []
for classname in classnames:
texts = [template.format(classname) for template in templates] # format with class
texts = clip.tokenize(texts).to(device) # tokenize
class_embeddings = self.clip_model.encode_text(texts) # embed with text encoder
class_embeddings /= class_embeddings.norm(dim=-1, keepdim=True)
class_embedding = class_embeddings.mean(dim=0)
class_embedding /= class_embedding.norm()
zeroshot_weights.append(class_embedding)
# shape: [E, C]
# where E is the dimension of an embedding and C is the number of classes.
self.zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(device)
def encode_image(self, image):
feature = self.clip_model.encode_image(image)
feature /= feature.norm(dim=-1, keepdim=True)
return feature
def forward(self, images):
features = self.encode_image(images)
output = features @ self.zeroshot_weights
return F.softmax(output, dim=-1)
@staticmethod
def available_models():
return Clip._AVAILABLE_MODELS
| 35.964427 | 116 | 0.593032 | 6,278 | 0.689966 | 0 | 0 | 2,135 | 0.234641 | 0 | 0 | 2,660 | 0.29234 |
8160fc2ecf8175573434885167d35e68b574a5af | 11,463 | py | Python | src/basset_sick_loss.py | shtoneyan/Basset | b6c7f8995bb4f8fc37eccf3ee0f78478beef51d7 | [
"MIT"
]
| 248 | 2015-10-06T12:30:53.000Z | 2022-02-02T20:30:34.000Z | src/basset_sick_loss.py | Deepstatsanalysis/Basset | 18753ad9ff5a46291021a0fa1abaad037b6f64f0 | [
"MIT"
]
| 51 | 2015-10-08T04:57:41.000Z | 2021-08-12T19:53:04.000Z | src/basset_sick_loss.py | Deepstatsanalysis/Basset | 18753ad9ff5a46291021a0fa1abaad037b6f64f0 | [
"MIT"
]
| 120 | 2015-10-15T00:49:44.000Z | 2022-02-16T21:17:17.000Z | #!/usr/bin/env python
from __future__ import print_function
from optparse import OptionParser
import os
import random
import subprocess
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import pysam
from scipy.stats import binom
from scipy.stats.mstats import mquantiles
import seaborn as sns
import stats
################################################################################
# basset_sick_loss.py
#
# Shuffle SNPs that overlap DNase sites within their sites and compare the SAD
# distributions.
#
# Todo:
# -Control for GC% changes introduced by mutation shuffles.
# -Control for positional changes within the DHS regions.
# -Properly handle indels.
################################################################################
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <vcf_file> <sample_beds_file> <model_file>'
parser = OptionParser(usage)
parser.add_option('-f', dest='genome_fasta', default='%s/assembly/hg19.fa'%os.environ['HG19'], help='Genome FASTA [Default: %default]')
parser.add_option('-g', dest='gpu', default=False, action='store_true', help='Run on GPU [Default: %default]')
parser.add_option('-l', dest='seq_len', type='int', default=600, help='Sequence length provided to the model [Default: %default]')
parser.add_option('-o', dest='out_dir', default='sad_shuffle', help='Output directory')
parser.add_option('-r', dest='replot', default=False, action='store_true', help='Re-plot only, without re-computing [Default: %default]')
parser.add_option('-s', dest='num_shuffles', default=1, type='int', help='Number of SNP shuffles [Default: %default]')
parser.add_option('-t', dest='sad_table_file', help='Pre-computed SAD scores for the SNPs')
(options,args) = parser.parse_args()
if len(args) != 3:
parser.error('Must provide VCF file, sample BEDs file, and model file')
else:
vcf_file = args[0]
sample_beds_file = args[1]
model_file = args[2]
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
# open reference genome
genome = pysam.Fastafile(options.genome_fasta)
# open binomial stats file
binom_out = open('%s/binom.txt' % options.out_dir, 'w')
# open mann-whitney stats file
mw_out = open('%s/mannwhitney.txt' % options.out_dir, 'w')
# plot defaults
sns.set(font_scale=1.5, style='ticks')
si = 0
for line in open(sample_beds_file):
sample, bed_file = line.split()
print(sample)
#########################################
# compute SAD
#########################################
# filter VCF to overlapping SNPs
print(" intersecting SNPs")
sample_vcf_file = '%s/%s.vcf' % (options.out_dir,sample)
if not options.replot:
filter_vcf(vcf_file, bed_file, sample_vcf_file)
# compute SAD scores for this sample's SNPs
print(" computing SAD")
if options.sad_table_file:
true_sad = retrieve_sad(sample_vcf_file, options.sad_table_file, si)
else:
true_sad = compute_sad(sample_vcf_file, model_file, si, '%s/%s_sad'%(options.out_dir,sample), options.seq_len, options.gpu, options.replot)
#########################################
# compute shuffled SAD
#########################################
shuffle_sad = np.zeros((true_sad.shape[0],options.num_shuffles))
for ni in range(options.num_shuffles):
# shuffle the SNPs within their overlapping DHS
print(" shuffle %d" % ni)
sample_vcf_shuf_file = '%s/%s_shuf%d.vcf' % (options.out_dir, sample, ni)
shuffle_snps(sample_vcf_file, sample_vcf_shuf_file, genome)
# compute SAD scores for shuffled SNPs
print(" computing shuffle SAD")
shuffle_sad[:,ni] = compute_sad(sample_vcf_shuf_file, model_file, si, '%s/%s_shuf%d_sad'%(options.out_dir,sample,ni), options.seq_len, options.gpu, options.replot)
#########################################
# simple stats
#########################################
# compute shuffle means
shuffle_sad_mean = shuffle_sad.mean(axis=1)
# print sample table
sample_sad_out = open('%s/%s_table.txt' % (options.out_dir,sample), 'w')
for vi in range(len(true_sad)):
print('%f\t%f' % (true_sad[vi], shuffle_sad_mean[vi]), file=sample_sad_out)
sample_sad_out.close()
# scatter plot
# plt.figure()
# plt.scatter(true_sad, shuffle_sad_mean, color='black', alpha=0.7)
# plt.gca().grid(True, linestyle=':')
# plt.savefig('%s/%s_scatter.pdf' % (options.out_dir,sample))
# plt.close()
# plot CDFs
sns_colors = sns.color_palette('deep')
plt.figure()
plt.hist(true_sad, 1000, normed=1, histtype='step', cumulative=True, color=sns_colors[0], linewidth=1, label='SNPs')
plt.hist(shuffle_sad.flatten(), 1000, normed=1, histtype='step', cumulative=True, color=sns_colors[2], linewidth=1, label='Shuffle')
ax = plt.gca()
ax.grid(True, linestyle=':')
ax.set_xlim(-.2, .2)
plt.legend()
plt.savefig('%s/%s_cdf.pdf' % (options.out_dir,sample))
plt.close()
# plot Q-Q
true_q = mquantiles(true_sad, np.linspace(0,1,min(10000,true_sad.shape[0])))
shuf_q = mquantiles(shuffle_sad_mean, np.linspace(0,1,min(10000,true_sad.shape[0])))
plt.figure()
plt.scatter(true_q, shuf_q, color=sns_colors[0])
pmin = 1.05*min(true_q[0], shuf_q[0])
pmax = 1.05*max(true_q[-1], shuf_q[-1])
plt.plot([pmin,pmax], [pmin,pmax], color='black', linewidth=1)
ax = plt.gca()
ax.set_xlim(pmin,pmax)
ax.set_ylim(pmin,pmax)
ax.set_xlabel('True SAD')
ax.set_ylabel('Shuffled SAD')
ax.grid(True, linestyle=':')
plt.savefig('%s/%s_qq.pdf' % (options.out_dir,sample))
plt.close()
#########################################
# statistical tests
#########################################
# compute matched binomial test
true_great = sum((true_sad-shuffle_sad_mean) > 0)
true_lo = np.log2(true_great) - np.log2(len(true_sad)-true_great)
if true_lo > 0:
binom_p = 1.0 - binom.cdf(true_great-1, n=len(true_sad), p=0.5)
else:
binom_p = binom.cdf(true_great, n=len(true_sad), p=0.5)
# print significance stats
cols = (sample, len(true_sad), true_great, true_lo, binom_p)
print('%-20s %5d %5d %6.2f %6.1e' % cols, file=binom_out)
# compute Mann-Whitney
mw_z, mw_p = stats.mannwhitneyu(true_sad, shuffle_sad.flatten())
cols = (sample, len(true_sad), true_sad.mean(), shuffle_sad.mean(), mw_z, mw_p)
print('%-20s %5d %6.3f %6.3f %6.2f %6.1e' % cols, file=mw_out)
# update sample index
si += 1
binom_out.close()
mw_out.close()
genome.close()
def compute_sad(sample_vcf_file, model_file, si, out_dir, seq_len, gpu, replot):
''' Run basset_sad.py to compute scores. '''
cuda_str = ''
if gpu:
cuda_str = '--cudnn'
cmd = 'basset_sad.py %s -l %d -o %s %s %s' % (cuda_str, seq_len, out_dir, model_file, sample_vcf_file)
if not replot:
subprocess.call(cmd, shell=True)
sad = []
for line in open('%s/sad_table.txt' % out_dir):
a = line.split()
if a[3] == 't%d'%si:
sad.append(float(a[-1]))
return np.array(sad)
def filter_vcf(vcf_file, bed_file, sample_vcf_file):
''' Filter the VCF file for SNPs that overlap
the BED file, removing indels. '''
# open filtered file
sample_vcf_out = open(sample_vcf_file, 'w')
# intersect
p = subprocess.Popen('bedtools intersect -wo -a %s -b %s' % (vcf_file, bed_file), stdout=subprocess.PIPE, shell=True)
for line in p.stdout:
a = line.split()
if len(a[3]) == len(a[4]) == 1:
print(line, file=sample_vcf_out, end='')
sample_vcf_out.close()
def retrieve_sad(sample_vcf_file, sad_table_file, si):
''' Retrieve SAD scores from a pre-computed table.
Note that I'm assuming here the table has all
SAD scores in one row for each SNP so I can
pull out the score I want as column si+1.
'''
snp_indexes = {}
vi = 0
for line in open(sample_vcf_file):
a = line.split()
snp_indexes[a[2]] = vi
vi += 1
sad = np.zeros(len(snp_indexes))
for line in open(sad_table_file):
a = line.split()
print(a)
if a[0] in snp_indexes:
sad[snp_indexes[a[0]]] = float(a[si+1])
return sad
def shuffle_snps(in_vcf_file, out_vcf_file, genome):
''' Shuffle the SNPs within their overlapping DHS. '''
out_vcf_open = open(out_vcf_file, 'w')
for line in open(in_vcf_file):
a = line.split()
# read SNP info
snp_chrom = a[0]
snp_pos = int(a[1])
snp_nt = a[3]
# determine BED start
bi = 5
while a[bi] != snp_chrom:
bi += 1
# read BED info
bed_chrom = a[bi]
bed_start = int(a[bi+1])
bed_end = int(a[bi+2])
# get sequence
bed_seq = genome.fetch(bed_chrom, bed_start, bed_end)
# determine matching positions
bed_nt_matches = [i for i in range(len(bed_seq)) if bed_seq[i] == snp_nt]
while len(bed_nt_matches) == 0:
# expand segment by 10 nt
bed_start = max(0, bed_start-10)
bed_end += 10
bed_seq = genome.fetch(bed_chrom, bed_start, bed_end)
# sample new SNP position
shuf_pos = bed_start + 1 + random.choice(bed_nt_matches)
# write into columns
a[1] = str(shuf_pos)
print('\t'.join(a), file=out_vcf_open)
out_vcf_open.close()
def shuffle_snps_old(in_vcf_file, out_vcf_file, genome):
''' Shuffle the SNPs within their overlapping DHS. '''
out_vcf_open = open(out_vcf_file, 'w')
for line in open(in_vcf_file):
a = line.split()
# read SNP info
snp_chrom = a[0]
snp_pos = int(a[1])
# determine BED start
bi = 5
while a[bi] != snp_chrom:
bi += 1
# read BED info
bed_chrom = a[bi]
bed_start = int(a[bi+1])
bed_end = int(a[bi+2])
# sample new SNP position
shuf_pos = random.randint(bed_start, bed_end-1)
while shuf_pos == snp_pos:
shuf_pos = random.randint(bed_start, bed_end-1)
# set reference allele
ref_nt = genome.fetch(snp_chrom, shuf_pos-1, shuf_pos)
# sample alternate allele
alt_nt = random.choice('ACGT')
while alt_nt == ref_nt:
alt_nt = random.choice('ACGT')
# write into columns
a[1] = str(shuf_pos)
a[3] = ref_nt
a[4] = alt_nt
print('\t'.join(a), file=out_vcf_open)
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| 34.527108 | 175 | 0.566693 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,715 | 0.324086 |
816115c45af8c3075e38530126280e891f465fb7 | 1,657 | py | Python | level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
]
| null | null | null | level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
]
| null | null | null | level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
]
| 1 | 2020-04-03T06:19:11.000Z | 2020-04-03T06:19:11.000Z | #!/usr/bin/env python3
import codecs
import re
def trans_addr(addr):
"""Traduit une position de fichier en une adresse du programme"""
if addr < 0x1000:
return 0
if 0x0000000000001000 <= addr < 0x0000000000001000 + 0x00001ef000000000:
return 0x00002b0000000000 + addr - 0x0000000000001000
if 0x00002afffffe1000 <= addr < 0x00002afffffe1000 + 0x0000161000000000:
return 0x000049f000000000 + addr - 0x00002afffffe1000
if 0x000049effffe1000 <= addr < 0x000049effffe1000 + 0x00002afffffe0000:
return 0x0000000000020000 + addr - 0x000049effffe1000
raise Exception("Invalid addr {:#x}".format(addr))
blobs = {}
with open('strace_tar_output.log', 'r') as f:
curseek = 0
for line in f:
m = re.match(r'lseek\(4, ([^,]*), SEEK_SET\)', line)
if m is not None:
curseek = int(m.group(1))
continue
if line.startswith('write(4, "'):
m = re.match(r'write\(4, "(.*)", ([0-9]*)\) = ([0-9]*)', line)
assert m is not None:
rawdata, count1, count2 = m.groups()
assert count1 == count2
addr = curseek
curseek += int(count1)
data = codecs.escape_decode(rawdata.encode('ascii'))[0]
# Trouve le premier octet non-nul dans le bloc de données
i = 0
while i < len(data) and not data[i]:
i += 1
if i >= len(data):
continue
addr = trans_addr(addr + i)
data = data[i:].rstrip(b'\0')
with open('out/blob-{:016x}.bin'.format(addr), 'wb') as f:
f.write(data)
| 36.822222 | 76 | 0.569101 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 315 | 0.189988 |
81617162fc6604315b045e09dee8878d1bf83430 | 4,826 | py | Python | UI/test/ui_test.py | tunapro1234/ai.destroy | 0c0e1be378e40ef069fcafaa91eb9c98de31520f | [
"MIT"
]
| null | null | null | UI/test/ui_test.py | tunapro1234/ai.destroy | 0c0e1be378e40ef069fcafaa91eb9c98de31520f | [
"MIT"
]
| null | null | null | UI/test/ui_test.py | tunapro1234/ai.destroy | 0c0e1be378e40ef069fcafaa91eb9c98de31520f | [
"MIT"
]
| null | null | null | import pygame_gui
import pygame
class colors:
black = "#000000"
dimgrey = "#696969" # lmao
darkslategray = "#2F4F4F"
TITLE = "TUNAPRO1234"
BACKGROUND = colors.darkslategray
WIDTH, HEIGHT = 1920, 1080
"""
Hızlıca bir plan yapacağım
Neural ağları kontrol edebileceğimiz küçük bir framework
Ağların gelişimini görebileceğiz değiştirebileceğiz ve kaydedebileceğiz
Bunun için
-select box yapısı
-başlatmak için buton
-kaydetme olayları için üstteki şeyden
Pencereler
-tıkladığımız nöronun bilgilerini gösteren ve değiştirebilen bir pencere
-tıkladığımız weightin değişmesini sağlayan bir pencere
Norön, katman ve ağ için pygame wrapperları yazacağım
Weigth için de bir class olur
Kaydetme olayına daha var
"""
# elements: dict: {"buttons": butonlar, "entries", entryler}
class Window:
def __init__(self, screen, buttons={}, entries={}):
self.buttons = buttons
self.entries = entries
self.screen = screen
def main():
pygame.init()
pygame.display.set_caption(TITLE)
window_surface = pygame.display.set_mode((WIDTH, HEIGHT),
pygame.FULLSCREEN)
background = pygame.Surface((WIDTH, HEIGHT))
background.fill(pygame.Color(BACKGROUND))
manager = pygame_gui.UIManager((WIDTH, HEIGHT))
buttons = {}
entries = {}
selects = {}
sliders = {}
windows = {}
# labels = {}
dropdowns = {}
#yapf: disable
entries["Hello"] = pygame_gui.elements.UITextEntryLine(relative_rect=pygame.Rect((400, 500), (200, 50)), manager=manager)
buttons["Hello"] = pygame_gui.elements.UIButton(relative_rect=pygame.Rect((605, 500), (95, 29)), text='ok', manager=manager)
sliders["Hello"] = pygame_gui.elements.UIHorizontalSlider(relative_rect=pygame.Rect((400, 534), (300, 20)), start_value=0, value_range=(-20.0, 20.0), manager=manager)
dropdowns["Hello"] = pygame_gui.elements.UIDropDownMenu(relative_rect=pygame.Rect((500, 100), (100, 20)), options_list=["1", "2", "3", "4"], starting_option="select", manager=manager)
selects["Hello"] = pygame_gui.elements.UISelectionList(relative_rect=pygame.Rect((100, 500), (100, 100)), item_list=["1", "2", "3", "4"], manager=manager)
# links["Hello"] = pygame_gui.elements.UITextBox(relative_rect=pygame.Rect((100, 500), (100, 50)), text="LABEL TUNAPRO", manager=manager)
windows["Hello"] = pygame_gui.elements.UIWindow(rect=pygame.Rect((100, 100), (200, 200)), manager=manager, window_display_title="test", resizable=True)
buttonRect = pygame.Rect(0, 0, 100, 20)
buttonRect.bottomright = (-30, -20)
anchors = {
'left': 'right',
'right': 'right',
'top': 'bottom',
'bottom': 'bottom'
}
pygame_gui.elements.UIButton(relative_rect=buttonRect, text='Hello', manager=manager, container=windows["Hello"], anchors=anchors)
# yapf: enable
# activate: text_box.set_active_effect(pygame_gui.TEXT_EFFECT_TYPING_APPEAR)
# activate: text_box.set_active_effect(pygame_gui.TEXT_EFFECT_FADE_OUT)
# activate: text_box.set_active_effect(pygame_gui.TEXT_EFFECT_FADE_IN)
# deactivate: text_box.set_active_effect(None)
clock = pygame.time.Clock()
isRunning = True
while isRunning:
time_delta = clock.tick(60) / 1000.0
for event in pygame.event.get():
if event.type == pygame.QUIT:
isRunning = False
if event.type == pygame.USEREVENT:
if event.ui_element == buttons["Hello"]:
if event.user_type == pygame_gui.UI_BUTTON_PRESSED:
print('Hello World!')
if event.ui_element == dropdowns["Hello"]:
if event.user_type == pygame_gui.UI_DROP_DOWN_MENU_CHANGED:
print("Selected option:", event.text)
if event.ui_element == entries["Hello"]:
if event.user_type == pygame_gui.UI_TEXT_ENTRY_FINISHED:
print("Entered text:", event.text)
# if event.user_type == pygame_gui.UI_TEXT_ENTRY_CHANGED:
# print("Changed text:", event.text)
if event.ui_element == sliders["Hello"]:
if event.user_type == pygame_gui.UI_HORIZONTAL_SLIDER_MOVED:
print('current slider value:', event.value)
if event.ui_element == selects["Hello"]:
if event.user_type == pygame_gui.UI_SELECTION_LIST_NEW_SELECTION:
print("Selected item:", event.text)
manager.process_events(event)
manager.update(time_delta)
window_surface.blit(background, (0, 0))
manager.draw_ui(window_surface)
pygame.display.update()
if __name__ == "__main__":
main() | 33.282759 | 187 | 0.643804 | 257 | 0.052675 | 0 | 0 | 0 | 0 | 0 | 0 | 1,499 | 0.307235 |
8163184bea4450d8faedd6f3d068c99c6560b188 | 2,814 | py | Python | tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 21c19489f0c477cbfbabd3a1d232f526f84a9e49 | [
"BSD-3-Clause"
]
| null | null | null | tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 21c19489f0c477cbfbabd3a1d232f526f84a9e49 | [
"BSD-3-Clause"
]
| 41 | 2021-10-20T17:54:59.000Z | 2022-02-02T20:43:53.000Z | tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 21c19489f0c477cbfbabd3a1d232f526f84a9e49 | [
"BSD-3-Clause"
]
| null | null | null | import time
import pytest
from flask import g
from flask import session
import paho.mqtt.client as paho
from SmartSleep.db import get_db
from flask import json
import runpy
msg_nr = 0
messages = [""]
broker = 'broker.emqx.io'
port = 1883
def update_contor():
global msg_nr
msg_nr += 1
def on_message(client, userdata, message):
received = json.loads(message.payload)
if "status" in received:
assert received['status'] == messages[msg_nr]
update_contor()
elif "db" in received:
assert received["db"] == messages[msg_nr]
update_contor()
def test_cooling_system(client, auth):
global msg_nr
msg_nr = 0
global messages
messages = ['16',
"Setting the temperature system level to 1.0", "New temperature system level set to 1.0",
'16',
"Setting the temperature system level to 2.0", "New temperature system level set to 2.0",
'16',
"Setting the temperature system level to 3.0", "New temperature system level set to 3.0",
'16',
"Setting the temperature system level to 4.0", "New temperature system level set to 4.0",
'19',
"Setting the temperature system level to 3.0", "New temperature system level set to 3.0",
'16',
"Setting the temperature system level to 4.0", "New temperature system level set to 4.0",
"18"
]
time.sleep(2)
client_mqtt = paho.Client("client-test-snoring")
client_mqtt.on_message = on_message
client_mqtt.connect(broker)
client_mqtt.loop_start()
client_mqtt.subscribe("SmartSleep/SoundSensor")
auth.login()
response = client.post(f"/config/start_to_sleep?sleep_now={True}")
assert response.status_code == 200
response = client.post("/config/temp?temperature=18")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=19")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=16")
assert response.status_code == 200
time.sleep(1.5)
response = client.post("/config/current_temp?sensor=18")
assert response.status_code == 200
time.sleep(1.5)
| 29.621053 | 105 | 0.646411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 926 | 0.329069 |
81642e5d95ded6a23159027c35921f4b03706531 | 136 | py | Python | 3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
]
| 3 | 2020-10-23T02:53:11.000Z | 2021-03-12T11:04:37.000Z | 3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
]
| null | null | null | 3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
]
| 1 | 2021-07-28T08:38:05.000Z | 2021-07-28T08:38:05.000Z | import theano.tensor as tt
def explin(x):
return tt.where(x >= 0, 1 + x, tt.exp(x))
def log_exp1p(x):
return tt.log1p(tt.exp(x))
| 12.363636 | 42 | 0.639706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
81643c548f00589437ba73490093135a47e9e5d0 | 470 | py | Python | readtest.py | pyEtherCAT/Test-Source | 32e7f36873cf311580acc25ab76db589e209e479 | [
"MIT"
]
| null | null | null | readtest.py | pyEtherCAT/Test-Source | 32e7f36873cf311580acc25ab76db589e209e479 | [
"MIT"
]
| null | null | null | readtest.py | pyEtherCAT/Test-Source | 32e7f36873cf311580acc25ab76db589e209e479 | [
"MIT"
]
| null | null | null | from pyEtherCAT import MasterEtherCAT #ライブラリの読出し
nic = "eth0" # ネットワークカードのアドレスを記載
cat = MasterEtherCAT.MasterEtherCAT(nic)
ADP = 0x0000 #1台目
ADDR = 0x0E00 #コアレジスタのアドレス
cat.APRD(IDX=0x00, ADP=ADP, ADO=ADDR, DATA=[0,0,0,0,0,0,0,0]) #DATAは0を8個(64bit分)の枠を指示
(DATA, WKC) = cat.socket_read() #結果を読出し
print("[0x{:04X}]= 0x{:02x}{:02x},0x{:02x}{:02x},0x{:02x}{:02x},0x{:02x}{:02x}".format(ADDR, DATA[7],DATA[6],DATA[5],DATA[4],DATA[3],DATA[2],DATA[1],DATA[0]))
#読み出したデータを表示する | 47 | 158 | 0.691489 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 306 | 0.503289 |
8167b0d56a737d008f46fbbcfb74a28e00ab2a2b | 1,353 | py | Python | python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
]
| null | null | null | python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
]
| null | null | null | python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import pprint
from base import BaseObject
from base import FileIO
class PythonLOCParser(BaseObject):
""" Parse T/LOC from a Python File
"""
def __init__(self,
file_path: str,
is_debug: bool = False):
"""
Created:
24-Dec-2019
[email protected]
* https://github.ibm.com/GTS-CDO/unstructured-analytics/issues/1637#issuecomment-16802191
:param file_path:
link to a python file
:param is_debug:
"""
BaseObject.__init__(self, __name__)
self._is_debug = is_debug
self._file_path = file_path
def _lines(self) -> list:
lines = FileIO.file_to_lines(self._file_path, use_sort=False)
return lines
def process(self) -> dict:
lines = self._lines()
loc = len(lines)
tloc = len([line for line in lines if line and len(line.strip())])
d_result = {
"Provenance": str(self.__class__.__name__),
"FilePath": self._file_path,
"LOC": str(loc),
"TLOC": str(tloc)}
if self._is_debug:
self.logger.debug('\n'.join([
"LOC Parsing Complete",
pprint.pformat(d_result, indent=4)]))
return d_result
| 24.160714 | 103 | 0.554324 | 1,235 | 0.912786 | 0 | 0 | 0 | 0 | 0 | 0 | 422 | 0.311899 |
8168288e7e0624056cb2c2cd06a6e71eb7b00f91 | 255 | py | Python | time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 5e6e57154d2d29e311b9a725fd3b9ac8c5b99a74 | [
"MIT"
]
| null | null | null | time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 5e6e57154d2d29e311b9a725fd3b9ac8c5b99a74 | [
"MIT"
]
| 7 | 2018-03-10T09:44:34.000Z | 2018-06-17T09:44:59.000Z | time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 5e6e57154d2d29e311b9a725fd3b9ac8c5b99a74 | [
"MIT"
]
| 1 | 2018-03-10T04:56:01.000Z | 2018-03-10T04:56:01.000Z | import pandas as pd
import time
df = pd.read_csv("data/sample.csv")
for num in range(1000):
argx = str(df["x"][num:num+1].get_values())
argy = str(df["y"][num:num+1].get_values())
print("x:{0} / y:{1}".format(argx,argy))
time.sleep(0.1)
| 23.181818 | 47 | 0.611765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 38 | 0.14902 |
816842032e46719c27ed0ea91d613473a3f094ca | 601 | py | Python | architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | e4229c5938a4dd01d0877afa7b93daf68e09283b | [
"MIT"
]
| 1 | 2021-08-13T01:37:29.000Z | 2021-08-13T01:37:29.000Z | architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | e4229c5938a4dd01d0877afa7b93daf68e09283b | [
"MIT"
]
| null | null | null | architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | e4229c5938a4dd01d0877afa7b93daf68e09283b | [
"MIT"
]
| 1 | 2021-07-19T07:57:54.000Z | 2021-07-19T07:57:54.000Z | from django.urls import path
from . import views
app_name = "graphs"
urlpatterns = [
path("graphs/", views.GraphListView.as_view(), name="graph.list"),
path("graphs/create/", views.GraphCreateView.as_view(), name="graph.create"),
path(
"graphs/<str:pk>/",
views.GraphDetailView.as_view(),
name="graph.detail",
),
path(
"graphs/<str:pk>/update/",
views.GraphUpdateView.as_view(),
name="graph.update",
),
path(
"graphs/<str:pk>/delete/",
views.GraphDeleteView.as_view(),
name="graph.delete",
),
]
| 24.04 | 81 | 0.587354 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 169 | 0.281198 |
81690ba836e0e2d1c0fdfb89754bbbb996e53c02 | 2,823 | py | Python | lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 6d8f3b3ca6db67bcebef8e18fb11248e15bd9dc4 | [
"MIT"
]
| null | null | null | lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 6d8f3b3ca6db67bcebef8e18fb11248e15bd9dc4 | [
"MIT"
]
| null | null | null | lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 6d8f3b3ca6db67bcebef8e18fb11248e15bd9dc4 | [
"MIT"
]
| null | null | null | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick - extended by Lukas Tuggener
# --------------------------------------------------------
"""Blob helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import cv2
import random
def im_list_to_blob(ims):
"""Convert a list of images into a network input.
Assumes images are already prepared (means subtracted, BGR order, ...).
"""
max_shape = np.array([im.shape for im in ims]).max(axis=0)
num_images = len(ims)
blob = np.zeros((num_images, max_shape[0], max_shape[1], 3),
dtype=np.float32)
for i in range(num_images):
im = ims[i]
blob[i, 0:im.shape[0], 0:im.shape[1], :] = im
return blob
def prep_im_for_blob(im, pixel_means, global_scale, args):
"""Mean subtract and scale an image for use in a blob."""
im = im.astype(np.float32, copy=False)
# substract mean
if args.substract_mean == "True":
im -= pixel_means
# do global scaling
im = cv2.resize(im, None, None, fx=global_scale, fy=global_scale,
interpolation=cv2.INTER_LINEAR)
im_size_max = np.max(im.shape[0:2])
# Prevent the biggest axis from being more than MAX_SIZE
if im_size_max > args.max_edge:
if not args.crop == "True":
# scale down if bigger than max size
re_scale = (float(args.max_edge) / float(im_size_max))
im = cv2.resize(im, None, None, fx=re_scale, fy=re_scale,
interpolation=cv2.INTER_LINEAR)
global_scale = global_scale*re_scale
crop_box = [0,0,im.shape[0],im.shape[1]]
else:
# Crop image
topleft = random.uniform(0,1)<args.crop_top_left_bias
# crop to max size if necessary
if im.shape[0] <= args.max_edge or topleft:
crop_0 = 0
else:
crop_0 = random.randint(0,im.shape[0]-args.max_edge)
if im.shape[1] <= args.max_edge or topleft:
crop_1 = 0
else:
crop_1 = random.randint(0,im.shape[1]-args.max_edge)
crop_box = [crop_0, crop_1, min(crop_0+args.max_edge,im.shape[0]), min(crop_1+args.max_edge,im.shape[1])]
im = im[crop_box[0]:crop_box[2],crop_box[1]:crop_box[3]]
else:
crop_box = [0, 0, im.shape[0], im.shape[1]]
if not args.pad_to == 0:
# pad to fit RefineNet #TODO fix refinenet padding problem
y_mulity = int(np.ceil(im.shape[0] / float(args.pad_to)))
x_mulity = int(np.ceil(im.shape[1] / float(args.pad_to)))
canv = np.ones([y_mulity * args.pad_to, x_mulity * args.pad_to,3], dtype=np.uint8) * 255
canv[0:im.shape[0], 0:im.shape[1]] = im
im = canv
return im, global_scale, crop_box
| 32.825581 | 111 | 0.631598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 726 | 0.257173 |
81691bebff51090814a13a3ea3f9262d90d38a7b | 1,022 | py | Python | edlm/convert/_get_media_folders.py | etcher-be/EDLM | 7b25c85252fd15c2c222b00271f7a32e335db704 | [
"MIT"
]
| null | null | null | edlm/convert/_get_media_folders.py | etcher-be/EDLM | 7b25c85252fd15c2c222b00271f7a32e335db704 | [
"MIT"
]
| 4 | 2020-03-24T16:53:26.000Z | 2020-06-26T08:31:13.000Z | edlm/convert/_get_media_folders.py | etcher-be/EDLM | 7b25c85252fd15c2c222b00271f7a32e335db704 | [
"MIT"
]
| null | null | null | # coding=utf-8
"""
Gathers the media folders
"""
import elib
from ._context import Context
def get_media_folders(ctx: Context):
"""
Gathers the media folders
"""
ctx.info('gathering media folders')
media_folders = []
this_folder = ctx.source_folder
while True:
ctx.debug(f'traversing: "{this_folder}"')
media_folder_candidate = elib.path.ensure_path(this_folder, 'media', must_exist=False).absolute()
if media_folder_candidate.exists() and media_folder_candidate.is_dir():
ctx.debug(f'media folder found: "{media_folder_candidate}"')
media_folders.append(media_folder_candidate)
if len(this_folder.parents) is 1:
ctx.debug(f'reach mount point at: "{this_folder}"')
break
this_folder = this_folder.parent
# if not media_folders:
# raise ConvertError('no media folder found', ctx)
ctx.info(f'media folders:\n{elib.pretty_format(media_folders)}')
ctx.media_folders = media_folders
| 28.388889 | 105 | 0.672211 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 370 | 0.362035 |
8169c20b93a9491060201043f0c1a523fd5bc8ec | 1,924 | py | Python | twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 40b73bdf7dcb778d957c3f146baf825a97dceae5 | [
"Apache-2.0"
]
| 63 | 2017-08-31T10:02:52.000Z | 2021-12-18T21:42:14.000Z | twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 40b73bdf7dcb778d957c3f146baf825a97dceae5 | [
"Apache-2.0"
]
| 284 | 2017-10-20T15:35:26.000Z | 2020-09-11T15:25:40.000Z | twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 40b73bdf7dcb778d957c3f146baf825a97dceae5 | [
"Apache-2.0"
]
| 44 | 2017-08-21T19:47:57.000Z | 2020-11-22T22:45:50.000Z | from inspect import signature
import twister2.tset.TSet as ts
from twister2.utils import function_wrapper
class TLink:
def __init__(self, java_ref, env):
self.__java_ref = java_ref
self.__env = env
def map(self, lam):
map_wrapper = function_wrapper(lam)
map_func_java_ref = self.__env.functions.map.build(map_wrapper)
map_t_set_java_ref = self.__java_ref.map(map_func_java_ref)
return ts.TSet(map_t_set_java_ref, self.__env)
def flat_map(self, lam):
flat_map_wrapper = function_wrapper(lam)
flat_map_func_java_ref = self.__env.functions.flat_map.build(flat_map_wrapper)
flat_map_t_set_java_ref = self.__java_ref.flatmap(flat_map_func_java_ref)
return ts.TSet(flat_map_t_set_java_ref, self.__env)
def sink(self, sink_func):
sink_wrapper = function_wrapper(sink_func)
sink_func_java_ref = self.__env.functions.sink.build(sink_wrapper)
self.__java_ref.sink(sink_func_java_ref)
def compute(self, compute_func):
compute_wrapper = function_wrapper(compute_func)
# if function has two arguments, user is expecting the collector version of compute
if len(signature(compute_func).parameters) is 3:
compute_collector_func_java_ref = self.__env.functions \
.compute_with_collector.build(compute_wrapper)
return ts.TSet(self.__java_ref.compute(compute_collector_func_java_ref), self.__env)
else:
compute_func_java_ref = self.__env.functions.compute.build(compute_wrapper)
return ts.TSet(self.__java_ref.compute(compute_func_java_ref), self.__env)
def for_each(self, foreach_func):
foreach_wrapper = function_wrapper(foreach_func)
foreach_func_java_ref = self.__env.functions.apply.build(foreach_wrapper)
return ts.TSet(self.__java_ref.forEach(foreach_func_java_ref), self.__env)
| 42.755556 | 96 | 0.72817 | 1,814 | 0.942827 | 0 | 0 | 0 | 0 | 0 | 0 | 83 | 0.043139 |
816e5b3d645c0e4cb41592db326d16685c653103 | 9,373 | py | Python | e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | e486a0d2cec71f2bde2d61f2f1315922f2883cee | [
"BSD-3-Clause"
]
| null | null | null | e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | e486a0d2cec71f2bde2d61f2f1315922f2883cee | [
"BSD-3-Clause"
]
| null | null | null | e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | e486a0d2cec71f2bde2d61f2f1315922f2883cee | [
"BSD-3-Clause"
]
| null | null | null | from e2cnn.gspaces import *
from e2cnn.nn import FieldType
from e2cnn.nn import GeometricTensor
from e2cnn.group import Representation
from e2cnn.group.representation import build_from_discrete_group_representation
from ..equivariant_module import EquivariantModule
import torch
from typing import List, Tuple, Any
import numpy as np
import math
__all__ = ["ConcatenatedNonLinearity"]
class ConcatenatedNonLinearity(EquivariantModule):
def __init__(self, in_type, function = "c_relu"):
r"""
Concatenated non-linearities.
For each input channel, the module applies the specified activation function both to its value and its opposite
(the value multiplied by -1).
The number of channels is, therefore, doubled.
Notice that not all the representations support this kind of non-linearity. Indeed, only representations
with the same pattern of permutation matrices and containing only values in :math:`\{0, 1, -1\}` support it.
Args:
in_type (FieldType): the input field type
function (str): the identifier of the non-linearity. It is used to specify which function to apply.
By default (``'c_relu'``), ReLU is used.
"""
assert isinstance(in_type.gspace, GeneralOnR2)
for r in in_type.representations:
assert "concatenated" in r.supported_nonlinearities, (
'Error! Representation "{}" does not support "concatenated"'
" non-linearity".format(r.name)
)
super(ConcatenatedNonLinearity, self).__init__()
self.space = in_type.gspace
self.in_type = in_type
# compute the output representation given the input one
self.out_type = ConcatenatedNonLinearity._transform_fiber_representation(
in_type
)
# retrieve the activation function to apply
if function == "c_relu":
self._function = torch.relu
elif function == "c_sigmoid":
self._function = torch.sigmoid
elif function == "c_tanh":
self._function = torch.tanh
else:
raise ValueError('Function "{}" not recognized!'.format(function))
def forward(self, input):
assert input.type == self.in_type
b, c, w, h = input.tensor.shape
# build the output tensor
output = torch.empty(
b, 2 * c, w, h, dtype=torch.float, device=input.tensor.device
)
# each channels is transformed to 2 channels:
# first, apply the non-linearity to its value
output[:, ::2, ...] = self._function(input.tensor)
# then, apply the non-linearity to its values with the sign inverted
output[:, 1::2, ...] = self._function(-1 * input.tensor)
# wrap the result in a GeometricTensor
return GeometricTensor(output, self.out_type)
def evaluate_output_shape(self, input_shape):
assert len(input_shape) == 4
assert input_shape[1] == self.in_type.size
b, c, hi, wi = input_shape
return b, self.out_type.size, hi, wi
def check_equivariance(self, atol = 1e-6, rtol = 1e-5):
c = self.in_type.size
x = torch.randn(3, c, 10, 10)
x = GeometricTensor(x, self.in_type)
errors = []
for el in self.space.testing_elements:
out1 = self(x).transform_fibers(el)
out2 = self(x.transform_fibers(el))
errs = (out1.tensor - out2.tensor).detach().numpy()
errs = np.abs(errs).reshape(-1)
print(el, errs.max(), errs.mean(), errs.var())
assert torch.allclose(out1.tensor, out2.tensor, atol=atol, rtol=rtol), (
'The error found during equivariance check with element "{}" is too'
" high: max = {}, mean = {} var ={}".format(
el, errs.max(), errs.mean(), errs.var()
)
)
errors.append((el, errs.mean()))
return errors
@staticmethod
def _transform_fiber_representation(in_type):
r"""
Compute the output representation from the input one after applying the concatenated non-linearity.
Args:
in_type (FieldType): the input field type
Returns:
(FieldType): the new output field type
"""
transformed = {}
# transform each different input Representation
for repr in in_type._unique_representations:
transformed[repr] = ConcatenatedNonLinearity._transform_representation(repr)
new_representations = []
# concatenate the new representations
for repr in in_type.representations:
new_representations.append(transformed[repr])
return FieldType(in_type.gspace, new_representations)
@staticmethod
def _transform_representation(representation):
r"""
Transform an input :class:`~e2cnn.group.Representation` according to the concatenated non-linearity.
The input representation needs to have the pattern of a permutation matrix, with values -1 or 1.
The output representation has double the size of the input one and is built by substituting the ``1`` s with 2x2
identity matrices and the ``-1`` s with 2x2 antidiagonal matrix containing ``1`` s.
Args:
representation (Representation): the input representation
Returns:
(Representation): the new output representation
"""
group = representation.group
assert not group.continuous
# the name of the new representation
name = "concatenated_{}".format(representation.name)
if name in group.representations:
# if the representation has already been built, return it
r = group.representations[name]
else:
# otherwise, build the new representation
s = representation.size
rep = {}
# build the representation for each element
for element in group.elements:
# retrieve the input representation of the current element
r = representation(element)
# build the matrix for the output representation of the current element
rep[element] = np.zeros((2 * s, 2 * s))
# check if the input matrix has the pattern of a permutation matrix
e = [-1] * s
for i in range(s):
for j in range(s):
if not math.isclose(r[i, j], 0, abs_tol=1e-9):
if e[i] < 0:
e[i] = j
else:
raise ValueError(
'''Error! the representation should have the pattern of a permutation matrix
but 2 values have been found in a row for element "{}"'''.format(
element
)
)
if len(set(e)) != len(e):
raise ValueError(
'''Error! the representation should have the pattern of a permutation matrix
but 2 values have been found in a column for element "{}"'''.format(
element
)
)
# parse the input representation matrix and fill the output representation accordingly
for i in range(s):
for j in range(s):
if math.isclose(r[i, j], 1, abs_tol=1e-9):
# if the current cell contains 1, fill the output submatrix with the 2x2 identity
rep[element][2 * i : 2 * i + 2, 2 * j : 2 * j + 2] = np.eye(
2
)
elif math.isclose(r[i, j], -1, abs_tol=1e-9):
# if the current cell contains -1, fill the output submatrix with the 2x2 antidigonal matrix
rep[element][
2 * i : 2 * i + 2, 2 * j : 2 * j + 2
] = np.flipud(np.eye(2))
elif not math.isclose(r[i, j], 0, abs_tol=1e-9):
# otherwise the cell has to contain a 0
raise ValueError(
'''Error! The representation should be a signed permutation matrix and, therefore,
contain only -1, 1 or 0 values but {} found in position({}, {}) for element "{}"'''.format(
r[i, j], i, j, element
)
)
# the resulting representation is a quotient repreentation and, therefore,
# it also supports pointwise non-linearities
nonlinearities = representation.supported_nonlinearities.union(
["pointwise"]
)
# build the output representation
r = build_from_discrete_group_representation(
rep, name, group, supported_nonlinearities=nonlinearities
)
return r
| 37.047431 | 124 | 0.559159 | 8,980 | 0.958071 | 0 | 0 | 5,327 | 0.568335 | 0 | 0 | 3,686 | 0.393257 |
81710a8f5de8e268c2f9c31947a6e69ae41e9b04 | 2,551 | py | Python | modules/gathering/host_gathering.py | anouarbensaad/VulnX | 2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb | [
"MIT"
]
| 10 | 2019-05-10T04:43:54.000Z | 2019-05-16T00:45:46.000Z | modules/gathering/host_gathering.py | anouarbensaad/VulnX | 2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb | [
"MIT"
]
| null | null | null | modules/gathering/host_gathering.py | anouarbensaad/VulnX | 2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb | [
"MIT"
]
| 8 | 2019-05-13T04:25:11.000Z | 2019-05-15T02:47:49.000Z | import requests
import re
import socket
from common.colors import bad,que, info, good,run,W,end
from common.uriParser import parsing_url as hostd
class GatherHost():
def __init__(self,url,headers=None):
self.url = url
self.headers = headers
def match_info(self,regex,data):
match = re.search(regex, data)
if match:
return dict(
data=match.group(1)
)
def match_printer(self,to_match,match):
if match['data']:
print(' {0} {1} : {2}'.format(good,to_match,match['data']))
def os_server(self):
response = requests.get(self.url, headers=self.headers).headers
try:
regx = re.compile(r"(.+) \((.+)\)")
data = regx.search(response["server"])
try:
print(' {0} {1}Server :{2} {3}' .format(good, W, end, data.group(1)))
print(' {0} {1}OS :{2} {3}' .format(good, W, end, data.group(2)))
except AttributeError:
print(' {0} Cannot Find OS & HostingServer ' .format(bad))
except KeyError:
print(' {0} Cannot Find the server headers ' .format(bad))
def web_host(self):
urldate = "https://input.payapi.io/v1/api/fraud/domain/age/" + hostd(self.url)
getinfo = requests.get(urldate, self.headers).text
regex_date = r'Date: (.+?)-(.+?)'
regex_date = re.compile(regex_date)
matches = re.search(regex_date, getinfo)
try:
if matches:
print(' {0} Domain Created on : {1}'.format(good, matches.group(1)))
ip = socket.gethostbyname(hostd(self.url))
print(' {0} CloudFlare IP : {1}'.format(good, ip))
ipinfo = "http://ipinfo.io/" + ip + "/json"
gather = requests.get(ipinfo, self.headers).text
self.match_printer('Country',self.match_info(r'country\": \"(.+?)\"',gather))
self.match_printer('Region',self.match_info(r'region\": \"(.+?)\"',gather))
self.match_printer('Timezone',self.match_info(r'timezone\": \"(.+?)\"',gather))
self.match_printer('Postal',self.match_info(r'postal\": \"(.+?)\"',gather))
self.match_printer('Org',self.match_info(r'org\": \"(.+?)\"',gather))
self.match_printer('Location',self.match_info(r'loc\": \"(.+?)\"',gather))
except Exception as err:
print(' {0} Parse Error : {1}' .format(bad,err)) | 42.516667 | 95 | 0.53822 | 2,404 | 0.942376 | 0 | 0 | 0 | 0 | 0 | 0 | 529 | 0.20737 |
8171ba68e87f53d5c2ecb6dd90deb2acd88e328d | 34,379 | py | Python | datastore/core/basic.py | datastore/datastore | 7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3 | [
"MIT"
]
| 65 | 2015-03-22T23:43:48.000Z | 2022-03-25T16:10:33.000Z | datastore/core/basic.py | datastore/datastore | 7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3 | [
"MIT"
]
| 3 | 2015-03-11T21:57:23.000Z | 2019-07-26T16:20:29.000Z | datastore/core/basic.py | datastore/datastore | 7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3 | [
"MIT"
]
| 14 | 2015-01-23T17:03:33.000Z | 2020-02-03T06:35:04.000Z |
from key import Key
from query import Cursor
class Datastore(object):
'''A Datastore represents storage for any key-value pair.
Datastores are general enough to be backed by all kinds of different storage:
in-memory caches, databases, a remote datastore, flat files on disk, etc.
The general idea is to wrap a more complicated storage facility in a simple,
uniform interface, keeping the freedom of using the right tools for the job.
In particular, a Datastore can aggregate other datastores in interesting ways,
like sharded (to distribute load) or tiered access (caches before databases).
While Datastores should be written general enough to accept all sorts of
values, some implementations will undoubtedly have to be specific (e.g. SQL
databases where fields should be decomposed into columns), particularly to
support queries efficiently.
'''
# Main API. Datastore mplementations MUST implement these methods.
def get(self, key):
'''Return the object named by key or None if it does not exist.
None takes the role of default value, so no KeyError exception is raised.
Args:
key: Key naming the object to retrieve
Returns:
object or None
'''
raise NotImplementedError
def put(self, key, value):
'''Stores the object `value` named by `key`.
How to serialize and store objects is up to the underlying datastore.
It is recommended to use simple objects (strings, numbers, lists, dicts).
Args:
key: Key naming `value`
value: the object to store.
'''
raise NotImplementedError
def delete(self, key):
'''Removes the object named by `key`.
Args:
key: Key naming the object to remove.
'''
raise NotImplementedError
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`
Implementations of query will be the largest differentiating factor
amongst datastores. All datastores **must** implement query, even using
query's worst case scenario, see :ref:class:`Query` for details.
Args:
query: Query object describing the objects to return.
Raturns:
iterable cursor with all objects matching criteria
'''
raise NotImplementedError
# Secondary API. Datastores MAY provide optimized implementations.
def contains(self, key):
'''Returns whether the object named by `key` exists.
The default implementation pays the cost of a get. Some datastore
implementations may optimize this.
Args:
key: Key naming the object to check.
Returns:
boalean whether the object exists
'''
return self.get(key) is not None
class NullDatastore(Datastore):
'''Stores nothing, but conforms to the API. Useful to test with.'''
def get(self, key):
'''Return the object named by key or None if it does not exist (None).'''
return None
def put(self, key, value):
'''Store the object `value` named by `key` (does nothing).'''
pass
def delete(self, key):
'''Remove the object named by `key` (does nothing).'''
pass
def query(self, query):
'''Returns an iterable of objects matching criteria in `query` (empty).'''
return query([])
class DictDatastore(Datastore):
'''Simple straw-man in-memory datastore backed by nested dicts.'''
def __init__(self):
self._items = dict()
def _collection(self, key):
'''Returns the namespace collection for `key`.'''
collection = str(key.path)
if not collection in self._items:
self._items[collection] = dict()
return self._items[collection]
def get(self, key):
'''Return the object named by `key` or None.
Retrieves the object from the collection corresponding to ``key.path``.
Args:
key: Key naming the object to retrieve.
Returns:
object or None
'''
try:
return self._collection(key)[key]
except KeyError, e:
return None
def put(self, key, value):
'''Stores the object `value` named by `key`.
Stores the object in the collection corresponding to ``key.path``.
Args:
key: Key naming `value`
value: the object to store.
'''
if value is None:
self.delete(key)
else:
self._collection(key)[key] = value
def delete(self, key):
'''Removes the object named by `key`.
Removes the object from the collection corresponding to ``key.path``.
Args:
key: Key naming the object to remove.
'''
try:
del self._collection(key)[key]
if len(self._collection(key)) == 0:
del self._items[str(key.path)]
except KeyError, e:
pass
def contains(self, key):
'''Returns whether the object named by `key` exists.
Checks for the object in the collection corresponding to ``key.path``.
Args:
key: Key naming the object to check.
Returns:
boalean whether the object exists
'''
return key in self._collection(key)
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`
Naively applies the query operations on the objects within the namespaced
collection corresponding to ``query.key.path``.
Args:
query: Query object describing the objects to return.
Raturns:
iterable cursor with all objects matching criteria
'''
# entire dataset already in memory, so ok to apply query naively
if str(query.key) in self._items:
return query(self._items[str(query.key)].values())
else:
return query([])
def __len__(self):
return sum(map(len, self._items.values()))
class InterfaceMappingDatastore(Datastore):
'''Represents simple wrapper datastore around an object that, though not a
Datastore, implements data storage through a similar interface. For example,
memcached and redis both implement a `get`, `set`, `delete` interface.
'''
def __init__(self, service, get='get', put='put', delete='delete', key=str):
'''Initialize the datastore with given `service`.
Args:
service: A service that provides data storage through a similar interface
to Datastore. Using the service should only require a simple mapping
of methods, such as {put : set}.
get: The attribute name of the `service` method implementing get
put: The attribute name of the `service` method implementing put
delete: The attribute name of the `service` method implementing delete
key: A function converting a Datastore key (of type Key) into a `service`
key. The conversion will often be as simple as `str`.
'''
self._service = service
self._service_key = key
self._service_ops = {}
self._service_ops['get'] = getattr(service, get)
self._service_ops['put'] = getattr(service, put)
self._service_ops['delete'] = getattr(service, delete)
# AttributeError will be raised if service does not implement the interface
def get(self, key):
'''Return the object in `service` named by `key` or None.
Args:
key: Key naming the object to retrieve.
Returns:
object or None
'''
key = self._service_key(key)
return self._service_ops['get'](key)
def put(self, key, value):
'''Stores the object `value` named by `key` in `service`.
Args:
key: Key naming `value`.
value: the object to store.
'''
key = self._service_key(key)
self._service_ops['put'](key, value)
def delete(self, key):
'''Removes the object named by `key` in `service`.
Args:
key: Key naming the object to remove.
'''
key = self._service_key(key)
self._service_ops['delete'](key)
class ShimDatastore(Datastore):
'''Represents a non-concrete datastore that adds functionality between the
client and a lower level datastore. Shim datastores do not actually store
data themselves; instead, they delegate storage to an underlying child
datastore. The default implementation just passes all calls to the child.
'''
def __init__(self, datastore):
'''Initializes this ShimDatastore with child `datastore`.'''
if not isinstance(datastore, Datastore):
errstr = 'datastore must be of type %s. Got %s.'
raise TypeError(errstr % (Datastore, datastore))
self.child_datastore = datastore
# default implementation just passes all calls to child
def get(self, key):
'''Return the object named by key or None if it does not exist.
Default shim implementation simply returns ``child_datastore.get(key)``
Override to provide different functionality, for example::
def get(self, key):
value = self.child_datastore.get(key)
return json.loads(value)
Args:
key: Key naming the object to retrieve
Returns:
object or None
'''
return self.child_datastore.get(key)
def put(self, key, value):
'''Stores the object `value` named by `key`.
Default shim implementation simply calls ``child_datastore.put(key, value)``
Override to provide different functionality, for example::
def put(self, key, value):
value = json.dumps(value)
self.child_datastore.put(key, value)
Args:
key: Key naming `value`.
value: the object to store.
'''
self.child_datastore.put(key, value)
def delete(self, key):
'''Removes the object named by `key`.
Default shim implementation simply calls ``child_datastore.delete(key)``
Override to provide different functionality.
Args:
key: Key naming the object to remove.
'''
self.child_datastore.delete(key)
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`.
Default shim implementation simply returns ``child_datastore.query(query)``
Override to provide different functionality, for example::
def query(self, query):
cursor = self.child_datastore.query(query)
cursor._iterable = deserialized(cursor._iterable)
return cursor
Args:
query: Query object describing the objects to return.
Raturns:
iterable cursor with all objects matching criteria
'''
return self.child_datastore.query(query)
class CacheShimDatastore(ShimDatastore):
'''Wraps a datastore with a caching shim optimizes some calls.'''
def __init__(self, *args, **kwargs):
self.cache_datastore = kwargs.pop('cache')
if not isinstance(self.cache_datastore, Datastore):
errstr = 'datastore must be of type %s. Got %s.'
raise TypeError(errstr % (Datastore, self.cache_datastore))
super(CacheShimDatastore, self).__init__(*args, **kwargs)
def get(self, key):
'''Return the object named by key or None if it does not exist.
CacheShimDatastore first checks its ``cache_datastore``.
'''
value = self.cache_datastore.get(key)
return value if value is not None else self.child_datastore.get(key)
def put(self, key, value):
'''Stores the object `value` named by `key`self.
Writes to both ``cache_datastore`` and ``child_datastore``.
'''
self.cache_datastore.put(key, value)
self.child_datastore.put(key, value)
def delete(self, key):
'''Removes the object named by `key`.
Writes to both ``cache_datastore`` and ``child_datastore``.
'''
self.cache_datastore.delete(key)
self.child_datastore.delete(key)
def contains(self, key):
'''Returns whether the object named by `key` exists.
First checks ``cache_datastore``.
'''
return self.cache_datastore.contains(key) \
or self.child_datastore.contains(key)
class LoggingDatastore(ShimDatastore):
'''Wraps a datastore with a logging shim.'''
def __init__(self, child_datastore, logger=None):
if not logger:
import logging
logger = logging
self.logger = logger
super(LoggingDatastore, self).__init__(child_datastore)
def get(self, key):
'''Return the object named by key or None if it does not exist.
LoggingDatastore logs the access.
'''
self.logger.info('%s: get %s' % (self, key))
value = super(LoggingDatastore, self).get(key)
self.logger.debug('%s: %s' % (self, value))
return value
def put(self, key, value):
'''Stores the object `value` named by `key`self.
LoggingDatastore logs the access.
'''
self.logger.info('%s: put %s' % (self, key))
self.logger.debug('%s: %s' % (self, value))
super(LoggingDatastore, self).put(key, value)
def delete(self, key):
'''Removes the object named by `key`.
LoggingDatastore logs the access.
'''
self.logger.info('%s: delete %s' % (self, key))
super(LoggingDatastore, self).delete(key)
def contains(self, key):
'''Returns whether the object named by `key` exists.
LoggingDatastore logs the access.
'''
self.logger.info('%s: contains %s' % (self, key))
return super(LoggingDatastore, self).contains(key)
def query(self, query):
'''Returns an iterable of objects matching criteria expressed in `query`.
LoggingDatastore logs the access.
'''
self.logger.info('%s: query %s' % (self, query))
return super(LoggingDatastore, self).query(query)
class KeyTransformDatastore(ShimDatastore):
'''Represents a simple ShimDatastore that applies a transform on all incoming
keys. For example:
>>> import datastore.core
>>> def transform(key):
... return key.reverse
...
>>> ds = datastore.DictDatastore()
>>> kt = datastore.KeyTransformDatastore(ds, keytransform=transform)
None
>>> ds.put(datastore.Key('/a/b/c'), 'abc')
>>> ds.get(datastore.Key('/a/b/c'))
'abc'
>>> kt.get(datastore.Key('/a/b/c'))
None
>>> kt.get(datastore.Key('/c/b/a'))
'abc'
>>> ds.get(datastore.Key('/c/b/a'))
None
'''
def __init__(self, *args, **kwargs):
'''Initializes KeyTransformDatastore with `keytransform` function.'''
self.keytransform = kwargs.pop('keytransform', None)
super(KeyTransformDatastore, self).__init__(*args, **kwargs)
def get(self, key):
'''Return the object named by keytransform(key).'''
return self.child_datastore.get(self._transform(key))
def put(self, key, value):
'''Stores the object names by keytransform(key).'''
return self.child_datastore.put(self._transform(key), value)
def delete(self, key):
'''Removes the object named by keytransform(key).'''
return self.child_datastore.delete(self._transform(key))
def contains(self, key):
'''Returns whether the object named by key is in this datastore.'''
return self.child_datastore.contains(self._transform(key))
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`'''
query = query.copy()
query.key = self._transform(query.key)
return self.child_datastore.query(query)
def _transform(self, key):
'''Returns a `key` transformed by `self.keytransform`.'''
return self.keytransform(key) if self.keytransform else key
class LowercaseKeyDatastore(KeyTransformDatastore):
'''Represents a simple ShimDatastore that lowercases all incoming keys.
For example:
>>> import datastore.core
>>> ds = datastore.DictDatastore()
>>> ds.put(datastore.Key('hello'), 'world')
>>> ds.put(datastore.Key('HELLO'), 'WORLD')
>>> ds.get(datastore.Key('hello'))
'world'
>>> ds.get(datastore.Key('HELLO'))
'WORLD'
>>> ds.get(datastore.Key('HeLlO'))
None
>>> lds = datastore.LowercaseKeyDatastore(ds)
>>> lds.get(datastore.Key('HeLlO'))
'world'
>>> lds.get(datastore.Key('HeLlO'))
'world'
>>> lds.get(datastore.Key('HeLlO'))
'world'
'''
def __init__(self, *args, **kwargs):
'''Initializes KeyTransformDatastore with keytransform function.'''
super(LowercaseKeyDatastore, self).__init__(*args, **kwargs)
self.keytransform = self.lowercaseKey
@classmethod
def lowercaseKey(cls, key):
'''Returns a lowercased `key`.'''
return Key(str(key).lower())
class NamespaceDatastore(KeyTransformDatastore):
'''Represents a simple ShimDatastore that namespaces all incoming keys.
For example:
>>> import datastore.core
>>>
>>> ds = datastore.DictDatastore()
>>> ds.put(datastore.Key('/a/b'), 'ab')
>>> ds.put(datastore.Key('/c/d'), 'cd')
>>> ds.put(datastore.Key('/a/b/c/d'), 'abcd')
>>>
>>> nd = datastore.NamespaceDatastore('/a/b', ds)
>>> nd.get(datastore.Key('/a/b'))
None
>>> nd.get(datastore.Key('/c/d'))
'abcd'
>>> nd.get(datastore.Key('/a/b/c/d'))
None
>>> nd.put(datastore.Key('/c/d'), 'cd')
>>> ds.get(datastore.Key('/a/b/c/d'))
'cd'
'''
def __init__(self, namespace, *args, **kwargs):
'''Initializes NamespaceDatastore with `key` namespace.'''
super(NamespaceDatastore, self).__init__(*args, **kwargs)
self.keytransform = self.namespaceKey
self.namespace = Key(namespace)
def namespaceKey(self, key):
'''Returns a namespaced `key`: namespace.child(key).'''
return self.namespace.child(key)
class NestedPathDatastore(KeyTransformDatastore):
'''Represents a simple ShimDatastore that shards/namespaces incoming keys.
Incoming keys are sharded into nested namespaces. The idea is to use the key
name to separate into nested namespaces. This is akin to the directory
structure that ``git`` uses for objects. For example:
>>> import datastore.core
>>>
>>> ds = datastore.DictDatastore()
>>> np = datastore.NestedPathDatastore(ds, depth=3, length=2)
>>>
>>> np.put(datastore.Key('/abcdefghijk'), 1)
>>> np.get(datastore.Key('/abcdefghijk'))
1
>>> ds.get(datastore.Key('/abcdefghijk'))
None
>>> ds.get(datastore.Key('/ab/cd/ef/abcdefghijk'))
1
>>> np.put(datastore.Key('abc'), 2)
>>> np.get(datastore.Key('abc'))
2
>>> ds.get(datastore.Key('/ab/ca/bc/abc'))
2
'''
_default_depth = 3
_default_length = 2
_default_keyfn = lambda key: key.name
_default_keyfn = staticmethod(_default_keyfn)
def __init__(self, *args, **kwargs):
'''Initializes KeyTransformDatastore with keytransform function.
kwargs:
depth: the nesting level depth (e.g. 3 => /1/2/3/123) default: 3
length: the nesting level length (e.g. 2 => /12/123456) default: 2
'''
# assign the nesting variables
self.nest_depth = kwargs.pop('depth', self._default_depth)
self.nest_length = kwargs.pop('length', self._default_length)
self.nest_keyfn = kwargs.pop('keyfn', self._default_keyfn)
super(NestedPathDatastore, self).__init__(*args, **kwargs)
self.keytransform = self.nestKey
def query(self, query):
# Requires supporting * operator on queries.
raise NotImplementedError
def nestKey(self, key):
'''Returns a nested `key`.'''
nest = self.nest_keyfn(key)
# if depth * length > len(key.name), we need to pad.
mult = 1 + int(self.nest_depth * self.nest_length / len(nest))
nest = nest * mult
pref = Key(self.nestedPath(nest, self.nest_depth, self.nest_length))
return pref.child(key)
@staticmethod
def nestedPath(path, depth, length):
'''returns a nested version of `basename`, using the starting characters.
For example:
>>> NestedPathDatastore.nested_path('abcdefghijk', 3, 2)
'ab/cd/ef'
>>> NestedPathDatastore.nested_path('abcdefghijk', 4, 2)
'ab/cd/ef/gh'
>>> NestedPathDatastore.nested_path('abcdefghijk', 3, 4)
'abcd/efgh/ijk'
>>> NestedPathDatastore.nested_path('abcdefghijk', 1, 4)
'abcd'
>>> NestedPathDatastore.nested_path('abcdefghijk', 3, 10)
'abcdefghij/k'
'''
components = [path[n:n+length] for n in xrange(0, len(path), length)]
components = components[:depth]
return '/'.join(components)
class SymlinkDatastore(ShimDatastore):
'''Datastore that creates filesystem-like symbolic link keys.
A symbolic link key is a way of naming the same value with multiple keys.
For example:
>>> import datastore.core
>>>
>>> dds = datastore.DictDatastore()
>>> sds = datastore.SymlinkDatastore(dds)
>>>
>>> a = datastore.Key('/A')
>>> b = datastore.Key('/B')
>>>
>>> sds.put(a, 1)
>>> sds.get(a)
1
>>> sds.link(a, b)
>>> sds.get(b)
1
>>> sds.put(b, 2)
>>> sds.get(b)
2
>>> sds.get(a)
2
>>> sds.delete(a)
>>> sds.get(a)
None
>>> sds.get(b)
None
>>> sds.put(a, 3)
>>> sds.get(a)
3
>>> sds.get(b)
3
>>> sds.delete(b)
>>> sds.get(b)
None
>>> sds.get(a)
3
'''
sentinel = 'datastore_link'
def _link_value_for_key(self, source_key):
'''Returns the link value for given `key`.'''
return str(source_key.child(self.sentinel))
def _link_for_value(self, value):
'''Returns the linked key if `value` is a link, or None.'''
try:
key = Key(value)
if key.name == self.sentinel:
return key.parent
except:
pass
return None
def _follow_link(self, value):
'''Returns given `value` or, if it is a symlink, the `value` it names.'''
seen_keys = set()
while True:
link_key = self._link_for_value(value)
if not link_key:
return value
assert link_key not in seen_keys, 'circular symlink reference'
seen_keys.add(link_key)
value = super(SymlinkDatastore, self).get(link_key)
def _follow_link_gen(self, iterable):
'''A generator that follows links in values encountered.'''
for item in iterable:
yield self._follow_link(item)
def link(self, source_key, target_key):
'''Creates a symbolic link key pointing from `target_key` to `source_key`'''
link_value = self._link_value_for_key(source_key)
# put straight into the child, to avoid following previous links.
self.child_datastore.put(target_key, link_value)
# exercise the link. ensure there are no cycles.
self.get(target_key)
def get(self, key):
'''Return the object named by `key. Follows links.'''
value = super(SymlinkDatastore, self).get(key)
return self._follow_link(value)
def put(self, key, value):
'''Stores the object named by `key`. Follows links.'''
# if value is a link, don't follow links
if self._link_for_value(value):
super(SymlinkDatastore, self).put(key, value)
return
# if `key` points to a symlink, need to follow it.
current_value = super(SymlinkDatastore, self).get(key)
link_key = self._link_for_value(current_value)
if link_key:
self.put(link_key, value) # self.put: could be another link.
else:
super(SymlinkDatastore, self).put(key, value)
def query(self, query):
'''Returns objects matching criteria expressed in `query`. Follows links.'''
results = super(SymlinkDatastore, self).query(query)
return self._follow_link_gen(results)
class DirectoryDatastore(ShimDatastore):
'''Datastore that allows manual tracking of directory entries.
For example:
>>> ds = DirectoryDatastore(ds)
>>>
>>> # initialize directory at /foo
>>> ds.directory(Key('/foo'))
>>>
>>> # adding directory entries
>>> ds.directoryAdd(Key('/foo'), Key('/foo/bar'))
>>> ds.directoryAdd(Key('/foo'), Key('/foo/baz'))
>>>
>>> # value is a generator returning all the keys in this dir
>>> for key in ds.directoryRead(Key('/foo')):
... print key
Key('/foo/bar')
Key('/foo/baz')
>>>
>>> # querying for a collection works
>>> for item in ds.query(Query(Key('/foo'))):
... print item
'bar'
'baz'
'''
def directory(self, dir_key):
'''Initializes directory at dir_key.'''
dir_items = self.get(dir_key)
if not isinstance(dir_items, list):
self.put(dir_key, [])
def directoryRead(self, dir_key):
'''Returns a generator that iterates over all keys in the directory
referenced by `dir_key`
Returns None if the directory `dir_key` does not exist
'''
return self.directory_entries_generator(dir_key)
def directoryAdd(self, dir_key, key):
'''Adds directory entry `key` to directory at `dir_key`.
If the directory `dir_key` does not exist, it is created.
'''
key = str(key)
dir_items = self.get(dir_key) or []
if key not in dir_items:
dir_items.append(key)
self.put(dir_key, dir_items)
def directoryRemove(self, dir_key, key):
'''Removes directory entry `key` from directory at `dir_key`.
If either the directory `dir_key` or the directory entry `key` don't exist,
this method is a no-op.
'''
key = str(key)
dir_items = self.get(dir_key) or []
if key in dir_items:
dir_items = [k for k in dir_items if k != key]
self.put(dir_key, dir_items)
def directory_entries_generator(self, dir_key):
dir_items = self.get(dir_key) or []
for item in dir_items:
yield Key(item)
class DirectoryTreeDatastore(ShimDatastore):
'''Datastore that tracks directory entries, like in a filesystem.
All key changes cause changes in a collection-like directory.
For example:
>>> import datastore.core
>>>
>>> dds = datastore.DictDatastore()
>>> rds = datastore.DirectoryTreeDatastore(dds)
>>>
>>> a = datastore.Key('/A')
>>> b = datastore.Key('/A/B')
>>> c = datastore.Key('/A/C')
>>>
>>> rds.get(a)
[]
>>> rds.put(b, 1)
>>> rds.get(b)
1
>>> rds.get(a)
['/A/B']
>>> rds.put(c, 1)
>>> rds.get(c)
1
>>> rds.get(a)
['/A/B', '/A/C']
>>> rds.delete(b)
>>> rds.get(a)
['/A/C']
>>> rds.delete(c)
>>> rds.get(a)
[]
'''
def put(self, key, value):
'''Stores the object `value` named by `key`self.
DirectoryTreeDatastore stores a directory entry.
'''
super(DirectoryTreeDatastore, self).put(key, value)
str_key = str(key)
# ignore root
if str_key == '/':
return
# retrieve directory, to add entry
dir_key = key.parent.instance('directory')
directory = self.directory(dir_key)
# ensure key is in directory
if str_key not in directory:
directory.append(str_key)
super(DirectoryTreeDatastore, self).put(dir_key, directory)
def delete(self, key):
'''Removes the object named by `key`.
DirectoryTreeDatastore removes the directory entry.
'''
super(DirectoryTreeDatastore, self).delete(key)
str_key = str(key)
# ignore root
if str_key == '/':
return
# retrieve directory, to remove entry
dir_key = key.parent.instance('directory')
directory = self.directory(dir_key)
# ensure key is not in directory
if directory and str_key in directory:
directory.remove(str_key)
if len(directory) > 0:
super(DirectoryTreeDatastore, self).put(dir_key, directory)
else:
super(DirectoryTreeDatastore, self).delete(dir_key)
def query(self, query):
'''Returns objects matching criteria expressed in `query`.
DirectoryTreeDatastore uses directory entries.
'''
return query(self.directory_values_generator(query.key))
def directory(self, key):
'''Retrieves directory entries for given key.'''
if key.name != 'directory':
key = key.instance('directory')
return self.get(key) or []
def directory_values_generator(self, key):
'''Retrieve directory values for given key.'''
directory = self.directory(key)
for key in directory:
yield self.get(Key(key))
class DatastoreCollection(ShimDatastore):
'''Represents a collection of datastores.'''
def __init__(self, stores=[]):
'''Initialize the datastore with any provided datastores.'''
if not isinstance(stores, list):
stores = list(stores)
for store in stores:
if not isinstance(store, Datastore):
raise TypeError("all stores must be of type %s" % Datastore)
self._stores = stores
def datastore(self, index):
'''Returns the datastore at `index`.'''
return self._stores[index]
def appendDatastore(self, store):
'''Appends datastore `store` to this collection.'''
if not isinstance(store, Datastore):
raise TypeError("stores must be of type %s" % Datastore)
self._stores.append(store)
def removeDatastore(self, store):
'''Removes datastore `store` from this collection.'''
self._stores.remove(store)
def insertDatastore(self, index, store):
'''Inserts datastore `store` into this collection at `index`.'''
if not isinstance(store, Datastore):
raise TypeError("stores must be of type %s" % Datastore)
self._stores.insert(index, store)
class TieredDatastore(DatastoreCollection):
'''Represents a hierarchical collection of datastores.
Each datastore is queried in order. This is helpful to organize access
order in terms of speed (i.e. read caches first).
Datastores should be arranged in order of completeness, with the most complete
datastore last, as it will handle query calls.
Semantics:
* get : returns first found value
* put : writes through to all
* delete : deletes through to all
* contains : returns first found value
* query : queries bottom (most complete) datastore
'''
def get(self, key):
'''Return the object named by key. Checks each datastore in order.'''
value = None
for store in self._stores:
value = store.get(key)
if value is not None:
break
# add model to lower stores only
if value is not None:
for store2 in self._stores:
if store == store2:
break
store2.put(key, value)
return value
def put(self, key, value):
'''Stores the object in all underlying datastores.'''
for store in self._stores:
store.put(key, value)
def delete(self, key):
'''Removes the object from all underlying datastores.'''
for store in self._stores:
store.delete(key)
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`.
The last datastore will handle all query calls, as it has a (if not
the only) complete record of all objects.
'''
# queries hit the last (most complete) datastore
return self._stores[-1].query(query)
def contains(self, key):
'''Returns whether the object is in this datastore.'''
for store in self._stores:
if store.contains(key):
return True
return False
class ShardedDatastore(DatastoreCollection):
'''Represents a collection of datastore shards.
A datastore is selected based on a sharding function.
Sharding functions should take a Key and return an integer.
WARNING: adding or removing datastores while mid-use may severely affect
consistency. Also ensure the order is correct upon initialization.
While this is not as important for caches, it is crucial for
persistent datastores.
'''
def __init__(self, stores=[], shardingfn=hash):
'''Initialize the datastore with any provided datastore.'''
if not callable(shardingfn):
raise TypeError('shardingfn (type %s) is not callable' % type(shardingfn))
super(ShardedDatastore, self).__init__(stores)
self._shardingfn = shardingfn
def shard(self, key):
'''Returns the shard index to handle `key`, according to sharding fn.'''
return self._shardingfn(key) % len(self._stores)
def shardDatastore(self, key):
'''Returns the shard to handle `key`.'''
return self.datastore(self.shard(key))
def get(self, key):
'''Return the object named by key from the corresponding datastore.'''
return self.shardDatastore(key).get(key)
def put(self, key, value):
'''Stores the object to the corresponding datastore.'''
self.shardDatastore(key).put(key, value)
def delete(self, key):
'''Removes the object from the corresponding datastore.'''
self.shardDatastore(key).delete(key)
def contains(self, key):
'''Returns whether the object is in this datastore.'''
return self.shardDatastore(key).contains(key)
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`'''
cursor = Cursor(query, self.shard_query_generator(query))
cursor.apply_order() # ordering sharded queries is expensive (no generator)
return cursor
def shard_query_generator(self, query):
'''A generator that queries each shard in sequence.'''
shard_query = query.copy()
for shard in self._stores:
# yield all items matching within this shard
cursor = shard.query(shard_query)
for item in cursor:
yield item
# update query with results of first query
shard_query.offset = max(shard_query.offset - cursor.skipped, 0)
if shard_query.limit:
shard_query.limit = max(shard_query.limit - cursor.returned, 0)
if shard_query.limit <= 0:
break # we're already done!
'''
Hello Tiered Access
>>> import pymongo
>>> import datastore.core
>>>
>>> from datastore.impl.mongo import MongoDatastore
>>> from datastore.impl.lrucache import LRUCache
>>> from datastore.impl.filesystem import FileSystemDatastore
>>>
>>> conn = pymongo.Connection()
>>> mongo = MongoDatastore(conn.test_db)
>>>
>>> cache = LRUCache(1000)
>>> fs = FileSystemDatastore('/tmp/.test_db')
>>>
>>> ds = datastore.TieredDatastore([cache, mongo, fs])
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
Hello Sharding
>>> import datastore.core
>>>
>>> shards = [datastore.DictDatastore() for i in range(0, 10)]
>>>
>>> ds = datastore.ShardedDatastore(shards)
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
'''
| 28.53029 | 80 | 0.651706 | 33,149 | 0.964222 | 1,078 | 0.031356 | 842 | 0.024492 | 0 | 0 | 20,847 | 0.606388 |
8173b9a0c93895ba388cef8eeda4fb14eb4184e3 | 15,988 | py | Python | website/admin.py | jonfroehlich/makeabilitylabwebsite | 0b322b5a172cf1d4edc63559e3de713ad3e3542c | [
"MIT"
]
| 2 | 2017-06-19T02:24:48.000Z | 2018-10-25T09:14:59.000Z | website/admin.py | jonfroehlich/makeabilitylabwebsite | 0b322b5a172cf1d4edc63559e3de713ad3e3542c | [
"MIT"
]
| 571 | 2017-06-14T13:38:45.000Z | 2020-07-17T18:15:58.000Z | website/admin.py | jonfroehlich/makeabilitylabwebsite | 0b322b5a172cf1d4edc63559e3de713ad3e3542c | [
"MIT"
]
| 7 | 2017-12-06T21:51:29.000Z | 2020-06-18T19:58:36.000Z | from django.contrib import admin
from django.contrib.admin import widgets
from .models import Person, Publication, Position, Talk, Project, Poster, Keyword, News, Banner, Video, Project_header, Photo, Project_umbrella, Project_Role, Sponsor
from website.admin_list_filters import PositionRoleListFilter, PositionTitleListFilter, PubVenueTypeListFilter, PubVenueListFilter
from sortedm2m_filter_horizontal_widget.forms import SortedFilteredSelectMultiple
import django
from django import forms
from django.http import HttpResponse
from datetime import datetime
from django.template import loader
from django.template import RequestContext
from django.shortcuts import redirect
from django import forms
import urllib
import bibtexparser
from image_cropping import ImageCroppingMixin
class BannerAdmin(ImageCroppingMixin, admin.ModelAdmin):
fieldsets = [
(None, {'fields': ["page", "title", "caption", "alt_text", "link", "favorite", "project"]}),
# ('Image', {'fields': ["image", "image_preview"]})
('Image', {'fields': ["image", "cropping"]})
]
# The list display lets us control what is shown in the default persons table at Home > Website > Banners
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('title', 'project', 'page', 'favorite', 'image')
# readonly_fields = ["image_preview"]
class PositionInline(admin.StackedInline):
model = Position
# This specifies that the Inline is linked to the main owner of the position rather than any of the advisor roles.
fk_name = "person"
# This specifies that the field appears only once (by default)
extra = 0
def formfield_for_foreignkey(self, db_field, request, **kwargs):
print("PositionInline.formfield_for_foreignkey: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "advisor" or db_field.name == "co_advisor":
# Filters advisors to professors and sorts by first name
# Based on: http://stackoverflow.com/a/30627555
professor_ids = [person.id for person in Person.objects.all() if person.is_professor()]
filtered_persons = Person.objects.filter(id__in=professor_ids).order_by('first_name')
print(filtered_persons)
kwargs["queryset"] = filtered_persons
elif db_field.name == "grad_mentor":
# Filters grad mentor list to current grad students (either member or collaborator)
grad_ids = [person.id for person in Person.objects.all() if person.is_grad_student() and (person.is_current_member() or person.is_current_collaborator())]
filtered_persons = Person.objects.filter(id__in=grad_ids).order_by('first_name')
print(filtered_persons)
kwargs["queryset"] = filtered_persons
return super(PositionInline, self).formfield_for_foreignkey(db_field, request, **kwargs)
class ProjectRoleInline(admin.StackedInline):
model = Project_Role
extra = 0
class ProjectHeaderInline(ImageCroppingMixin, admin.StackedInline):
model = Project_header
extra = 0
# Uses format as per https://github.com/jonasundderwolf/django-image-cropping to add cropping to the admin page
class NewsAdmin(ImageCroppingMixin, admin.ModelAdmin):
# Filters authors only to current members and sorts by firstname
# Based on: http://stackoverflow.com/a/30627555
def formfield_for_foreignkey(self, db_field, request, **kwargs):
# print("NewsAdmin.formfield_for_foreignkey: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "author":
current_member_ids = [person.id for person in Person.objects.all() if person.is_current_member()]
filtered_persons = Person.objects.filter(id__in=current_member_ids).order_by('first_name')
print(filtered_persons)
kwargs["queryset"] = filtered_persons
return super(NewsAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "project":
kwargs["widget"] = widgets.FilteredSelectMultiple("project", is_stacked=False)
return super(NewsAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class PhotoAdmin(ImageCroppingMixin, admin.ModelAdmin):
list_display = ('__str__', 'admin_thumbnail')
class ProjectAdmin(ImageCroppingMixin, admin.ModelAdmin):
inlines = [ProjectHeaderInline]
# The list display lets us control what is shown in the Project table at Home > Website > Project
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('name', 'start_date', 'end_date', 'has_ended', 'get_people_count',
'get_current_member_count', 'get_past_member_count',
'get_most_recent_artifact_date', 'get_most_recent_artifact_type',
'get_publication_count', 'get_video_count', 'get_talk_count')
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "sponsors":
kwargs["widget"] = widgets.FilteredSelectMultiple("sponsors", is_stacked=False)
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
if db_field.name == "project_umbrellas":
kwargs["widget"] = widgets.FilteredSelectMultiple("project umbrellas", is_stacked=False)
return super(ProjectAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class PersonAdmin(ImageCroppingMixin, admin.ModelAdmin):
# inlines allow us to edit models on the same page as a parent model
# see: https://docs.djangoproject.com/en/1.11/ref/contrib/admin/#inlinemodeladmin-objects
inlines = [PositionInline, ProjectRoleInline]
# The list display lets us control what is shown in the default persons table at Home > Website > People
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('get_full_name', 'get_current_title', 'get_current_role', 'is_active', 'get_start_date', 'get_end_date', 'get_time_in_current_position', 'get_total_time_as_member')
#TODO setup filter here that has diff categories (like active members, past, etc.):
#https://www.elements.nl/2015/03/16/getting-the-most-out-of-django-admin-filters/
#related to: https://github.com/jonfroehlich/makeabilitylabwebsite/issues/238
list_filter = (PositionRoleListFilter, PositionTitleListFilter)
class VideoAdmin(admin.ModelAdmin):
# The list display lets us control what is shown in the default persons table at Home > Website > Videos
# info on displaying multiple entries comes from http://stackoverflow.com/questions/9164610/custom-columns-using-django-admin
list_display = ('title', 'date', 'caption', 'project')
# search_fields are used for auto-complete, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
search_fields = ['title', 'get_video_host_str', 'date']
# default the sort order in table to descending order by date
ordering = ('-date',)
class TalkAdmin(admin.ModelAdmin):
# The list display lets us control what is shown in the default talk table at Home > Website > Talk
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#django.contrib.admin.ModelAdmin.list_display
list_display = ('title', 'date', 'get_speakers_as_csv', 'forum_name', 'location', 'talk_type')
# search_fields are used for auto-complete, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
# for example, the PublicationAdmin uses auto-complete select2 for talks
search_fields = ['title', 'forum_name']
# Filters speakers only to current members and collaborators and sorts by first name
# Based on: https://stackoverflow.com/a/17457828
# Update: we no longer do this because sometimes we want to add a talk by a former member or collaborator
def formfield_for_manytomany(self, db_field, request, **kwargs):
print("TalkAdmin.formfield_for_manytomany: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "projects":
kwargs["widget"] = widgets.FilteredSelectMultiple("projects", is_stacked=False)
if db_field.name == "project_umbrellas":
kwargs["widget"] = widgets.FilteredSelectMultiple("project umbrellas", is_stacked=False, )
if db_field.name == "speakers":
# Uncomment the following block of code to limit the speakers field in the admin UI only to current lab members
# Note: we don't actually want to do this (see https://github.com/jonfroehlich/makeabilitylabwebsite/issues/534)
# but keeping it here because code may be useful in the future for other areas of admin interface
# current_member_and_collab_ids = [person.id for person in Person.objects.all() if person.is_current_member()]
# filtered_speakers = Person.objects.filter(id__in=current_member_and_collab_ids).order_by('first_name')
# kwargs["queryset"] = filtered_speakers
kwargs["widget"] = widgets.FilteredSelectMultiple("speakers", is_stacked=False)
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(TalkAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class PosterAdmin(admin.ModelAdmin):
# search_fields are used for auto-complete, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
search_fields = ['title', 'date']
def formfield_for_manytomany(self, db_field, request, **kwargs):
print("PosterAdmin.formfield_for_manytomany: db_field: {} db_field.name {} request: {}".format(db_field, db_field.name, request))
if db_field.name == "projects":
kwargs["widget"] = widgets.FilteredSelectMultiple("projects", is_stacked=False)
if db_field.name == "authors":
kwargs["widget"] = widgets.FilteredSelectMultiple("authors", is_stacked=False)
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(PosterAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
class ProjectUmbrellaAdmin(admin.ModelAdmin):
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(ProjectUmbrellaAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
#from https://stackoverflow.com/questions/9602217/define-an-order-for-manytomanyfield-with-django
#display items inline
class PublicationAuthorInline(admin.TabularInline):
model = Publication.authors.through
verbose_name = "Author"
verbose_name_plural = "Author Order"
class PublicationAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title', 'authors', 'date']}),
('Files', {'fields': ['pdf_file']}),
('Pub Venue information', {'fields': ['pub_venue_url','pub_venue_type', 'book_title', 'book_title_short', 'geo_location', 'total_papers_submitted', 'total_papers_accepted']}),
('Archival Info', {'fields': ['official_url', 'extended_abstract', 'peer_reviewed', 'award' ]}),
('Page Info', {'fields': ['num_pages', 'page_num_start', 'page_num_end']}),
('Supplementary Artifacts', {'fields': ['poster', 'video', 'talk', 'code_repo_url']}),
('Project Info', {'fields': ['projects', 'project_umbrellas']}),
('Keyword Info', {'fields': ['keywords']}),
]
list_display = ('title', 'book_title_short', 'date')
# default the sort order in table to descending order by date
ordering = ('-date',)
list_filter = (PubVenueTypeListFilter, PubVenueListFilter)
# add in auto-complete fields for talks, see:
# https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.autocomplete_fields
# this addresses: https://github.com/jonfroehlich/makeabilitylabwebsite/issues/553
# You must also update the search_fields in the respective admins like PosterAdmin, VideoAdmin, and TalkAdmin
# these search fields become what the auto-complete function searches for filtering
autocomplete_fields = ['poster', 'video', 'talk']
def get_form(self, request, obj=None, **kwargs):
"""We custom style some of the admin UI, including expanding the width of the talk select interface"""
form = super(PublicationAdmin, self).get_form(request, obj, **kwargs)
# we style the talks select2 widget so that it's wider, see:
# https://docs.djangoproject.com/en/2.2/ref/forms/widgets/#customizing-widget-instances
# see also:
# https://stackoverflow.com/questions/10588275/django-change-field-size-of-modelmultiplechoicefield
# https://stackoverflow.com/questions/110378/change-the-width-of-form-elements-created-with-modelform-in-django
# and finally, this is what worked for me:
# https://stackoverflow.com/q/35211809
# to address: https://github.com/jonfroehlich/makeabilitylabwebsite/issues/851
text_min_width = 750
form.base_fields['title'].widget.attrs['style'] = 'min-width: {}px;'.format(text_min_width)
form.base_fields['book_title'].widget.attrs['style'] = 'min-width: {}px;'.format(text_min_width)
form.base_fields['book_title_short'].widget.attrs['style'] = 'min-width: {}px;'.format(500)
select_min_width = 600
form.base_fields['poster'].widget.attrs['style'] = 'min-width: {}px;'.format(select_min_width)
form.base_fields['video'].widget.attrs['style'] = 'min-width: {}px;'.format(select_min_width)
form.base_fields['talk'].widget.attrs['style'] = 'min-width: {}px;'.format(select_min_width)
return form
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "authors":
kwargs['widget'] = SortedFilteredSelectMultiple()
elif db_field.name == "projects":
kwargs["widget"] = widgets.FilteredSelectMultiple("projects", is_stacked=False)
elif db_field.name == "project_umbrellas":
kwargs["widget"] = widgets.FilteredSelectMultiple("project umbrellas", is_stacked=False)
elif db_field.name == "keywords":
kwargs["widget"] = widgets.FilteredSelectMultiple("keywords", is_stacked=False)
return super(PublicationAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
admin.site.register(Person, PersonAdmin)
admin.site.register(Publication, PublicationAdmin)
admin.site.register(Talk, TalkAdmin)
admin.site.register(Project, ProjectAdmin)
admin.site.register(Poster, PosterAdmin)
admin.site.register(Keyword)
admin.site.register(News, NewsAdmin)
admin.site.register(Banner, BannerAdmin)
admin.site.register(Video, VideoAdmin)
admin.site.register(Photo, PhotoAdmin)
admin.site.register(Project_umbrella, ProjectUmbrellaAdmin)
admin.site.register(Sponsor)
# For modifying more on the front admin landing page, see https://medium.com/django-musings/customizing-the-django-admin-site-b82c7d325510
admin.site.index_title = "Makeability Lab Admin. Django version: " + django.get_version() + " ML Version: 0.5.7a"
| 58.99631 | 185 | 0.714411 | 14,197 | 0.887978 | 0 | 0 | 0 | 0 | 0 | 0 | 7,525 | 0.470665 |
8173fb74546ca4fcfb60600f03ab6b477c4abacb | 6,191 | py | Python | python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 2fe4c54d1e4bcf0e5e06313cc9696588ab105653 | [
"Unlicense"
]
| null | null | null | python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 2fe4c54d1e4bcf0e5e06313cc9696588ab105653 | [
"Unlicense"
]
| null | null | null | python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 2fe4c54d1e4bcf0e5e06313cc9696588ab105653 | [
"Unlicense"
]
| null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 26 21:39:04 2019
@author: fmk
"""
import argparse,subprocess,string,random
import pandas as pd
''' positional and optional argument parser'''
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''\
Infer orthologs across two or more prokka-based annotations, and returns overview table for all genes.
Homology is inferred using CD-HIT and annotations need to be in fasta format (nucleotide (*.ffn) or amino acid (*.faa))
CD-HIT: %identity optional. Fixed: -s 0.9, ie. shorter sequences need to be at least 90% length of the representative of the cluster.
''',
epilog="Questions or comments? --> [email protected]")
parser.add_argument("-f", dest="file_sample_annotation", help="2-col TSV file with subject-identifier and annotation file path.",type=argparse.FileType('r'),required=True)
parser.add_argument('-p', dest="percentIdentity", action="store", default='0.98', help="Percent identity cd-hit. Default: 0.98")
parser.add_argument('-o', dest="outpath", action="store", help="Output path.",required=True)
parser.add_argument("-c", dest="cdhit", help="Path to CD-HIT executable", action="store",default="cd-hit")
args = parser.parse_args()
''' FUNCTIONS'''
def fix_path(path):
# make sure path has trailing "/"
if path[-1] != "/":
path = path + "/"
return path
else:
return path
def read_merge_sample_annotation_file(file_sample_annotation):
# get list of annotation file paths, tuple/dict of subjectID and prokka-assigned gene-tag (use first line)
subj_tag_dict = {}
subprocess.run(['mkdir','-p',outpath],check=True)
with open(outpath+"merged_annotation.fa", 'w') as outfile:
# with open(file_sample_annotation,'r') as infile:
for line in file_sample_annotation:
line = line.strip().split('\t')
print('1')
print(line)
# read annotation file: extract prokka-gene-tag (from 1st line) and merge all annotation files into one
with open(line[1]) as annofile:
# link subjectID with prokka gene tag
first_line = annofile.readline()
if first_line.startswith('>'):
prokka_gene_tag = first_line.strip().split(' ')[0].split('>')[1].split('_')[0]
subj_tag_dict[ line[0] ] = prokka_gene_tag
else:
raise ValueError('Annotation file does not start with ">": '+first_line+' in '+line[1])
# write annotation files into merged outfile
outfile.write(first_line) # necessary bcs already read
outfile.write(annofile.read())
file_sample_annotation.close()
return subj_tag_dict
def read_cdhit_cluster(cdhit_clstr,percentIdentity,prokka_tag_list):
# read cdhit results and build for each cluster entry in dict
ctr = 1
ctr_cluster_within_subject = 0
rdm_tag = ''.join(random.choice(string.ascii_lowercase) for x in range(4)) # added to cluster-tags to avoid confusion w/ different runs
saab_cluster_genes = {}
# value_default = ['NA' for i in prokka_tag_list]
with open(cdhit_clstr) as infile:
for line in infile:
if line.startswith('>'):
cluster_tag = "SAAB_" + "%05d" % ctr + "_pid" + percentIdentity + "_" + rdm_tag
saab_cluster_genes[cluster_tag] = ['NA' for i in prokka_tag_list]
ctr += 1
else:
line = line.strip().split('\t')[1].split(" ")[1].split('.')[0].split('>')[1] # remodel string so it matches prokka-gene_id eg. "0 10155aa, >JKPBNODD_00001... *"
subject_identifier_prokka = line.split('_')[0]
subject_index = prokka_tag_list.index( subject_identifier_prokka )
if saab_cluster_genes[cluster_tag][subject_index] == 'NA':
saab_cluster_genes[cluster_tag][subject_index] = line
else:
saab_cluster_genes[cluster_tag][subject_index] = saab_cluster_genes[cluster_tag][subject_index] + "," + line
ctr_cluster_within_subject += 1
if ctr_cluster_within_subject > 0:
print('NOTE: ' + str(ctr_cluster_within_subject) + " occasions where a gene cluster had >1 gene from the same individual assigned.")
return saab_cluster_genes
''' MAIN '''
# TEST Vars
#file_sample_annotation = "/Users/fmk/Documents/mit/stapAD/tmp/pycode/prokka_ffn/subject_4_9_16.list"
##annopath = "/Users/fmk/Documents/mit/stapAD/mlst"
##filetype = "txt"
#outpath = "/Users/fmk/Documents/mit/stapAD/tmp/pycode"
#percentIdentity=0.95
#cdhit_executable = '/usr/local/bin/cd-hit'
if __name__ == "__main__":
# assign argparse arguments
file_sample_annotation = args.file_sample_annotation
# annopath = fix_path(args.annopath) # fix path to annotation has trailing "/"
outpath = fix_path(args.outpath)
# filetype = args.filetype
cdhit_executable = args.cdhit
percentIdentity = args.percentIdentity
# get concatenated annotation file (output: merged_annotation.fa) and dict[subject]=prokka-tag
subj_tag_dict = read_merge_sample_annotation_file(file_sample_annotation)
subject_list_ord = list(subj_tag_dict.keys())
prokkaTag_list_ord = [ subj_tag_dict[k] for k in subject_list_ord ]
# cd-hit
command_cdhit = cdhit_executable + " -s 0.9 -c " + percentIdentity + " -i " + outpath + "merged_annotation.fa" + " -o " + outpath+"cdhit_results"
subprocess.run(command_cdhit,shell=True)
# read-in cdhit results: dict[SAAB_XXXXX_pidZZZ_YYY]=[geneX,geneY,geneZ]
cdhit_res_dict = read_cdhit_cluster(outpath+"cdhit_results.clstr",percentIdentity,prokkaTag_list_ord)
# build table of gene annotation
cdhit_res_df = pd.DataFrame.from_dict(cdhit_res_dict,orient='index',columns=subject_list_ord)
# write cdhit res
cdhit_res_df.to_csv(outpath+'annotation_orthologs.tsv',sep="\t")
| 45.189781 | 182 | 0.65466 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,597 | 0.41948 |
81740534e476d32c799f65e43f16716d4ed45822 | 708 | py | Python | tests/test_vtable.py | matthewpruett/angr | bfba2af1ea2eb941001339f47a1264a685c60eec | [
"BSD-2-Clause"
]
| 6,132 | 2015-08-06T23:24:47.000Z | 2022-03-31T21:49:34.000Z | tests/test_vtable.py | matthewpruett/angr | bfba2af1ea2eb941001339f47a1264a685c60eec | [
"BSD-2-Clause"
]
| 2,272 | 2015-08-10T08:40:07.000Z | 2022-03-31T23:46:44.000Z | tests/test_vtable.py | matthewpruett/angr | bfba2af1ea2eb941001339f47a1264a685c60eec | [
"BSD-2-Clause"
]
| 1,155 | 2015-08-06T23:37:39.000Z | 2022-03-31T05:54:11.000Z | import os
import angr
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
def test_vtable_extraction_x86_64():
p = angr.Project(os.path.join(test_location, "x86_64", "cpp_classes"), auto_load_libs=False)
vtables_sizes = {0x403cb0: 24, 0x403cd8: 16, 0x403cf8: 16, 0x403d18: 16}
vtable_analysis = p.analyses.VtableFinder()
vtables = vtable_analysis.vtables_list
assert len(vtables) == 4
for vtable in vtables:
assert vtable.vaddr in [0x403cb0, 0x403cd8, 0x403cf8, 0x403d18]
assert vtables_sizes[vtable.vaddr] == vtable.size
if __name__ == "__main__":
test_vtable_extraction_x86_64()
| 32.181818 | 107 | 0.689266 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 56 | 0.079096 |
81741edd16ac8dabfdb435a60121ed91382e9d89 | 465 | py | Python | sendmail.py | loitd/buzreportgenerator | 8d07be40b68859ca15173f3b229dd93f6bb39809 | [
"MIT"
]
| null | null | null | sendmail.py | loitd/buzreportgenerator | 8d07be40b68859ca15173f3b229dd93f6bb39809 | [
"MIT"
]
| null | null | null | sendmail.py | loitd/buzreportgenerator | 8d07be40b68859ca15173f3b229dd93f6bb39809 | [
"MIT"
]
| null | null | null | from zeep import Client
def test():
client = Client('http://www.dneonline.com/calculator.asmx?wsdl')
result = client.service.Add(100,220)
print(result)
def test2():
client = Client('http://172.16.237.11:8080/SendNotify_Interface_Gmail/services/Notify?wsdl')
result = client.service.sendEmail("[email protected]", "deglkxtfyjpnjqtq", "Ahihi", "Xin chao", "[email protected]", 1)
print(result)
if __name__ == "__main__":
test2() | 33.214286 | 131 | 0.696774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.458065 |
8174be4107d534513138717c81ca4815dbd17aaf | 2,760 | py | Python | pommerman/agents/http_agent.py | KaixiangLin/playground | a0eb299f4772bada1c528a881f3bf26404b131aa | [
"Apache-2.0"
]
| 2 | 2018-11-10T08:31:13.000Z | 2018-11-13T08:16:45.000Z | pommerman/agents/http_agent.py | KaixiangLin/playground | a0eb299f4772bada1c528a881f3bf26404b131aa | [
"Apache-2.0"
]
| null | null | null | pommerman/agents/http_agent.py | KaixiangLin/playground | a0eb299f4772bada1c528a881f3bf26404b131aa | [
"Apache-2.0"
]
| null | null | null | '''The HTTP agent - provides observation using http push to remote
agent and expects action in the reply'''
import json
import time
import os
import threading
import requests
from . import BaseAgent
from .. import utility
from .. import characters
class HttpAgent(BaseAgent):
"""The HTTP Agent that connects to a port with a remote agent where the
character runs. It uses the same interface as the docker agent and
is useful for debugging."""
def __init__(self,
port=8080,
host='localhost',
timeout=120,
character=characters.Bomber):
self._port = port
self._host = host
self._timeout = timeout
super(HttpAgent, self).__init__(character)
self._wait_for_remote()
def _wait_for_remote(self):
"""Wait for network service to appear. A timeout of 0 waits forever."""
timeout = self._timeout
backoff = .25
max_backoff = min(timeout, 16)
if timeout:
# time module is needed to calc timeout shared between two exceptions
end = time.time() + timeout
while True:
try:
now = time.time()
if timeout and end < now:
print("Timed out - %s:%s" % (self._host, self._port))
raise
request_url = 'http://%s:%s/ping' % (self._host, self._port)
req = requests.get(request_url)
self._acknowledged = True
return True
except requests.exceptions.ConnectionError as e:
print("ConnectionError: ", e)
backoff = min(max_backoff, backoff * 2)
time.sleep(backoff)
except requests.exceptions.HTTPError as e:
print("HTTPError: ", e)
backoff = min(max_backoff, backoff * 2)
time.sleep(backoff)
def act(self, obs, action_space):
obs_serialized = json.dumps(obs, cls=utility.PommermanJSONEncoder)
request_url = "http://{}:{}/action".format(self._host, self._port)
try:
req = requests.post(
request_url,
timeout=0.15,
json={
"obs":
obs_serialized,
"action_space":
json.dumps(action_space, cls=utility.PommermanJSONEncoder)
})
action = req.json()['action']
except requests.exceptions.Timeout as e:
print('Timeout!')
# TODO: Fix this. It's ugly.
action = [0] * len(action_space.shape)
if len(action) == 1:
action = action[0]
return action
| 34.074074 | 81 | 0.544565 | 2,505 | 0.907609 | 0 | 0 | 0 | 0 | 0 | 0 | 597 | 0.216304 |
8174d6a81d47ed944222a745013e7d241d84e72a | 737 | py | Python | cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 14d18edb76502736f6f31955509c3b413f1f91fc | [
"BSD-3-Clause"
]
| 1 | 2016-03-07T17:03:45.000Z | 2016-03-07T17:03:45.000Z | cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 14d18edb76502736f6f31955509c3b413f1f91fc | [
"BSD-3-Clause"
]
| 4 | 2016-04-29T20:48:31.000Z | 2021-06-10T20:39:26.000Z | cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 14d18edb76502736f6f31955509c3b413f1f91fc | [
"BSD-3-Clause"
]
| 3 | 2016-03-04T19:46:45.000Z | 2016-05-11T19:46:00.000Z | # -*- coding: utf-8 -*-
from rest_framework import serializers
from django_countries.serializer_fields import CountryField
from .models import Event, CountryEvent
class CountryEventSerializer(serializers.ModelSerializer):
code = serializers.ReadOnlyField(source='country.code')
name = serializers.SerializerMethodField()
class Meta:
model = CountryEvent
fields = ('code', 'name')
def get_name(self, obj):
return obj.country.name
class EventsSerializer(serializers.ModelSerializer):
events_country = CountryEventSerializer(many=True, read_only=True)
class Meta:
model = Event
fields = (
'name', 'description', 'start', 'end', 'events_country'
)
| 25.413793 | 70 | 0.693351 | 566 | 0.767978 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.130258 |
817503f7a5b9852ce0db8730044ea4170fe4eb91 | 13,998 | py | Python | examples/map.py | jlsajfj/NBT | a7aaaadce423cd50f941ccd5cd25fbd071651336 | [
"MIT"
]
| 241 | 2015-01-14T22:49:03.000Z | 2022-03-17T06:46:44.000Z | examples/map.py | jlsajfj/NBT | a7aaaadce423cd50f941ccd5cd25fbd071651336 | [
"MIT"
]
| 78 | 2015-01-02T15:01:06.000Z | 2022-03-12T16:06:23.000Z | examples/map.py | jlsajfj/NBT | a7aaaadce423cd50f941ccd5cd25fbd071651336 | [
"MIT"
]
| 87 | 2015-01-24T20:02:29.000Z | 2022-03-17T21:32:24.000Z | #!/usr/bin/env python
"""
Prints a map of the entire world.
"""
import os, sys
import math
from struct import pack
# local module
try:
import nbt
except ImportError:
# nbt not in search path. Let's see if it can be found in the parent folder
extrasearchpath = os.path.realpath(os.path.join(__file__,os.pardir,os.pardir))
if not os.path.exists(os.path.join(extrasearchpath,'nbt')):
raise
sys.path.append(extrasearchpath)
from nbt.region import RegionFile
from nbt.chunk import Chunk
from nbt.world import WorldFolder,McRegionWorldFolder
# PIL module (not build-in)
try:
from PIL import Image
except ImportError:
# PIL not in search path. Let's see if it can be found in the parent folder
sys.stderr.write("Module PIL/Image not found. Pillow (a PIL fork) can be found at http://python-imaging.github.io/\n")
# Note: it may also be possible that PIL is installed, but JPEG support is disabled or broken
sys.exit(70) # EX_SOFTWARE
def get_heightmap_image(chunk, buffer=False, gmin=False, gmax=False):
points = chunk.blocks.generate_heightmap(buffer, True)
# Normalize the points
hmin = min(points) if (gmin == False) else gmin # Allow setting the min/max explicitly, in case this is part of a bigger map
hmax = max(points) if (gmax == False) else gmax
hdelta = hmax-hmin+0.0
pixels = ""
for y in range(16):
for x in range(16):
# pix X => mc -Z
# pix Y => mc X
offset = (15-x)*16+y
height = int((points[offset]-hmin)/hdelta*255)
if (height < 0): height = 0
if (height > 255): height = 255
pixels += pack(">B", height)
im = Image.fromstring('L', (16,16), pixels)
return im
# List of blocks to ignore
# Uncomment all the lines to show underground structures
# TODO: move this list into a separate config file
block_ignore = [
'air', # At least this one
# 'cave_air', 'water', 'lava', 'snow', 'ice',
# 'grass', 'tall_grass', 'dead_bush',
# 'seagrass', 'tall_seagrass', 'kelp', 'kelp_plant',
# 'dandelion', 'poppy', 'oxeye_daisy', 'white_tulip',
# 'azure_bluet', 'lilac', 'rose_bush', 'peony', 'blue_orchid',
# 'lily_pad', 'sugar_cane', 'vine', 'pumpkin', 'cactus',
# 'wheat', 'potatoes', 'beetroots', 'carrots',
# 'oak_leaves', 'dark_oak_leaves', 'birch_leaves',
# 'acacia_leaves', 'spruce_leaves',
# 'oak_log', 'dark_oak_log', 'birch_log',
# 'acacia_log', 'spruce_log',
# 'brown_mushroom', 'red_mushroom',
# 'brown_mushroom_block', 'red_mushroom_block', 'mushroom_stem',
# 'grass_block', 'grass_path', 'farmland', 'dirt',
# 'stone', 'sand', 'gravel', 'clay',
# 'sandstone', 'diorite', 'andesite', 'granite', 'obsidian',
# 'coal_ore', 'iron_ore', 'gold_ore', 'diamond_ore',
# 'redstone_ore', 'lapis_ore', 'emerald_ore',
# 'cobweb',
]
# Map of block colors from names
# Legacy block numeric identifiers are now hidden by Block class
# and mapped to alpha identifiers in best effort
# TODO: move this map into a separate config file
block_colors = {
'acacia_leaves': {'h':114, 's':64, 'l':22 },
'acacia_log': {'h':35, 's':93, 'l':30 },
'air': {'h':0, 's':0, 'l':0 },
'andesite': {'h':0, 's':0, 'l':32 },
'azure_bluet': {'h':0, 's':0, 'l':100},
'bedrock': {'h':0, 's':0, 'l':10 },
'birch_leaves': {'h':114, 's':64, 'l':22 },
'birch_log': {'h':35, 's':93, 'l':30 },
'blue_orchid': {'h':0, 's':0, 'l':100},
'bookshelf': {'h':0, 's':0, 'l':100},
'brown_mushroom': {'h':0, 's':0, 'l':100},
'brown_mushroom_block': {'h':0, 's':0, 'l':100},
'cactus': {'h':126, 's':61, 'l':20 },
'cave_air': {'h':0, 's':0, 'l':0 },
'chest': {'h':0, 's':100, 'l':50 },
'clay': {'h':7, 's':62, 'l':23 },
'coal_ore': {'h':0, 's':0, 'l':10 },
'cobblestone': {'h':0, 's':0, 'l':25 },
'cobblestone_stairs': {'h':0, 's':0, 'l':25 },
'crafting_table': {'h':0, 's':0, 'l':100},
'dandelion': {'h':60, 's':100, 'l':60 },
'dark_oak_leaves': {'h':114, 's':64, 'l':22 },
'dark_oak_log': {'h':35, 's':93, 'l':30 },
'dark_oak_planks': {'h':35, 's':93, 'l':30 },
'dead_bush': {'h':0, 's':0, 'l':100},
'diorite': {'h':0, 's':0, 'l':32 },
'dirt': {'h':27, 's':51, 'l':15 },
'end_portal_frame': {'h':0, 's':100, 'l':50 },
'farmland': {'h':35, 's':93, 'l':15 },
'fire': {'h':55, 's':100, 'l':50 },
'flowing_lava': {'h':16, 's':100, 'l':48 },
'flowing_water': {'h':228, 's':50, 'l':23 },
'glass_pane': {'h':0, 's':0, 'l':100},
'granite': {'h':0, 's':0, 'l':32 },
'grass': {'h':94, 's':42, 'l':25 },
'grass_block': {'h':94, 's':42, 'l':32 },
'gravel': {'h':21, 's':18, 'l':20 },
'ice': {'h':240, 's':10, 'l':95 },
'infested_stone': {'h':320, 's':100, 'l':50 },
'iron_ore': {'h':22, 's':65, 'l':61 },
'iron_bars': {'h':22, 's':65, 'l':61 },
'ladder': {'h':35, 's':93, 'l':30 },
'lava': {'h':16, 's':100, 'l':48 },
'lilac': {'h':0, 's':0, 'l':100},
'lily_pad': {'h':114, 's':64, 'l':18 },
'lit_pumpkin': {'h':24, 's':100, 'l':45 },
'mossy_cobblestone': {'h':115, 's':30, 'l':50 },
'mushroom_stem': {'h':0, 's':0, 'l':100},
'oak_door': {'h':35, 's':93, 'l':30 },
'oak_fence': {'h':35, 's':93, 'l':30 },
'oak_fence_gate': {'h':35, 's':93, 'l':30 },
'oak_leaves': {'h':114, 's':64, 'l':22 },
'oak_log': {'h':35, 's':93, 'l':30 },
'oak_planks': {'h':35, 's':93, 'l':30 },
'oak_pressure_plate': {'h':35, 's':93, 'l':30 },
'oak_stairs': {'h':114, 's':64, 'l':22 },
'peony': {'h':0, 's':0, 'l':100},
'pink_tulip': {'h':0, 's':0, 'l':0 },
'poppy': {'h':0, 's':100, 'l':50 },
'pumpkin': {'h':24, 's':100, 'l':45 },
'rail': {'h':33, 's':81, 'l':50 },
'red_mushroom': {'h':0, 's':50, 'l':20 },
'red_mushroom_block': {'h':0, 's':50, 'l':20 },
'rose_bush': {'h':0, 's':0, 'l':100},
'sugar_cane': {'h':123, 's':70, 'l':50 },
'sand': {'h':53, 's':22, 'l':58 },
'sandstone': {'h':48, 's':31, 'l':40 },
'seagrass': {'h':94, 's':42, 'l':25 },
'sign': {'h':114, 's':64, 'l':22 },
'spruce_leaves': {'h':114, 's':64, 'l':22 },
'spruce_log': {'h':35, 's':93, 'l':30 },
'stone': {'h':0, 's':0, 'l':32 },
'stone_slab': {'h':0, 's':0, 'l':32 },
'tall_grass': {'h':94, 's':42, 'l':25 },
'tall_seagrass': {'h':94, 's':42, 'l':25 },
'torch': {'h':60, 's':100, 'l':50 },
'snow': {'h':240, 's':10, 'l':85 },
'spawner': {'h':180, 's':100, 'l':50 },
'vine': {'h':114, 's':64, 'l':18 },
'wall_torch': {'h':60, 's':100, 'l':50 },
'water': {'h':228, 's':50, 'l':23 },
'wheat': {'h':123, 's':60, 'l':50 },
'white_wool': {'h':0, 's':0, 'l':100},
}
def get_map(chunk):
# Show an image of the chunk from above
pixels = b""
for z in range(16):
for x in range(16):
# Find the highest block in this column
max_height = chunk.get_max_height()
ground_height = max_height
tints = []
for y in range(max_height,-1,-1):
block_id = chunk.get_block(x, y, z)
if block_id != None:
#block_data = 0 # TODO: use block properties
#if (block_id == 'water' or block_id == 'water'):
#tints.append({'h':228, 's':50, 'l':23}) # Water
#elif (block_id == 'leaves'): # TODO: old id - update
#if (block_data == 1):
#tints.append({'h':114, 's':64, 'l':22}) # Redwood Leaves
#elif (block_data == 2):
#tints.append({'h':93, 's':39, 'l':10}) # Birch Leaves
#else:
#tints.append({'h':114, 's':64, 'l':22}) # Normal Leaves
#elif (block_id == 'ice'):
#tints.append({'h':240, 's':5, 'l':95}) # Ice
#elif (block_id == 'fire'):
#tints.append({'h':55, 's':100, 'l':50}) # Fire
#elif (block_id != 'air' or block_id != 'cave_air' or y == 0):
if (block_id not in block_ignore or y == 0):
# Here is ground level
ground_height = y
break
if block_id != None:
if block_id in block_colors:
color = block_colors[block_id]
else:
color = {'h':0, 's':0, 'l':100}
print("warning: unknown color for block id: %s" % block_id)
print("hint: add that block to the 'block_colors' map")
else:
color = {'h':0, 's':0, 'l':0}
height_shift = 0 #(ground_height-64)*0.25
final_color = {'h':color['h'], 's':color['s'], 'l':color['l'] + height_shift}
if final_color['l'] > 100: final_color['l'] = 100
if final_color['l'] < 0: final_color['l'] = 0
# Apply tints from translucent blocks
for tint in reversed(tints):
final_color = hsl_slide(final_color, tint, 0.4)
rgb = hsl2rgb(final_color['h'], final_color['s'], final_color['l'])
pixels += pack("BBB", rgb[0], rgb[1], rgb[2])
im = Image.frombytes('RGB', (16,16), pixels)
return im
## Color functions for map generation ##
# Hue given in degrees,
# saturation and lightness given either in range 0-1 or 0-100 and returned in kind
def hsl_slide(hsl1, hsl2, ratio):
if (abs(hsl2['h'] - hsl1['h']) > 180):
if (hsl1['h'] > hsl2['h']):
hsl1['h'] -= 360
else:
hsl1['h'] += 360
# Find location of two colors on the H/S color circle
p1x = math.cos(math.radians(hsl1['h']))*hsl1['s']
p1y = math.sin(math.radians(hsl1['h']))*hsl1['s']
p2x = math.cos(math.radians(hsl2['h']))*hsl2['s']
p2y = math.sin(math.radians(hsl2['h']))*hsl2['s']
# Slide part of the way from tint to base color
avg_x = p1x + ratio*(p2x-p1x)
avg_y = p1y + ratio*(p2y-p1y)
avg_h = math.atan(avg_y/avg_x)
avg_s = avg_y/math.sin(avg_h)
avg_l = hsl1['l'] + ratio*(hsl2['l']-hsl1['l'])
avg_h = math.degrees(avg_h)
#print('tint: %s base: %s avg: %s %s %s' % (tint,final_color,avg_h,avg_s,avg_l))
return {'h':avg_h, 's':avg_s, 'l':avg_l}
# From http://www.easyrgb.com/index.php?X=MATH&H=19#text19
def hsl2rgb(H,S,L):
H = H/360.0
S = S/100.0 # Turn into a percentage
L = L/100.0
if (S == 0):
return (int(L*255), int(L*255), int(L*255))
var_2 = L * (1+S) if (L < 0.5) else (L+S) - (S*L)
var_1 = 2*L - var_2
def hue2rgb(v1, v2, vH):
if (vH < 0): vH += 1
if (vH > 1): vH -= 1
if ((6*vH)<1): return v1 + (v2-v1)*6*vH
if ((2*vH)<1): return v2
if ((3*vH)<2): return v1 + (v2-v1)*(2/3.0-vH)*6
return v1
R = int(255*hue2rgb(var_1, var_2, H + (1.0/3)))
G = int(255*hue2rgb(var_1, var_2, H))
B = int(255*hue2rgb(var_1, var_2, H - (1.0/3)))
return (R,G,B)
def main(world_folder, show=True):
world = WorldFolder(world_folder)
bb = world.get_boundingbox()
world_map = Image.new('RGB', (16*bb.lenx(),16*bb.lenz()))
t = world.chunk_count()
try:
i =0.0
for chunk in world.iter_chunks():
if i % 50 ==0:
sys.stdout.write("Rendering image")
elif i % 2 == 0:
sys.stdout.write(".")
sys.stdout.flush()
elif i % 50 == 49:
sys.stdout.write("%5.1f%%\n" % (100*i/t))
i +=1
chunkmap = get_map(chunk)
x,z = chunk.get_coords()
world_map.paste(chunkmap, (16*(x-bb.minx),16*(z-bb.minz)))
print(" done\n")
filename = os.path.basename(world_folder)+".png"
world_map.save(filename,"PNG")
print("Saved map as %s" % filename)
except KeyboardInterrupt:
print(" aborted\n")
filename = os.path.basename(world_folder)+".partial.png"
world_map.save(filename,"PNG")
print("Saved map as %s" % filename)
return 75 # EX_TEMPFAIL
if show:
world_map.show()
return 0 # NOERR
if __name__ == '__main__':
if (len(sys.argv) == 1):
print("No world folder specified!")
sys.exit(64) # EX_USAGE
if sys.argv[1] == '--noshow' and len(sys.argv) > 2:
show = False
world_folder = sys.argv[2]
else:
show = True
world_folder = sys.argv[1]
# clean path name, eliminate trailing slashes. required for os.path.basename()
world_folder = os.path.normpath(world_folder)
if (not os.path.exists(world_folder)):
print("No such folder as "+world_folder)
sys.exit(72) # EX_IOERR
sys.exit(main(world_folder, show))
| 41.660714 | 128 | 0.473282 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,260 | 0.375768 |
81763b53608a015d4f73d9be3a5324c6bd08db61 | 136 | py | Python | flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 18c254c10ef03145073e1264a06a0313e811ad29 | [
"BSD-2-Clause"
]
| 1 | 2020-02-24T13:08:16.000Z | 2020-02-24T13:08:16.000Z | flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 18c254c10ef03145073e1264a06a0313e811ad29 | [
"BSD-2-Clause"
]
| null | null | null | flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 18c254c10ef03145073e1264a06a0313e811ad29 | [
"BSD-2-Clause"
]
| null | null | null | from flaskcbv.url import Url, make_urls
from .views import mainView
namespases = make_urls(
Url('', mainView(), name="main"),
)
| 13.6 | 39 | 0.691176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0.058824 |
81770013c6cc12c6db69c1cb5d883f8060329eda | 536 | py | Python | main/permissions.py | hellojoshuatonga/notepik | 8f251fe9a689a9be8248d4da6260fe7c8742e3c0 | [
"MIT"
]
| null | null | null | main/permissions.py | hellojoshuatonga/notepik | 8f251fe9a689a9be8248d4da6260fe7c8742e3c0 | [
"MIT"
]
| null | null | null | main/permissions.py | hellojoshuatonga/notepik | 8f251fe9a689a9be8248d4da6260fe7c8742e3c0 | [
"MIT"
]
| null | null | null | # Rest framework
from rest_framework import permissions
class IsAuthorOrReadOnly(permissions.BasePermission):
"""
Object level permission. Check if the requesting user is the author or not. If he/she the author then we will give him/her a read and write permission otherwise ready only
"""
def has_object_permission(self, request, view, obj):
# Check if he requesting for only a get, etc
if request.method in permissions.SAFE_METHODS:
return True
return obj.author == request.user
| 35.733333 | 175 | 0.718284 | 477 | 0.889925 | 0 | 0 | 0 | 0 | 0 | 0 | 247 | 0.460821 |
8177b1f754a6ce02d1d064390b7211e8eae1df80 | 8,595 | py | Python | femagtools/vtu.py | dapu/femagtools | 95eaf750adc2013232cdf482e523b3900ac6eb08 | [
"BSD-2-Clause"
]
| null | null | null | femagtools/vtu.py | dapu/femagtools | 95eaf750adc2013232cdf482e523b3900ac6eb08 | [
"BSD-2-Clause"
]
| null | null | null | femagtools/vtu.py | dapu/femagtools | 95eaf750adc2013232cdf482e523b3900ac6eb08 | [
"BSD-2-Clause"
]
| null | null | null | """
femagtools.vtu
~~~~~~~~~~~~~~
Read FEMAG vtu files
"""
import vtk
import pathlib
import numpy as np
class Reader(object):
'''Class to read vtu-files'''
def __init__(self, pathname):
'''Read the vtu-files
Parameters
----------
pathname : str
Directory of result files (vtu-files) or a single vtu file
'''
self.data = {}
self.reader = vtk.vtkXMLUnstructuredGridReader()
self.output = self.reader.GetOutput()
self.field_data_names = []
self.point_data_names = []
self.cell_data_names = []
if pathlib.Path(pathname).suffix == '.vtu':
self.filenames = [pathlib.Path(pathname)]
else:
self.filenames = sorted(pathlib.Path(pathname).glob(
"*.vtu"))
self.reader.SetFileName(str(self.filenames[0]))
self.reader.Update()
self.field_data_names = [
self.output.GetFieldData().GetAbstractArray(i).GetName()
for i in range(self.output.GetFieldData().GetNumberOfArrays())]
self.point_data_names = [
self.output.GetPointData().GetAbstractArray(i).GetName()
for i in range(self.output.GetPointData().GetNumberOfArrays())]
self.cell_data_names = [
self.output.GetCellData().GetAbstractArray(i).GetName()
for i in range(self.output.GetCellData().GetNumberOfArrays())]
self.set_time_window(0.0, 0.0)
def get_data_names(self):
'''Read the list of values stored in the vtu files
Parameters
----------
None
Returns
-------
List of values stored in the vtu files
'''
return (self.field_data_names +
self.point_data_names +
self.cell_data_names)
def read_data(self, data_list):
'''Extracts data from the vtu files
Parameters
----------
data_list : fist of str
List of values to extract from vtu_files
'''
for data_name in data_list:
if data_name in self.field_data_names:
self.data[data_name] = []
elif data_name in self.point_data_names:
self.data[data_name] = []
elif data_name in self.cell_data_names:
self.data[data_name] = []
else:
raise Exception('unknown data name "' + data_name+'"')
for filename in self.filenames:
self.reader.SetFileName(str(filename))
self.reader.Update()
for data_name in data_list:
if data_name in self.field_data_names:
self.data[data_name].append(
self.output.GetFieldData().GetAbstractArray(data_name))
if data_name in self.point_data_names:
self.data[data_name].append(
self.output.GetPointData().GetAbstractArray(data_name))
if data_name in self.cell_data_names:
self.data[data_name].append(
self.output.GetCellData().GetAbstractArray(data_name))
return "done"
def set_time_window(self, start, end):
'''Set time window
Parameters
----------
start: float
Start of the time window
end: float
End of the time window
Only values within the time window are output by the functions
get_field_vector
get_point_vector
get_cell_vector
get_data_vector
At start = 0.0 the values are read out starting from the first value
At end = 0.0 the values are read out up to the last value
'''
try:
if "time [s]" not in self.data:
self.read_data(['time [s]'])
if start == 0 or start <= self.data['time [s]'][0].GetValue(0):
self.istart = 0
else:
self.istart = 0
for i in range(len(self.data['time [s]'])):
if start >= self.data['time [s]'][i].GetValue(0):
self.istart = i+1
if end == 0 or end >= self.data['time [s]'][-1].GetValue(0):
self.iend = len(self.data['time [s]'])
else:
self.iend = 0
for i in range(len(self.data['time [s]'])):
if end <= self.data['time [s]'][i].GetValue(0):
self.iend = i
except:
self.istart = None
self.iend = None
def get_field_vector(self, field_data):
'''Read field data
Parameters
----------
field_data : str
Name of field to read
Returns
-------
field_vec : list of float
List of field values within the time window
'''
if field_data not in self.data:
self.read_data([field_data])
if self.istart:
start = self.istart
else:
start = 0
if self.iend:
end = self.iend
else:
end = len(self.data[field_data])
field_vec = []
# for i in range(self.istart,self.iend):
for i in range(start, end):
field_vec.append(self.data[field_data][i].GetValue(0))
return field_vec
# pnt = node-key, >0
def get_point_vector(self, pnt_data, pnt):
'''Read point data
Parameters
----------
point_data : str
Name of field to read
pnt : int
Key of point
Returns
-------
point_vec : list of float
List of point values within the time window
'''
if pnt_data not in self.data:
self.read_data([pnt_data])
if self.istart:
start = self.istart
else:
start = 0
if self.iend:
end = self.iend
else:
end = len(self.data[pnt_data])
point_vec = []
for i in range(start, end):
point_vec.append(self.data[pnt_data][i].GetValue(pnt-1))
return point_vec
def get_cell_vector(self, cell_data, cell):
'''Read cell data
Parameters
----------
cell_data : str
Name of field to read
cell : int
Key of cell
Returns
-------
cell_vec : list of float
List of cell values within the time window
'''
if cell_data not in self.data:
self.read_data([cell_data])
i = self.cell_data_names.index(cell_data)
noc = self.output.GetCellData().GetAbstractArray(i).GetNumberOfComponents()
if noc == 1:
cell_vec = []
else:
cell_vec_x = []
cell_vec_y = []
cell_vec_z = []
if self.istart:
start = self.istart
else:
start = 0
if self.iend:
end = self.iend
else:
end = int(len(self.data[cell_data]))
for i in range(start, end):
if noc == 1:
cell_vec.append(self.data[cell_data][i].GetValue(cell-1))
else:
cell_vec_x.append(
self.data[cell_data][i].GetValue(noc*(cell-1)))
cell_vec_y.append(
self.data[cell_data][i].GetValue(noc*(cell-1)+1))
cell_vec_z.append(
self.data[cell_data][i].GetValue(noc*(cell-1)+2))
if noc == 1:
return cell_vec
else:
return [cell_vec_x, cell_vec_y, cell_vec_z]
def get_data_vector(self, data_name, key=0):
'''Read data of fiels, point or cell
Parameters
----------
data_name : str
Name of data to read
hey : int (optional)
Key of point or cell
Returns
-------
data_vec : list of float
List of values within the time window
'''
if data_name in self.field_data_names:
return self.get_field_vector(data_name)
if data_name in self.point_data_names:
return self.get_point_vector(data_name, key)
if data_name in self.cell_data_names:
return self.get_cell_vector(data_name, key)
return []
def read(filename):
"""
Read vtu file and return Reader object.
Arguments:
filename: name of vtu file to be read
"""
return Reader(filename)
| 31.254545 | 83 | 0.52135 | 8,303 | 0.966027 | 0 | 0 | 0 | 0 | 0 | 0 | 2,615 | 0.304247 |
81780d1d21d48080345dc52c64611a0acffa03d7 | 232 | py | Python | __init__.py | rolc/python-package | 272fb6da9d7aa3c2a0c53dea6d4e958409ac5d12 | [
"MIT"
]
| null | null | null | __init__.py | rolc/python-package | 272fb6da9d7aa3c2a0c53dea6d4e958409ac5d12 | [
"MIT"
]
| null | null | null | __init__.py | rolc/python-package | 272fb6da9d7aa3c2a0c53dea6d4e958409ac5d12 | [
"MIT"
]
| null | null | null | #!/usr/bin/python
#-------------------------------IMPORT--------------------------------#
from lib import *
#-------------------------------EXPORT--------------------------------#
__all__ = ['<#PREFIX#>_app','<#PREFIX#>_index']
| 23.2 | 71 | 0.284483 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 193 | 0.831897 |
81787499b7aab46ab4834d4e0428415d4b205073 | 916 | py | Python | svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 9013aba2de92099dea5a703486da3dfab1dfbb61 | [
"MIT"
]
| null | null | null | svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 9013aba2de92099dea5a703486da3dfab1dfbb61 | [
"MIT"
]
| null | null | null | svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 9013aba2de92099dea5a703486da3dfab1dfbb61 | [
"MIT"
]
| null | null | null | from collections import Counter
import numpy as np
def keep_word(word):
return word.is_alpha
def unique_words(problems):
return set([word.lemma_ for problem in problems for word in problem.tokens() if keep_word(word)])
def create_word2idx(vocab):
return {word: idx for idx, word in enumerate(vocab)}
class BagOfWordsFeature():
def __init__(self, corpus):
self.vocab = list(unique_words(corpus))
# Mapping from words to their index in the feature vector.
self.word2idx = create_word2idx(self.vocab)
def process(self, problem):
features = np.zeros(len(self.vocab))
words = [word.lemma_ for word in problem.tokens() if keep_word(word)]
freqs = Counter(words)
for word in freqs:
# Skip unknown words.
if word in self.word2idx:
features[self.word2idx[word]] = freqs[word]
return features
| 28.625 | 101 | 0.663755 | 595 | 0.649563 | 0 | 0 | 0 | 0 | 0 | 0 | 79 | 0.086245 |
81788a0ffc02bedb32998891f0a147adfc80c30e | 1,507 | py | Python | pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | c8314581010d68d3fa34af6e87b6af2969fc261d | [
"MIT"
]
| 4 | 2018-02-17T05:35:54.000Z | 2021-09-12T10:14:57.000Z | pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | c8314581010d68d3fa34af6e87b6af2969fc261d | [
"MIT"
]
| null | null | null | pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | c8314581010d68d3fa34af6e87b6af2969fc261d | [
"MIT"
]
| null | null | null | #!/usr/bin/python
'''
Tests for MPS and MPO
'''
from numpy import *
import matplotlib.pyplot as plt
from numpy.testing import dec, assert_, assert_raises, assert_almost_equal, assert_allclose
import pdb
from ..sweep import *
def test_iterator():
start = (1, '->', 2)
stop = (3, '<-', 1)
print('Testing iterator start = %s, stop= %s' % (start, stop))
iterator = get_sweeper(start=start, stop=stop, nsite=4 - 2, iprint=2)
order = [(1, '->', 2), (1, '<-', 1), (1, '<-', 0),
(2, '->', 1), (2, '->', 2), (2, '<-', 1), (2, '<-', 0),
(3, '->', 1), (3, '->', 2), (3, '<-', 1),
]
plt.ion()
visualize_sweeper(iterator, nsite=3)
for od, it in zip(order, iterator):
assert_(od == it)
print('Testing 2-site iterator.')
start = (1, '->', 0)
stop = (3, '->', 0)
order = [(1, '->', 0), (2, '->', 0), (3, '->', 0)]
iterator = get_sweeper(start=start, stop=stop, nsite=2 - 2)
for od, it in zip(order, iterator):
assert_(od == it)
print('Testing periodic case.')
iterator = get_psweeper(start=(1, 2), stop=(3, 1), nsite=4, iprint=2)
order = [(1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3), (3, 0), (3, 1)]
for od, it in zip(order, iterator):
assert_(od == it)
iterator = get_psweeper(start=(1, 0), stop=(3, 0), nsite=2, iprint=2)
order = [(1, 0), (1, 1), (2, 0)]
for od, it in zip(order, iterator):
assert_(od == it)
if __name__ == '__main__':
test_iterator()
| 31.395833 | 91 | 0.512276 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.14134 |
8178e5a5cdd16d8a39e43e9f1e2b33dd9e55953c | 4,471 | py | Python | ui/numbered_menu.py | LouisPi/PiPortableRecorder | 430a4b6e1e869cbd68fd89bbf97261710fd7db6b | [
"Apache-2.0",
"MIT"
]
| 51 | 2017-12-03T21:59:13.000Z | 2021-01-02T17:13:34.000Z | ui/numbered_menu.py | LouisPi/PiPortableRecorder | 430a4b6e1e869cbd68fd89bbf97261710fd7db6b | [
"Apache-2.0",
"MIT"
]
| 153 | 2017-10-27T19:59:46.000Z | 2020-01-14T23:58:57.000Z | ui/numbered_menu.py | LouisPi/PiPortableRecorder | 430a4b6e1e869cbd68fd89bbf97261710fd7db6b | [
"Apache-2.0",
"MIT"
]
| 26 | 2017-11-16T11:10:56.000Z | 2022-03-29T18:44:48.000Z | from threading import Lock
from time import time
from ui import Menu
from ui.utils import clamp, check_value_lock, to_be_foreground
class NumberedMenu(Menu):
"""
This Menu allows the user to jump to entries using the numpad. If the menu is 10 entries or less
the navigation is instant. Otherwise, it lets the user type multiple digits to navigate to entries beyond 10th.
The `input_delay` parameter controls how long, and if, the menu waits before considering an input as definitive.
If `input_delay` is 0, then only the 10 first entries can be navigated to using the keypad.
The `prepend_numbers` parameters controls whether the entries should be prefixed by their number.
(default: `True`)
"""
def __init__(self, *args, **kwargs):
self.prepend_numbers = kwargs.pop('prepend_numbers', True)
self.input_delay = kwargs.pop('input_delay', 1)
Menu.__init__(self, *args, **kwargs)
self.__locked_name__ = None
self.value_lock = Lock()
self.numeric_keymap = {"KEY_{}".format(i): i for i in range(10)}
self.last_input_time = 0
self.current_input = None
@property
def entry_count(self):
return len(self.contents)
def before_activate(self):
Menu.before_activate(self)
self.last_input_time = -self.input_delay
def idle_loop(self):
Menu.idle_loop(self)
self.check_character_state()
def set_keymap(self):
Menu.set_keymap(self)
self.i.set_streaming(self.on_key_pressed)
def deactivate(self):
Menu.deactivate(self)
self.i.remove_streaming()
@to_be_foreground
def on_key_pressed(self, key):
if key == "KEY_RIGHT" and self.is_multi_digit():
self.confirm_current_input()
if key not in self.numeric_keymap:
return
if self.is_multi_digit():
self.process_multi_digit_input(key)
else:
self.process_single_digit_input(key)
self.view.refresh()
def process_single_digit_input(self, key):
self.move_to_entry(self.numeric_keymap[key])
def process_multi_digit_input(self, key):
self.last_input_time = time()
if not self.current_input:
self.current_input = str(self.numeric_keymap[key])
else:
self.current_input += str(self.numeric_keymap[key])
def move_to_entry(self, index):
if self.pointer == index:
# Moving to the same item that's already selected
# let's interpret this as KEY_ENTER
self.current_input = None
self.select_entry()
return
self.pointer = clamp(index, 0, len(self.contents) - 1)
self.current_input = None
self.view.refresh()
def process_contents(self):
Menu.process_contents(self)
if self.prepend_numbers:
self.prepend_entry_text()
def prepend_entry_text(self):
# prepend numbers to each entry name
if self.is_multi_digit():
self.contents = [["{} {}".format(i, entry[0]), entry[1]]
for i, entry in enumerate(self.contents)]
else:
for i, entry in enumerate(self.contents[:10]):
entry[0] = "{} {}".format(i, entry[0])
@check_value_lock
def check_character_state(self):
if self.is_current_input_finished():
self.move_to_entry(int(self.current_input))
def is_multi_digit(self):
return self.input_delay > 0
def is_current_input_finished(self):
# nothing in the buffer
if not self.current_input:
return False
# no need to let the user input '100' if we have 20 entries
if len(str(self.current_input)) == len(str(self.entry_count)):
return True
# user typed 2 and we have 19 entries, going to the most likely option
if int(self.current_input) * 10 > self.entry_count:
return True
# user typed 17 and we have 12 entries
if int(self.current_input) >= self.entry_count:
return True
now = time()
elapsed = now - self.last_input_time
if self.is_multi_digit() and elapsed >= self.input_delay: # delay wait is over
return True
return False
def confirm_current_input(self):
if self.current_input is None:
return
self.move_to_entry(int(self.current_input))
| 33.616541 | 116 | 0.635652 | 4,335 | 0.969582 | 0 | 0 | 611 | 0.136658 | 0 | 0 | 960 | 0.214717 |
8179e93b1b83227b20bf562f4c5eea1ac565d48c | 358 | py | Python | Math/RussianPeasantMultiplication.py | kopok2/algorithms | efb6a423a8447d99584335e9fef8d9b3c74e2ad8 | [
"MIT"
]
| null | null | null | Math/RussianPeasantMultiplication.py | kopok2/algorithms | efb6a423a8447d99584335e9fef8d9b3c74e2ad8 | [
"MIT"
]
| null | null | null | Math/RussianPeasantMultiplication.py | kopok2/algorithms | efb6a423a8447d99584335e9fef8d9b3c74e2ad8 | [
"MIT"
]
| null | null | null | # coding=utf-8
"""Russian Peasant Multiplication algorithm Python implementation."""
def russ_peasant(a, b):
res = 0
while b > 0:
if b & 1:
res += a
a <<= 1
b >>= 1
return res
if __name__ == '__main__':
for x in range(10):
for y in range(10):
print(x, y, x * y, russ_peasant(x, y))
| 18.842105 | 69 | 0.511173 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 93 | 0.259777 |
817a13b6a8ab8c5ae685c931b654984848f5a51f | 36,420 | py | Python | bustime/requestmock.py | RemyPorter/BusTimeClient | a0bd3ef7e24e132e964e6847261ed6888e5735ee | [
"MIT"
]
| 2 | 2017-06-24T14:10:42.000Z | 2018-02-16T20:44:07.000Z | bustime/requestmock.py | RemyPorter/BusTimeClient | a0bd3ef7e24e132e964e6847261ed6888e5735ee | [
"MIT"
]
| null | null | null | bustime/requestmock.py | RemyPorter/BusTimeClient | a0bd3ef7e24e132e964e6847261ed6888e5735ee | [
"MIT"
]
| null | null | null | from urllib.parse import urlparse, parse_qs
from io import BytesIO
import json
class MockRequest:
def getmethod(self, path):
apiend = "v2/"
start = path.find(apiend) + len(apiend)
return path[start:]
def urlopen(self, url):
output = BytesIO()
parsed = urlparse(url)
params = parse_qs(parsed.query)
method = self.getmethod(parsed.path)
assert("key" in params)
s = getattr(self, method)(**params)
output.write(s.encode("UTF8"))
output.seek(0)
return output
def gettime(self, **kwargs):
return json.dumps(
{"bustime-response": {"tm": "20141012 10:21:04"}}
)
def getdirections(self, **kwargs):
assert("rt" in kwargs.keys())
return json.dumps(
{"bustime-response":
{"directions": [{"dir":"INBOUND"}, {"dir":"OUTBOUND"}]}
}
)
def getstops(self, **kwargs):
assert("rt" in kwargs.keys())
assert("dir" in kwargs.keys())
return json.dumps(
{
"bustime-response":
{"stops": [
{'stpid': '2564', 'stpnm': '5th Ave at Meyran Ave',
'lon': -79.959239533731, 'lat': 40.441172012068}]}
})
def getpredictions(self, **kwargs):
assert("stpid" in kwargs)
assert(type(kwargs.get("rt", [])) == list)
return json.dumps({
"bustime-response":{
"prd": [{'rt': '71C', 'typ': 'A',
'prdctdn': '5', 'prdtm': '20141022 12:37',
'rtdir': 'INBOUND', 'zone': '',
'des': 'Downtown', 'dly': False,
'dstp': 4198, 'stpnm': '5th Ave at Chesterfield Rd',
'stpid': '38', 'tatripid': '159261',
'tmstmp': '20141022 12:31', 'tablockid': '071C-150',
'vid': '5678'}]}
})
def getvehicles(self, **kwargs):
assert(len(kwargs.get("vid", [])) > 0 or len(kwargs.get("rt", [])) > 0)
return json.dumps({
"bustime-response":{
"vehicle":[{'tablockid': '071C-148', 'pid': 2363,
'tatripid': '159264', 'zone': '', 'des': 'Downtown',
'dly': False, 'spd': 0, 'pdist': 17607, 'hdg': '299',
'tmstmp': '20141022 12:52', 'lat': '40.46042251586914',
'rt': '71C', 'vid': '5669', 'lon': '-79.92157814719461'}]
}
})
def getroutes(self, **kwargs):
return json.dumps({
"bustime-response": {
"routes": [{'rt': '12', 'rtnm': 'MCKNIGHT', 'rtclr': '#cc00cc'}]
}
})
def getpatterns(self, **kwargs):
assert(len(kwargs.get("pid", [])) > 0 or len(kwargs.get("rt", [])) > 0)
return json.dumps({
"bustime-response": { #yes, it's a fucking long line. I wanted to have a solid pattern for testing.
"ptr": [{'ln': 48062.0, 'rtdir': 'INBOUND', 'pid': 2363, 'pt': [{'typ': 'S', 'stpid': '19988', 'seq': 1, 'lon': -79.89193733333298, 'stpnm': 'LAYOVER BRUSHTON TERMINAL', 'pdist': 0.0, 'lat': 40.449901185011}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.449815459172, 'lon': -79.89199634193102, 'seq': 2}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.449160346541, 'lon': -79.89106988890501, 'seq': 3}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.44854800997, 'lon': -79.89031887038101, 'seq': 4}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.447739717154, 'lon': -79.88950347883997, 'seq': 5}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.447155944074, 'lon': -79.88896703703699, 'seq': 6}, {'typ': 'S', 'stpid': '16111', 'seq': 7, 'lon': -79.88872563822599, 'stpnm': 'Wilkinsburg Station stop B', 'pdist': 1664.0, 'lat': 40.446845684989}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.445680436902, 'lon': -79.88800934987802, 'seq': 8}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.4445128499, 'lon': -79.88743535714798, 'seq': 9}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.444227073768, 'lon': -79.88744608598398, 'seq': 10}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.44292065312, 'lon': -79.88707057672201, 'seq': 11}, {'typ': 'S', 'stpid': '8153', 'seq': 12, 'lon': -79.88699011045202, 'stpnm': 'Hay St ramp outbound shelter', 'pdist': 3184.0, 'lat': 40.442822670548}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442713549274, 'lon': -79.886595606477, 'seq': 13}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442472674453, 'lon': -79.886718988092, 'seq': 14}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442713549274, 'lon': -79.887507557543, 'seq': 15}, {'typ': 'S', 'stpid': '19871', 'seq': 16, 'lon': -79.887233972223, 'stpnm': 'Pennwood Ave at Franklin', 'pdist': 3809.0, 'lat': 40.441803119568}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.44066404441, 'lon': -79.886885285051, 'seq': 17}, {'typ': 'S', 'stpid': '7084', 'seq': 18, 'lon': -79.887757249342, 'stpnm': 'Rowland Connector opp Highrise', 'pdist': 4362.0, 'lat': 40.440518615096}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.4404410428, 'lon': -79.887928910719, 'seq': 19}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.440228739215, 'lon': -79.888191767202, 'seq': 20}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.440196077066, 'lon': -79.888325877653, 'seq': 21}, {'typ': 'S', 'stpid': '7085', 'seq': 22, 'lon': -79.889442955026, 'stpnm': 'Kelly Ave at Pitt St', 'pdist': 4987.0, 'lat': 40.440550595192}, {'typ': 'S', 'stpid': '7086', 'seq': 23, 'lon': -79.891097240081, 'stpnm': 'Kelly Ave at West St', 'pdist': 5545.0, 'lat': 40.441113108446}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.441213613573, 'lon': -79.891389862428, 'seq': 24}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.44125444064, 'lon': -79.891786829362, 'seq': 25}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.441266688755, 'lon': -79.892339364419, 'seq': 26}, {'typ': 'S', 'stpid': '8224', 'seq': 27, 'lon': -79.892355457673, 'stpnm': 'Trenton Ave opp Rebecca (nearside)', 'pdist': 6113.0, 'lat': 40.441887256997}, {'typ': 'S', 'stpid': '8225', 'seq': 28, 'lon': -79.892379833333, 'stpnm': 'Trenton Ave at Franklin', 'pdist': 6606.0, 'lat': 40.443183767554}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.443363401196, 'lon': -79.892342282407, 'seq': 29}, {'typ': 'S', 'stpid': '8226', 'seq': 30, 'lon': -79.892166166667, 'stpnm': 'Trenton Ave at South', 'pdist': 6878.0, 'lat': 40.443920100848}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.445097455012, 'lon': -79.891708178573, 'seq': 31}, {'typ': 'S', 'stpid': '8227', 'seq': 32, 'lon': -79.891327304892, 'stpnm': 'Trenton Ave at Penn', 'pdist': 7526.0, 'lat': 40.445815966242}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.445893532335, 'lon': -79.891284389548, 'seq': 33}, {'typ': 'S', 'stpid': '18527', 'seq': 34, 'lon': -79.891631411372, 'stpnm': 'Penn Ave past Trenton', 'pdist': 7803.0, 'lat': 40.44601741031}, {'typ': 'S', 'stpid': '7628', 'seq': 35, 'lon': -79.89356461905, 'stpnm': 'Penn Ave at Brushton', 'pdist': 8206.0, 'lat': 40.446668068321}, {'typ': 'S', 'stpid': '7629', 'seq': 36, 'lon': -79.895090308202, 'stpnm': 'Penn Ave at Braddock', 'pdist': 8685.0, 'lat': 40.44715925159}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.447673625313, 'lon': -79.896608438504, 'seq': 37}, {'typ': 'S', 'stpid': '7630', 'seq': 38, 'lon': -79.898152870373, 'stpnm': 'Penn Ave opp Carnegie', 'pdist': 9608.0, 'lat': 40.44816623597}, {'typ': 'S', 'stpid': '7631', 'seq': 39, 'lon': -79.899677632935, 'stpnm': 'Penn Ave at Lexington', 'pdist': 10093.0, 'lat': 40.448684819403}, {'typ': 'S', 'stpid': '7632', 'seq': 40, 'lon': -79.901225994708, 'stpnm': 'Penn Ave at Homewood', 'pdist': 10522.0, 'lat': 40.449202240648}, {'typ': 'S', 'stpid': '7633', 'seq': 41, 'lon': -79.903692774473, 'stpnm': 'Penn Ave at Lang', 'pdist': 11274.0, 'lat': 40.450018821594}, {'typ': 'S', 'stpid': '18528', 'seq': 42, 'lon': -79.906128449073, 'stpnm': 'Penn Ave past Murtland', 'pdist': 12021.0, 'lat': 40.450823187894}, {'typ': 'S', 'stpid': '20762', 'seq': 43, 'lon': -79.908102313491, 'stpnm': 'Penn Ave at Dallas ', 'pdist': 12609.0, 'lat': 40.451481247155}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.451775156577, 'lon': -79.908847967597, 'seq': 44}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.452232346455, 'lon': -79.909700910064, 'seq': 45}, {'typ': 'S', 'stpid': '8229', 'seq': 46, 'lon': -79.910224384258, 'stpnm': 'Penn Ave at Linden', 'pdist': 13331.0, 'lat': 40.452589523632}, {'typ': 'S', 'stpid': '8230', 'seq': 47, 'lon': -79.912039435186, 'stpnm': 'Penn Ave at 5th Ave', 'pdist': 14004.0, 'lat': 40.453822374}, {'typ': 'S', 'stpid': '18942', 'seq': 48, 'lon': -79.915107988093, 'stpnm': 'Penn Ave at Bakery Square', 'pdist': 15237.0, 'lat': 40.455933324408}, {'typ': 'S', 'stpid': '20011', 'seq': 49, 'lon': -79.917314336642, 'stpnm': 'Penn Ave at East Liberty Blvd', 'pdist': 15990.0, 'lat': 40.457427481869}, {'typ': 'S', 'stpid': '8233', 'seq': 50, 'lon': -79.918370864419, 'stpnm': 'Penn Ave at Village of Eastside Shpg Ctr', 'pdist': 16430.0, 'lat': 40.458190021715}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.459063494845, 'lon': -79.919679782418, 'seq': 51}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.45921568281, 'lon': -79.91981071693, 'seq': 52}, {'typ': 'S', 'stpid': '8234', 'seq': 53, 'lon': -79.920277421298, 'stpnm': 'Penn Ave opp Shady Ave (nearside)', 'pdist': 17088.0, 'lat': 40.459550374354}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.459827922173, 'lon': -79.92062610847, 'seq': 54}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460195263114, 'lon': -79.921221558871, 'seq': 55}, {'typ': 'S', 'stpid': '19573', 'seq': 56, 'lon': -79.921578570767, 'stpnm': 'Penn Ave opp Eastside III Dr (Target)', 'pdist': 17587.0, 'lat': 40.460350125013}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460386858912, 'lon': -79.92178778307, 'seq': 57}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460676647854, 'lon': -79.922694369718, 'seq': 58}, {'typ': 'S', 'stpid': '8881', 'seq': 59, 'lon': -79.923569593255, 'stpnm': 'Penn Ave at Sheridan Ave', 'pdist': 18160.0, 'lat': 40.461008500156}, {'typ': 'S', 'stpid': '8882', 'seq': 60, 'lon': -79.92460004233, 'stpnm': 'Penn Ave at Highland Ave', 'pdist': 18488.0, 'lat': 40.46138158881}, {'typ': 'S', 'stpid': '8883', 'seq': 61, 'lon': -79.925521833333, 'stpnm': 'Penn Ave at Whitfield St', 'pdist': 18764.0, 'lat': 40.461677433225}, {'typ': 'S', 'stpid': '9124', 'seq': 62, 'lon': -79.928575802248, 'stpnm': 'Penn Ave at St Clair', 'pdist': 19679.0, 'lat': 40.462834921535}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.463556607817, 'lon': -79.930664010581, 'seq': 63}, {'typ': 'S', 'stpid': '19060', 'seq': 64, 'lon': -79.930969782409, 'stpnm': 'Negley Ave past Penn', 'pdist': 20408.0, 'lat': 40.463050518435}, {'typ': 'S', 'stpid': '8188', 'seq': 65, 'lon': -79.93160924008, 'stpnm': 'Negley Ave at Coral St', 'pdist': 20857.0, 'lat': 40.462015444544}, {'typ': 'S', 'stpid': '8189', 'seq': 66, 'lon': -79.932365500001, 'stpnm': 'Negley Ave at Friendship Ave', 'pdist': 21357.0, 'lat': 40.460807766604}, {'typ': 'S', 'stpid': '8190', 'seq': 67, 'lon': -79.933393646824, 'stpnm': 'Negley Ave at #370', 'pdist': 22054.0, 'lat': 40.459079351742}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.457797710009, 'lon': -79.93416612302, 'seq': 68}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.45757729782, 'lon': -79.9342197672, 'seq': 69}, {'typ': 'S', 'stpid': '8191', 'seq': 70, 'lon': -79.934196499999, 'stpnm': 'Negley Ave at Centre Ave', 'pdist': 22689.0, 'lat': 40.457446933451}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.457279582648, 'lon': -79.934116033729, 'seq': 71}, {'typ': 'S', 'stpid': '8192', 'seq': 72, 'lon': -79.936404395502, 'stpnm': 'Centre Ave at Graham St', 'pdist': 23393.0, 'lat': 40.456588788684}, {'typ': 'S', 'stpid': '8193', 'seq': 73, 'lon': -79.938546226853, 'stpnm': 'Centre Ave at Aiken Ave', 'pdist': 23939.0, 'lat': 40.45594147008}, {'typ': 'S', 'stpid': '8194', 'seq': 74, 'lon': -79.940120982802, 'stpnm': 'Centre Ave opp Shadyside Hospital', 'pdist': 24570.0, 'lat': 40.455471658241}, {'typ': 'S', 'stpid': '8195', 'seq': 75, 'lon': -79.941432683864, 'stpnm': 'Centre Ave at Cypress St', 'pdist': 24887.0, 'lat': 40.455069439451}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.454661251586, 'lon': -79.942848890224, 'seq': 76}, {'typ': 'S', 'stpid': '8196', 'seq': 77, 'lon': -79.94441278836, 'stpnm': 'Centre Ave at Morewood Ave', 'pdist': 25777.0, 'lat': 40.454203439285}, {'typ': 'S', 'stpid': '8197', 'seq': 78, 'lon': -79.947320525796, 'stpnm': 'Centre Ave at Millvale Ave', 'pdist': 26631.0, 'lat': 40.453325441844}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.452851703376, 'lon': -79.94905713492, 'seq': 79}, {'typ': 'S', 'stpid': '2631', 'seq': 80, 'lon': -79.949652585321, 'stpnm': 'Centre Ave opp Neville St', 'pdist': 27299.0, 'lat': 40.452651685031}, {'typ': 'S', 'stpid': '2632', 'seq': 81, 'lon': -79.950813788362, 'stpnm': 'Centre Ave at Melwood Ave', 'pdist': 27681.0, 'lat': 40.452286780054}, {'typ': 'S', 'stpid': '2633', 'seq': 82, 'lon': -79.952102351194, 'stpnm': 'Centre Ave at Craig St (nearside)', 'pdist': 28159.0, 'lat': 40.45194294909}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.451898046457, 'lon': -79.952070164685, 'seq': 83}, {'typ': 'S', 'stpid': '2634', 'seq': 84, 'lon': -79.950836166666, 'stpnm': 'Craig St at Bayard St', 'pdist': 28824.0, 'lat': 40.449890100525}, {'typ': 'S', 'stpid': '2635', 'seq': 85, 'lon': -79.949188166667, 'stpnm': 'Craig St at 5th Ave', 'pdist': 29896.0, 'lat': 40.447257934}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446951757745, 'lon': -79.949064785052, 'seq': 86}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446853781049, 'lon': -79.95005183797, 'seq': 87}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446796627911, 'lon': -79.951156908084, 'seq': 88}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446714980486, 'lon': -79.951516324092, 'seq': 89}, {'typ': 'S', 'stpid': '1171', 'seq': 90, 'lon': -79.951805621695, 'stpnm': '5th Ave at Bellefield', 'pdist': 30703.0, 'lat': 40.446598625546}, {'typ': 'S', 'stpid': '33', 'seq': 91, 'lon': -79.953128988756, 'stpnm': '5th Ave at Tennyson Ave', 'pdist': 31230.0, 'lat': 40.445674281789}, {'typ': 'S', 'stpid': '34', 'seq': 92, 'lon': -79.954654511245, 'stpnm': '5th Ave at Bigelow Blvd', 'pdist': 31839.0, 'lat': 40.444563354959}, {'typ': 'S', 'stpid': '35', 'seq': 93, 'lon': -79.956426053571, 'stpnm': '5th Ave at Thackeray Ave', 'pdist': 32525.0, 'lat': 40.443287941739}, {'typ': 'S', 'stpid': '36', 'seq': 94, 'lon': -79.958504737436, 'stpnm': '5th Ave opp Atwood St', 'pdist': 33276.0, 'lat': 40.441797374239}, {'typ': 'S', 'stpid': '38', 'seq': 95, 'lon': -79.961631833334, 'stpnm': '5th Ave at Chesterfield Rd', 'pdist': 34541.0, 'lat': 40.439498767751}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438439620008, 'lon': -79.963037673274, 'seq': 96}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438194646982, 'lon': -79.963327351848, 'seq': 97}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.43796192178, 'lon': -79.9637296832, 'seq': 98}, {'typ': 'S', 'stpid': '2639', 'seq': 99, 'lon': -79.964008632937, 'stpnm': '5th Ave opp Craft Ave', 'pdist': 35395.0, 'lat': 40.43786393198}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437761859121, 'lon': -79.964722100536, 'seq': 100}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437786356621, 'lon': -79.965043965617, 'seq': 101}, {'typ': 'S', 'stpid': '2640', 'seq': 102, 'lon': -79.965337675265, 'stpnm': '5th Ave at Robinson St', 'pdist': 35782.0, 'lat': 40.437783509968}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437501788161, 'lon': -79.965552251986, 'seq': 103}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437269060561, 'lon': -79.965616625003, 'seq': 104}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437056746959, 'lon': -79.965766828707, 'seq': 105}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436913843196, 'lon': -79.965874117068, 'seq': 106}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436762773174, 'lon': -79.966045778445, 'seq': 107}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436615785799, 'lon': -79.966260355166, 'seq': 108}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.43652187703, 'lon': -79.966560762576, 'seq': 109}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436472881098, 'lon': -79.966989916018, 'seq': 110}, {'typ': 'S', 'stpid': '18165', 'seq': 111, 'lon': -79.968430881616, 'stpnm': '5th Ave past Brenham St', 'pdist': 36887.0, 'lat': 40.436465920651}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436457754658, 'lon': -79.968661551591, 'seq': 112}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436484215649, 'lon': -79.968868819433, 'seq': 113}, {'typ': 'S', 'stpid': '2643', 'seq': 114, 'lon': -79.970167008596, 'stpnm': '5th Ave opp #2410', 'pdist': 37351.0, 'lat': 40.436912928767}, {'typ': 'S', 'stpid': '2644', 'seq': 115, 'lon': -79.970976146824, 'stpnm': '5th Ave opp #2358', 'pdist': 37622.0, 'lat': 40.437166181894}, {'typ': 'S', 'stpid': '18164', 'seq': 116, 'lon': -79.973009164022, 'stpnm': '5th Ave past Kirkpatrick St', 'pdist': 38216.0, 'lat': 40.437822924516}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437953577631, 'lon': -79.973400766539, 'seq': 117}, {'typ': 'S', 'stpid': '3147', 'seq': 118, 'lon': -79.974486406746, 'stpnm': '5th Ave past Moultrie', 'pdist': 38618.0, 'lat': 40.438030101164}, {'typ': 'S', 'stpid': '3148', 'seq': 119, 'lon': -79.975773604499, 'stpnm': '5th Ave opp Seneca St', 'pdist': 38981.0, 'lat': 40.438166929226}, {'typ': 'S', 'stpid': '3149', 'seq': 120, 'lon': -79.977394310846, 'stpnm': '5th Ave at Wyandotte St', 'pdist': 39420.0, 'lat': 40.438247594947}, {'typ': 'S', 'stpid': '3150', 'seq': 121, 'lon': -79.97944159061, 'stpnm': '5th Ave opp Gist St', 'pdist': 39979.0, 'lat': 40.438339593968}, {'typ': 'S', 'stpid': '3151', 'seq': 122, 'lon': -79.980569064815, 'stpnm': '5th Ave at Dinwiddie St', 'pdist': 40343.0, 'lat': 40.438395013737}, {'typ': 'S', 'stpid': '3152', 'seq': 123, 'lon': -79.982623861111, 'stpnm': '5th Ave opp Van Braam St', 'pdist': 40957.0, 'lat': 40.438484094732}, {'typ': 'S', 'stpid': '3154', 'seq': 124, 'lon': -79.984903019843, 'stpnm': '5th Ave at Pride St', 'pdist': 41518.0, 'lat': 40.438582096493}, {'typ': 'S', 'stpid': '3155', 'seq': 125, 'lon': -79.986305788361, 'stpnm': '5th Ave at Stevenson St', 'pdist': 41893.0, 'lat': 40.438632511731}, {'typ': 'S', 'stpid': '3156', 'seq': 126, 'lon': -79.988097240079, 'stpnm': '5th Ave at Magee St', 'pdist': 42426.0, 'lat': 40.438710016375}, {'typ': 'S', 'stpid': '3157', 'seq': 127, 'lon': -79.990663968915, 'stpnm': '5th Ave at Washington Pl', 'pdist': 43145.0, 'lat': 40.43883051923}, {'typ': 'S', 'stpid': '18161', 'seq': 128, 'lon': -79.992593541, 'stpnm': '5th Ave opp Diamond St', 'pdist': 43844.0, 'lat': 40.43889764138}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.43895888408, 'lon': -79.99469102845, 'seq': 129}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.439742785712, 'lon': -79.994202866409, 'seq': 130}, {'typ': 'S', 'stpid': '20292', 'seq': 131, 'lon': -79.994164892194, 'stpnm': '6th Ave at Center Ave', 'pdist': 44671.0, 'lat': 40.439906541484}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.44002494221, 'lon': -79.994191714284, 'seq': 132}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.440151508273, 'lon': -79.994266816137, 'seq': 133}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.440265825802, 'lon': -79.994379468915, 'seq': 134}, {'typ': 'S', 'stpid': '1509', 'seq': 135, 'lon': -79.994631135581, 'stpnm': '6th Ave at Bigelow Sq', 'pdist': 44962.0, 'lat': 40.440377273463}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.440657024606, 'lon': -79.995568611756, 'seq': 136}, {'typ': 'S', 'stpid': '3158', 'seq': 137, 'lon': -79.997612455025, 'stpnm': '6th Ave at Smithfield St', 'pdist': 45844.0, 'lat': 40.441420492964}, {'typ': 'S', 'stpid': '3159', 'seq': 138, 'lon': -79.999358695108, 'stpnm': '6th Ave at Wood St', 'pdist': 46378.0, 'lat': 40.442103148627}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442299115726, 'lon': -79.999809306222, 'seq': 139}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442462421206, 'lon': -79.999916594583, 'seq': 140}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.44171937807, 'lon': -80.002727549631, 'seq': 141}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.441507078522, 'lon': -80.00256661709, 'seq': 142}, {'typ': 'S', 'stpid': '20690', 'seq': 143, 'lon': -80.00084666204202, 'stpnm': '5th Ave at Wood St', 'pdist': 48062.0, 'lat': 40.44088273536}]}, {'ln': 43730.0, 'rtdir': 'OUTBOUND', 'pid': 2364, 'pt': [{'typ': 'S', 'stpid': '20690', 'seq': 1, 'lon': -80.00084666204202, 'stpnm': '5th Ave at Wood St', 'pdist': 0.0, 'lat': 40.44088273536}, {'typ': 'S', 'stpid': '20691', 'seq': 2, 'lon': -79.99866483333403, 'stpnm': '5th Ave at Smithfield St', 'pdist': 542.0, 'lat': 40.440055767723}, {'typ': 'S', 'stpid': '20293', 'seq': 3, 'lon': -79.995996042329, 'stpnm': '5th Ave at Ross St', 'pdist': 1454.0, 'lat': 40.439031583534}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438937678139, 'lon': -79.995684906083, 'seq': 4}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438896849666, 'lon': -79.994783683854, 'seq': 5}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438758032671, 'lon': -79.994783683854, 'seq': 6}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.43801494859, 'lon': -79.994343801576, 'seq': 7}, {'typ': 'S', 'stpid': '3241', 'seq': 8, 'lon': -79.993441129629, 'stpnm': 'Forbes Ave at Boyd St', 'pdist': 2309.0, 'lat': 40.437999108078}, {'typ': 'S', 'stpid': '3242', 'seq': 9, 'lon': -79.991346486111, 'stpnm': 'Forbes Ave at McAnulty Dr', 'pdist': 2920.0, 'lat': 40.437917437913}, {'typ': 'S', 'stpid': '3243', 'seq': 10, 'lon': -79.9887468168, 'stpnm': 'Forbes Ave at Magee St', 'pdist': 3680.0, 'lat': 40.437810426188}, {'typ': 'S', 'stpid': '3244', 'seq': 11, 'lon': -79.986794570767, 'stpnm': 'Forbes Ave at Stevenson St', 'pdist': 4191.0, 'lat': 40.437725178342}, {'typ': 'S', 'stpid': '3245', 'seq': 12, 'lon': -79.98534843783, 'stpnm': 'Forbes Ave at Pride St', 'pdist': 4573.0, 'lat': 40.437646432071}, {'typ': 'S', 'stpid': '3246', 'seq': 13, 'lon': -79.982940666667, 'stpnm': 'Forbes Ave at Van Braam St', 'pdist': 5263.0, 'lat': 40.437541934523}, {'typ': 'S', 'stpid': '3247', 'seq': 14, 'lon': -79.981758166667, 'stpnm': 'Forbes Ave at Miltenberger St', 'pdist': 5584.0, 'lat': 40.437488434527}, {'typ': 'S', 'stpid': '3248', 'seq': 15, 'lon': -79.979675333334, 'stpnm': 'Forbes Ave at Gist St', 'pdist': 6165.0, 'lat': 40.43739501512}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437313356274, 'lon': -79.977685134245, 'seq': 16}, {'typ': 'S', 'stpid': '3249', 'seq': 17, 'lon': -79.977664844577, 'stpnm': 'Jumonville St at Watson St', 'pdist': 6823.0, 'lat': 40.437740517434}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438185923097, 'lon': -79.977653302922, 'seq': 18}, {'typ': 'S', 'stpid': '3250', 'seq': 19, 'lon': -79.975984968915, 'stpnm': '5th Ave at Seneca St', 'pdist': 7437.0, 'lat': 40.438112431006}, {'typ': 'S', 'stpid': '3251', 'seq': 20, 'lon': -79.974548949073, 'stpnm': '5th Ave at Moulrie St', 'pdist': 7832.0, 'lat': 40.43797818236}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437889166532, 'lon': -79.97340458863, 'seq': 21}, {'typ': 'S', 'stpid': '3252', 'seq': 22, 'lon': -79.972862782409, 'stpnm': '5th Ave at Birmingham Bridge', 'pdist': 8327.0, 'lat': 40.437725849943}, {'typ': 'S', 'stpid': '2556', 'seq': 23, 'lon': -79.971299708996, 'stpnm': '5th Ave at #2358', 'pdist': 8811.0, 'lat': 40.437214433778}, {'typ': 'S', 'stpid': '2557', 'seq': 24, 'lon': -79.9702225, 'stpnm': '5th Ave at #2410', 'pdist': 9150.0, 'lat': 40.43686293456}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436434221124, 'lon': -79.968918946419, 'seq': 25}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436409723131, 'lon': -79.968758013878, 'seq': 26}, {'typ': 'S', 'stpid': '2558', 'seq': 27, 'lon': -79.968582166668, 'stpnm': '5th Ave opp Brenham St', 'pdist': 9629.0, 'lat': 40.436405101253}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436421433251, 'lon': -79.968040360447, 'seq': 28}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436425516249, 'lon': -79.966962112423, 'seq': 29}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436478595211, 'lon': -79.966511501309, 'seq': 30}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436580670019, 'lon': -79.966243280407, 'seq': 31}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436727657471, 'lon': -79.965996517178, 'seq': 32}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436850146768, 'lon': -79.965846313473, 'seq': 33}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.436988967702, 'lon': -79.965728296276, 'seq': 34}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437119622437, 'lon': -79.965658558842, 'seq': 35}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437262131201, 'lon': -79.965561438496, 'seq': 36}, {'typ': 'S', 'stpid': '19850', 'seq': 37, 'lon': -79.965362955029, 'stpnm': '5th Ave at Maurice St', 'pdist': 10655.0, 'lat': 40.437429531855}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437552019874, 'lon': -79.965164471562, 'seq': 38}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437641844279, 'lon': -79.964917708333, 'seq': 39}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437739834402, 'lon': -79.96443491071, 'seq': 40}, {'typ': 'S', 'stpid': '2561', 'seq': 41, 'lon': -79.964154864419, 'stpnm': '5th Ave at Craft Ave', 'pdist': 11043.0, 'lat': 40.437787848263}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437836843236, 'lon': -79.96390810119, 'seq': 42}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.437951164701, 'lon': -79.963613058198, 'seq': 43}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438171641264, 'lon': -79.963285828698, 'seq': 44}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438318625238, 'lon': -79.963103438485, 'seq': 45}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.438592177888, 'lon': -79.962744022477, 'seq': 46}, {'typ': 'S', 'stpid': '2562', 'seq': 47, 'lon': -79.962188666667, 'stpnm': '5th Ave at Halket St', 'pdist': 11768.0, 'lat': 40.439010601112}, {'typ': 'S', 'stpid': '17671', 'seq': 48, 'lon': -79.959905053573, 'stpnm': '5th Ave past McKee Pl', 'pdist': 12632.0, 'lat': 40.440680195665}, {'typ': 'S', 'stpid': '2564', 'seq': 49, 'lon': -79.959239533731, 'stpnm': '5th Ave at Meyran Ave', 'pdist': 12879.0, 'lat': 40.441172012068}, {'typ': 'S', 'stpid': '2565', 'seq': 50, 'lon': -79.957992499998, 'stpnm': '5th Ave at Oakland Ave', 'pdist': 13394.0, 'lat': 40.442085267612}, {'typ': 'S', 'stpid': '2566', 'seq': 51, 'lon': -79.956747457672, 'stpnm': '5th Ave opp Thackeray Ave', 'pdist': 13872.0, 'lat': 40.442990015366}, {'typ': 'S', 'stpid': '2567', 'seq': 52, 'lon': -79.954881824734, 'stpnm': '5th Ave at Bigelow Blvd', 'pdist': 14532.0, 'lat': 40.444335993965}, {'typ': 'S', 'stpid': '3253', 'seq': 53, 'lon': -79.953202708996, 'stpnm': '5th Ave opp Tennyson Ave', 'pdist': 15193.0, 'lat': 40.445546749707}, {'typ': 'S', 'stpid': '3254', 'seq': 54, 'lon': -79.952111093253, 'stpnm': '5th Ave at Bellefield Ave', 'pdist': 15623.0, 'lat': 40.44630592845}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446623373397, 'lon': -79.951609216288, 'seq': 55}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446721350429, 'lon': -79.951314173296, 'seq': 56}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446770338891, 'lon': -79.950579248026, 'seq': 57}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446835656786, 'lon': -79.949640474871, 'seq': 58}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446913221703, 'lon': -79.948980651453, 'seq': 59}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.447374526992, 'lon': -79.94917913492, 'seq': 60}, {'typ': 'S', 'stpid': '2571', 'seq': 61, 'lon': -79.949420533732, 'stpnm': 'Craig St at Park Plaza', 'pdist': 16851.0, 'lat': 40.447762347572}, {'typ': 'S', 'stpid': '2572', 'seq': 62, 'lon': -79.950570087302, 'stpnm': 'Craig St at Bayard St', 'pdist': 17630.0, 'lat': 40.449607534262}, {'typ': 'S', 'stpid': '2573', 'seq': 63, 'lon': -79.951959353173, 'stpnm': 'Craig St at Centre Ave', 'pdist': 18457.0, 'lat': 40.451872385571}, {'typ': 'S', 'stpid': '2574', 'seq': 64, 'lon': -79.951001553571, 'stpnm': 'Centre Ave at Melwood Ave', 'pdist': 18766.0, 'lat': 40.452173174704}, {'typ': 'S', 'stpid': '2575', 'seq': 65, 'lon': -79.949852615741, 'stpnm': 'Centre Ave at Neville St', 'pdist': 19116.0, 'lat': 40.452535756554}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.452783201296, 'lon': -79.94906657475, 'seq': 66}, {'typ': 'S', 'stpid': '8198', 'seq': 67, 'lon': -79.947097833333, 'stpnm': 'Centre Ave opp Millvale Ave', 'pdist': 19930.0, 'lat': 40.453342433673}, {'typ': 'S', 'stpid': '8199', 'seq': 68, 'lon': -79.944541689153, 'stpnm': 'Centre Ave at Morewood Ave', 'pdist': 20688.0, 'lat': 40.454097103115}, {'typ': 'S', 'stpid': '8201', 'seq': 69, 'lon': -79.941522513889, 'stpnm': 'Centre Ave opp Cypress St', 'pdist': 21562.0, 'lat': 40.45498568797}, {'typ': 'S', 'stpid': '8202', 'seq': 70, 'lon': -79.939839104496, 'stpnm': 'Centre Ave at Shadyside Hosptital', 'pdist': 22083.0, 'lat': 40.455500688037}, {'typ': 'S', 'stpid': '8203', 'seq': 71, 'lon': -79.938622420634, 'stpnm': 'Centre Ave at Aiken St', 'pdist': 22408.0, 'lat': 40.455849760816}, {'typ': 'S', 'stpid': '8204', 'seq': 72, 'lon': -79.936516138226, 'stpnm': 'Centre Ave at Graham St', 'pdist': 23051.0, 'lat': 40.456481021498}, {'typ': 'S', 'stpid': '8205', 'seq': 73, 'lon': -79.934715333332, 'stpnm': 'Centre Ave at Negley Ave', 'pdist': 23617.0, 'lat': 40.457034933475}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.457255347443, 'lon': -79.934001865734, 'seq': 74}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.457496169285, 'lon': -79.934119882931, 'seq': 75}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.457647192712, 'lon': -79.934130611767, 'seq': 76}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.457851277885, 'lon': -79.934082332004, 'seq': 77}, {'typ': 'S', 'stpid': '8206', 'seq': 78, 'lon': -79.933821595899, 'stpnm': 'Centre Ave at Baum Blvd', 'pdist': 24178.0, 'lat': 40.458263705021}, {'typ': 'S', 'stpid': '8207', 'seq': 79, 'lon': -79.932419593254, 'stpnm': 'Negley Ave at Friendship Ave', 'pdist': 25101.0, 'lat': 40.460559766618}, {'typ': 'S', 'stpid': '8208', 'seq': 80, 'lon': -79.931591155423, 'stpnm': 'Negley Ave opp Coral St', 'pdist': 25663.0, 'lat': 40.461936270313}, {'typ': 'S', 'stpid': '8209', 'seq': 81, 'lon': -79.930680564813, 'stpnm': 'Negley Ave at Penn Ave', 'pdist': 26175.0, 'lat': 40.463411168093}, {'typ': 'S', 'stpid': '9128', 'seq': 82, 'lon': -79.927021771164, 'stpnm': 'Penn Ave at Beatty St', 'pdist': 27341.0, 'lat': 40.462186421807}, {'typ': 'S', 'stpid': '9129', 'seq': 83, 'lon': -79.925920042329, 'stpnm': 'Penn Ave at Whitfield St', 'pdist': 27695.0, 'lat': 40.461761433221}, {'typ': 'S', 'stpid': '3133', 'seq': 84, 'lon': -79.924964833335, 'stpnm': 'Penn Ave at Highland Ave', 'pdist': 27978.0, 'lat': 40.461429599904}, {'typ': 'S', 'stpid': '18975', 'seq': 85, 'lon': -79.923960135581, 'stpnm': 'Penn Ave at Sheridan Sq', 'pdist': 28284.0, 'lat': 40.461080255601}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460778223843, 'lon': -79.923107193114, 'seq': 86}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460631288983, 'lon': -79.922769234778, 'seq': 87}, {'typ': 'S', 'stpid': '19574', 'seq': 88, 'lon': -79.921938462961, 'stpnm': 'Penn Ave at Eastside III Dr', 'pdist': 28861.0, 'lat': 40.460351862483}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460323291659, 'lon': -79.921852632273, 'seq': 89}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.460237579116, 'lon': -79.921788259256, 'seq': 90}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.45995186985, 'lon': -79.921112342585, 'seq': 91}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.4596865673, 'lon': -79.920774384249, 'seq': 92}, {'typ': 'S', 'stpid': '8276', 'seq': 93, 'lon': -79.920097333333, 'stpnm': 'Penn Ave past Shady Ave', 'pdist': 29549.0, 'lat': 40.459199365493}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.459080998544, 'lon': -79.919963222882, 'seq': 94}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.459015692551, 'lon': -79.919754010579, 'seq': 95}, {'typ': 'S', 'stpid': '8277', 'seq': 96, 'lon': -79.918572725527, 'stpnm': 'Penn Ave opp Village of Eastside Shpg Ctr', 'pdist': 30029.0, 'lat': 40.458219989936}, {'typ': 'S', 'stpid': '19381', 'seq': 97, 'lon': -79.9176930959, 'stpnm': 'Penn Ave opp East Liberty Blvd (farside)', 'pdist': 30400.0, 'lat': 40.457613502192}, {'typ': 'S', 'stpid': '8279', 'seq': 98, 'lon': -79.915559208994, 'stpnm': 'Penn Ave opp Bakery Square', 'pdist': 30705.0, 'lat': 40.456158097123}, {'typ': 'S', 'stpid': '8280', 'seq': 99, 'lon': -79.912942375661, 'stpnm': 'Penn Ave at 5th Ave', 'pdist': 31694.0, 'lat': 40.454361927924}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.453982309293, 'lon': -79.912395205022, 'seq': 100}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.453194491742, 'lon': -79.911279406072, 'seq': 101}, {'typ': 'S', 'stpid': '8281', 'seq': 102, 'lon': -79.910441722881, 'stpnm': 'Penn Ave at Linden', 'pdist': 32619.0, 'lat': 40.452643023645}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.452187911929, 'lon': -79.909776638236, 'seq': 103}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.451938906948, 'lon': -79.909347484794, 'seq': 104}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.451751132091, 'lon': -79.908955882277, 'seq': 105}, {'typ': 'S', 'stpid': '20763', 'seq': 106, 'lon': -79.908237050262, 'stpnm': 'Penn Ave at Dallas Ave', 'pdist': 33320.0, 'lat': 40.451444976306}, {'typ': 'S', 'stpid': '7692', 'seq': 107, 'lon': -79.906119717594, 'stpnm': 'Penn Ave opp Murtland', 'pdist': 33990.0, 'lat': 40.450752764737}, {'typ': 'S', 'stpid': '7693', 'seq': 108, 'lon': -79.903907833332, 'stpnm': 'Penn Ave at Lang', 'pdist': 34657.0, 'lat': 40.450015933852}, {'typ': 'S', 'stpid': '7694', 'seq': 109, 'lon': -79.901542666665, 'stpnm': 'Penn Ave at Homewood', 'pdist': 35380.0, 'lat': 40.449207267228}, {'typ': 'S', 'stpid': '7695', 'seq': 110, 'lon': -79.899738177912, 'stpnm': 'Penn Ave at Lexington', 'pdist': 35928.0, 'lat': 40.44861834093}, {'typ': 'S', 'stpid': '18542', 'seq': 111, 'lon': -79.898263060188, 'stpnm': 'Penn Ave past Carnegie', 'pdist': 36455.0, 'lat': 40.448122273561}, {'typ': 'S', 'stpid': '7697', 'seq': 112, 'lon': -79.895344437829, 'stpnm': 'Penn Ave at Braddock', 'pdist': 37204.0, 'lat': 40.447155681048}, {'typ': 'S', 'stpid': '7698', 'seq': 113, 'lon': -79.894080240081, 'stpnm': 'Penn Ave at East End', 'pdist': 37577.0, 'lat': 40.44674726544}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.445849137648, 'lon': -79.891376573394, 'seq': 114}, {'typ': 'S', 'stpid': '8283', 'seq': 115, 'lon': -79.891540499999, 'stpnm': 'Trenton Ave past Penn', 'pdist': 38534.0, 'lat': 40.445526100761}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.445060696134, 'lon': -79.891780072747, 'seq': 116}, {'typ': 'S', 'stpid': '8284', 'seq': 117, 'lon': -79.892091208993, 'stpnm': 'Trenton Ave at South', 'pdist': 38980.0, 'lat': 40.44430135164}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.444219701183, 'lon': -79.892101937829, 'seq': 118}, {'typ': 'S', 'stpid': '7329', 'seq': 119, 'lon': -79.890609666666, 'stpnm': 'South Ave at West', 'pdist': 39454.0, 'lat': 40.443714100858}, {'typ': 'S', 'stpid': '7330', 'seq': 120, 'lon': -79.888255084655, 'stpnm': 'South Ave at Pitt', 'pdist': 40151.0, 'lat': 40.442899450303}, {'typ': 'S', 'stpid': '20013', 'seq': 121, 'lon': -79.887481977515, 'stpnm': 'South Ave at Pennwood', 'pdist': 40395.0, 'lat': 40.442649715114}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442433335871, 'lon': -79.886747052245, 'seq': 122}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442743615323, 'lon': -79.886575390868, 'seq': 123}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442772193622, 'lon': -79.886698772483, 'seq': 124}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.442845680619, 'lon': -79.886800696425, 'seq': 125}, {'typ': 'S', 'stpid': '16014', 'seq': 126, 'lon': -79.88690183333199, 'stpnm': 'Hay St Ramp inbound shelter', 'pdist': 40996.0, 'lat': 40.443279227407}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.443626246017, 'lon': -79.88700912169202, 'seq': 127}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.443961015213, 'lon': -79.887186147487, 'seq': 128}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.444859168235, 'lon': -79.887545563495, 'seq': 129}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.445712402489, 'lon': -79.887985445774, 'seq': 130}, {'typ': 'S', 'stpid': '16118', 'seq': 131, 'lon': -79.88844805952203, 'stpnm': 'Wilkinsburg Station stop D', 'pdist': 42200.0, 'lat': 40.446499209682}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.446980929603, 'lon': -79.88878065344, 'seq': 132}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.447519798645, 'lon': -79.88925272222701, 'seq': 133}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.448026005021, 'lon': -79.88972479101301, 'seq': 134}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.448548536635, 'lon': -79.89027732607002, 'seq': 135}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.448956761632, 'lon': -79.89076548811101, 'seq': 136}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.449258846534, 'lon': -79.89114099737299, 'seq': 137}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.449581341079, 'lon': -79.89159160848703, 'seq': 138}, {'typ': 'W', 'pdist': 0.0, 'lat': 40.449760957623, 'lon': -79.89176326986399, 'seq': 139}, {'typ': 'S', 'stpid': '19988', 'seq': 140, 'lon': -79.89193733333298, 'stpnm': 'LAYOVER BRUSHTON TERMINAL', 'pdist': 43730.0, 'lat': 40.449901185011}]}]
}
})
| 337.222222 | 33,400 | 0.578913 | 36,318 | 0.997199 | 0 | 0 | 0 | 0 | 0 | 0 | 16,016 | 0.439758 |
817aa92cbced1c3e3227a2a7dec4ed035c84f33f | 1,589 | py | Python | Assignment 4/src/optim/lr_scheduler.py | vamsi3/CS763-IIT-Bombay | 2c4650587eee8bb15944b95101b7434746ec03af | [
"MIT"
]
| 1 | 2022-01-28T12:21:36.000Z | 2022-01-28T12:21:36.000Z | Assignment 4/src/optim/lr_scheduler.py | vamsi3/IITB-Computer-Vision | 2c4650587eee8bb15944b95101b7434746ec03af | [
"MIT"
]
| null | null | null | Assignment 4/src/optim/lr_scheduler.py | vamsi3/IITB-Computer-Vision | 2c4650587eee8bb15944b95101b7434746ec03af | [
"MIT"
]
| null | null | null | import math
import torch
from bisect import bisect_right
class _LRScheduler:
def __init__(self, optimizer, last_epoch=-1):
self.optimizer = optimizer
self.base_lr = optimizer.lr
self.last_epoch = last_epoch
def step(self):
self.last_epoch += 1
self.optimizer.lr = self.get_lr()
class StepLR(_LRScheduler):
def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.step_size = step_size
self.gamma = gamma
def get_lr(self):
return self.base_lr * self.gamma ** (self.last_epoch // self.step_size)
class MultiStepLR(_LRScheduler):
def __init__(self, optimizer, milestones, gamma=0.1, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.milestones = milestones
self.gamma = gamma
def get_lr(self):
return self.base_lr * self.gamma ** bisect_right(self.milestones, self.last_epoch)
class ExponentialLR(_LRScheduler):
def __init__(self, optimizer, gamma, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.gamma = gamma
def get_lr(self):
return self.base_lr * self.gamma ** self.last_epoch
class CosineAnnealingLR(_LRScheduler):
def __init__(self, optimizer, T_max, eta_min=0, last_epoch=-1):
super().__init__(optimizer, last_epoch)
self.T_max = T_max
self.eta_min = eta_min
def get_lr(self):
return self.eta_min + (self.base_lr - self.eta_min) * (1 + math.cos(math.pi * self.last_epoch / self.T_max)) / 2
| 29.425926 | 120 | 0.668345 | 1,517 | 0.954688 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
817af60b313398519be2e69061cd3bf593b81217 | 2,972 | py | Python | gluon/dal/adapters/teradata.py | lightcoder127/Web2py | d604816b487aaf758075805cffdb89f45dea906e | [
"BSD-3-Clause"
]
| 2 | 2017-02-02T00:31:48.000Z | 2017-08-08T22:36:25.000Z | gluon/dal/adapters/teradata.py | crania/containerservices | 0ffbadb3b5a259abc74ed433b69bf6342b99ef83 | [
"BSD-3-Clause"
]
| null | null | null | gluon/dal/adapters/teradata.py | crania/containerservices | 0ffbadb3b5a259abc74ed433b69bf6342b99ef83 | [
"BSD-3-Clause"
]
| null | null | null | # -*- coding: utf-8 -*-
from .._globals import IDENTITY
from ..connection import ConnectionPool
from .base import BaseAdapter
class TeradataAdapter(BaseAdapter):
drivers = ('pyodbc',)
types = {
'boolean': 'CHAR(1)',
'string': 'VARCHAR(%(length)s)',
'text': 'VARCHAR(2000)',
'json': 'VARCHAR(4000)',
'password': 'VARCHAR(%(length)s)',
'blob': 'BLOB',
'upload': 'VARCHAR(%(length)s)',
'integer': 'INT',
'bigint': 'BIGINT',
'float': 'REAL',
'double': 'DOUBLE',
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
'date': 'DATE',
'time': 'TIME',
'datetime': 'TIMESTAMP',
# Modified Constraint syntax for Teradata.
# Teradata does not support ON DELETE.
'id': 'INT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
'reference': 'INT',
'list:integer': 'VARCHAR(4000)',
'list:string': 'VARCHAR(4000)',
'list:reference': 'VARCHAR(4000)',
'geometry': 'ST_GEOMETRY', # http://www.info.teradata.com/HTMLPubs/DB_TTU_14_00/index.html#page/Database_Management/B035_1094_111A/ch14.055.160.html
'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
'big-reference': 'BIGINT',
'reference FK': ' REFERENCES %(foreign_key)s',
'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
}
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}, do_connect=True, after_connection=None):
self.db = db
self.dbengine = "teradata"
self.uri = uri
if do_connect: self.find_driver(adapter_args,uri)
self.pool_size = pool_size
self.folder = folder
self.db_codec = db_codec
self._after_connection = after_connection
self.find_or_make_work_folder()
ruri = uri.split('://', 1)[1]
def connector(cnxn=ruri,driver_args=driver_args):
return self.driver.connect(cnxn,**driver_args)
self.connector = connector
if do_connect: self.reconnect()
def close(self,action='commit',really=True):
# Teradata does not implicitly close off the cursor
# leading to SQL_ACTIVE_STATEMENTS limit errors
self.cursor.close()
ConnectionPool.close(self, action, really)
def LEFT_JOIN(self):
return 'LEFT OUTER JOIN'
# Similar to MSSQL, Teradata can't specify a range (for Pageby)
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
if limitby:
(lmin, lmax) = limitby
sql_s += ' TOP %i' % lmax
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
def _truncate(self, table, mode=''):
tablename = table._tablename
return ['DELETE FROM %s ALL;' % (tablename)]
| 38.102564 | 156 | 0.601615 | 2,842 | 0.956258 | 0 | 0 | 0 | 0 | 0 | 0 | 1,214 | 0.408479 |
817bffa6766be54af974175321c8e15902437d0a | 8,367 | py | Python | conan_tests/external_tools/vswhere_test.py | conan-io/test | 273835a701aca3934694dfa1ec082e58d5332660 | [
"MIT"
]
| 2 | 2019-02-09T10:18:25.000Z | 2020-12-15T22:22:03.000Z | conan_tests/external_tools/vswhere_test.py | conan-io/test | 273835a701aca3934694dfa1ec082e58d5332660 | [
"MIT"
]
| 9 | 2018-02-22T21:42:17.000Z | 2020-10-16T03:54:19.000Z | conan_tests/external_tools/vswhere_test.py | conan-io/test | 273835a701aca3934694dfa1ec082e58d5332660 | [
"MIT"
]
| 7 | 2017-12-19T09:35:31.000Z | 2020-09-23T16:17:59.000Z | import os
import platform
import unittest
import nose
from conans import tools
from conans.errors import ConanException
from conans.model.version import Version
from conans import __version__ as client_version
from conans.model import settings
from conans.test.utils.tools import TestClient
from conans.test.assets.visual_project_files import get_vs_project_files
class vswhereTest(unittest.TestCase):
# Environment supossed:
# - BuildTools 14 (2015)
# - VS Community 14 (2015)
#
# - BuildTools 15 (2017) OR VS Community 15 (2017)
modern_products = 1 # 2017 or higher versions without BuildTools -> vswhere()
all_modern_products = 2 # 2017 or higher versions with BuildTools -> vswhere(products=["*"])
modern_and_legacy_products = 2 # 2017 and lower versions (without BuildTools) -> vswhere(legacy=True)
only_legacy_products = 1
all_products = 3
def setUp(self):
if platform.system() != "Windows":
raise nose.SkipTest("Only Windows test")
if Version(client_version) < Version("1.1.0-dev"):
raise nose.SkipTest("Only >= 1.1.0-dev version")
def vs_comntools_test(self):
# Fake path
with tools.environment_append({"VS150COMNTOOLS": "fake/path/here"}):
path = tools.vs_comntools("15")
self.assertEqual(path, "fake/path/here")
# VS 14 path
path = tools.vs_comntools("14")
self.assertEqual(path, "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\Common7\\Tools\\")
# VS 15 path (shouldn't be found as VS150COMNTOOLS is not set by default)
path = tools.vs_comntools("15")
self.assertEqual(path, None)
def vswhere_test(self):
# products and legacy not allowed
self.assertRaises(ConanException, tools.vswhere, products=["*"], legacy=True)
# Detect only one product (VS Community 15) as vswhere default detection
nproducts = len(tools.vswhere())
self.assertEqual(nproducts, self.modern_products)
# Detect only modern products (VS Community 15 & BuildTools 15)
products = tools.vswhere(products=["*"])
nproducts = len(products)
self.assertEqual(nproducts, self.all_modern_products)
installation_paths = [product["installationPath"] for product in products]
self.assertTrue(any("Community" in install_path for install_path in installation_paths))
self.assertTrue(any("BuildTools" in install_path for install_path in installation_paths))
# Detect also legacy products but no modern BuildTools
products = tools.vswhere(legacy=True)
nproducts = len(products)
self.assertEqual(nproducts, self.modern_and_legacy_products)
installation_paths = [product["installationPath"] for product in products]
self.assertTrue(any("Community" in install_path for install_path in installation_paths))
self.assertTrue(any("Microsoft Visual Studio 14.0" in install_path for install_path in installation_paths))
# Detect all installed products
products = tools.vswhere(products=["*"])
products += tools.vswhere(legacy=["*"])
seen_products = []
for product in products:
if product not in seen_products:
seen_products.append(product)
products = seen_products
nproducts = len(products)
self.assertEqual(nproducts, self.all_products)
installation_paths = [product["installationPath"] for product in products]
self.assertTrue(any("Community" in install_path for install_path in installation_paths))
self.assertTrue(any("BuildTools" in install_path for install_path in installation_paths))
self.assertTrue(any("Microsoft Visual Studio 14.0" in install_path for install_path in installation_paths))
def vs_installation_path_test(self):
# Default behaviour
install_path = tools.vs_installation_path("15")
self.assertIn("Community", install_path)
install_path = tools.vs_installation_path("14")
self.assertIn("Microsoft Visual Studio 14.0", install_path)
# only BuildTools detection
install_path = tools.vs_installation_path("15", preference=["BuildTools"])
self.assertIn("BuildTools", install_path)
install_path = tools.vs_installation_path("14", preference=["BuildTools"])
self.assertIn("Microsoft Visual Studio 14.0", install_path)
# Ask for not installed versions
install_path = tools.vs_installation_path("15", preference=["Enterprise"])
self.assertIsNone(install_path)
install_path = tools.vs_installation_path("15", preference=["Professional"])
self.assertIsNone(install_path)
# Change preference order
install_path = tools.vs_installation_path("15", preference=["BuildTools", "Community", "Professional", "Enterprise"])
self.assertIn("BuildTools", install_path)
install_path = tools.vs_installation_path("15", preference=["Professional", "Enterprise", "Community"])
self.assertIn("Community", install_path)
# Preference order by env var
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"BuildTools, Community,Professional, Enterprise"})):
install_path = tools.vs_installation_path("15")
self.assertIn("BuildTools", install_path)
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"Professional, Enterprise,Community"})):
install_path = tools.vs_installation_path("15")
self.assertIn("Community", install_path)
def vvcars_command_test(self):
fake_settings = settings.Settings({"os":"Windows", "arch": "x86_64"})
# preference order with VS 15
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"BuildTools, Community,Professional, Enterprise"})):
command = tools.vcvars_command(settings=fake_settings, compiler_version="15")
self.assertNotIn("Community", command)
self.assertIn("VC/Auxiliary/Build/vcvarsall.bat", command)
self.assertIn("Microsoft Visual Studio\\2017\\BuildTools", command)
self.assertIn("VSCMD_START_DIR", command)
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"Professional, Enterprise,Community"})):
command = tools.vcvars_command(settings=fake_settings, compiler_version="15")
self.assertNotIn("BuildTools", command)
self.assertIn("VC/Auxiliary/Build/vcvarsall.bat", command)
self.assertIn("Microsoft Visual Studio\\2017\\Community", command)
self.assertIn("VSCMD_START_DIR", command)
# With VS 14 order of preference does not apply
command = tools.vcvars_command(settings=fake_settings, compiler_version="14")
self.assertNotIn("VSCMD_START_DIR", command)
self.assertIn("VC/vcvarsall.bat", command)
self.assertIn("Microsoft Visual Studio 14.0\\", command)
def build_test(self):
conan_build_vs = """
from conans import ConanFile, MSBuild, tools
class HelloConan(ConanFile):
name = "Hello"
version = "1.2.1"
settings = "os", "build_type", "arch", "compiler"
export_source = "*"
def build(self):
msbuild = MSBuild(self)
msbuild.build("MyProject.sln", upgrade_project=False)
"""
client = TestClient()
files = get_vs_project_files()
files["conanfile.py"] = conan_build_vs
client.save(files)
with(tools.environment_append({"CONAN_PRINT_RUN_COMMANDS": "1"})):
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE": "BuildTools"})):
client.run("install .")
client.run("build .")
self.assertIn("BuildTools", client.out)
conan_build_vs = conan_build_vs.replace("upgrade_project=False", "upgrade_project=True")
files["conanfile.py"] = conan_build_vs
client.save(files)
with(tools.environment_append({"CONAN_VS_INSTALLATION_PREFERENCE":"BuildTools",
"CONAN_SKIP_VS_PROJECTS_UPGRADE":"True"})):
client.run("install .")
client.run("build .")
self.assertIn("BuildTools", client.out)
| 45.472826 | 126 | 0.676826 | 7,998 | 0.955898 | 0 | 0 | 0 | 0 | 0 | 0 | 2,802 | 0.334887 |
817dad7fd6fc56d0d2967576e42cee9331599cf9 | 7,797 | py | Python | pinliner/pinliner.py | minazukie/pinliner | 4d1f879a8df2b03e7335536735840274bbb26416 | [
"Apache-2.0"
]
| 53 | 2016-03-29T10:30:41.000Z | 2022-03-23T17:49:38.000Z | pinliner/pinliner.py | minazukie/pinliner | 4d1f879a8df2b03e7335536735840274bbb26416 | [
"Apache-2.0"
]
| 4 | 2017-07-21T15:21:54.000Z | 2022-03-17T19:51:07.000Z | pinliner/pinliner.py | minazukie/pinliner | 4d1f879a8df2b03e7335536735840274bbb26416 | [
"Apache-2.0"
]
| 9 | 2017-07-21T18:05:45.000Z | 2022-01-15T19:57:00.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import argparse
import json
import os
from pinliner import __version__
import sys
TEMPLATE_FILE = 'importer.template'
TEMPLATE_PATTERN = '${CONTENTS}'
def output(cfg, what, newline=True):
# We need indentation for PEP8
cfg.outfile.write(what)
if newline:
cfg.outfile.write(os.linesep)
def process_file(cfg, base_dir, package_path):
if cfg.tagging:
output(cfg, '<tag:' + package_path + '>')
path = os.path.splitext(package_path)[0].replace(os.path.sep, '.')
package_start = cfg.outfile.tell()
full_path = os.path.join(base_dir, package_path)
with open(full_path, 'r') as f:
# Read the whole file
code = f.read()
# Insert escape character before ''' since we'll be using ''' to insert
# the code as a string
output(cfg, code.replace("'''", r"\'''"), newline=cfg.tagging)
package_end = cfg.outfile.tell()
is_package = 1 if path.endswith('__init__') else 0
if is_package:
path = path[:-9]
# Get file timestamp
timestamp = int(os.path.getmtime(full_path))
return path, is_package, package_start, package_end, timestamp
def template(cfg):
template_path = os.path.join(os.path.dirname(__file__), TEMPLATE_FILE)
with open(template_path) as f:
template = f.read()
prefix_end = template.index(TEMPLATE_PATTERN)
prefix_data = template[:prefix_end].replace('%{FORCE_EXC_HOOK}',
str(cfg.set_hook))
prefix_data = prefix_data.replace('%{DEFAULT_PACKAGE}',
cfg.default_package)
cfg.outfile.write(prefix_data)
postfix_begin = prefix_end + len(TEMPLATE_PATTERN)
return template[postfix_begin:]
def process_directory(cfg, base_dir, package_path):
files = []
contents = os.listdir(os.path.join(base_dir, package_path))
for content in contents:
next_path = os.path.join(package_path, content)
path = os.path.join(base_dir, next_path)
if is_module(path):
files.append(process_file(cfg, base_dir, next_path))
elif is_package(path):
files.extend(process_directory(cfg, base_dir, next_path))
return files
def process_files(cfg):
# template would look better as a context manager
postfix = template(cfg)
files = []
output(cfg, "'''")
for package_path in cfg.packages:
base_dir, module_name = os.path.split(package_path)
files.extend(process_directory(cfg, base_dir, module_name))
output(cfg, "'''")
# Transform the list into a dictionary
inliner_packages = {data[0]: data[1:] for data in files}
# Generate the references to the positions of the different packages and
# modules inside the main file.
# We don't use indent to decrease the number of bytes in the file
data = json.dumps(inliner_packages)
output(cfg, 2 * os.linesep + 'inliner_packages = ', newline=False)
data = data.replace('],', '],' + os.linesep + ' ')
data = data.replace('[', '[' + os.linesep + 8 * ' ')
data = '%s%s %s%s%s' % (data[0], os.linesep, data[1:-1], os.linesep,
data[-1])
output(cfg, data)
# No newline on last line, as we want output file to be PEP8 compliant.
output(cfg, postfix, newline=False)
cfg.outfile.close()
def parse_args():
class MyParser(argparse.ArgumentParser):
"""Class to print verbose help on error."""
def error(self, message):
self.print_help()
sys.stderr.write('\nERROR: %s\n' % message)
sys.exit(2)
general_description = """Pinliner - Python Inliner (Version %s)
This tool allows you to merge all files that comprise a Python package into
a single file and be able to use this single file as if it were a package.
Imports will work as usual so if you have a package structure like:
.
└── [my_package]
├── file_a.py
├── [sub_package]
│ ├── file_b.py
│ └── __init__.py
├── __init__.py
And you execute:
$ mkdir test
$ pinliner my_package test/my_package.py
$ cd test
$ python
You'll be able to use this file as if it were the real package:
>>> import my_package
>>> from my_package import file_a as a_file
>>> from my_package.sub_package import file_b
And __init__.py contents will be executed as expected when importing
my_package and you'll be able to access its contents like you would with your
normal package. Modules will also behave as usual.
By default there is no visible separation between the different modules'
source code, but one can be enabled for clarity with option --tag, which will
include a newline and a <tag:file_path> tag before each of the source files.
""" % __version__
general_epilog = None
parser = MyParser(description=general_description,
epilog=general_epilog, argument_default='',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('packages', nargs='+', help='Packages to inline.')
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument('-o', '--outfile', nargs='?',
type=argparse.FileType('w'),
default=sys.stdout, help='Output file.')
parser.add_argument('--set-except', default=None, dest='set_hook',
action='store_true',
help='Force setting handler for uncaught exceptions.')
parser.add_argument('--no-except', default=None, dest='set_hook',
action='store_false',
help="Don't set handler for uncaught exceptions.")
parser.add_argument('--tag', default=False, dest='tagging',
action='store_true',
help="Mark with <tag:file_path> each added file.")
parser.add_argument('-d', '--default-pkg', default=None,
dest='default_package',
help='Define the default package when multiple '
'packages are inlined.')
cfg = parser.parse_args()
# If user didn't pass a default package determine one ourselves.
if cfg.default_package is None:
# For single package file default is the package, for multiple packaged
# files default is none (act as a bundle).
def_file = cfg.packages[0] if len(cfg.packages) == 1 else ''
cfg.default_package = def_file
return cfg
def is_module(module):
# This validation is poor, but good enough for now
return os.path.isfile(module) and module.endswith('.py')
def is_package(package):
init_file = os.path.join(package, '__init__.py')
return os.path.isdir(package) and os.path.isfile(init_file)
def validate_args(cfg):
missing = False
# This is weird now, but in the future we'll allow to inline multiple
# packages
for package in cfg.packages:
if not is_package(package):
sys.stderr.write('ERROR: %s is not a python package' % package)
missing = True
if missing:
sys.exit(1)
if cfg.default_package:
if cfg.default_package not in cfg.packages:
sys.stderr.write('ERROR: %s is not a valid default package' %
cfg.default_pkg)
sys.exit(2)
# Convert the default package from path to package
cfg.default_package = os.path.split(cfg.default_package)[1]
def main():
cfg = parse_args()
validate_args(cfg)
process_files(cfg)
if __name__ == '__main__':
main()
| 36.434579 | 79 | 0.62896 | 236 | 0.030098 | 0 | 0 | 0 | 0 | 0 | 0 | 2,937 | 0.37457 |
817e1fdfe583e5b2b44c9f5c5fb7e9b12305519f | 305 | py | Python | list_s3_buckets.py | MarijaKalebota/aws-playground | 2aaf3da65ba3f0cc5c2c222a10659d219f9136e8 | [
"MIT"
]
| null | null | null | list_s3_buckets.py | MarijaKalebota/aws-playground | 2aaf3da65ba3f0cc5c2c222a10659d219f9136e8 | [
"MIT"
]
| null | null | null | list_s3_buckets.py | MarijaKalebota/aws-playground | 2aaf3da65ba3f0cc5c2c222a10659d219f9136e8 | [
"MIT"
]
| null | null | null | from dotenv import load_dotenv
load_dotenv()
import os
import boto3
#s3 = boto3.resource('s3')
s3 = boto3.resource('s3', aws_access_key_id=os.environ.get("AWS_KEY_ID"),
aws_secret_access_key=os.environ.get("AWS_SECRET_KEY"))
for bucket in s3.buckets.all():
print(bucket.name)
| 23.461538 | 77 | 0.704918 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.190164 |
817ee460ad53e44fa65e444a49afe839ce9a20b2 | 2,570 | py | Python | GAScore/testbench/hold_buffer.py | sharm294/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
]
| 1 | 2021-04-12T06:41:33.000Z | 2021-04-12T06:41:33.000Z | GAScore/testbench/hold_buffer.py | UofT-HPRC/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
]
| null | null | null | GAScore/testbench/hold_buffer.py | UofT-HPRC/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
]
| null | null | null | import os
from sonar.testbench import Testbench, Module, TestVector, Thread
from sonar.interfaces import AXIS
from sonar_strToInt import strToInt
hold_buffer = Testbench.default('hold_buffer')
filepath = os.path.join(os.path.dirname(__file__), 'build/hold_buffer/')
dut = Module.default("DUT")
dut.add_clock_port('ap_clk', '20ns')
dut.add_reset_port('ap_rst_n')
dut.add_port('dataRelease_V', 'input', 16)
axis_input = AXIS('axis_input', 'slave', 'ap_clk', c_struct='axis_word', c_stream='uaxis_l')
axis_input.port.init_channels('tkeep', 64, True)
dut.add_interface(axis_input)
axis_output = AXIS('axis_output', 'master', 'ap_clk', c_struct='axis_word', c_stream='uaxis_l')
axis_output.port.init_channels('tkeep', 64, True)
dut.add_interface(axis_output)
hold_buffer.add_module(dut)
################################################################################
# Test Vectors
################################################################################
# Initialization thread (added to each test vector to reset everything)
initT = Thread()
initT.init_signals()
initT.wait_negedge('ap_clk')
initT.add_delay('40ns')
initT.set_signal('ap_rst_n', 1)
initT.set_signal('axis_output_tready', 1)
#-------------------------------------------------------------------------------
#
#-------------------------------------------------------------------------------
Release_A = TestVector()
Release_A.add_thread(initT)
rA_t1 = Thread()
rA_t1.add_delay('100ns')
rA_t1.init_timer()
rA_t1.set_signal('dataRelease_V', 1)
axis_input.writes(rA_t1, [
{"tdata": 0xDEF, "callTB": 1},
{"tdata": 0xFED, "callTB": 1},
])
Release_A.add_thread(rA_t1)
rA_t2 = Thread()
axis_output.read(rA_t2, 0xDEF)
axis_output.read(rA_t2, 0xFED)
rA_t2.print_elapsed_time("Release_A")
rA_t2.end_vector()
Release_A.add_thread(rA_t2)
#-------------------------------------------------------------------------------
# Medium Message A
#
#
#-------------------------------------------------------------------------------
Release_B = TestVector()
Release_B.add_thread(initT)
rB_t1 = Thread()
rB_t1.add_delay('100ns')
rB_t1.init_timer()
axis_input.writes(rB_t1, [
{"tdata": 0xDEF, "callTB": 1},
{"tdata": 0xFED, "callTB": 1},
])
rB_t1.set_signal('dataRelease_V', 1)
Release_B.add_thread(rB_t1)
rB_t2 = Thread()
axis_output.read(rB_t2, 0xDEF)
axis_output.read(rB_t2, 0xFED)
rB_t2.print_elapsed_time("Release_B")
rB_t2.end_vector()
Release_B.add_thread(rB_t2)
hold_buffer.add_test_vector(Release_A)
hold_buffer.add_test_vector(Release_B)
hold_buffer.generateTB(filepath, 'all')
| 27.934783 | 95 | 0.614008 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 957 | 0.372374 |
8180ea48ed0eaf64449e035d61a657bb7146e229 | 16,957 | py | Python | vaxtools/utils/pair.py | menis/vaxtools | 221343d0a2b9ecefc777ff5a94cb12eaa1524813 | [
"MIT"
]
| null | null | null | vaxtools/utils/pair.py | menis/vaxtools | 221343d0a2b9ecefc777ff5a94cb12eaa1524813 | [
"MIT"
]
| null | null | null | vaxtools/utils/pair.py | menis/vaxtools | 221343d0a2b9ecefc777ff5a94cb12eaa1524813 | [
"MIT"
]
| 1 | 2018-10-10T21:59:08.000Z | 2018-10-10T21:59:08.000Z | #!/usr/bin/env python
# filename: pair.py
#
# Copyright (c) 2015 Bryan Briney
# License: The MIT license (http://opensource.org/licenses/MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import copy
import sys
import traceback
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
from abtools import germlines
from abtools.alignment import global_alignment
from abtools.sequence import Sequence
class Pair(object):
'''
Holds a pair of sequences, corresponding to HC and LC of a single mAb.
Input is a list of dicts, with each dict containing sequence information from a single
chain, formatted as would be returned from a query on a MongoDB database containing
AbStar output.
'''
def __init__(self, seqs, name=None, h_selection_func=None, l_selection_func=None):
self._seqs = seqs
self._heavy = None
self._light = None
self._heavies = [s for s in seqs if s['chain'] == 'heavy']
self._lights = [s for s in seqs if s['chain'] in ['kappa', 'lambda']]
self._name = name
self._fasta = None
self._sample = None
self._subject = None
self._group = None
self._experiment = None
self._timepoint = None
self._is_pair = None
self._vrc01_like = None
self._lineage = None
self._select_heavy = h_selection_func
self._select_light = l_selection_func
def __eq__(self, other):
return (self.heavy, self.light) == (other.heavy, other.light)
def __ne__(self, other):
return not self == other
def __hash(self):
return hash((self.heavy, self.light))
@property
def heavy(self):
if self._heavy is None:
# self._heavies = [s for s in self._seqs if s['chain'] == 'heavy']
if len(self._heavies) > 0:
if self._select_heavy is not None:
self._heavy = Sequence(self._select_heavy(self._heavies))
else:
self._heavy = Sequence(self._heavies[0])
else:
self._heavy = None
return self._heavy
@heavy.setter
def heavy(self, heavy):
self._heavy = heavy
@property
def light(self):
if self._light is None:
# self._lights = [s for s in self._seqs if s['chain'] in ['kappa', 'lambda']]
if len(self._lights) > 0:
if self._select_light is not None:
self._light = Sequence(self._select_light(self._lights))
else:
self._light = Sequence(self._lights[0])
else:
self._light = None
return self._light
@light.setter
def light(self, light):
self._light = light
@property
def is_pair(self):
if all([self.heavy is not None, self.light is not None]):
return True
return False
@property
def lineage(self):
if self._lineage is None:
self._lineage = self.heavy['clonify']['id']
return self._lineage
@property
def vrc01_like(self):
if self._vrc01_like is None:
if any([self.heavy is None, self.light is None]):
self._vrc01_like = False
else:
self._vrc01_like = all([self.heavy['v_gene']['gene'] == 'IGHV1-2', self.light['cdr3_len'] == 5])
return self._vrc01_like
@property
def name(self):
if self._name is None:
if self.heavy is not None:
self._name = self.heavy['seq_id']
elif self.light is not None:
self._name = self.light['seq_id']
return self._name
@name.setter
def name(self, name):
self._name = name
@property
def sample(self):
if self._sample is None:
slist = []
if self.experiment is not None:
slist.append(str(self.experiment))
if self.group is not None:
slist.append(str(self.group))
if self.subject is not None:
slist.append(str(self.subject))
if self.timepoint is not None:
slist.append(str(self.timepoint))
if slist:
self._sample = '|'.join(slist)
return self._sample
@property
def subject(self):
if self._subject is None:
if self.heavy is not None and 'subject' in list(self.heavy.keys()):
self._subject = self.heavy['subject']
elif self.light is not None and 'subject' in list(self.light.keys()):
self._subject = self.light['subject']
return self._subject
@subject.setter
def subject(self, subject):
self._subject = subject
@property
def group(self):
if self._group is None:
if self.heavy is not None and 'group' in list(self.heavy.keys()):
self._group = self.heavy['group']
elif self.light is not None and 'group' in list(self.light.keys()):
self._group = self.light['group']
return self._group
@group.setter
def group(self, group):
self._group = group
@property
def experiment(self):
if self._experiment is None:
if self.heavy is not None and 'experiment' in list(self.heavy.keys()):
self._experiment = self.heavy['experiment']
elif self.light is not None and 'experiment' in list(self.light.keys()):
self._experiment = self.light['experiment']
return self._experiment
@experiment.setter
def experiment(self, experiment):
self._experiment = experiment
@property
def timepoint(self):
if self._timepoint is None:
if self.heavy is not None and 'timepoint' in list(self.heavy.keys()):
self._timepoint = self.heavy['timepoint']
elif self.light is not None and 'timepoint' in list(self.light.keys()):
self._timepoint = self.light['timepoint']
return self._timepoint
@timepoint.setter
def timepoint(self, timepoint):
self._timepoint = timepoint
def refine(self, heavy=True, light=True, species='human'):
for seq in [s for s in [self.heavy, self.light] if s is not None]:
try:
self.remove_ambigs(seq)
self._refine_v(seq, species)
self._refine_j(seq, species)
self._retranslate(seq)
except:
print('REFINEMENT FAILED: {}, {} chain'.format(s['seq_id'], s['chain']))
print(traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1]))
@staticmethod
def remove_ambigs(seq):
# fix Ns in the nucleotide sequence
vdj = ''
for s, g in zip(seq['vdj_nt'], seq['vdj_germ_nt']):
if s.upper() == 'N':
vdj += g
else:
vdj += s
seq['vdj_nt'] = vdj
# fix Xs in the amino acid sequence
vdj = ''
for s, g in zip(seq['vdj_aa'], seq['vdj_germ_aa']):
if s.upper() == 'X':
vdj += g
else:
vdj += s
seq['vdj_aa'] = vdj
@staticmethod
def _refine_v(seq, species):
'''
Completes the 5' end of a a truncated sequence with germline nucleotides.
Input is a MongoDB dict (seq) and the species.
'''
vgerm = germlines.get_germline(seq['v_gene']['full'], species)
aln = global_alignment(seq['vdj_nt'], vgerm)
prepend = ''
for s, g in zip(aln.aligned_query, aln.aligned_target):
if s != '-':
break
else:
prepend += g
seq['vdj_nt'] = prepend + seq['vdj_nt']
@staticmethod
def _refine_j(seq, species):
'''
Completes the 3' end of a a truncated sequence with germline nucleotides.
Input is a MongoDB dict (seq) and the species.
'''
jgerm = germlines.get_germline(seq['j_gene']['full'], species)
aln = global_alignment(seq['vdj_nt'], jgerm)
append = ''
for s, g in zip(aln.aligned_query[::-1], aln.aligned_target[::-1]):
if s != '-':
break
else:
append += g
seq['vdj_nt'] = seq['vdj_nt'] + append[::-1]
@staticmethod
def _retranslate(seq):
'''
Retranslates a nucleotide sequence following refinement.
Input is a Pair sequence (basically a dict of MongoDB output).
'''
if len(seq['vdj_nt']) % 3 != 0:
trunc = len(seq['vdj_nt']) % 3
seq['vdj_nt'] = seq['vdj_nt'][:-trunc]
seq['vdj_aa'] = Seq(seq['vdj_nt'], generic_dna).translate()
def fasta(self, key='vdj_nt', append_chain=True):
'''
Returns the sequence pair as a fasta string. If the Pair object contains
both heavy and light chain sequences, both will be returned as a single string.
By default, the fasta string contains the 'vdj_nt' sequence for each chain. To change,
use the <key> option to select an alternate sequence.
By default, the chain (heavy or light) will be appended to the sequence name:
>MySequence_heavy
To just use the pair name (which will result in duplicate sequence names for Pair objects
with both heavy and light chains), set <append_chain> to False.
'''
fastas = []
for s, chain in [(self.heavy, 'heavy'), (self.light, 'light')]:
if s is not None:
c = '_{}'.format(chain) if append_chain else ''
fastas.append('>{}{}\n{}'.format(s['seq_id'], c, s[key]))
return '\n'.join(fastas)
def get_pairs(db, collection, experiment=None, subject=None, group=None, name='seq_id',
delim=None, delim_occurance=1, pairs_only=False):
'''
Gets sequences and assigns them to the appropriate mAb pair, based on the sequence name.
Inputs:
::db:: is a pymongo database connection object
::collection:: is the collection name, as a string
If ::subject:: is provided, only sequences with a 'subject' field matching ::subject:: will
be included. ::subject:: can be either a single subject (as a string) or an iterable
(list or tuple) of subject strings.
If ::group:: is provided, only sequences with a 'group' field matching ::group:: will
be included. ::group:: can be either a single group (as a string) or an iterable
(list or tuple) of group strings.
::name:: is the dict key of the field to be used to group the sequences into pairs.
Default is 'seq_id'
::delim:: is an optional delimiter used to truncate the contents of the ::name:: field.
Default is None, which results in no name truncation.
::delim_occurance:: is the occurance of the delimiter at which to trim. Trimming is performed
as delim.join(name.split(delim)[:delim_occurance]), so setting delim_occurance to -1 will
trucate after the last occurance of delim. Default is 1.
::pairs_only:: setting to True results in only truly paired sequences (pair.is_pair == True)
will be returned. Default is False.
Returns a list of Pair objects, one for each mAb pair.
'''
match = {}
if subject is not None:
if type(subject) in (list, tuple):
match['subject'] = {'$in': subject}
elif type(subject) in (str, str):
match['subject'] = subject
if group is not None:
if type(group) in (list, tuple):
match['group'] = {'$in': group}
elif type(group) in (str, str):
match['group'] = group
if experiment is not None:
if type(experiment) in (list, tuple):
match['experiment'] = {'$in': experiment}
elif type(experiment) in (str, str):
match['experiment'] = experiment
seqs = list(db[collection].find(match))
return assign_pairs(seqs, name=name, delim=delim,
delim_occurance=delim_occurance, pairs_only=pairs_only)
def assign_pairs(seqs, name='seq_id', delim=None, delim_occurance=1, pairs_only=False):
'''
Assigns sequences to the appropriate mAb pair, based on the sequence name.
Inputs:
::seqs:: is a list of dicts, of the format returned by querying a MongoDB containing
Abstar output.
::name:: is the dict key of the field to be used to group the sequences into pairs.
Default is 'seq_id'
::delim:: is an optional delimiter used to truncate the contents of the ::name:: field.
Default is None, which results in no name truncation.
::delim_occurance:: is the occurance of the delimiter at which to trim. Trimming is performed
as delim.join(name.split(delim)[:delim_occurance]), so setting delim_occurance to -1 will
trucate after the last occurance of delim. Default is 1.
::pairs_only:: setting to True results in only truly paired sequences (pair.is_pair == True)
will be returned. Default is False.
Returns a list of Pair objects, one for each mAb pair.
'''
pdict = {}
for s in seqs:
if delim is not None:
pname = delim.join(s[name].split(delim)[:delim_occurance])
else:
pname = s[name]
if pname not in pdict:
pdict[pname] = [s, ]
else:
pdict[pname].append(s)
pairs = [Pair(pdict[n], name=n) for n in list(pdict.keys())]
if pairs_only:
pairs = [p for p in pairs if p.is_pair]
return pairs
def deduplicate(pairs, aa=False, ignore_primer_regions=False):
'''
Removes duplicate sequences from a list of Pair objects.
If a Pair has heavy and light chains, both chains must identically match heavy and light chains
from another Pair to be considered a duplicate. If a Pair has only a single chain,
identical matches to that chain will cause the single chain Pair to be considered a duplicate,
even if the comparison Pair has both chains.
Note that identical sequences are identified by simple string comparison, so sequences of
different length that are identical over the entirety of the shorter sequence are not
considered duplicates.
By default, comparison is made on the nucleotide sequence. To use the amino acid sequence instead,
set aa=True.
'''
nr_pairs = []
just_pairs = [p for p in pairs if p.is_pair]
single_chains = [p for p in pairs if not p.is_pair]
_pairs = just_pairs + single_chains
for p in _pairs:
duplicates = []
for nr in nr_pairs:
identical = True
vdj = 'vdj_aa' if aa else 'vdj_nt'
offset = 4 if aa else 12
if p.heavy is not None:
if nr.heavy is None:
identical = False
else:
heavy = p.heavy[vdj][offset:-offset] if ignore_primer_regions else p.heavy[vdj]
nr_heavy = nr.heavy[vdj][offset:-offset] if ignore_primer_regions else nr.heavy[vdj]
if heavy != nr_heavy:
identical = False
if p.light is not None:
if nr.light is None:
identical = False
else:
light = p.light[vdj][offset:-offset] if ignore_primer_regions else p.light[vdj]
nr_light = nr.light[vdj][offset:-offset] if ignore_primer_regions else nr.light[vdj]
if light != nr_light:
identical = False
duplicates.append(identical)
if any(duplicates):
continue
else:
nr_pairs.append(p)
return nr_pairs
def refine(pairs, heavy=True, light=True, species='human'):
refined_pairs = copy.deepcopy(pairs)
for p in refined_pairs:
p.refine(heavy, light, species)
return refined_pairs
| 37.766147 | 109 | 0.606062 | 9,357 | 0.551808 | 0 | 0 | 6,475 | 0.381848 | 0 | 0 | 6,529 | 0.385033 |
8182de99b8accab6efbe98871df960f416bdddf7 | 38,104 | py | Python | t3f/riemannian.py | robol/t3f | d61037ba9c03c344e9fc31fce46648347b762b39 | [
"MIT"
]
| null | null | null | t3f/riemannian.py | robol/t3f | d61037ba9c03c344e9fc31fce46648347b762b39 | [
"MIT"
]
| null | null | null | t3f/riemannian.py | robol/t3f | d61037ba9c03c344e9fc31fce46648347b762b39 | [
"MIT"
]
| null | null | null | import tensorflow as tf
from t3f.tensor_train import TensorTrain
from t3f.tensor_train_batch import TensorTrainBatch
from t3f import shapes
from t3f import decompositions
def project_sum(what, where, weights=None):
"""Project sum of `what` TTs on the tangent space of `where` TT.
project_sum(what, x) = P_x(what)
project_sum(batch_what, x) = P_x(\sum_i batch_what[i])
project_sum(batch_what, x, weights) = P_x(\sum_j weights[j] * batch_what[j])
This function implements the algorithm from the paper [1], theorem 3.1.
[1] C. Lubich, I. Oseledets and B. Vandereycken, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
projection of the sum of elements in the batch.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
weights: python list or tf.Tensor of numbers or None, weights of the sum
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d r_what r_where n (r_what + r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
n is the size of the axis dimension of what and where e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12
"""
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
if weights is not None:
weights = tf.convert_to_tensor(weights, dtype=where.dtype)
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
batch_size = shapes.lazy_batch_size(what)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
mode_str = 'ij' if where.is_tt_matrix() else 'i'
right_rank_dim = where.right_tt_rank_dim
left_rank_dim = where.left_tt_rank_dim
if weights is not None:
weights_shape = weights.get_shape()
output_is_batch = len(weights_shape) > 1 and weights_shape[1] > 1
else:
output_is_batch = False
output_batch_str = 'o' if output_is_batch else ''
if output_is_batch:
right_rank_dim += 1
left_rank_dim += 1
output_batch_size = weights.get_shape()[1].value
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sa{0}b,sbd,c{0}d->sac'.format(mode_str)
rhs[core_idx] = tf.einsum(einsum_str, tens_core, rhs[core_idx + 1],
right_tang_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sab,a{0}c,sb{0}d->scd'.format(mode_str)
lhs[core_idx + 1] = tf.einsum(einsum_str, lhs[core_idx], left_tang_core,
tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 'a{0}b,sbc->sa{0}c'.format(mode_str)
proj_core -= tf.einsum(einsum_str, left_tang_core, lhs[core_idx + 1])
if weights is None:
einsum_str = 'sa{0}b,sbc->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
else:
einsum_str = 'sa{0}b,sbc->sa{0}c'.format(mode_str, output_batch_str)
proj_core_s = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
einsum_str = 's{1},sa{0}c->{1}a{0}c'.format(mode_str, output_batch_str)
proj_core = tf.einsum(einsum_str, weights, proj_core_s)
if core_idx == ndims - 1:
if weights is None:
einsum_str = 'sab,sb{0}c->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
else:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str, output_batch_str)
proj_core_s = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 's{1},sa{0}c->{1}a{0}c'.format(mode_str, output_batch_str)
proj_core = tf.einsum(einsum_str, weights, proj_core_s)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
if where.is_tt_matrix():
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core), axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
if where.is_tt_matrix():
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size = raw_shape[0][core_idx]
shape = [rank_1, mode_size, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros), axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def project(what, where):
"""Project `what` TTs on the tangent space of `where` TT.
project(what, x) = P_x(what)
project(batch_what, x) = batch(P_x(batch_what[0]), ..., P_x(batch_what[N]))
This function implements the algorithm from the paper [1], theorem 3.1.
[1] C. Lubich, I. Oseledets and B. Vandereycken, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
batch with projection of each individual tensor.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d r_what r_where n (r_what + r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
n is the size of the axis dimension of what and where e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12
"""
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
mode_str = 'ij' if where.is_tt_matrix() else 'i'
right_rank_dim = what.right_tt_rank_dim
left_rank_dim = what.left_tt_rank_dim
output_is_batch = isinstance(what, TensorTrainBatch)
if output_is_batch:
output_batch_size = what.batch_size
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
batch_size = shapes.lazy_batch_size(what)
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sa{0}b,sbd,c{0}d->sac'.format(mode_str)
rhs[core_idx] = tf.einsum(einsum_str, tens_core, rhs[core_idx + 1],
right_tang_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sab,a{0}c,sb{0}d->scd'.format(mode_str)
lhs[core_idx + 1] = tf.einsum(einsum_str, lhs[core_idx], left_tang_core,
tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 'a{0}b,sbc->sa{0}c'.format(mode_str)
proj_core -= tf.einsum(einsum_str, left_tang_core, lhs[core_idx + 1])
if output_is_batch:
einsum_str = 'sa{0}b,sbc->sa{0}c'.format(mode_str)
else:
einsum_str = 'sa{0}b,sbc->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
if core_idx == ndims - 1:
if output_is_batch:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
else:
einsum_str = 'sab,sb{0}c->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
if where.is_tt_matrix():
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core), axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
if where.is_tt_matrix():
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size = raw_shape[0][core_idx]
shape = [rank_1, mode_size, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros), axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def project_matmul(what, where, matrix):
"""Project `matrix` * `what` TTs on the tangent space of `where` TT.
project(what, x) = P_x(what)
project(batch_what, x) = batch(P_x(batch_what[0]), ..., P_x(batch_what[N]))
This function implements the algorithm from the paper [1], theorem 3.1.
[1] C. Lubich, I. Oseledets and B. Vandereycken, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
batch with projection of each individual tensor.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
matrix: TensorTrain, TT-matrix to multiply by what
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d R r_what r_where (n r_what + n m R + m r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
matrix is of TT-rank R and of raw-shape (m, m, ..., m) x (n, n, ..., n).
"""
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
batch_size = shapes.lazy_batch_size(what)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
right_rank_dim = what.right_tt_rank_dim
left_rank_dim = what.left_tt_rank_dim
output_is_batch = isinstance(what, TensorTrainBatch)
if output_is_batch:
output_batch_size = what.batch_size
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x matrix_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
rhs[core_idx] = tf.einsum('bije,cikf,sdef,sajkd->sabc', matrix_core,
right_tang_core, rhs[core_idx + 1], tens_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x matrix_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
# TODO: brutforce order of indices in lhs??
lhs[core_idx + 1] = tf.einsum('bije,aikd,sabc,scjkf->sdef', matrix_core,
left_tang_core, lhs[core_idx], tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
proj_core = tf.einsum('scjke,sabc,bijd->saikde', tens_core,
lhs[core_idx], matrix_core)
proj_core -= tf.einsum('aikb,sbcd->saikcd', left_tang_core,
lhs[core_idx + 1])
proj_core = tf.einsum('saikcb,sbcd->saikd', proj_core, rhs[core_idx + 1])
if core_idx == ndims - 1:
# d and e dimensions take 1 value, since its the last rank.
# To make the result shape (?, ?, ?, 1), we are summing d and leaving e,
# but we could have done the opposite -- sum e and leave d.
proj_core = tf.einsum('sabc,bijd,scjke->saike', lhs[core_idx], matrix_core,
tens_core)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core),
axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros),
axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def pairwise_flat_inner_projected(projected_tt_vectors_1,
projected_tt_vectors_2):
"""Scalar products between two batches of TTs from the same tangent space.
res[i, j] = t3f.flat_inner(projected_tt_vectors_1[i], projected_tt_vectors_1[j]).
pairwise_flat_inner_projected(projected_tt_vectors_1, projected_tt_vectors_2)
is equivalent to
pairwise_flat_inner(projected_tt_vectors_1, projected_tt_vectors_2)
, but works only on objects from the same tangent space and is much faster
than general pairwise_flat_inner.
Args:
projected_tt_vectors_1: TensorTrainBatch of tensors projected on the same
tangent space as projected_tt_vectors_2.
projected_tt_vectors_2: TensorTrainBatch.
Returns:
tf.tensor with the scalar product matrix.
Complexity:
O(batch_size^2 d r^2 n), where
d is the number of TT-cores (projected_tt_vectors_1.ndims());
r is the largest TT-rank max(projected_tt_vectors_1.get_tt_rank())
(i.e. 2 * {the TT-rank of the object we projected vectors onto}.
and n is the size of the axis dimension, e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12.
"""
if not hasattr(projected_tt_vectors_1, 'projection_on') or \
not hasattr(projected_tt_vectors_2, 'projection_on'):
raise ValueError('Both arguments should be projections on the tangent '
'space of some other TT-object. All projection* functions '
'leave .projection_on field in the resulting TT-object '
'which is not present in the arguments you\'ve provided')
if projected_tt_vectors_1.projection_on != projected_tt_vectors_2.projection_on:
raise ValueError('Both arguments should be projections on the tangent '
'space of the same TT-object. The provided arguments are '
'projections on different TT-objects (%s and %s). Or at '
'least the pointers are different.' %
(projected_tt_vectors_1.projection_on,
projected_tt_vectors_2.projection_on))
# Always work with batches of objects for simplicity.
projected_tt_vectors_1 = shapes.expand_batch_dim(projected_tt_vectors_1)
projected_tt_vectors_2 = shapes.expand_batch_dim(projected_tt_vectors_2)
ndims = projected_tt_vectors_1.ndims()
tt_ranks = shapes.lazy_tt_ranks(projected_tt_vectors_1)
if projected_tt_vectors_1.is_tt_matrix():
right_size = tt_ranks[1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[0]
curr_core_2 = projected_tt_vectors_2.tt_cores[0]
curr_du_1 = curr_core_1[:, :, :, :, :right_size]
curr_du_2 = curr_core_2[:, :, :, :, :right_size]
res = tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
for core_idx in range(1, ndims):
left_size = tt_ranks[core_idx] // 2
right_size = tt_ranks[core_idx + 1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[core_idx]
curr_core_2 = projected_tt_vectors_2.tt_cores[core_idx]
curr_du_1 = curr_core_1[:, left_size:, :, :, :right_size]
curr_du_2 = curr_core_2[:, left_size:, :, :, :right_size]
res += tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
left_size = tt_ranks[-2] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[-1]
curr_core_2 = projected_tt_vectors_2.tt_cores[-1]
curr_du_1 = curr_core_1[:, left_size:, :, :, :]
curr_du_2 = curr_core_2[:, left_size:, :, :, :]
res += tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
else:
# Working with TT-tensor, not TT-matrix.
right_size = tt_ranks[1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[0]
curr_core_2 = projected_tt_vectors_2.tt_cores[0]
curr_du_1 = curr_core_1[:, :, :, :right_size]
curr_du_2 = curr_core_2[:, :, :, :right_size]
res = tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
for core_idx in range(1, ndims):
left_size = tt_ranks[core_idx] // 2
right_size = tt_ranks[core_idx + 1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[core_idx]
curr_core_2 = projected_tt_vectors_2.tt_cores[core_idx]
curr_du_1 = curr_core_1[:, left_size:, :, :right_size]
curr_du_2 = curr_core_2[:, left_size:, :, :right_size]
res += tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
left_size = tt_ranks[-2] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[-1]
curr_core_2 = projected_tt_vectors_2.tt_cores[-1]
curr_du_1 = curr_core_1[:, left_size:, :, :]
curr_du_2 = curr_core_2[:, left_size:, :, :]
res += tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
return res
def add_n_projected(tt_objects, coef=None):
"""Adds all input TT-objects that are projections on the same tangent space.
add_projected((a, b)) is equivalent add(a, b) for a and b that are from the
same tangent space, but doesn't increase the TT-ranks.
Args:
tt_objects: a list of TT-objects that are projections on the same tangent
space.
coef: a list of numbers or anything else convertable to tf.Tensor.
If provided, computes weighted sum. The size of this array should be
len(tt_objects) x tt_objects[0].batch_size
Returns:
TT-objects representing the sum of the tt_objects (weighted sum if coef is
provided). The TT-rank of the result equals to the TT-ranks of the arguments.
"""
for tt in tt_objects:
if not hasattr(tt, 'projection_on'):
raise ValueError('Both arguments should be projections on the tangent '
'space of some other TT-object. All projection* functions '
'leave .projection_on field in the resulting TT-object '
'which is not present in the argument you\'ve provided.')
projection_on = tt_objects[0].projection_on
for tt in tt_objects[1:]:
if tt.projection_on != projection_on:
raise ValueError('All tt_objects should be projections on the tangent '
'space of the same TT-object. The provided arguments are '
'projections on different TT-objects (%s and %s). Or at '
'least the pointers are different.' % (tt.projection_on,
projection_on))
if coef is not None:
coef = tf.convert_to_tensor(coef, dtype=tt_objects[0].dtype)
if coef.get_shape().ndims > 1:
# In batch case we will need to multiply each core by this coefficients
# along the first axis. To do it need to reshape the coefs to match
# the TT-cores number of dimensions.
some_core = tt_objects[0].tt_cores[0]
dim_array = [1] * (some_core.get_shape().ndims + 1)
dim_array[0] = coef.get_shape()[0].value
dim_array[1] = coef.get_shape()[1].value
coef = tf.reshape(coef, dim_array)
ndims = tt_objects[0].ndims()
tt_ranks = shapes.lazy_tt_ranks(tt_objects[0])
left_rank_dim = tt_objects[0].left_tt_rank_dim
right_rank_dim = tt_objects[0].right_tt_rank_dim
res_cores = []
def slice_tt_core(tt_core, left_idx, right_idx):
num_tt_core_dims = len(tt_core.get_shape())
idx = [slice(None)] * num_tt_core_dims
idx[left_rank_dim] = left_idx
idx[right_rank_dim] = right_idx
return tt_core[idx]
right_half_rank = tt_ranks[1] // 2
left_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[0], slice(None),
slice(0, right_half_rank))
if coef is not None:
curr_core *= coef[obj_idx]
left_chunks.append(curr_core)
left_part = tf.add_n(left_chunks)
first_obj_core = tt_objects[0].tt_cores[0]
right_part = slice_tt_core(first_obj_core, slice(None),
slice(right_half_rank, None))
first_core = tf.concat((left_part, right_part), axis=right_rank_dim)
res_cores.append(first_core)
for core_idx in range(1, ndims - 1):
first_obj_core = tt_objects[0].tt_cores[core_idx]
left_half_rank = tt_ranks[core_idx] // 2
right_half_rank = tt_ranks[core_idx + 1] // 2
upper_part = slice_tt_core(tt.tt_cores[core_idx], slice(0, left_half_rank),
slice(None))
lower_right_part = slice_tt_core(first_obj_core,
slice(left_half_rank, None),
slice(right_half_rank, None))
lower_left_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[core_idx],
slice(left_half_rank, None),
slice(0, right_half_rank))
if coef is not None:
curr_core *= coef[obj_idx]
lower_left_chunks.append(curr_core)
lower_left_part = tf.add_n(lower_left_chunks)
lower_part = tf.concat((lower_left_part, lower_right_part),
axis=right_rank_dim)
curr_core = tf.concat((upper_part, lower_part), axis=left_rank_dim)
res_cores.append(curr_core)
left_half_rank = tt_ranks[ndims - 1] // 2
upper_part = slice_tt_core(tt.tt_cores[-1], slice(0, left_half_rank),
slice(None))
lower_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[-1], slice(left_half_rank, None),
slice(None))
if coef is not None:
curr_core *= coef[obj_idx]
lower_chunks.append(curr_core)
lower_part = tf.add_n(lower_chunks)
last_core = tf.concat((upper_part, lower_part), axis=left_rank_dim)
res_cores.append(last_core)
raw_shape = tt_objects[0].get_raw_shape()
static_tt_ranks = tt_objects[0].get_tt_ranks()
if isinstance(tt_objects[0], TensorTrain):
res = TensorTrain(res_cores, raw_shape, static_tt_ranks)
elif isinstance(tt_objects[0], TensorTrainBatch):
res = TensorTrainBatch(res_cores, raw_shape, static_tt_ranks,
tt_objects[0].batch_size)
# Maintain the projection_on property.
res.projection_on = tt_objects[0].projection_on
return res
def tangent_space_to_deltas(tt, name='t3f_tangent_space_to_deltas'):
"""Convert an element of the tangent space to deltas representation.
Tangent space elements (outputs of t3f.project) look like:
dP1 V2 ... Vd + U1 dP2 V3 ... Vd + ... + U1 ... Ud-1 dPd.
This function takes as input an element of the tangent space and converts
it to the list of deltas [dP1, ..., dPd].
Args:
tt: `TensorTrain` or `TensorTrainBatch` that is a result of t3f.project,
t3f.project_matmul, or other similar functions.
name: string, name of the Op.
Returns:
A list of delta-cores (tf.Tensors).
"""
if not hasattr(tt, 'projection_on') or tt.projection_on is None:
raise ValueError('tt argument is supposed to be a projection, but it '
'lacks projection_on field')
num_dims = tt.ndims()
left_tt_rank_dim = tt.left_tt_rank_dim
right_tt_rank_dim = tt.right_tt_rank_dim
deltas = [None] * num_dims
tt_ranks = shapes.lazy_tt_ranks(tt)
for i in range(1, num_dims - 1):
if int(tt_ranks[i] / 2) != tt_ranks[i] / 2:
raise ValueError('tt argument is supposed to be a projection, but its '
'ranks are not even.')
with tf.compat.v1.name_scope(name, values=tt.tt_cores):
for i in range(1, num_dims - 1):
r1, r2 = tt_ranks[i], tt_ranks[i + 1]
curr_core = tt.tt_cores[i]
slc = [slice(None)] * len(curr_core.shape)
slc[left_tt_rank_dim] = slice(int(r1 / 2), None)
slc[right_tt_rank_dim] = slice(0, int(r2 / 2))
deltas[i] = curr_core[slc]
slc = [slice(None)] * len(tt.tt_cores[0].shape)
slc[right_tt_rank_dim] = slice(0, int(tt_ranks[1] / 2))
deltas[0] = tt.tt_cores[0][slc]
slc = [slice(None)] * len(tt.tt_cores[0].shape)
slc[left_tt_rank_dim] = slice(int(tt_ranks[-2] / 2), None)
deltas[num_dims - 1] = tt.tt_cores[num_dims - 1][slc]
return deltas
def deltas_to_tangent_space(deltas, tt, left=None, right=None,
name='t3f_deltas_to_tangent_space'):
"""Converts deltas representation of tangent space vector to TT object.
Takes as input a list of [dP1, ..., dPd] and returns
dP1 V2 ... Vd + U1 dP2 V3 ... Vd + ... + U1 ... Ud-1 dPd.
This function is hard to use correctly because deltas should abey the
so called gauge conditions. If the don't, the function will silently return
incorrect result. This is why this function is not imported in __init__.
Args:
deltas: a list of deltas (essentially TT-cores) obeying the gauge
conditions.
tt: `TensorTrain` object on which the tangent space tensor represented by
delta is projected.
left: t3f.orthogonilize_tt_cores(tt). If you have it already compute, you
may pass it as argument to avoid recomputing.
right: t3f.orthogonilize_tt_cores(left, left_to_right=False). If you have
it already compute, you may pass it as argument to avoid recomputing.
name: string, name of the Op.
Returns:
`TensorTrain` object constructed from deltas, that is from the tangent
space at point `tt`.
"""
cores = []
dtype = tt.dtype
num_dims = tt.ndims()
# TODO: add cache instead of mannually pasisng precomputed stuff?
input_tensors = list(tt.tt_cores) + list(deltas)
if left is not None:
input_tensors += list(left.tt_cores)
if right is not None:
input_tensors += list(right.tt_cores)
with tf.compat.v1.name_scope(name, values=input_tensors):
if left is None:
left = decompositions.orthogonalize_tt_cores(tt)
if right is None:
right = decompositions.orthogonalize_tt_cores(left, left_to_right=False)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(left)
raw_shape = shapes.lazy_raw_shape(left)
right_rank_dim = left.right_tt_rank_dim
left_rank_dim = left.left_tt_rank_dim
is_batch_case = len(deltas[0].shape) > len(tt.tt_cores[0].shape)
if is_batch_case:
right_rank_dim += 1
left_rank_dim += 1
batch_size = deltas[0].shape.as_list()[0]
for i in range(num_dims):
left_tt_core = left.tt_cores[i]
right_tt_core = right.tt_cores[i]
if is_batch_case:
tile = [1] * len(left_tt_core.shape)
tile = [batch_size] + tile
left_tt_core = tf.tile(left_tt_core[None, ...], tile)
right_tt_core = tf.tile(right_tt_core[None, ...], tile)
if i == 0:
tangent_core = tf.concat((deltas[i], left_tt_core),
axis=right_rank_dim)
elif i == num_dims - 1:
tangent_core = tf.concat((right_tt_core, deltas[i]),
axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[i]
rank_2 = left_tangent_tt_ranks[i + 1]
if tt.is_tt_matrix():
mode_size_n = raw_shape[0][i]
mode_size_m = raw_shape[1][i]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size_n = raw_shape[0][i]
shape = [rank_1, mode_size_n, rank_2]
if is_batch_case:
shape = [batch_size] + shape
zeros = tf.zeros(shape, dtype=dtype)
upper = tf.concat((right_tt_core, zeros), axis=right_rank_dim)
lower = tf.concat((deltas[i], left_tt_core), axis=right_rank_dim)
tangent_core = tf.concat((upper, lower), axis=left_rank_dim)
cores.append(tangent_core)
if is_batch_case:
tangent = TensorTrainBatch(cores, batch_size=batch_size)
else:
tangent = TensorTrain(cores)
tangent.projection_on = tt
return tangent
| 42.90991 | 101 | 0.664943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11,466 | 0.300913 |
8184c1d8dc29034b686437e80c0929c8f140a87c | 262 | py | Python | dpauth/admin.py | askmeaboutlo0m/website | 3df97d061a425e7fbb3f173c78ff01d831575aa0 | [
"MIT"
]
| 9 | 2017-06-04T15:46:05.000Z | 2021-09-04T23:28:03.000Z | dpauth/admin.py | askmeaboutlo0m/website | 3df97d061a425e7fbb3f173c78ff01d831575aa0 | [
"MIT"
]
| 24 | 2018-02-10T04:29:00.000Z | 2021-10-01T16:01:04.000Z | dpauth/admin.py | askmeaboutlo0m/website | 3df97d061a425e7fbb3f173c78ff01d831575aa0 | [
"MIT"
]
| 4 | 2020-03-23T03:42:32.000Z | 2022-03-16T17:01:09.000Z | from django.contrib import admin
from . import models
@admin.register(models.Username)
class UsernameAdmin(admin.ModelAdmin):
list_display = ('user', 'name', 'is_mod')
readonly_fields = ('normalized_name',)
search_fields = ('user__email', 'name')
| 23.818182 | 45 | 0.717557 | 171 | 0.652672 | 0 | 0 | 204 | 0.778626 | 0 | 0 | 56 | 0.21374 |
8188e19b101be322e95cf844a7e3d5f16f246e15 | 346 | py | Python | iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 23047be01a229ef8f69ea6ca55185eae93adc56e | [
"MIT"
]
| 9 | 2018-11-02T02:51:50.000Z | 2022-01-12T06:22:33.000Z | iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 23047be01a229ef8f69ea6ca55185eae93adc56e | [
"MIT"
]
| 3 | 2019-05-11T21:28:32.000Z | 2020-04-27T00:58:46.000Z | iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 23047be01a229ef8f69ea6ca55185eae93adc56e | [
"MIT"
]
| 7 | 2019-01-03T20:31:30.000Z | 2022-01-29T04:09:24.000Z | import logging
from iptv_proxy.providers.beast.constants import BeastConstants
from iptv_proxy.providers.iptv_provider.json_api import ProviderConfigurationJSONAPI
logger = logging.getLogger(__name__)
class BeastConfigurationJSONAPI(ProviderConfigurationJSONAPI):
__slots__ = []
_provider_name = BeastConstants.PROVIDER_NAME.lower()
| 26.615385 | 84 | 0.84104 | 140 | 0.404624 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
818971d06d80952ffaa6249ad6fd01e66412f03b | 30,912 | py | Python | tests/random/random_testing.py | jkeiren/mCRL2 | 3a60939f4e6dbc131e8ce87b1929bea9dbf90dd2 | [
"BSL-1.0"
]
| 61 | 2018-05-24T13:14:05.000Z | 2022-03-29T11:35:03.000Z | tests/random/random_testing.py | jkeiren/mCRL2 | 3a60939f4e6dbc131e8ce87b1929bea9dbf90dd2 | [
"BSL-1.0"
]
| 229 | 2018-05-28T08:31:09.000Z | 2022-03-21T11:02:41.000Z | tests/random/random_testing.py | jkeiren/mCRL2 | 3a60939f4e6dbc131e8ce87b1929bea9dbf90dd2 | [
"BSL-1.0"
]
| 28 | 2018-04-11T14:09:39.000Z | 2022-02-25T15:57:39.000Z | #!/usr/bin/env python
# Copyright 2015 Wieger Wesselink.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import os
import os.path
import random
import re
import sys
import traceback
sys.path += [os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'python'))]
import random_state_formula_generator
from random_bes_generator import make_bes
from random_pbes_generator import make_pbes
import random_process_expression
from testing import YmlTest
from text_utility import write_text
MCRL2_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
MCRL2_INSTALL_DIR = os.path.join(MCRL2_ROOT, 'install', 'bin')
def ymlfile(file):
return '{}/tests/specifications/{}.yml'.format(MCRL2_ROOT, file)
def mcrl2file(file):
return os.path.join(MCRL2_ROOT, file)
class RandomTest(YmlTest):
def __init__(self, name, ymlfile, settings):
super(RandomTest, self).__init__(name, ymlfile, [], settings)
# create input files for the random test, and add the filenames to self.inputfiles
def create_inputfiles(self, runpath = '.'):
raise NotImplementedError
# removes input files that are in the runpath directory
def remove_inputfiles(self, runpath = '.'):
for filename in self.inputfiles:
if os.path.abspath(runpath) == os.path.abspath(os.path.dirname(filename)):
os.remove(filename)
def execute(self, runpath = '.'):
self.create_inputfiles(runpath)
super(RandomTest, self).execute(runpath)
self.remove_inputfiles(runpath)
class ProcessTest(RandomTest):
def __init__(self, name, ymlfile, settings):
super(ProcessTest, self).__init__(name, ymlfile, settings)
self.actions = ['a', 'b', 'c', 'd']
self.process_identifiers = ['P', 'Q', 'R']
self.process_size = 13
self.parallel_operator_generators = random_process_expression.default_parallel_operator_generators
self.process_expression_generators = random_process_expression.default_process_expression_generators
self.init = None
self.generate_process_parameters = False
def create_inputfiles(self, runpath = '.'):
filename = '{0}.mcrl2'.format(self.name, self.settings)
p = random_process_expression.make_process_specification(self.parallel_operator_generators, self.process_expression_generators, self.actions, self.process_identifiers, self.process_size, self.init, self.generate_process_parameters)
write_text(filename, str(p))
self.inputfiles += [filename]
# generates stochastic random processes
class StochasticProcessTest(ProcessTest):
def __init__(self, name, ymlfile, settings):
super(StochasticProcessTest, self).__init__(name, ymlfile, settings)
self.process_expression_generators = {
random_process_expression.make_action : 8,
random_process_expression.make_delta : 1,
random_process_expression.make_tau : 1,
random_process_expression.make_process_instance: 2,
random_process_expression.make_sum : 2,
random_process_expression.make_if_then : 2,
random_process_expression.make_if_then_else : 2,
random_process_expression.make_choice : 5,
random_process_expression.make_seq : 5,
random_process_expression.make_multi_action : 1,
random_process_expression.make_dist : 3,
}
# generates random process with higher probability of tau transitions
class ProcessTauTest(ProcessTest):
def __init__(self, name, testfile, settings):
super(ProcessTauTest, self).__init__(name, testfile, settings)
self.actions = ['a', 'b', 'c']
self.init = 'hide({a}, allow({a, b, c}, P || Q || R))'
self.process_expression_generators = {
random_process_expression.make_action: 8,
random_process_expression.make_delta: 1,
random_process_expression.make_tau: 4,
random_process_expression.make_process_instance: 1,
random_process_expression.make_sum: 0,
random_process_expression.make_if_then: 0,
random_process_expression.make_if_then_else: 0,
random_process_expression.make_choice: 5,
random_process_expression.make_seq: 5,
random_process_expression.make_multi_action: 1,
random_process_expression.make_dist: 0,
}
class AlphabetReduceTest(ProcessTest):
def __init__(self, name, settings):
super(AlphabetReduceTest, self).__init__(name, ymlfile('alphabet-reduce'), settings)
self.actions = ['a', 'b', 'c', 'd', 'e']
class LpsSuminstTest(ProcessTest):
def __init__(self, name, settings):
super(LpsSuminstTest, self).__init__(name, ymlfile('lpssuminst'), settings)
class LpsSumelmTest(ProcessTest):
def __init__(self, name, settings):
super(LpsSumelmTest, self).__init__(name, ymlfile('lpssumelm'), settings)
class LpsParelmTest(ProcessTest):
def __init__(self, name, settings):
super(LpsParelmTest, self).__init__(name, ymlfile('lpsparelm'), settings)
self.generate_process_parameters = True
class LpsOnePointRuleRewriteTest(ProcessTest):
def __init__(self, name, settings):
super(LpsOnePointRuleRewriteTest, self).__init__(name, ymlfile('lpstransform'), settings)
self.add_command_line_options('t2', ['-alps-one-point-rule-rewriter'])
class LpsConfcheckTest(ProcessTauTest):
def __init__(self, name, confluence_type, settings):
self.option_map = { 'commutative' : 'C',
'commutative-disjoint' : 'c',
'disjoint' : 'd',
'triangular' : 'T',
'trivial' : 'Z'
}
assert confluence_type in self.option_map
super(LpsConfcheckTest, self).__init__(name, ymlfile('lpsconfcheck'), settings)
self.add_command_line_options('t2', ['-x' + self.option_map[confluence_type]])
class LtscompareTest(ProcessTauTest):
def __init__(self, name, equivalence_type, settings):
assert equivalence_type in ['bisim', 'bisim-gv', 'bisim-gjkw', 'branching-bisim', 'branching-bisim-gv', 'branching-bisim-gjkw', 'dpbranching-bisim', 'dpbranching-bisim-gv', 'dpbranching-bisim-gjkw', 'weak-bisim', 'dpweak-bisim', 'sim', 'ready-sim' , 'trace', 'weak-trace']
super(LtscompareTest, self).__init__(name, ymlfile('ltscompare'), settings)
self.add_command_line_options('t3', ['-e' + equivalence_type])
self.add_command_line_options('t4', ['-e' + equivalence_type])
class StochasticLtscompareTest(StochasticProcessTest):
def __init__(self, name, settings):
super(StochasticLtscompareTest, self).__init__(name, ymlfile('stochastic-ltscompare'), settings)
class BisimulationTest(ProcessTauTest):
def __init__(self, name, equivalence_type, settings):
assert equivalence_type in ['bisim', 'bisim-gv', 'bisim-gjkw', 'branching-bisim', 'branching-bisim-gv', 'branching-bisim-gjkw', 'weak-bisim']
super(BisimulationTest, self).__init__(name, ymlfile('bisimulation'), settings)
self.add_command_line_options('t3', ['-e' + equivalence_type])
self.add_command_line_options('t4', ['-e' + equivalence_type])
if equivalence_type in ['branching-bisim-gv', 'branching-bisim-gjkw']:
self.add_command_line_options('t7', ['-bbranching-bisim'])
elif equivalence_type in ['bisim', 'bisim-gv', 'bisim-gjkw']:
self.add_command_line_options('t7', ['-bstrong-bisim'])
else:
self.add_command_line_options('t7', ['-b' + equivalence_type])
class Lps2ltsAlgorithmsTest(ProcessTauTest):
def __init__(self, name, settings):
super(Lps2ltsAlgorithmsTest, self).__init__(name, ymlfile('lps2lts-algorithms'), settings)
# randomly choose an algorithm
actions = random.choice(['a', 'a,b', 'a,b,c'])
options = [random.choice(['--deadlock', '--divergence', '--nondeterminism', '--action={}'.format(actions)])]
options = [random.choice(['--deadlock', '--nondeterminism', '--action={}'.format(actions)])]
if 'divergence' in options[0]:
tau_actions = random.choice(['', '', 'b', 'b,c'])
if tau_actions:
options.append('--tau={}'.format(tau_actions))
self.add_command_line_options('t2', options)
self.add_command_line_options('t3', options)
class LpsConstelmTest(ProcessTest):
def __init__(self, name, settings):
super(LpsConstelmTest, self).__init__(name, ymlfile('lpsconstelm'), settings)
self.generate_process_parameters = True
class LpsBinaryTest(ProcessTest):
def __init__(self, name, settings):
super(LpsBinaryTest, self).__init__(name, ymlfile('lpsbinary'), settings)
self.generate_process_parameters = True
class LpsstategraphTest(ProcessTest):
def __init__(self, name, settings):
super(LpsstategraphTest, self).__init__(name, ymlfile('lpsstategraph'), settings)
self.generate_process_parameters = True
class Lps2pbesTest(ProcessTest):
def __init__(self, name, settings):
super(Lps2pbesTest, self).__init__(name, ymlfile('lps2pbes'), settings)
def create_inputfiles(self, runpath = '.'):
super(Lps2pbesTest, self).create_inputfiles(runpath)
self.inputfiles.append(mcrl2file('examples/modal-formulas/nodeadlock.mcf'))
class Lts2pbesTest(ProcessTest):
def __init__(self, name, settings):
super(Lts2pbesTest, self).__init__(name, ymlfile('lts2pbes'), settings)
def create_inputfiles(self, runpath = '.'):
super(Lts2pbesTest, self).create_inputfiles(runpath)
self.inputfiles.append(mcrl2file('examples/modal-formulas/nodeadlock.mcf'))
class PbesTest(RandomTest):
def __init__(self, name, ymlfile, settings):
super(PbesTest, self).__init__(name, ymlfile, settings)
self.equation_count = 4
self.atom_count = 4
self.propvar_count = 3
self.use_quantifiers = True
self.use_integers = True
def create_inputfiles(self, runpath = '.'):
filename = '{0}.txt'.format(self.name)
p = make_pbes(self.equation_count, self.atom_count, self.propvar_count, self.use_quantifiers, use_integers=self.use_integers)
write_text(filename, str(p))
self.inputfiles += [filename]
# N.B. does not work yet due to unusable abstraction map
class PbesabsintheTest(PbesTest):
def __init__(self, name, settings):
super(PbesabsintheTest, self).__init__(name, ymlfile('pbesabsinthe'), settings)
# N.B. This test has been disabled, since the tool has been deprecated.
class PbesabstractTest(PbesTest):
def __init__(self, name, settings):
super(PbesabstractTest, self).__init__(name, ymlfile('pbesabstract'), settings)
class PbesbddsolveTest(PbesTest):
def __init__(self, name, settings):
super(PbesbddsolveTest, self).__init__(name, ymlfile('pbesbddsolve'), settings)
self.use_integers = False
self.use_quantifiers = False
class PbesconstelmTest(PbesTest):
def __init__(self, name, settings):
super(PbesconstelmTest, self).__init__(name, ymlfile('pbesconstelm'), settings)
class PbesparelmTest(PbesTest):
def __init__(self, name, settings):
super(PbesparelmTest, self).__init__(name, ymlfile('pbesparelm'), settings)
class PbespareqelmTest(PbesTest):
def __init__(self, name, settings):
super(PbespareqelmTest, self).__init__(name, ymlfile('pbespareqelm'), settings)
class Pbespor1Test(PbesTest):
def __init__(self, name, settings):
super(Pbespor1Test, self).__init__(name, ymlfile('pbespor1'), settings)
class Pbespor2Test(ProcessTest):
def __init__(self, name, settings):
super(Pbespor2Test, self).__init__(name, ymlfile('pbespor2'), settings)
def create_inputfiles(self, runpath = '.'):
super(Pbespor2Test, self).create_inputfiles(runpath)
filename = '{0}.mcf'.format(self.name, self.settings)
formula = random_state_formula_generator.make_modal_formula()
write_text(filename, str(formula))
self.inputfiles += [filename]
class PbesrewrTest(PbesTest):
def __init__(self, name, rewriter, settings):
super(PbesrewrTest, self).__init__(name, ymlfile('pbesrewr'), settings)
self.add_command_line_options('t2', ['-p' + rewriter])
class PbestransformTest(PbesTest):
def __init__(self, name, rewriter, settings):
super(PbestransformTest, self).__init__(name, ymlfile('pbestransform'), settings)
self.add_command_line_options('t2', ['-a' + rewriter])
class PbesinstTest(PbesTest):
def __init__(self, name, options, settings):
super(PbesinstTest, self).__init__(name, ymlfile('pbesinst'), settings)
self.add_command_line_options('t2', options)
class PbespgsolveTest(PbesTest):
def __init__(self, name, settings):
super(PbespgsolveTest, self).__init__(name, ymlfile('pbespgsolve'), settings)
class PbesstategraphTest(PbesTest):
def __init__(self, name, settings):
super(PbesstategraphTest, self).__init__(name, ymlfile('pbesstategraph'), settings)
class PbessymbolicbisimTest(PbesTest):
def __init__(self, name, settings):
super(PbessymbolicbisimTest, self).__init__(name, ymlfile('pbessymbolicbisim'), settings)
class PbessolvesymbolicTest(PbesTest):
def __init__(self, name, settings):
super(PbessolvesymbolicTest, self).__init__(name, ymlfile('pbessolvesymbolic'), settings)
class Pbes2boolTest(PbesTest):
def __init__(self, name, settings):
super(Pbes2boolTest, self).__init__(name, ymlfile('pbessolve'), settings)
class Pbes2boolDepthFirstTest(PbesTest):
def __init__(self, name, settings):
super(Pbes2boolDepthFirstTest, self).__init__(name, ymlfile('pbessolve'), settings)
self.add_command_line_options('t2', ['-zdepth-first'])
self.add_command_line_options('t3', ['-zdepth-first'])
self.add_command_line_options('t4', ['-zdepth-first'])
self.add_command_line_options('t5', ['-zdepth-first'])
self.add_command_line_options('t6', ['-zdepth-first'])
self.add_command_line_options('t7', ['-zdepth-first'])
self.add_command_line_options('t8', ['-zdepth-first'])
class Pbes2bool_counter_exampleTest(ProcessTest):
def __init__(self, name, optimization, settings):
super(Pbes2bool_counter_exampleTest, self).__init__(name, ymlfile('pbessolve-counter-example'), settings)
if optimization in [4, 5]:
self.add_command_line_options('t3', ['-l{}'.format(optimization), '--aggressive', '--prune-todo-list'])
else:
self.add_command_line_options('t3', ['-l{}'.format(optimization), '--prune-todo-list'])
def create_inputfiles(self, runpath = '.'):
super(Pbes2bool_counter_exampleTest, self).create_inputfiles(runpath)
filename = '{0}.mcf'.format(self.name, self.settings)
formula = random_state_formula_generator.make_modal_formula()
write_text(filename, str(formula))
self.inputfiles += [filename]
class Pbes_unify_parametersTest(PbesTest):
def __init__(self, name, settings):
super(Pbes_unify_parametersTest, self).__init__(name, ymlfile('pbes-unify-parameters'), settings)
class Pbes_srfTest(PbesTest):
def __init__(self, name, settings):
super(Pbes_srfTest, self).__init__(name, ymlfile('pbes-srf'), settings)
# N.B does not work due to unknown expressions (F_or)
class SymbolicExplorationTest(PbesTest):
def __init__(self, name, settings):
super(SymbolicExplorationTest, self).__init__(name, ymlfile('symbolic_exploration'), settings)
class BesTest(RandomTest):
def __init__(self, name, ymlfile, settings):
super(BesTest, self).__init__(name, ymlfile, settings)
self.equation_count = 4
self.term_size = 3
def create_inputfiles(self, runpath = '.'):
filename = '{0}.txt'.format(self.name, self.settings)
p = make_bes(self.equation_count, self.term_size)
write_text(filename, str(p))
self.inputfiles += [filename]
class BessolveTest(BesTest):
def __init__(self, name, settings):
super(BessolveTest, self).__init__(name, ymlfile('bessolve'), settings)
available_tests = {
'alphabet-reduce' : lambda name, settings: AlphabetReduceTest(name, settings) ,
'lpssuminst' : lambda name, settings: LpsSuminstTest(name, settings) ,
'lpssumelm' : lambda name, settings: LpsSumelmTest(name, settings) ,
'lpsparelm' : lambda name, settings: LpsParelmTest(name, settings) ,
'lps-quantifier-one-point' : lambda name, settings: LpsOnePointRuleRewriteTest(name, settings) ,
'lpsconfcheck-commutative' : lambda name, settings: LpsConfcheckTest(name, 'commutative', settings) ,
'lpsconfcheck-commutative-disjoint' : lambda name, settings: LpsConfcheckTest(name, 'commutative-disjoint', settings) ,
'lpsconfcheck-disjoint' : lambda name, settings: LpsConfcheckTest(name, 'disjoint', settings) ,
'lpsconfcheck-triangular' : lambda name, settings: LpsConfcheckTest(name, 'triangular', settings) ,
'lpsconfcheck-trivial' : lambda name, settings: LpsConfcheckTest(name, 'trivial', settings) ,
'lpsconstelm' : lambda name, settings: LpsConstelmTest(name, settings) ,
'lpsbinary' : lambda name, settings: LpsBinaryTest(name, settings) ,
'lps2lts-algorithms' : lambda name, settings: Lps2ltsAlgorithmsTest(name, settings) ,
'lps2pbes' : lambda name, settings: Lps2pbesTest(name, settings) ,
'lpsstategraph' : lambda name, settings: LpsstategraphTest(name, settings) ,
'lts2pbes' : lambda name, settings: Lts2pbesTest(name, settings) ,
'ltscompare-bisim' : lambda name, settings: LtscompareTest(name, 'bisim', settings) ,
'ltscompare-bisim-gv' : lambda name, settings: LtscompareTest(name, 'bisim-gv', settings) ,
'ltscompare-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'bisim-gjkw', settings) ,
'ltscompare-branching-bisim' : lambda name, settings: LtscompareTest(name, 'branching-bisim', settings) ,
'ltscompare-branching-bisim-gv' : lambda name, settings: LtscompareTest(name, 'branching-bisim-gv', settings) ,
'ltscompare-branching-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'branching-bisim-gjkw', settings) ,
'ltscompare-dpbranching-bisim' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim', settings) ,
'ltscompare-dpbranching-bisim-gv' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim-gv', settings) ,
'ltscompare-dpbranching-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim-gjkw', settings) ,
'ltscompare-weak-bisim' : lambda name, settings: LtscompareTest(name, 'weak-bisim', settings) ,
'ltscompare-dpweak-bisim' : lambda name, settings: LtscompareTest(name, 'dpweak-bisim', settings) ,
'ltscompare-sim' : lambda name, settings: LtscompareTest(name, 'sim', settings) ,
'ltscompare-ready-sim' : lambda name, settings: LtscompareTest(name, 'ready-sim', settings) ,
'ltscompare-trace' : lambda name, settings: LtscompareTest(name, 'trace', settings) ,
'ltscompare-weak-trace' : lambda name, settings: LtscompareTest(name, 'weak-trace', settings) ,
'bisimulation-bisim' : lambda name, settings: BisimulationTest(name, 'bisim', settings) ,
'bisimulation-bisim-gv' : lambda name, settings: BisimulationTest(name, 'bisim-gv', settings) ,
'bisimulation-bisim-gjkw' : lambda name, settings: BisimulationTest(name, 'bisim-gjkw', settings) ,
'bisimulation-branching-bisim' : lambda name, settings: BisimulationTest(name, 'branching-bisim', settings) ,
'bisimulation-branching-bisim-gv' : lambda name, settings: BisimulationTest(name, 'branching-bisim-gv', settings) ,
'bisimulation-branching-bisim-gjkw' : lambda name, settings: BisimulationTest(name, 'branching-bisim-gjkw', settings) ,
'bisimulation-weak-bisim' : lambda name, settings: BisimulationTest(name, 'weak-bisim', settings) ,
'pbesconstelm' : lambda name, settings: PbesconstelmTest(name, settings) ,
'pbesparelm' : lambda name, settings: PbesparelmTest(name, settings) ,
'pbespareqelm' : lambda name, settings: PbespareqelmTest(name, settings) ,
'pbespor2' : lambda name, settings: Pbespor2Test(name, settings) ,
'pbesrewr-simplify' : lambda name, settings: PbesrewrTest(name, 'simplify', settings) ,
'pbesrewr-pfnf' : lambda name, settings: PbesrewrTest(name, 'pfnf', settings) ,
'pbesrewr-quantifier-all' : lambda name, settings: PbesrewrTest(name, 'quantifier-all', settings) ,
'pbesrewr-quantifier-finite' : lambda name, settings: PbesrewrTest(name, 'quantifier-finite', settings) ,
'pbesrewr-quantifier-inside' : lambda name, settings: PbesrewrTest(name, 'quantifier-inside', settings) ,
'pbesrewr-quantifier-one-point' : lambda name, settings: PbesrewrTest(name, 'quantifier-one-point', settings) ,
'pbesrewr-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-data-rewriter', settings) ,
'pbesrewr-simplify-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-rewriter', settings) ,
'pbesrewr-simplify-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-data-rewriter', settings) ,
'pbesrewr-simplify-quantifiers-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-quantifiers-rewriter', settings) ,
'pbesrewr-simplify-quantifiers-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-quantifiers-data-rewriter', settings),
'pbesinst-lazy' : lambda name, settings: PbesinstTest(name, ['-slazy'], settings) ,
'pbesinst-alternative_lazy' : lambda name, settings: PbesinstTest(name, ['-salternative-lazy'], settings) ,
'pbesinst-finite' : lambda name, settings: PbesinstTest(name, ['-sfinite', '-f*(*:Bool)'], settings) ,
'pbespgsolve' : lambda name, settings: PbespgsolveTest(name, settings) ,
'pbessolve' : lambda name, settings: Pbes2boolTest(name, settings) ,
'pbessolve-depth-first' : lambda name, settings: Pbes2boolDepthFirstTest(name, settings) ,
'pbessolve-counter-example-optimization-0' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 0, settings) ,
'pbessolve-counter-example-optimization-1' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 1, settings) ,
'pbessolve-counter-example-optimization-2' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 2, settings) ,
'pbessolve-counter-example-optimization-3' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 3, settings) ,
'pbessolve-counter-example-optimization-4' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 4, settings) ,
'pbessolve-counter-example-optimization-5' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 5, settings) ,
'pbessolve-counter-example-optimization-6' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 6, settings) ,
'pbessolve-counter-example-optimization-7' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 7, settings) ,
'pbesstategraph' : lambda name, settings: PbesstategraphTest(name, settings) ,
'pbes-unify-parameters' : lambda name, settings: Pbes_unify_parametersTest(name, settings) ,
'pbes-srf' : lambda name, settings: Pbes_srfTest(name, settings) ,
# 'pbessymbolicbisim' : lambda name, settings: PbessymbolicbisimTest(name, settings) , # excluded from the tests because of Z3 dependency
'bessolve' : lambda name, settings: BessolveTest(name, settings) ,
#'stochastic-ltscompare' : lambda name, settings: StochasticLtscompareTest(name, settings) ,
}
# These test do not work on Windows due to dependencies.
if os.name != 'nt':
available_tests.update({'pbessolvesymbolic' : lambda name, settings: PbessolvesymbolicTest(name, settings) })
# available_tests.update({ 'pbesbddsolve' : lambda name, settings: PbesbddsolveTest(name, settings) })
def print_names(tests):
for name in sorted(tests):
print(name)
# Return all tests that match with pattern. In case of an exact match, only this exact match is returned.
def matching_tests(tests, pattern):
matches = [name for name in sorted(tests) if re.search(pattern, name)]
if pattern in matches:
return [pattern]
return matches
def main(tests):
import argparse
cmdline_parser = argparse.ArgumentParser()
cmdline_parser.add_argument('-t', '--toolpath', dest='toolpath', help='The path where the mCRL2 tools are installed')
cmdline_parser.add_argument('-r', '--repetitions', dest='repetitions', metavar='N', default='10', help='Perform N repetitions of each test')
cmdline_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Display additional progress messages.')
cmdline_parser.add_argument('-k', '--keep-files', dest='keep_files', action='store_true', help='Keep the files produced by the test')
cmdline_parser.add_argument('-n', '--names', dest='names', action='store_true', help='Print the names of the available tests')
cmdline_parser.add_argument('-p', '--pattern', dest='pattern', metavar='P', default='.', action='store', help='Run the tests that match with pattern P')
cmdline_parser.add_argument('-o', '--output', dest='output', metavar='o', action='store', help='Run the tests in the given directory')
args = cmdline_parser.parse_args()
if args.names:
print_names(tests)
return
toolpath = args.toolpath
if not toolpath:
toolpath = MCRL2_INSTALL_DIR
settings = {'toolpath': toolpath, 'verbose': args.verbose, 'cleanup_files': not args.keep_files, 'allow-non-zero-return-values': True}
I = range(int(args.repetitions))
if args.output:
if not os.path.exists(args.output):
os.mkdir(args.output)
os.chdir(args.output)
test_failed = False
for name in matching_tests(tests, args.pattern):
try:
for i in I:
test = tests[name]('{}_{}'.format(name, i), settings)
test.execute_in_sandbox()
except Exception as e:
print('An exception occurred:', e.__class__, e)
traceback.print_exc()
test_failed = True
if (test_failed):
sys.exit(-1)
if __name__ == '__main__':
main(available_tests)
| 63.085714 | 280 | 0.606593 | 15,715 | 0.508379 | 0 | 0 | 0 | 0 | 0 | 0 | 6,122 | 0.198046 |
8189efb35e8c25b88203a01795c7461668948d95 | 969 | py | Python | src/download.py | stanislawbartkowski/webhdfsdirectory | 8f7110eb573487c845ab0126eb71f038edb5ed41 | [
"Apache-2.0"
]
| null | null | null | src/download.py | stanislawbartkowski/webhdfsdirectory | 8f7110eb573487c845ab0126eb71f038edb5ed41 | [
"Apache-2.0"
]
| null | null | null | src/download.py | stanislawbartkowski/webhdfsdirectory | 8f7110eb573487c845ab0126eb71f038edb5ed41 | [
"Apache-2.0"
]
| null | null | null | """ Main program to launch proc/hdfs.py
"""
import argparse
import logging
from pars import addargs
import sys
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
from proc.hdfs import DIRHDFS
def gettestargs(parser) :
i = "/home/sbartkowski/work/webhdfsdirectory/testdata/inputhdfs.txt"
return parser.parse_args([i,"inimical1","14000","sb","/user/sb","dir1","/tmp/download","--dryrun"])
def getargs(parser) :
return parser.parse_args(sys.argv[1:])
def readargs():
parser = argparse.ArgumentParser(
description='Download HDFS using WEB REST/API')
addargs(parser)
# return gettestargs(parser)
return getargs(parser)
def main():
args = readargs()
T = DIRHDFS(args.host[0], args.port[0], args.user[0],args.regexp,args.dryrun)
T.downloadhdfsdir(args.userdir[0], args.usersubdir[0], args.localdir[0])
if __name__ == "__main__":
# execute only if run as a script
main()
| 25.5 | 103 | 0.700722 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 305 | 0.314757 |
818a52c58e57385fa71bcd403825bd6c6ac08eb9 | 15,748 | py | Python | desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | kokosing/hue | 2307f5379a35aae9be871e836432e6f45138b3d9 | [
"Apache-2.0"
]
| 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
]
| 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
]
| 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | """JSON (de)serialization framework.
The framework presented here is somewhat based on `Go's "json" package`_
(especially the ``omitempty`` functionality).
.. _`Go's "json" package`: http://golang.org/pkg/encoding/json/
"""
import abc
import binascii
import logging
import OpenSSL
import six
from josepy import b64, errors, interfaces, util
logger = logging.getLogger(__name__)
class Field(object):
"""JSON object field.
:class:`Field` is meant to be used together with
:class:`JSONObjectWithFields`.
``encoder`` (``decoder``) is a callable that accepts a single
parameter, i.e. a value to be encoded (decoded), and returns the
serialized (deserialized) value. In case of errors it should raise
:class:`~josepy.errors.SerializationError`
(:class:`~josepy.errors.DeserializationError`).
Note, that ``decoder`` should perform partial serialization only.
:ivar str json_name: Name of the field when encoded to JSON.
:ivar default: Default value (used when not present in JSON object).
:ivar bool omitempty: If ``True`` and the field value is empty, then
it will not be included in the serialized JSON object, and
``default`` will be used for deserialization. Otherwise, if ``False``,
field is considered as required, value will always be included in the
serialized JSON objected, and it must also be present when
deserializing.
"""
__slots__ = ('json_name', 'default', 'omitempty', 'fdec', 'fenc')
def __init__(self, json_name, default=None, omitempty=False,
decoder=None, encoder=None):
# pylint: disable=too-many-arguments
self.json_name = json_name
self.default = default
self.omitempty = omitempty
self.fdec = self.default_decoder if decoder is None else decoder
self.fenc = self.default_encoder if encoder is None else encoder
@classmethod
def _empty(cls, value):
"""Is the provided value considered "empty" for this field?
This is useful for subclasses that might want to override the
definition of being empty, e.g. for some more exotic data types.
"""
return not isinstance(value, bool) and not value
def omit(self, value):
"""Omit the value in output?"""
return self._empty(value) and self.omitempty
def _update_params(self, **kwargs):
current = dict(json_name=self.json_name, default=self.default,
omitempty=self.omitempty,
decoder=self.fdec, encoder=self.fenc)
current.update(kwargs)
return type(self)(**current) # pylint: disable=star-args
def decoder(self, fdec):
"""Descriptor to change the decoder on JSON object field."""
return self._update_params(decoder=fdec)
def encoder(self, fenc):
"""Descriptor to change the encoder on JSON object field."""
return self._update_params(encoder=fenc)
def decode(self, value):
"""Decode a value, optionally with context JSON object."""
return self.fdec(value)
def encode(self, value):
"""Encode a value, optionally with context JSON object."""
return self.fenc(value)
@classmethod
def default_decoder(cls, value):
"""Default decoder.
Recursively deserialize into immutable types (
:class:`josepy.util.frozendict` instead of
:func:`dict`, :func:`tuple` instead of :func:`list`).
"""
# bases cases for different types returned by json.loads
if isinstance(value, list):
return tuple(cls.default_decoder(subvalue) for subvalue in value)
elif isinstance(value, dict):
return util.frozendict(
dict((cls.default_decoder(key), cls.default_decoder(value))
for key, value in six.iteritems(value)))
else: # integer or string
return value
@classmethod
def default_encoder(cls, value):
"""Default (passthrough) encoder."""
# field.to_partial_json() is no good as encoder has to do partial
# serialization only
return value
class JSONObjectWithFieldsMeta(abc.ABCMeta):
"""Metaclass for :class:`JSONObjectWithFields` and its subclasses.
It makes sure that, for any class ``cls`` with ``__metaclass__``
set to ``JSONObjectWithFieldsMeta``:
1. All fields (attributes of type :class:`Field`) in the class
definition are moved to the ``cls._fields`` dictionary, where
keys are field attribute names and values are fields themselves.
2. ``cls.__slots__`` is extended by all field attribute names
(i.e. not :attr:`Field.json_name`). Original ``cls.__slots__``
are stored in ``cls._orig_slots``.
In a consequence, for a field attribute name ``some_field``,
``cls.some_field`` will be a slot descriptor and not an instance
of :class:`Field`. For example::
some_field = Field('someField', default=())
class Foo(object):
__metaclass__ = JSONObjectWithFieldsMeta
__slots__ = ('baz',)
some_field = some_field
assert Foo.__slots__ == ('some_field', 'baz')
assert Foo._orig_slots == ()
assert Foo.some_field is not Field
assert Foo._fields.keys() == ['some_field']
assert Foo._fields['some_field'] is some_field
As an implementation note, this metaclass inherits from
:class:`abc.ABCMeta` (and not the usual :class:`type`) to mitigate
the metaclass conflict (:class:`ImmutableMap` and
:class:`JSONDeSerializable`, parents of :class:`JSONObjectWithFields`,
use :class:`abc.ABCMeta` as its metaclass).
"""
def __new__(mcs, name, bases, dikt):
fields = {}
for base in bases:
fields.update(getattr(base, '_fields', {}))
# Do not reorder, this class might override fields from base classes!
for key, value in tuple(six.iteritems(dikt)):
# not six.iterkeys() (in-place edit!)
if isinstance(value, Field):
fields[key] = dikt.pop(key)
dikt['_orig_slots'] = dikt.get('__slots__', ())
dikt['__slots__'] = tuple(
list(dikt['_orig_slots']) + list(six.iterkeys(fields)))
dikt['_fields'] = fields
return abc.ABCMeta.__new__(mcs, name, bases, dikt)
@six.add_metaclass(JSONObjectWithFieldsMeta)
class JSONObjectWithFields(util.ImmutableMap, interfaces.JSONDeSerializable):
# pylint: disable=too-few-public-methods
"""JSON object with fields.
Example::
class Foo(JSONObjectWithFields):
bar = Field('Bar')
empty = Field('Empty', omitempty=True)
@bar.encoder
def bar(value):
return value + 'bar'
@bar.decoder
def bar(value):
if not value.endswith('bar'):
raise errors.DeserializationError('No bar suffix!')
return value[:-3]
assert Foo(bar='baz').to_partial_json() == {'Bar': 'bazbar'}
assert Foo.from_json({'Bar': 'bazbar'}) == Foo(bar='baz')
assert (Foo.from_json({'Bar': 'bazbar', 'Empty': '!'})
== Foo(bar='baz', empty='!'))
assert Foo(bar='baz').bar == 'baz'
"""
@classmethod
def _defaults(cls):
"""Get default fields values."""
return dict([(slot, field.default) for slot, field
in six.iteritems(cls._fields)])
def __init__(self, **kwargs):
# pylint: disable=star-args
super(JSONObjectWithFields, self).__init__(
**(dict(self._defaults(), **kwargs)))
def encode(self, name):
"""Encode a single field.
:param str name: Name of the field to be encoded.
:raises errors.SerializationError: if field cannot be serialized
:raises errors.Error: if field could not be found
"""
try:
field = self._fields[name]
except KeyError:
raise errors.Error("Field not found: {0}".format(name))
return field.encode(getattr(self, name))
def fields_to_partial_json(self):
"""Serialize fields to JSON."""
jobj = {}
omitted = set()
for slot, field in six.iteritems(self._fields):
value = getattr(self, slot)
if field.omit(value):
omitted.add((slot, value))
else:
try:
jobj[field.json_name] = field.encode(value)
except errors.SerializationError as error:
raise errors.SerializationError(
'Could not encode {0} ({1}): {2}'.format(
slot, value, error))
return jobj
def to_partial_json(self):
return self.fields_to_partial_json()
@classmethod
def _check_required(cls, jobj):
missing = set()
for _, field in six.iteritems(cls._fields):
if not field.omitempty and field.json_name not in jobj:
missing.add(field.json_name)
if missing:
raise errors.DeserializationError(
'The following fields are required: {0}'.format(
','.join(missing)))
@classmethod
def fields_from_json(cls, jobj):
"""Deserialize fields from JSON."""
cls._check_required(jobj)
fields = {}
for slot, field in six.iteritems(cls._fields):
if field.json_name not in jobj and field.omitempty:
fields[slot] = field.default
else:
value = jobj[field.json_name]
try:
fields[slot] = field.decode(value)
except errors.DeserializationError as error:
raise errors.DeserializationError(
'Could not decode {0!r} ({1!r}): {2}'.format(
slot, value, error))
return fields
@classmethod
def from_json(cls, jobj):
return cls(**cls.fields_from_json(jobj))
def encode_b64jose(data):
"""Encode JOSE Base-64 field.
:param bytes data:
:rtype: `unicode`
"""
# b64encode produces ASCII characters only
return b64.b64encode(data).decode('ascii')
def decode_b64jose(data, size=None, minimum=False):
"""Decode JOSE Base-64 field.
:param unicode data:
:param int size: Required length (after decoding).
:param bool minimum: If ``True``, then `size` will be treated as
minimum required length, as opposed to exact equality.
:rtype: bytes
"""
error_cls = TypeError if six.PY2 else binascii.Error
try:
decoded = b64.b64decode(data.encode())
except error_cls as error:
raise errors.DeserializationError(error)
if size is not None and ((not minimum and len(decoded) != size) or
(minimum and len(decoded) < size)):
raise errors.DeserializationError(
"Expected at least or exactly {0} bytes".format(size))
return decoded
def encode_hex16(value):
"""Hexlify.
:param bytes value:
:rtype: unicode
"""
return binascii.hexlify(value).decode()
def decode_hex16(value, size=None, minimum=False):
"""Decode hexlified field.
:param unicode value:
:param int size: Required length (after decoding).
:param bool minimum: If ``True``, then `size` will be treated as
minimum required length, as opposed to exact equality.
:rtype: bytes
"""
value = value.encode()
if size is not None and ((not minimum and len(value) != size * 2) or
(minimum and len(value) < size * 2)):
raise errors.DeserializationError()
error_cls = TypeError if six.PY2 else binascii.Error
try:
return binascii.unhexlify(value)
except error_cls as error:
raise errors.DeserializationError(error)
def encode_cert(cert):
"""Encode certificate as JOSE Base-64 DER.
:type cert: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:rtype: unicode
"""
return encode_b64jose(OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1, cert.wrapped))
def decode_cert(b64der):
"""Decode JOSE Base-64 DER-encoded certificate.
:param unicode b64der:
:rtype: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
try:
return util.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
except OpenSSL.crypto.Error as error:
raise errors.DeserializationError(error)
def encode_csr(csr):
"""Encode CSR as JOSE Base-64 DER.
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:rtype: unicode
"""
return encode_b64jose(OpenSSL.crypto.dump_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, csr.wrapped))
def decode_csr(b64der):
"""Decode JOSE Base-64 DER-encoded CSR.
:param unicode b64der:
:rtype: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
try:
return util.ComparableX509(OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
except OpenSSL.crypto.Error as error:
raise errors.DeserializationError(error)
class TypedJSONObjectWithFields(JSONObjectWithFields):
"""JSON object with type."""
typ = NotImplemented
"""Type of the object. Subclasses must override."""
type_field_name = "type"
"""Field name used to distinguish different object types.
Subclasses will probably have to override this.
"""
TYPES = NotImplemented
"""Types registered for JSON deserialization"""
@classmethod
def register(cls, type_cls, typ=None):
"""Register class for JSON deserialization."""
typ = type_cls.typ if typ is None else typ
cls.TYPES[typ] = type_cls
return type_cls
@classmethod
def get_type_cls(cls, jobj):
"""Get the registered class for ``jobj``."""
if cls in six.itervalues(cls.TYPES):
if cls.type_field_name not in jobj:
raise errors.DeserializationError(
"Missing type field ({0})".format(cls.type_field_name))
# cls is already registered type_cls, force to use it
# so that, e.g Revocation.from_json(jobj) fails if
# jobj["type"] != "revocation".
return cls
if not isinstance(jobj, dict):
raise errors.DeserializationError(
"{0} is not a dictionary object".format(jobj))
try:
typ = jobj[cls.type_field_name]
except KeyError:
raise errors.DeserializationError("missing type field")
try:
return cls.TYPES[typ]
except KeyError:
raise errors.UnrecognizedTypeError(typ, jobj)
def to_partial_json(self):
"""Get JSON serializable object.
:returns: Serializable JSON object representing ACME typed object.
:meth:`validate` will almost certainly not work, due to reasons
explained in :class:`josepy.interfaces.IJSONSerializable`.
:rtype: dict
"""
jobj = self.fields_to_partial_json()
jobj[self.type_field_name] = self.typ
return jobj
@classmethod
def from_json(cls, jobj):
"""Deserialize ACME object from valid JSON object.
:raises josepy.errors.UnrecognizedTypeError: if type
of the ACME object has not been registered.
"""
# make sure subclasses don't cause infinite recursive from_json calls
type_cls = cls.get_type_cls(jobj)
return type_cls(**type_cls.fields_from_json(jobj))
| 32.672199 | 78 | 0.625921 | 12,051 | 0.76524 | 0 | 0 | 6,504 | 0.413005 | 0 | 0 | 7,573 | 0.480886 |
818c9c67cc31addec97fe43bfd97c54843bc6cf4 | 6,463 | py | Python | norm/executable/schema/variable.py | reasoned-ai/norm | 5e45d5917ce8745c9a757a0c6b5e689ea0cac19f | [
"Apache-2.0"
]
| 8 | 2019-07-22T08:57:20.000Z | 2021-03-26T13:51:02.000Z | norm/executable/schema/variable.py | xumiao/norm | 5e45d5917ce8745c9a757a0c6b5e689ea0cac19f | [
"Apache-2.0"
]
| null | null | null | norm/executable/schema/variable.py | xumiao/norm | 5e45d5917ce8745c9a757a0c6b5e689ea0cac19f | [
"Apache-2.0"
]
| 1 | 2019-11-16T13:37:35.000Z | 2019-11-16T13:37:35.000Z | from norm.models.norm import Status, Lambda
from norm.executable import NormExecutable
from typing import Union, List
import logging
logger = logging.getLogger(__name__)
class VariableName(NormExecutable):
def __init__(self, scope, name):
"""
The variable and its scope
:param scope: the scope of the variable
:type scope: Union[VariableName, EvaluationExpr]
:param name: the name of the variable
:type name: str
"""
super().__init__()
from norm.executable.expression.evaluation import EvaluationExpr
self.scope: Union[VariableName, EvaluationExpr] = scope
self.name: str = name
self.output_projection: str = None
@property
def eval_lam(self):
return self.lam
def __str__(self):
if self.scope is not None:
return '{}{}{}'.format(self.scope.name, self.VARIABLE_SEPARATOR, self.name)
else:
return self.name
def variable_type(self):
return self.lam
def compile(self, context):
if self.name == context.THAT_VARIABLE_NAME:
self.lam = context.that
return self
session = context.session
if self.scope is None:
name = self.name
scope = context.get_scope(name)
if scope is not None:
return ColumnVariable(scope, name).compile(context)
else:
lam = self.try_retrieve_type(session, context.context_namespace, name)
if lam is None:
lam = self.try_retrieve_type(session, context.search_namespaces, name, status=Status.READY)
self.lam = lam
return self
else:
if isinstance(self.scope, ColumnVariable) and str(self) in self.scope.lam:
# Already joined
self.scope.name = str(self)
return self.scope
lam = self.scope.variable_type()
if self.name in lam:
if isinstance(self.scope, (ColumnVariable, JoinVariable)):
# Join lam to the scope for the column
return JoinVariable(self.scope, self.name, lam).compile(context)
else:
# A column of the scope variable
return ColumnVariable(self.scope, self.name).compile(context)
else:
# An evaluation whose first argument is the scope
lam = self.try_retrieve_type(session, context.context_namespace, self.name)
if lam is None:
lam = self.try_retrieve_type(session, context.search_namespaces, self.name, status=Status.READY)
assert(lam is not None)
self.lam = lam
from norm.executable.expression.argument import ArgumentExpr
arg = ArgumentExpr(expr=self.scope)
self.scope = None
from norm.executable.expression.evaluation import EvaluationExpr
return EvaluationExpr([arg], self)
class UnquoteVariable(VariableName):
def __init__(self, name, unquoted_variables):
"""
The variable and its scope
:param name: the name of the variable
:type name: str
:param unquoted_variables: a list of variables to unquote
:type unquoted_variables: List[VariableName]
"""
super().__init__(None, name)
self.unquoted_variables: List[VariableName] = unquoted_variables
def __str__(self):
return self.name
def variable_type(self):
raise NotImplementedError
def compile(self, context):
assert(len(self.unquoted_variables) > 0)
assert(all([isinstance(v, ColumnVariable) for v in self.unquoted_variables]))
lam = self.unquoted_variables[0].lam
assert(all([v.lam is lam for v in self.unquoted_variables]))
self.lam = lam
return self
def execute(self, context):
# TODO: hacky
assert(len(self.unquoted_variables) == 1)
vname = self.unquoted_variables[0].name
data = self.lam.data
from pandas import DataFrame
def _execute(x):
try:
result = context.execute(x[vname].values[0])
if isinstance(result, DataFrame):
return result
else:
return None
except:
return None
results = data.groupby(vname).apply(_execute).reset_index()
if self.output_projection is not None:
cols = dict((col, self.VARIABLE_SEPARATOR.join([self.output_projection, col]))
for col in results.columns if col != vname and col != self.lam.VAR_OID)
if self.lam.VAR_OID in results.columns:
cols[self.lam.VAR_OID] = self.output_projection
results = results.rename(columns=cols)
return data.merge(results, on=vname)
class ColumnVariable(VariableName):
def __init__(self, scope, name):
super().__init__(scope, name)
def __str__(self):
return self.name
def variable_type(self):
return self.lam.get_type(self.name)
def compile(self, context):
from norm.engine import QuantifiedLambda
if self.scope is None:
assert(context.scope is not None)
self.lam = context.scope
elif isinstance(self.scope, Lambda):
self.lam = self.scope
elif isinstance(self.scope, QuantifiedLambda):
self.lam = self.scope
else:
self.lam = self.scope.lam
return self
def execute(self, context):
result = self.lam.data[self.name]
if self.output_projection is not None:
return result.rename(self.output_projection)
else:
return result
class JoinVariable(VariableName):
def __init__(self, scope, name, joiner):
super().__init__(scope, name)
self.lam = joiner
def variable_type(self):
return self.lam.get_type(self.name)
def compile(self, context):
return self
def execute(self, context):
lam = self.scope.lam
joiner = self.lam
if str(self) not in lam.data.columns:
to_join = joiner.data[[self.name]].rename(columns={self.name: str(self)})
lam.data = lam.data.join(to_join, on=str(self.scope))
return lam.data
| 34.195767 | 116 | 0.597865 | 6,279 | 0.97153 | 0 | 0 | 57 | 0.008819 | 0 | 0 | 620 | 0.095931 |
818cc866ba9ab18156dde4bf30880a18007fbc03 | 3,318 | py | Python | src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 66847a6373d1fbc771e8ed9172a068731f9cd4da | [
"Apache-2.0"
]
| 8 | 2016-07-20T19:50:00.000Z | 2020-09-15T01:56:51.000Z | src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 66847a6373d1fbc771e8ed9172a068731f9cd4da | [
"Apache-2.0"
]
| 27 | 2016-08-01T13:39:25.000Z | 2021-08-18T17:47:23.000Z | src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 66847a6373d1fbc771e8ed9172a068731f9cd4da | [
"Apache-2.0"
]
| 3 | 2016-07-20T19:55:49.000Z | 2021-08-06T16:19:48.000Z | from __future__ import print_function
import os
import sys
import json
import tika
from tqdm import tqdm
from utils import LogUtil
from parser import Parser
from ioutils import read_lines
from tika import parser as tk_parser
class TikaParser(Parser):
""" The TikaParser class is wrapper of the Apache TIKA parse """
def __init__(self, tika_server_url):
super(TikaParser, self).__init__('tika_parser')
if tika_server_url:
os.environ['TIKA_CLIENT_ONLY'] = 'True'
os.environ['TIKA_SERVER_ENDPOINT'] = tika_server_url
print("Tika Server Endpoint %s" %
os.environ['TIKA_SERVER_ENDPOINT'])
tika.initVM()
def parse(self, file_path):
""" Parse one PDF file using Apache TIKA parser
Args:
file_path (str): Path to a PDF file
Return:
parsed content stored in a dictionary
"""
if not os.path.exists(file_path):
raise RuntimeError('%s error. File not found: %s' %
(self.parse_name, os.path.abspath(file_path)))
try:
tika_dict = tk_parser.from_file(file_path)
except Exception:
raise RuntimeError('Internal TIKA error occurred while parsing the '
'file: %s' % os.path.abspath(file_path))
tika_dict['file'] = os.path.abspath(file_path)
return tika_dict
def process(in_file, in_list, out_file, log_file, tika_server_url):
# Log input parameters
logger = LogUtil('lpsc-parser', log_file)
logger.info('Input parameters')
logger.info('in_file: %s' % in_file)
logger.info('in_list: %s' % in_list)
logger.info('out_file: %s' % out_file)
logger.info('tika_server_url: %s' % tika_server_url)
if in_file and in_list:
print('[ERROR] in_file and in_list cannot be provided simultaneously')
sys.exit(1)
tika_parser = TikaParser(tika_server_url)
if in_file:
files = [in_file]
else:
files = read_lines(in_list)
out_f = open(out_file, 'wb', 1)
for f in tqdm(files):
try:
tika_dict = tika_parser.parse(f)
out_f.write(json.dumps(tika_dict))
out_f.write('\n')
except Exception as e:
logger.info('TIKA parser failed: %s' % os.path.abspath(f))
logger.error(e)
out_f.close()
def main():
import argparse
parser = argparse.ArgumentParser()
input_parser = parser.add_mutually_exclusive_group(required=True)
input_parser.add_argument('-i', '--in_file', help='Path to input file')
input_parser.add_argument('-li', '--in_list', help='Path to input list')
parser.add_argument('-o', '--out_file', required=True,
help='Path to output JSON file')
parser.add_argument('-l', '--log_file', default='./tika-parser-log.txt',
help='Log file that contains processing information. '
'It is default to ./tika-parser-log.txt unless '
'otherwise specified.')
parser.add_argument('-p', '--tika_server_url', required=False,
help='Tika server URL')
args = parser.parse_args()
process(**vars(args))
if __name__ == '__main__':
main()
| 31.6 | 80 | 0.611814 | 1,203 | 0.362568 | 0 | 0 | 0 | 0 | 0 | 0 | 983 | 0.296263 |
818d2b5226021a3473fd95143600b3a63ac484e1 | 869 | py | Python | checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 2628c6f28a5604efe3877d6eacc3044d2b66b7b1 | [
"Apache-2.0"
]
| null | null | null | checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 2628c6f28a5604efe3877d6eacc3044d2b66b7b1 | [
"Apache-2.0"
]
| 2 | 2022-03-07T07:15:32.000Z | 2022-03-21T07:21:17.000Z | checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 2628c6f28a5604efe3877d6eacc3044d2b66b7b1 | [
"Apache-2.0"
]
| null | null | null | from checkov.cloudformation.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.parsers.node import DictNode
from checkov.common.models.enums import CheckResult, CheckCategories
class DocDBAuditLogs(BaseResourceCheck):
def __init__(self) -> None:
name = "Ensure DocDB has audit logs enabled"
id = "CKV_AWS_104"
supported_resources = ["AWS::DocDB::DBClusterParameterGroup"]
categories = [CheckCategories.LOGGING]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf: DictNode) -> CheckResult:
params = conf.get("Properties", {}).get("Parameters", {})
if params.get("audit_logs") == "enabled":
return CheckResult.PASSED
return CheckResult.FAILED
check = DocDBAuditLogs()
| 36.208333 | 106 | 0.721519 | 632 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 132 | 0.151899 |
818de49075c87063860cf616c4fbba1c27c95106 | 584 | py | Python | py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 2d04563f18ae914754b28041475c02770e57af15 | [
"MIT"
]
| null | null | null | py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 2d04563f18ae914754b28041475c02770e57af15 | [
"MIT"
]
| null | null | null | py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 2d04563f18ae914754b28041475c02770e57af15 | [
"MIT"
]
| null | null | null | #
# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending
#
import jpy
_JCallbackAdapter = jpy.get_type('io.deephaven.server.plugin.python.CallbackAdapter')
def initialize_all_and_register_into(callback: _JCallbackAdapter):
try:
from . import register
except ModuleNotFoundError as e:
# deephaven.plugin is an optional dependency, so if it can't be found, there are no Deephaven python plugins
# to register
if e.name == 'deephaven.plugin':
return
raise e
register.initialize_all_and_register_into(callback)
| 30.736842 | 116 | 0.717466 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 256 | 0.438356 |
818e56826eb6b882afcd422dcb192ea6b72a334b | 3,133 | py | Python | mypy/test/testoutput.py | TimSimpsonR/mypy | 5e6fd6335e0662b0477e1d678269f33e6f4194ba | [
"PSF-2.0"
]
| 1 | 2019-06-27T11:34:27.000Z | 2019-06-27T11:34:27.000Z | mypy/test/testoutput.py | silky/mypy | de6a8d3710df9f49109cb682f2092e4967bfb92c | [
"PSF-2.0"
]
| null | null | null | mypy/test/testoutput.py | silky/mypy | de6a8d3710df9f49109cb682f2092e4967bfb92c | [
"PSF-2.0"
]
| null | null | null | """Tests for parse tree pretty printing that preserves formatting
Test case descriptions are in file test/data/output.test.
"""
import os.path
import re
from typing import Undefined, Any
from mypy import build
from mypy.myunit import Suite, run_test
from mypy.test.helpers import assert_string_arrays_equal
from mypy.test.data import parse_test_cases
from mypy.test.config import test_data_prefix, test_temp_dir
from mypy.parse import parse
from mypy.output import OutputVisitor
from mypy.errors import CompileError
# Files which contain test case descriptions.
output_files = ['output.test']
class OutputSuite(Suite):
def cases(self):
c = []
for f in output_files:
c += parse_test_cases(os.path.join(test_data_prefix, f),
test_output, test_temp_dir, True)
return c
def test_output(testcase):
"""Perform an identity source code transformation test case."""
expected = testcase.output
if expected == []:
expected = testcase.input
try:
src = '\n'.join(testcase.input)
# Parse and semantically analyze the source program.
# Test case names with a special suffix get semantically analyzed. This
# lets us test that semantic analysis does not break source code pretty
# printing.
if testcase.name.endswith('_SemanticAnalyzer'):
result = build.build('main',
target=build.SEMANTIC_ANALYSIS,
program_text=src,
flags=[build.TEST_BUILTINS],
alt_lib_path=test_temp_dir)
files = result.files
else:
files = {'main': parse(src, 'main')}
a = []
first = True
# Produce an output containing the pretty-printed forms (with original
# formatting) of all the relevant source files.
for fnam in sorted(files.keys()):
f = files[fnam]
# Omit the builtins and files marked for omission.
if (not f.path.endswith(os.sep + 'builtins.py') and
'-skip.' not in f.path):
# Add file name + colon for files other than the first.
if not first:
a.append('{}:'.format(fix_path(remove_prefix(
f.path, test_temp_dir))))
v = OutputVisitor()
f.accept(v)
s = v.output()
if s != '':
a += s.split('\n')
first = False
except CompileError as e:
a = e.messages
assert_string_arrays_equal(
expected, a, 'Invalid source code output ({}, line {})'.format(
testcase.file, testcase.line))
def remove_prefix(path, prefix):
regexp = '^' + prefix.replace('\\', '\\\\')
np = re.sub(regexp, '', path)
if np.startswith(os.sep):
np = np[1:]
return np
def fix_path(path):
return path.replace('\\', '/')
if __name__ == '__main__':
import sys
run_test(OutputSuite(), sys.argv[1:])
| 31.969388 | 79 | 0.578998 | 246 | 0.078519 | 0 | 0 | 0 | 0 | 0 | 0 | 823 | 0.262688 |
81909aed2c8da07e00d68c36e16753bcc2a5f66d | 2,395 | py | Python | tests/test_SklearnDecisionTreeConverters.py | c-bata/sklearn-onnx | ff3e2eb204991b5799fc606c265b2c283dbfc25c | [
"MIT"
]
| 1 | 2019-05-06T20:54:02.000Z | 2019-05-06T20:54:02.000Z | tests/test_SklearnDecisionTreeConverters.py | PossieMP/https-github.com-onnx-sklearn-onnx | 48c60398d38e6937897d7c1506a8dcfcf28830a2 | [
"MIT"
]
| null | null | null | tests/test_SklearnDecisionTreeConverters.py | PossieMP/https-github.com-onnx-sklearn-onnx | 48c60398d38e6937897d7c1506a8dcfcf28830a2 | [
"MIT"
]
| 1 | 2020-04-09T07:46:52.000Z | 2020-04-09T07:46:52.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from sklearn.tree import DecisionTreeClassifier
from sklearn.tree import DecisionTreeRegressor
from skl2onnx.common.data_types import onnx_built_with_ml
from test_utils import (
dump_one_class_classification,
dump_binary_classification,
dump_multiple_classification,
)
from test_utils import dump_multiple_regression, dump_single_regression
class TestSklearnDecisionTreeModels(unittest.TestCase):
@unittest.skipIf(not onnx_built_with_ml(),
reason="Requires ONNX-ML extension.")
def test_decision_tree_classifier(self):
model = DecisionTreeClassifier()
dump_one_class_classification(
model,
# Operator cast-1 is not implemented in onnxruntime
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.3') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
dump_binary_classification(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.3') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
dump_multiple_classification(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.3') or "
"StrictVersion(onnxruntime.__version__)"
" <= StrictVersion('0.2.1')",
)
def test_decision_tree_regressor(self):
model = DecisionTreeRegressor()
dump_single_regression(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2')",
)
dump_multiple_regression(
model,
allow_failure="StrictVersion(onnx.__version__)"
" < StrictVersion('1.2')",
)
if __name__ == "__main__":
unittest.main()
| 38.015873 | 76 | 0.562004 | 1,663 | 0.694363 | 0 | 0 | 1,168 | 0.487683 | 0 | 0 | 901 | 0.3762 |
8190b06cbd6a99f76c275c7c5d6181dfe355ab0d | 7,005 | py | Python | tests/test_inference.py | MihailSalnikov/microscopeimagequality | 22d1f7c7f6793a0ba6f64c2aea2bf3270c32301b | [
"Apache-2.0"
]
| 77 | 2017-10-30T19:34:06.000Z | 2022-01-20T17:15:10.000Z | tests/test_inference.py | MihailSalnikov/microscopeimagequality | 22d1f7c7f6793a0ba6f64c2aea2bf3270c32301b | [
"Apache-2.0"
]
| 3 | 2020-07-02T22:20:46.000Z | 2021-08-25T14:39:43.000Z | tests/test_inference.py | MihailSalnikov/microscopeimagequality | 22d1f7c7f6793a0ba6f64c2aea2bf3270c32301b | [
"Apache-2.0"
]
| 40 | 2017-12-22T01:21:13.000Z | 2022-03-13T22:28:59.000Z | import logging
import os
import tempfile
import PIL.Image
import numpy
import tensorflow
import microscopeimagequality.constants
import microscopeimagequality.data_provider
import microscopeimagequality.evaluation
import microscopeimagequality.prediction
class Inference(tensorflow.test.TestCase):
def setUp(self):
self.input_directory = os.path.join(os.path.dirname(os.path.abspath(__file__))
, "data")
self.test_data_directory = os.path.join(os.path.dirname(os.path.abspath(__file__))
, "data")
self.test_dir = tempfile.mkdtemp()
self.glob_images = os.path.join(self.input_directory, 'images_for_glob_test/*')
self.patch_width = 84
self.num_classes = 11
def testPatchValuesToMask(self):
values = numpy.round(
numpy.array([[0.2, 0.4, 0.5], [1.0, 0.0, 0.3]]) *
numpy.iinfo(numpy.uint16).max).astype(numpy.uint16)
mask = microscopeimagequality.prediction.patch_values_to_mask(values, self.patch_width)
self.assertEquals((168, 252), mask.shape)
self.assertEquals(numpy.iinfo(numpy.uint16).max, numpy.max(mask))
def testSaveMasksAndAnnotatedVisualization(self):
test_filename = 'BBBC006_z_aligned__a01__s1__w1_10.png'
orig_name = os.path.join(self.test_data_directory, test_filename)
prediction = 1
certainties = {name: 0.3 for name in microscopeimagequality.evaluation.CERTAINTY_NAMES}
num_patches = 4
np_images = numpy.ones((num_patches, self.patch_width, self.patch_width, 1))
np_probabilities = numpy.ones(
(num_patches, self.num_classes)) / self.num_classes
np_probabilities[0, :] = 0
np_probabilities[0, 1] = 1.0
np_probabilities[1, :] = 0
np_probabilities[1, 2] = 0.4
np_probabilities[1, -1] = 0.6
np_labels = 2 * numpy.ones(num_patches)
image_height = int(numpy.sqrt(num_patches)) * self.patch_width
image_width = image_height
microscopeimagequality.prediction.save_masks_and_annotated_visualization(
orig_name, self.test_dir, prediction, certainties, np_images,
np_probabilities, np_labels, self.patch_width, image_height,
image_width)
# Check that output has been generated and is the correct shape.
expected_size = PIL.Image.open(orig_name, 'r').size
expected_visualization_path = os.path.join(
self.test_dir,
'actual2_pred1_mean_certainty=0.300orig_name=%s' % test_filename)
expected_predictions_path = os.path.join(self.test_dir,
microscopeimagequality.constants.PREDICTIONS_MASK_FORMAT %
test_filename)
expected_certainties_path = os.path.join(self.test_dir,
microscopeimagequality.constants.CERTAINTY_MASK_FORMAT %
test_filename)
expected_valid_path = os.path.join(self.test_dir,
microscopeimagequality.constants.VALID_MASK_FORMAT %
test_filename)
img = PIL.Image.open(expected_visualization_path, 'r')
self.assertEquals(expected_size, img.size)
img = PIL.Image.open(expected_predictions_path, 'r')
self.assertEquals(expected_size, img.size)
img = PIL.Image.open(expected_certainties_path, 'r')
self.assertEquals(expected_size, img.size)
img = PIL.Image.open(expected_valid_path, 'r')
self.assertEquals(expected_size, img.size)
def testSaveMasksAndAnnotatedVisualizationTif(self):
test_filename = ('00_mcf-z-stacks-03212011_k06_s2_w12667264a'
'-6432-4f7e-bf58-625a1319a1c9.tif')
orig_name = os.path.join(self.test_data_directory, test_filename)
prediction = 1
certainties = {name: 0.3 for name in microscopeimagequality.evaluation.CERTAINTY_NAMES}
num_patches = 4
np_images = numpy.ones((num_patches, self.patch_width, self.patch_width, 1))
np_probabilities = numpy.ones(
(num_patches, self.num_classes)) / self.num_classes
image_height = int(numpy.sqrt(num_patches)) * self.patch_width
image_width = image_height
np_labels = 2 * numpy.ones(num_patches)
microscopeimagequality.prediction.save_masks_and_annotated_visualization(
orig_name, self.test_dir, prediction, certainties, np_images,
np_probabilities, np_labels, self.patch_width, image_height,
image_width)
mask_formats = [
microscopeimagequality.constants.CERTAINTY_MASK_FORMAT, microscopeimagequality.constants.PREDICTIONS_MASK_FORMAT,
microscopeimagequality.constants.VALID_MASK_FORMAT
]
for mask_format in mask_formats:
orig_name_png = os.path.splitext(os.path.basename(orig_name))[0] + '.png'
expected_file = os.path.join(self.test_dir,
mask_format % orig_name_png)
self.assertTrue(os.path.isfile(expected_file))
def testRunModelInferenceFirstHalfRuns(self):
batch_size = 1
num_classes = 11
model_patch_width = 84
image_width = 84
image_height = 84
tfexamples_tfrecord = microscopeimagequality.prediction.build_tfrecord_from_pngs(
[self.glob_images],
use_unlabeled_data=True,
num_classes=num_classes,
eval_directory=self.test_dir,
image_background_value=0,
image_brightness_scale=1,
shard_num=0,
num_shards=1,
image_width=image_width,
image_height=image_height)
num_samples = microscopeimagequality.data_provider.get_num_records(tfexamples_tfrecord %
microscopeimagequality.prediction._SPLIT_NAME)
logging.info('TFRecord has %g samples.', num_samples)
g = tensorflow.Graph()
with g.as_default():
images, one_hot_labels, _, _ = microscopeimagequality.data_provider.provide_data(
tfexamples_tfrecord,
split_name=microscopeimagequality.prediction._SPLIT_NAME,
batch_size=batch_size,
num_classes=num_classes,
image_width=84,
image_height=84,
patch_width=model_patch_width,
randomize=False,
num_threads=1)
labels = microscopeimagequality.evaluation.get_model_and_metrics(
images,
num_classes=num_classes,
one_hot_labels=one_hot_labels,
is_training=False).labels
self.assertEquals(batch_size, labels.get_shape())
| 44.056604 | 125 | 0.63469 | 6,745 | 0.962884 | 0 | 0 | 0 | 0 | 0 | 0 | 312 | 0.04454 |
8190c488725fd5780c71f8986d5214f9a0371832 | 498 | py | Python | config.py | hiankun/qb_test | ab031d74d903cdb8845a033d290611b444a5abdb | [
"MIT"
]
| 4 | 2015-03-07T19:15:28.000Z | 2021-01-24T15:13:30.000Z | config.py | hiankun/qb_test | ab031d74d903cdb8845a033d290611b444a5abdb | [
"MIT"
]
| null | null | null | config.py | hiankun/qb_test | ab031d74d903cdb8845a033d290611b444a5abdb | [
"MIT"
]
| 1 | 2017-07-03T06:55:24.000Z | 2017-07-03T06:55:24.000Z | """
QuickBot wiring config.
Specifies which pins are used for motor control, IR sensors and wheel encoders.
"""
# Motor pins: (dir1_pin, dir2_pin, pwd_pin)
RIGHT_MOTOR_PINS = 'P8_12', 'P8_10', 'P9_14'
LEFT_MOTOR_PINS = 'P8_14', 'P8_16', 'P9_16'
# IR sensors (clock-wise, starting with the rear left sensor):
# rear-left, front-left, front, front-right, rear-right
IR_PINS = ('P9_38', 'P9_40', 'P9_36', 'P9_35', 'P9_33')
# Wheel encoder sensors: (left, right)
ENC_PINS = ('P9_39', 'P9_37')
| 23.714286 | 79 | 0.690763 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 401 | 0.805221 |
8191a9d3234f49c843978a8688358673f859017f | 8,912 | py | Python | tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1adfb847fe565e53d2e26e35b04c8dc112b7513a | [
"BSD-3-Clause"
]
| 1 | 2016-05-04T10:08:50.000Z | 2016-05-04T10:08:50.000Z | tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1adfb847fe565e53d2e26e35b04c8dc112b7513a | [
"BSD-3-Clause"
]
| null | null | null | tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1adfb847fe565e53d2e26e35b04c8dc112b7513a | [
"BSD-3-Clause"
]
| 1 | 2020-01-16T03:34:53.000Z | 2020-01-16T03:34:53.000Z | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Self-test for skimage.
import filecmp
import os
import subprocess
import sys
import tempfile
class BinaryNotFoundException(Exception):
def __str__ (self):
return ("Could not find binary!\n"
"Did you forget to build the tools project?\n"
"Self tests failed")
# Find a path to the binary to use. Iterates through a list of possible
# locations the binary may be.
def PickBinaryPath(base_dir):
POSSIBLE_BINARY_PATHS = [
'out/Debug/skimage',
'out/Release/skimage',
'xcodebuild/Debug/skimage',
'xcodebuild/Release/skimage',
]
for binary in POSSIBLE_BINARY_PATHS:
binary_full_path = os.path.join(base_dir, binary)
if (os.path.exists(binary_full_path)):
return binary_full_path
raise BinaryNotFoundException
# Quit early if two files have different content.
def DieIfFilesMismatch(expected, actual):
if not filecmp.cmp(expected, actual):
print 'Error: file mismatch! expected=%s , actual=%s' % (
expected, actual)
exit(1)
def test_invalid_file(file_dir, skimage_binary):
""" Test the return value of skimage when an invalid file is decoded.
If there is no expectation file, or the file expects a particular
result, skimage should return nonzero indicating failure.
If the file has no expectation, or ignore-failure is set to true,
skimage should return zero indicating success. """
invalid_file = os.path.join(file_dir, "skimage", "input", "bad-images",
"invalid.png")
# No expectations file:
args = [skimage_binary, "--readPath", invalid_file]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
# Directory holding all expectations files
expectations_dir = os.path.join(file_dir, "skimage", "input", "bad-images")
# Expectations file expecting a valid decode:
incorrect_expectations = os.path.join(expectations_dir,
"incorrect-results.json")
args = [skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath", incorrect_expectations]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
# Empty expectations:
empty_expectations = os.path.join(expectations_dir, "empty-results.json")
output = subprocess.check_output([skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath",
empty_expectations],
stderr=subprocess.STDOUT)
if not "Missing" in output:
# Another test (in main()) tests to ensure that "Missing" does not appear
# in the output. That test could be passed if the output changed so
# "Missing" never appears. This ensures that an error is not missed if
# that happens.
print "skimage output changed! This may cause other self tests to fail!"
exit(1)
# Ignore failure:
ignore_expectations = os.path.join(expectations_dir, "ignore-results.json")
output = subprocess.check_output([skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath",
ignore_expectations],
stderr=subprocess.STDOUT)
if not "failures" in output:
# Another test (in main()) tests to ensure that "failures" does not
# appear in the output. That test could be passed if the output changed
# so "failures" never appears. This ensures that an error is not missed
# if that happens.
print "skimage output changed! This may cause other self tests to fail!"
exit(1)
def test_incorrect_expectations(file_dir, skimage_binary):
""" Test that comparing to incorrect expectations fails, unless
ignore-failures is set to true. """
valid_file = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes",
"1209453360120438698.png")
expectations_dir = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes")
incorrect_results = os.path.join(expectations_dir,
"incorrect-results.json")
args = [skimage_binary, "--readPath", valid_file, "--readExpectationsPath",
incorrect_results]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
ignore_results = os.path.join(expectations_dir, "ignore-failures.json")
subprocess.check_call([skimage_binary, "--readPath", valid_file,
"--readExpectationsPath", ignore_results])
def main():
# Use the directory of this file as the out directory
file_dir = os.path.abspath(os.path.dirname(__file__))
trunk_dir = os.path.normpath(os.path.join(file_dir, os.pardir, os.pardir))
# Find the binary
skimage_binary = PickBinaryPath(trunk_dir)
print "Running " + skimage_binary
# Generate an expectations file from known images.
images_dir = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes")
expectations_path = os.path.join(file_dir, "skimage", "output-actual",
"create-expectations", "expectations.json")
subprocess.check_call([skimage_binary, "--readPath", images_dir,
"--createExpectationsPath", expectations_path])
# Make sure the expectations file was generated correctly.
golden_expectations = os.path.join(file_dir, "skimage", "output-expected",
"create-expectations",
"expectations.json")
DieIfFilesMismatch(expected=golden_expectations, actual=expectations_path)
# Tell skimage to read back the expectations file it just wrote, and
# confirm that the images in images_dir match it.
output = subprocess.check_output([skimage_binary, "--readPath", images_dir,
"--readExpectationsPath",
expectations_path],
stderr=subprocess.STDOUT)
# Although skimage succeeded, it would have reported success if the file
# was missing from the expectations file. Consider this a failure, since
# the expectations file was created from this same image. (It will print
# "Missing" in this case before listing the missing expectations).
if "Missing" in output:
print "Expectations file was missing expectations!"
print output
exit(1)
# Again, skimage would succeed if there were known failures (and print
# "failures"), but there should be no failures, since the file just
# created did not include failures to ignore.
if "failures" in output:
print "Image failed!"
print output
exit(1)
test_incorrect_expectations(file_dir=file_dir,
skimage_binary=skimage_binary)
# Generate an expectations file from an empty directory.
empty_dir = tempfile.mkdtemp()
expectations_path = os.path.join(file_dir, "skimage", "output-actual",
"empty-dir", "expectations.json")
subprocess.check_call([skimage_binary, "--readPath", empty_dir,
"--createExpectationsPath", expectations_path])
golden_expectations = os.path.join(file_dir, "skimage", "output-expected",
"empty-dir", "expectations.json")
DieIfFilesMismatch(expected=golden_expectations, actual=expectations_path)
os.rmdir(empty_dir)
# Generate an expectations file from a nonexistent directory.
expectations_path = os.path.join(file_dir, "skimage", "output-actual",
"nonexistent-dir", "expectations.json")
subprocess.check_call([skimage_binary, "--readPath", "/nonexistent/dir",
"--createExpectationsPath", expectations_path])
golden_expectations = os.path.join(file_dir, "skimage", "output-expected",
"nonexistent-dir", "expectations.json")
DieIfFilesMismatch(expected=golden_expectations, actual=expectations_path)
test_invalid_file(file_dir=file_dir, skimage_binary=skimage_binary)
# Done with all tests.
print "Self tests succeeded!"
if __name__ == "__main__":
main()
| 44.78392 | 81 | 0.632518 | 208 | 0.023339 | 0 | 0 | 0 | 0 | 0 | 0 | 3,980 | 0.446589 |
8193d66190779e7816666311c0493c349ff06765 | 121 | py | Python | pymachine/condition.py | landrew31/pymachine | 117ad7aa3694a1ccb0be207cc931de8c2c345567 | [
"MIT"
]
| 1 | 2018-02-21T08:57:08.000Z | 2018-02-21T08:57:08.000Z | pymachine/condition.py | landrew31/pymachine | 117ad7aa3694a1ccb0be207cc931de8c2c345567 | [
"MIT"
]
| null | null | null | pymachine/condition.py | landrew31/pymachine | 117ad7aa3694a1ccb0be207cc931de8c2c345567 | [
"MIT"
]
| null | null | null | from collections import namedtuple
Condition = namedtuple(
'Condition',
['current_state', 'input_character'],
)
| 17.285714 | 41 | 0.719008 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.355372 |
8195c711df03d29790fdcc4e7f130ef66986f549 | 788 | py | Python | examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
]
| 2 | 2021-06-21T17:50:26.000Z | 2021-06-21T19:14:23.000Z | examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
]
| 7 | 2022-03-16T06:55:04.000Z | 2022-03-18T07:03:25.000Z | examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
]
| 1 | 2021-08-18T17:21:57.000Z | 2021-08-18T17:21:57.000Z | """Asset definitions for the simple_lakehouse example."""
import pandas as pd
from lakehouse import Column, computed_table, source_table
from pyarrow import date32, float64, string
sfo_q2_weather_sample_table = source_table(
path="data", columns=[Column("tmpf", float64()), Column("valid_date", string())],
)
@computed_table(
input_assets=[sfo_q2_weather_sample_table],
columns=[Column("valid_date", date32()), Column("max_tmpf", float64())],
)
def daily_temperature_highs_table(sfo_q2_weather_sample: pd.DataFrame) -> pd.DataFrame:
"""Computes the temperature high for each day"""
sfo_q2_weather_sample["valid_date"] = pd.to_datetime(sfo_q2_weather_sample["valid"])
return sfo_q2_weather_sample.groupby("valid_date").max().rename(columns={"tmpf": "max_tmpf"})
| 41.473684 | 97 | 0.757614 | 0 | 0 | 0 | 0 | 471 | 0.597716 | 0 | 0 | 198 | 0.251269 |
8196db5a9a3e9b1ef0fc71ca07363d90aa3c3237 | 4,386 | py | Python | aindex/demo.py | ad3002/Lyrebird | 8c0a186e32d61189f073401152c52a89bfed46ed | [
"MIT"
]
| null | null | null | aindex/demo.py | ad3002/Lyrebird | 8c0a186e32d61189f073401152c52a89bfed46ed | [
"MIT"
]
| null | null | null | aindex/demo.py | ad3002/Lyrebird | 8c0a186e32d61189f073401152c52a89bfed46ed | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#@created: 07.01.2018
#@author: Aleksey Komissarov
#@contact: [email protected]
from aindex import *
settings = {
"index_prefix": "tests/kmers.23",
"aindex_prefix": "tests/kmers.23",
"reads_file": "tests/reads.reads",
}
index = load_aindex(settings)
k = 23
sequence = "TAAGTTATTATTTAGTTAATACTTTTAACAATATTATTAAGGTATTTAAAAAATACTATTATAGTATTTAACATAGTTAAATACCTTCCTTAATACTGTTAAATTATATTCAATCAATACATATATAATATTATTAAAATACTTGATAAGTATTATTTAGATATTAGACAAATACTAATTTTATATTGCTTTAATACTTAATAAATACTACTTATGTATTAAGTAAATATTACTGTAATACTAATAACAATATTATTACAATATGCTAGAATAATATTGCTAGTATCAATAATTACTAATATAGTATTAGGAAAATACCATAATAATATTTCTACATAATACTAAGTTAATACTATGTGTAGAATAATAAATAATCAGATTAAAAAAATTTTATTTATCTGAAACATATTTAATCAATTGAACTGATTATTTTCAGCAGTAATAATTACATATGTACATAGTACATATGTAAAATATCATTAATTTCTGTTATATATAATAGTATCTATTTTAGAGAGTATTAATTATTACTATAATTAAGCATTTATGCTTAATTATAAGCTTTTTATGAACAAAATTATAGACATTTTAGTTCTTATAATAAATAATAGATATTAAAGAAAATAAAAAAATAGAAATAAATATCATAACCCTTGATAACCCAGAAATTAATACTTAATCAAAAATGAAAATATTAATTAATAAAAGTGAATTGAATAAAATTTTGAAAAAAATGAATAACGTTATTATTTCCAATAACAAAATAAAACCACATCATTCATATTTTTTAATAGAGGCAAAAGAAAAAGAAATAAACTTTTATGCTAACAATGAATACTTTTCTGTCAAATGTAATTTAAATAAAAATATTGATATTCTTGAACAAGGCTCCTTAATTGTTAAAGGAAAAATTTTTAACGATCTTATTAATGGCATAAAAGAAGAGATTATTACTATTCAAGAAAAAGATCAAACACTTTTGGTTAAAACAAAAAAAACAAGTATTAATTTAAACACAATTAATGTGAATGAATTTCCAAGAATAAGGTTTAATGAAAAAAACGATTTAAGTGAATTTAATCAATTCAAAATAAATTATTCACTTTTAGTAAAAGGCATTAAAAAAATTTTTCACTCAGTTTCAAATAATCGTGAAATATCTTCTAAATTTAATGGAGTAAATTTCAATGGATCCAATGGAAAAGAAATATTTTTAGAAGCTTCTGACACTTATAAACTATCTGTTTTTGAGATAAAGCAAGAAACAGAACCATTTGATTTCATTTTGGAGAGTAATTTACTTAGTTTCATTAATTCTTTTAATCCTGAAGAAGATAAATCTATTGTTTTTTATTACAGAAAAGATAATAAAGATAGCTTTAGTACAGAAATGTTGATTTCAATGGATAACTTTATGATTAGTTACACATCGGTTAATGAAAAATTTCCAGAGGTAAACTACTTTTTTGAATTTGAACCTGAAACTAAAATAGTTGTTCAAAAAAATGAATTAAAAGATGCACTTCAAAGAATTCAAACTTTGGCTCAAAATGAAAGAACTTTTTTATGCGATATGCAAATTAACAGTTCTGAATTAAAAATAAGAGCTATTGTTAATAATATCGGAAATTCTCTTGAGGAAATTTCTTGTCTTAAATTTGAAGGTTATAAACTTAATATTTCTTTTAACCCAAGTTCTCTATTAGATCACATAGAGTCTTTTGAATCAAATGAAATAAATTTTGATTTCCAAGGAAATAGTAAGTATTTTTTGATAACCTCTAAAAGTGAACCTGAACTTAAGCAAATATTGGTTCCTTCAAGATAATGAATCTTTACGATCTTTTAGAACTACCAACTACAGCATCAATAAAAGAAATAAAAATTGCTTATAAAAGATTAGCAAAGCGTTATCACCCTGATGTAAATAAATTAGGTTCGCAAACTTTTGTTGAAATTAATAATGCTTATTCAATATTAAGTGATCCTAACCAAAAGGAAAAATATGATTCAATGCTGAAAGTTAATGATTTTCAAAATCGCATCAAAAATTTAGATATTAGTGTTAGATGACATGAAAATTTCATGGAAGAACTCGAACTTCGTAAGAACTGAGAATTTGATTTTTTTTCATCTGATGAAGATTTCTTTTATTCTCCATTTACAAAAA"
test_kmer = "TAAGTTATTATTTAGTTAATACT"
right_kmer = "AGTTAATACTTTTAACAATATTA"
print("Task 1. Get kmer frequency")
# raw_input("\nReady?")
for i in range(len(sequence)-k+1):
kmer = sequence[i:i+k]
print("Position %s kmer %s freq = %s" % (i, kmer, index[kmer]))
print("Task 2. Iter read by read, print the first 20 reads")
# raw_input("\nReady?")
for i, read in enumerate(index.iter_reads()):
if i == 20:
break
print(i, read)
print("Task 3. Iter reads by kmer, returs (start, next_read_start, read, pos_if_uniq|None, all_poses)")
# raw_input("\nReady?")
for read in iter_reads_by_kmer(test_kmer, index):
print(read)
print("Task 4. Get distances in reads for two kmers, returns a list of (rid, left_kmer_pos, right_kmer_pos) tuples.")
# raw_input("\nReady?")
print(get_left_right_distances(test_kmer, right_kmer, index))
print("Task 5. Get layout for kmer, returns (max_pos, reads, lefts, rights, rids, starts), for details see source code")
# raw_input("\nReady?")
max_pos, reads, lefts, rights, rids, starts = get_layout_for_kmer(right_kmer, index)
print("Central layout:")
for read in reads:
print(read)
print("Left flanks:")
print(lefts)
print("Right flanks:")
print(rights)
print("Task 6. Iter reads by sequence, returтs (start, next_read_start, read, pos_if_uniq|None, all_poses)")
# raw_input("\nReady?")
sequence = "AATATTATTAAGGTATTTAAAAAATACTATTATAGTATTTAACATA"
for read in iter_reads_by_sequence(sequence, index):
print(read)
print("Task 7. Iter reads by kmer with reads as SE, returns (start, next_read_start, subread, kmere_pos, -1|0|1 for spring_pos, was_reversed, poses_in_read)")
# raw_input("\nReady?")
user_reads = set()
sequence = "AATATTATTAAGGTATTTAAAAAATACTATTATAGTATTTAACATA"
for rid, nextrid, read, pos, spring_pos, was_reversed, poses in get_reads_se_by_kmer(kmer, index, user_reads, k=23):
print(rid, read, pos)
| 58.48 | 2,183 | 0.858413 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,422 | 0.780032 |
81972bcd61b6a76b4f3ac7d2990ee8f85d6af35f | 1,013 | py | Python | tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | bc56c4b5f2c8f3138baeac7f145717f6a70af7b6 | [
"MIT"
]
| 5 | 2020-09-06T01:54:28.000Z | 2021-06-14T11:10:09.000Z | tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | bc56c4b5f2c8f3138baeac7f145717f6a70af7b6 | [
"MIT"
]
| 5 | 2020-05-23T08:30:01.000Z | 2021-05-01T04:58:06.000Z | tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | bc56c4b5f2c8f3138baeac7f145717f6a70af7b6 | [
"MIT"
]
| null | null | null | """Implements test for pytest-resource-path Fixtures with pytest.ini."""
from pathlib import Path
import pytest
def test_resource_path_ini(resource_path, request):
"""Fixture resource_path should be following absolute path."""
assert resource_path == Path(str(request.fspath)).parents[1] / Path(
"data/test_package/test_module_something/test_resource_path_ini"
)
def test_resource_path_root_ini(resource_path_root, request):
"""Fixture resource_path_root should be following absolute path."""
assert resource_path_root == Path(str(request.fspath)).parents[1] / Path("data")
@pytest.fixture(scope="package")
def resource_path_root_scope_package_ini(resource_path_root):
yield resource_path_root
# Reason: To define fixture in same module. pylint: disable=redefined-outer-name
def test_resource_path_root_scope_package_ini(resource_path_root_scope_package_ini, request):
assert resource_path_root_scope_package_ini == Path(str(request.fspath)).parents[1] / Path("data")
| 37.518519 | 102 | 0.785785 | 0 | 0 | 90 | 0.088845 | 123 | 0.121422 | 0 | 0 | 366 | 0.361303 |
8197395414f35f5a57891af7ddfab20969d9cd9f | 301 | py | Python | 17-files/read-file-with-try-block.py | johnehunt/Python3Intro | 2a41ce488aac11bb3928ea81e57be1c2c8acdac2 | [
"Apache-2.0"
]
| 1 | 2020-11-03T19:46:25.000Z | 2020-11-03T19:46:25.000Z | 14-files/read-file-with-try-block.py | johnehunt/PythonIntroDS | 7e9d5c5494191cd68bc71e140df5fb30290a8da6 | [
"Apache-2.0"
]
| null | null | null | 14-files/read-file-with-try-block.py | johnehunt/PythonIntroDS | 7e9d5c5494191cd68bc71e140df5fb30290a8da6 | [
"Apache-2.0"
]
| 1 | 2019-09-21T08:24:46.000Z | 2019-09-21T08:24:46.000Z | # Illustrates combining exception / error handling
# with file access
print('Start')
try:
with open('myfile2.txt', 'r') as f:
lines = f.readlines()
for line in lines:
print(line, end='')
except FileNotFoundError as err:
print('oops')
print(err)
print('Done')
| 20.066667 | 50 | 0.61794 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 105 | 0.348837 |
81977d254cadb7ee5093cb2ff32e221394f8fe36 | 8,455 | py | Python | Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 73d9605b91b774a56d18c193538691521f679f16 | [
"MIT"
]
| 51 | 2018-04-08T19:53:15.000Z | 2021-11-24T21:08:25.000Z | Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 73d9605b91b774a56d18c193538691521f679f16 | [
"MIT"
]
| 2 | 2018-05-29T20:50:37.000Z | 2020-09-12T07:14:08.000Z | Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 73d9605b91b774a56d18c193538691521f679f16 | [
"MIT"
]
| 13 | 2018-07-21T09:53:10.000Z | 2021-06-07T19:06:26.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 11 13:30:53 2017
@author: laoj
"""
import numpy as np
import pymc3 as pm
import theano.tensor as tt
from pymc3.distributions.distribution import Discrete, draw_values, generate_samples, infer_shape
from pymc3.distributions.dist_math import bound, logpow, factln, Cholesky
from pymc3.math import tround
#%% n scaler, p 1D
#n = 183
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
p = np.array([[ 0.21245365, 0.41223126, 0.37531509],
[ 0.13221011, 0.50537169, 0.3624182 ],
[ 0.08813779, 0.54447146, 0.36739075],
[ 0.18932804, 0.4630365, 0.34763546],
[ 0.11006472, 0.49227755, 0.39765773],
[ 0.17886852, 0.41098834, 0.41014314]])
# p = np.array([ 0.21245365, 0.41223126, 0.37531509])
n = tt.as_tensor_variable(n)
p = tt.as_tensor_variable(p)
n = np.squeeze(n)
n = tt.shape_padright(n) if n.ndim == 1 else tt.as_tensor_variable(n)
n.ndim
n * p
#%%
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
#n = 183
p = np.array([[ 0.21245365, 0.41223126, 0.37531509],
[ 0.13221011, 0.50537169, 0.3624182 ],
[ 0.08813779, 0.54447146, 0.36739075],
[ 0.18932804, 0.4630365, 0.34763546],
[ 0.11006472, 0.49227755, 0.39765773],
[ 0.17886852, 0.41098834, 0.41014314]])
#p = np.array([[ 0.21245365, 0.41223126, 0.37531509]])
#n = tt.as_tensor_variable(n)
p = tt.as_tensor_variable(p)
#%%
class Multinomial(Discrete):
def __init__(self, n, p, *args, **kwargs):
super(Multinomial, self).__init__(*args, **kwargs)
p = p / tt.sum(p, axis=-1, keepdims=True)
n = np.squeeze(n) # works also if n is a tensor
if len(self.shape) > 1:
m = self.shape[-2]
try:
assert n.shape == (m,)
except (AttributeError, AssertionError):
n = n * tt.ones(m)
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
elif n.ndim == 1:
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
else:
# n is a scalar, p is a 1d array
self.n = tt.as_tensor_variable(n)
self.p = tt.as_tensor_variable(p)
self.mean = self.n * self.p
mode = tt.cast(tt.round(self.mean), 'int32')
diff = self.n - tt.sum(mode, axis=-1, keepdims=True)
inc_bool_arr = tt.abs_(diff) > 0
mode = tt.inc_subtensor(mode[inc_bool_arr.nonzero()],
diff[inc_bool_arr.nonzero()])
self.mode = mode
def _random(self, n, p, size=None):
original_dtype = p.dtype
# Set float type to float64 for numpy. This change is related to numpy issue #8317 (https://github.com/numpy/numpy/issues/8317)
p = p.astype('float64')
# Now, re-normalize all of the values in float64 precision. This is done inside the conditionals
if size == p.shape:
size = None
if (p.ndim == 1) and (n.ndim == 0):
p = p / p.sum()
randnum = np.random.multinomial(n, p.squeeze(), size=size)
else:
p = p / p.sum(axis=1, keepdims=True)
if n.shape[0] > p.shape[0]:
randnum = np.asarray([
np.random.multinomial(nn, p.squeeze(), size=size)
for nn in n
])
elif n.shape[0] < p.shape[0]:
randnum = np.asarray([
np.random.multinomial(n.squeeze(), pp, size=size)
for pp in p
])
else:
randnum = np.asarray([
np.random.multinomial(nn, pp, size=size)
for (nn, pp) in zip(n, p)
])
return randnum.astype(original_dtype)
def random(self, point=None, size=None):
n, p = draw_values([self.n, self.p], point=point)
samples = generate_samples(self._random, n, p,
dist_shape=self.shape,
size=size)
return samples
def logp(self, x):
n = self.n
p = self.p
return bound(
tt.sum(factln(n)) - tt.sum(factln(x)) + tt.sum(x * tt.log(p)),
tt.all(x >= 0),
tt.all(tt.eq(tt.sum(x, axis=-1, keepdims=True), n)),
tt.all(p <= 1),
tt.all(tt.eq(tt.sum(p, axis=-1), 1)),
tt.all(tt.ge(n, 0)),
broadcast_conditions=False
)
Multinomial.dist(1,np.ones(3)/3,shape=(6, 3)).mode.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).p.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).n.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).mean.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).random()
#%%
counts =np.asarray([[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]])
Multinomial.dist(n,p,shape=(6, 3)).logp(x=counts).eval()
#%%
with pm.Model() as model:
like = Multinomial('obs_ABC', n, p, observed=counts, shape=counts.shape)
#%%
paramall = (
[[.25, .25, .25, .25], 4, 2],
[[.25, .25, .25, .25], (1, 4), 3],
# 3: expect to fail
# [[.25, .25, .25, .25], (10, 4)],
[[.25, .25, .25, .25], (10, 1, 4), 5],
# 5: expect to fail
# [[[.25, .25, .25, .25]], (2, 4), [7, 11]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (2, 4), 13],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (2, 4), [17, 19]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (1, 2, 4), [23, 29]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (10, 2, 4), [31, 37]],
)
for p, shape, n in paramall:
with pm.Model() as model:
m = Multinomial('m', n=n, p=np.asarray(p), shape=shape)
print(m.random().shape)
#%%
counts =np.asarray([[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]])
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
sparsity=1 #not zero
beta=np.ones(counts.shape) #input for dirichlet
with pm.Model() as model:
theta=pm.Dirichlet('theta',beta/sparsity, shape = counts.shape)
transition=pm.Multinomial('transition',n,theta,observed=counts)
trace=pm.sample(1000)
#%%
import numpy as np
import pymc3 as pm
import theano.tensor as tt
def norm_simplex(p):
"""Sum-to-zero transformation."""
return (p.T / p.sum(axis=-1)).T
def ccmodel(beta, x):
"""Community composition model."""
return norm_simplex(tt.exp(tt.dot(x, tt.log(beta))))
class DirichletMultinomial(pm.Discrete):
"""Dirichlet Multinomial Model
"""
def __init__(self, alpha, *args, **kwargs):
super(DirichletMultinomial, self).__init__(*args, **kwargs)
self.alpha = alpha
def logp(self, x):
alpha = self.alpha
n = tt.sum(x, axis=-1)
sum_alpha = tt.sum(alpha, axis=-1)
const = (tt.gammaln(n + 1) + tt.gammaln(sum_alpha)) - tt.gammaln(n + sum_alpha)
series = tt.gammaln(x + alpha) - (tt.gammaln(x + 1) + tt.gammaln(alpha))
result = const + tt.sum(series, axis=-1)
return result
def as_col(x):
if isinstance(x, tt.TensorVariable):
return x.dimshuffle(0, 'x')
else:
return np.asarray(x).reshape(-1, 1)
def as_row(x):
if isinstance(x, tt.TensorVariable):
return x.dimshuffle('x', 0)
else:
return np.asarray(x).reshape(1, -1)
n, k, r = 25, 10, 2
x = np.random.randint(0, 1000, size=(n, k))
y = np.random.randint(0, 1000, size=n)
design = np.vstack((np.ones(25), np.random.randint(2, size=n))).T
with pm.Model() as model:
# Community composition
pi = pm.Dirichlet('pi', np.ones(k), shape=(r, k))
comp = pm.Deterministic('comp', ccmodel(pi, design))
# Inferred population density of observed taxa (hierarchical model)
rho = pm.Normal('rho', shape=r)
tau = pm.Lognormal('tau')
dens = pm.Lognormal('dens', tt.dot(design, rho), tau=tau, shape=n)
# Community composition *with* the spike
expected_recovery = as_col(1 / dens)
_comp = norm_simplex(tt.concatenate((comp, expected_recovery), axis=1))
# Variability
mu = pm.Lognormal('mu')
# Data
obs = DirichletMultinomial('obs', _comp * mu,
observed=tt.concatenate((x, as_col(y)), axis=1))
pm.sample(1000) | 30.970696 | 135 | 0.543465 | 3,704 | 0.438084 | 0 | 0 | 0 | 0 | 0 | 0 | 1,082 | 0.127972 |
819a475b581f4721e5c8b8ee781500a5749d808c | 8,054 | py | Python | transformation_fnc.py | usrmaia/transformation-fnc | 37ef77708892417ac985bb6f1cf62285834560d8 | [
"MIT"
]
| null | null | null | transformation_fnc.py | usrmaia/transformation-fnc | 37ef77708892417ac985bb6f1cf62285834560d8 | [
"MIT"
]
| null | null | null | transformation_fnc.py | usrmaia/transformation-fnc | 37ef77708892417ac985bb6f1cf62285834560d8 | [
"MIT"
]
| null | null | null | from useful import *
from os import system
def remove_implication(formula):
while ">" in formula:
operator = formula.find(">")
print(formula, operator)
subform_left = get_subform_left(formula, operator)
subform_right = get_subform_right(formula, operator)
formula = get_remove_implication(formula, subform_left, subform_right, operator)
return formula
def get_remove_implication(formula, subform_left, subform_right, operator):
# ...(A>B)... |-> ...(-A#B)...
no_modification_right = formula[operator + len(subform_right) + 1:]
no_modification_left = formula[:operator - len(subform_left)]
return f"{no_modification_left}-{subform_left}#{subform_right}{no_modification_right}"
def morgan_law(formula):
while "-(" in formula:
index = formula.find("-(")
print(formula, index)
operator = get_operator(formula, index + 1)
subform_left = get_subform_left(formula, operator)
subform_right = get_subform_right(formula, operator)
formula = get_morgan_law(formula, subform_left, subform_right, operator)
return formula
def get_morgan_law(formula, subform_left, subform_right, operator):
# ...-(A&B)... |-> ...(-A#-B)...
# ...-(A#B)... |-> ...(-A&-B)...
match formula[operator]:
case "#":
new_operator = "&"
case "&":
new_operator = "#"
no_modification_right = formula[operator + len(subform_right) + 1:]
no_modification_left = formula[:operator - len(subform_left) - 1 - 1]
return f"{no_modification_left}(-{subform_left}{new_operator}-{subform_right}{no_modification_right}"
def remove_double_negation(formula):
# --A |-> A
formula = formula.replace("--", "")
return formula
def distributivity(formula):
index = 0
while index < len(formula):
# Existir "#(" ou ")#" é apenas a primeira condição para se aplicar a distributividade
# A segunda condição é existir "#(A&B)" ou "(A&B)#"
if "#(" in formula[index:index + 2]: # "#("
operator_and = get_operator(formula, index + 1)
if formula[operator_and] == "&": # "#(A&B)"
print(formula, index, operator_and)
formula, index = get_distributivity_lr(formula, index, operator_and)
if ")#" in formula[index:index + 2]: # "(#"
len_subform_left = len(get_subform_left(formula, index + 1))
operator_and = get_operator(formula, index + 1 - len_subform_left)
if formula[operator_and] == "&": # "(A&B)#"
print(formula, index + 1, operator_and)
formula, index = get_distributivity_rl(formula, index + 1, operator_and)
index += 1
return formula
def get_distributivity_lr(formula, operator_or, operator_and):
# ...(A#(B&C))... |-> ...((A#B)&(A#C))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_middle = get_subform_left(formula, operator_and)
subform_right = get_subform_right(formula, operator_and)
return f"{no_modification_left}({subform_left}#{subform_middle})&({subform_left}#{subform_right}){no_modification_right}", 0
def get_distributivity_rl(formula, operator_or, operator_and):
# ...((A&B)#C)... |-> ...((A#C)&(B#C))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_left = get_subform_left(formula, operator_and)
subform_middle = get_subform_right(formula, operator_and)
return f"{no_modification_left}({subform_left}#{subform_right})&({subform_middle}#{subform_right}){no_modification_right}", 0
def distributivity_new_aton(formula):
index = 0
while index < len(formula):
# Existir "#(" ou ")#" é apenas a primeira condição para se aplicar a distributividade
# A segunda condição é existir "#(A&B)" ou "(A&B)#"
if "#(" in formula[index:index + 2]: # "#("
operator_and = get_operator(formula, index + 1)
if formula[operator_and] == "&": # "#(A&B)"
print(formula, index, operator_and)
formula, index = get_distributivity_new_atom_lr(formula, index, operator_and)
if ")#" in formula[index:index + 2]: # "(#"
len_subform_left = len(get_subform_left(formula, index + 1))
operator_and = get_operator(formula, index + 1 - len_subform_left)
if formula[operator_and] == "&": # "(A&B)#"
print(formula, index + 1, operator_and)
formula, index = get_distributivity_new_atom_rl(formula, index + 1, operator_and)
index += 1
return formula
def get_distributivity_new_atom_lr(formula, operator_or, operator_and):
# ...(A#(B&C))... |-> ...(((A#p)&((¬p#B)&(¬p#C)))&((¬B#¬C)#p))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_middle = get_subform_left(formula, operator_and)
subform_right = get_subform_right(formula, operator_and)
new_operator = get_unprecedented(formula)
return f"{no_modification_left}(({subform_left}#{new_operator})&((¬{new_operator}#{subform_middle})&(¬{new_operator}#{subform_right})))&((¬{subform_middle}#¬{subform_right})#{new_operator}){no_modification_right}", 0
#return f"{no_modification_left}({subform_left}#{new_operator})&(¬{new_operator}#{subform_middle})&(¬{new_operator}#{subform_right})&(¬{subform_middle}#¬{subform_right}#{new_operator}){no_modification_right}", 0
def get_distributivity_new_atom_rl(formula, operator_or, operator_and):
# ...((A&B)#C)... |-> ...(((C#p)&((¬p#A)&(¬p#B)))&((¬A#¬B)#p))...
# Parenteses externo da fórmula
subform_left = get_subform_left(formula, operator_or)
no_modification_left = formula[:operator_or - len(subform_left)]
subform_right = get_subform_right(formula, operator_or)
no_modification_right = formula[operator_or + len(subform_right) + 1:]
# Parenteses interno da fórmula
subform_left = get_subform_left(formula, operator_and)
subform_middle = get_subform_right(formula, operator_and)
new_operator = get_unprecedented(formula)
return f"{no_modification_left}(({subform_right}#{new_operator})&((¬{new_operator}#{subform_left})&(¬{new_operator}#{subform_middle})))&((¬{subform_left}#¬{subform_middle})#{new_operator}){no_modification_right}", 0
#return f"{no_modification_left}({subform_right}#{new_operator})&(¬{new_operator}#{subform_left})&(¬{new_operator}#{subform_middle})&(¬{subform_left}#¬{subform_middle}#{new_operator}){no_modification_right}", 0
if __name__ == "__main__":
system("cls")
#system("clear")
while(True):
formula = input("Fórmula: ")
if formula == 'q': break
print(formula)
print("Removendo implicações: ")
A1 = remove_implication(formula)
print(A1)
print("Aplicando Lei de Morgan: ")
A2 = morgan_law(A1)
print(A2)
print("Removendo dupla negação: ")
A3 = remove_double_negation(A2)
print(A3)
print("Aplicando distributividade: ")
A4 = distributivity(A3)
print(A4)
print("Aplicando distributividade com novo átomo: ")
A5 = distributivity_new_aton(A3)
print(A5)
system("pause") | 47.099415 | 220 | 0.661286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,461 | 0.303677 |
819a9225919d23f1c377fd7749da7d7ea0d1e851 | 2,896 | py | Python | src/implant/commands/__init__.py | diefans/debellator | 44203174ef8e0702be577a9e08dedde40e3ce1fe | [
"Apache-2.0"
]
| 2 | 2018-09-08T00:04:43.000Z | 2021-07-20T01:31:09.000Z | src/implant/commands/__init__.py | diefans/implant | 44203174ef8e0702be577a9e08dedde40e3ce1fe | [
"Apache-2.0"
]
| null | null | null | src/implant/commands/__init__.py | diefans/implant | 44203174ef8e0702be577a9e08dedde40e3ce1fe | [
"Apache-2.0"
]
| null | null | null | # Copyright 2018 Oliver Berger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Core features
"""
import asyncio
import concurrent
import logging
import os
import time
from implant import core
log = logging.getLogger(__name__)
class Echo(core.Command):
"""Demonstrate the basic command API."""
data = core.Parameter(default='ping', description='Meaningful data.')
async def local(self, context):
# custom protocol
# first: send
await context.channel.send_iteration("send to remote")
# second: receive
from_remote = []
async for x in context.channel:
from_remote.append(x)
log.debug("************ receiving from remote: %s", from_remote)
# third: wait for remote to finish and return result
remote_result = await context.remote_future
result = {
'local_data': self.data,
'from_remote': ''.join(from_remote),
}
result.update(remote_result)
return result
remote = core.CommandRemote('implant.commands.remotes.Echo')
class SystemLoad(core.Command):
async def local(self, context):
t, load = await context.remote_future
return t, load
async def remote(self, context):
t, load = time.time(), os.getloadavg()
return t, load
class Copy(core.Command):
src = core.Parameter(description='Source file at local side.')
dest = core.Parameter(description='Desatination file at remote side.')
def __init__(self, *args, **kwargs):
super(Copy, self).__init__(*args, **kwargs)
self.executor = concurrent.futures.ThreadPoolExecutor()
self.loop = asyncio.get_event_loop()
def __del__(self):
self.executor.shutdown(wait=True)
async def local(self, context):
with open(self.src, "rb") as f:
while True:
data = await self.loop.run_in_executor(self.executor, f.read, 0x8000)
if not data:
context.channel.send(StopAsyncIteration())
break
await context.channel.send(data)
result = await context.remote_future
return result
async def remote(self, context):
with open(self.dest, "wb") as f:
async for data in context.channel:
await self.loop.run_in_executor(self.executor, f.write, data)
| 28.96 | 85 | 0.648481 | 2,155 | 0.74413 | 0 | 0 | 0 | 0 | 1,445 | 0.498964 | 932 | 0.321823 |
819b2d29e2f76cc772587cf27c82ea7f7e151615 | 73 | py | Python | trainer/utils/__init__.py | chriszhou0916/czai4art | 2fecc9b808fc16cb74c4c29be8a8770ffe247b15 | [
"MIT"
]
| null | null | null | trainer/utils/__init__.py | chriszhou0916/czai4art | 2fecc9b808fc16cb74c4c29be8a8770ffe247b15 | [
"MIT"
]
| null | null | null | trainer/utils/__init__.py | chriszhou0916/czai4art | 2fecc9b808fc16cb74c4c29be8a8770ffe247b15 | [
"MIT"
]
| null | null | null | from trainer.utils.losses import *
from trainer.utils import custom_ssim
| 24.333333 | 37 | 0.835616 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
819bd18a4722e9a3211561882e51cf2324399bde | 1,693 | py | Python | src/Testing/ZopeTestCase/__init__.py | tseaver/Zope-RFA | 08634f39b0f8b56403a2a9daaa6ee4479ef0c625 | [
"ZPL-2.1"
]
| 2 | 2015-12-21T10:34:56.000Z | 2017-09-24T11:07:58.000Z | src/Testing/ZopeTestCase/__init__.py | MatthewWilkes/Zope | 740f934fc9409ae0062e8f0cd6dcfd8b2df00376 | [
"ZPL-2.1"
]
| null | null | null | src/Testing/ZopeTestCase/__init__.py | MatthewWilkes/Zope | 740f934fc9409ae0062e8f0cd6dcfd8b2df00376 | [
"ZPL-2.1"
]
| null | null | null | ##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Names exported by the ZopeTestCase package
"""
import ZopeLite as Zope2
import utils
import layer
from ZopeLite import hasProduct
from ZopeLite import installProduct
from ZopeLite import hasPackage
from ZopeLite import installPackage
from ZopeLite import _print
from ZopeTestCase import folder_name
from ZopeTestCase import user_name
from ZopeTestCase import user_password
from ZopeTestCase import user_role
from ZopeTestCase import standard_permissions
from ZopeTestCase import ZopeTestCase
from ZopeTestCase import FunctionalTestCase
from PortalTestCase import portal_name
from PortalTestCase import PortalTestCase
from sandbox import Sandboxed
from functional import Functional
from base import TestCase
from base import app
from base import close
from warnhook import WarningsHook
from unittest import main
from zopedoctest import ZopeDocTestSuite
from zopedoctest import ZopeDocFileSuite
from zopedoctest import FunctionalDocTestSuite
from zopedoctest import FunctionalDocFileSuite
import zopedoctest as doctest
import transaction
import placeless
Zope = Zope2
| 29.189655 | 78 | 0.759598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 650 | 0.383934 |
819c97d3aababb49d55ab8197540259628b81475 | 3,132 | py | Python | CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
]
| 58 | 2015-04-22T10:41:03.000Z | 2022-03-29T16:04:34.000Z | CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
]
| 12 | 2015-08-26T03:57:23.000Z | 2020-12-11T20:14:42.000Z | CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
]
| 35 | 2015-01-10T12:21:03.000Z | 2020-09-09T08:18:16.000Z | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CPSM.Equipment.LoadModel.EnergyArea import EnergyArea
class SubLoadArea(EnergyArea):
"""The class is the second level in a hierarchical structure for grouping of loads for the purpose of load flow load scaling.
"""
def __init__(self, LoadGroups=None, LoadArea=None, *args, **kw_args):
"""Initialises a new 'SubLoadArea' instance.
@param LoadGroups: The Loadgroups in the SubLoadArea.
@param LoadArea: The LoadArea where the SubLoadArea belongs.
"""
self._LoadGroups = []
self.LoadGroups = [] if LoadGroups is None else LoadGroups
self._LoadArea = None
self.LoadArea = LoadArea
super(SubLoadArea, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["LoadGroups", "LoadArea"]
_many_refs = ["LoadGroups"]
def getLoadGroups(self):
"""The Loadgroups in the SubLoadArea.
"""
return self._LoadGroups
def setLoadGroups(self, value):
for x in self._LoadGroups:
x.SubLoadArea = None
for y in value:
y._SubLoadArea = self
self._LoadGroups = value
LoadGroups = property(getLoadGroups, setLoadGroups)
def addLoadGroups(self, *LoadGroups):
for obj in LoadGroups:
obj.SubLoadArea = self
def removeLoadGroups(self, *LoadGroups):
for obj in LoadGroups:
obj.SubLoadArea = None
def getLoadArea(self):
"""The LoadArea where the SubLoadArea belongs.
"""
return self._LoadArea
def setLoadArea(self, value):
if self._LoadArea is not None:
filtered = [x for x in self.LoadArea.SubLoadAreas if x != self]
self._LoadArea._SubLoadAreas = filtered
self._LoadArea = value
if self._LoadArea is not None:
if self not in self._LoadArea._SubLoadAreas:
self._LoadArea._SubLoadAreas.append(self)
LoadArea = property(getLoadArea, setLoadArea)
| 36 | 129 | 0.685185 | 1,964 | 0.627075 | 0 | 0 | 0 | 0 | 0 | 0 | 1,542 | 0.492337 |
819e28662a5c7fe45f90d593ee3b3db086815aa5 | 369 | py | Python | bin/clean_pdb.py | caixiuhong/Stable-MCCE | 186bdafdf1d631994b2cdd6ec6a548383f559929 | [
"MIT"
]
| null | null | null | bin/clean_pdb.py | caixiuhong/Stable-MCCE | 186bdafdf1d631994b2cdd6ec6a548383f559929 | [
"MIT"
]
| null | null | null | bin/clean_pdb.py | caixiuhong/Stable-MCCE | 186bdafdf1d631994b2cdd6ec6a548383f559929 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
import sys
HATOMS = ["HG", "HD", "HE", "HH"]
lines = open(sys.argv[1]).readlines()
for line in lines:
if line[:6] == "ATOM " or line[:6] == "HETATM":
if line[17:20] == "WAT":
continue
if line[13] == "H":
continue
if line[12:14] in HATOMS:
continue
print(line.strip("\n"))
| 23.0625 | 52 | 0.490515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 65 | 0.176152 |
819e7c6d0a67bfff73dacc1d420e6cd83f55ee9f | 7,693 | py | Python | old_logen/pylogen/OutputBook.py | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
]
| 14 | 2015-10-16T11:35:30.000Z | 2021-05-12T15:31:16.000Z | old_logen/pylogen/OutputBook.py | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
]
| null | null | null | old_logen/pylogen/OutputBook.py | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
]
| 5 | 2015-10-16T12:44:41.000Z | 2019-10-02T02:45:38.000Z | import Pmw
import os
import re
from FastIndex import FastIndex, timer
from PrologFrame import PrologFrame
from TerminalFrame import TerminalFrame
class OutputBook(Pmw.NoteBook):
def __init__(self, master=None):
self.app = master
Pmw.NoteBook.__init__(self, self.app.bottom,createcommand=self.create_page)
self.spec_page = self.add('Specialised File')
self.memo_page = self.add('Memo Table')
self.gx_page = self.add('Generating Extension')
self.output_page = self.add('Output')
self.console_page = self.add('Console')
#self.new_terminal_page = self.add('Terminal 1')
#spec file
self.output_spec = PrologFrame(self.spec_page,"",app=self.app)
self.output_spec.pack(side="bottom", fill="both", expand="yes")
#memo file
self.output_memo = PrologFrame(self.memo_page,"",app=self.app)
self.output_memo.pack(side="bottom", fill="both", expand="yes")
#gx file
self.output_gx = PrologFrame(self.gx_page,"",app=self.app)
self.output_gx.pack(side="bottom", fill="both", expand="yes")
#output
self.output_out = PrologFrame(self.output_page,"",app=self.app)
self.output_out.pack(side="bottom", fill="both", expand="yes")
#console
self.output_console = PrologFrame(self.console_page,"", app=self.app)
self.output_console.pack(side="bottom", fill="both", expand="yes")
self.pack(side="bottom", fill="both", expand="yes")
self.output_spec.text.tag_bind("nametag", "<Motion>", self.mouse_over)
self.output_spec.text.tag_bind("nametag", "<Leave>", self.mouse_leave)
self.terminal_pages = []
self.terminals = []
self.term_count = 0
self.create_new_terminal()
def create_page(self, pagename):
if pagename.startswith("Terminal"):
i = self.terminal_pages.index(pagename)
page = self.page(self.index(pagename))
self.terminals[i] = TerminalFrame(page, app=self.app, id=pagename)
self.terminals[i].pack(side="bottom", fill="both", expand="yes")
self.app.update_completions()
#self.selectpage(pagename)
def get_console_stream(self):
pass
def write_to_console(self,string):
self.output_console.text.config(state="normal")
self.output_console.text.insert("end", string)
self.output_console.text.config(state="disabled")
def set_font(self, font):
self.output_spec.text["font"] = font
self.output_memo.text["font"] = font
self.output_gx.text["font"] = font
self.output_out.text["font"] = font
def view_spec_output(self, filename):
(root, ext) = os.path.splitext(filename)
fast_idx = self.output_spec.load_source(root + ".spec")
self.output_memo.load_source(root + ".memo")
self.output_gx.load_source(root + ".gx")
self.output_out.clear()
funcs = self.get_spec_funcs()
spec_lines = self.output_spec.text.get(1.0, 'end')
pos = 0
while 1:
id = next_id(spec_lines, pos)
if id == ():
break
(start, end) = id
pos = end
(start_idx, end_idx) = fast_idx.get_two_tk_indices_same_line(start,
end)
self.output_spec.text.tag_add("nametag", start_idx, end_idx)
def reset_output(self, gx=None):
if gx is None:
pass
#self.output_gx.clear()
elif os.path.exists(gx):
self.output_gx.load_source(gx)
else:
self.output_gx.clear()
self.output_memo.clear()
self.output_spec.clear()
self.output_out.clear()
self.selectpage('Specialised File')
def set_output_from_file(self, filename):
self.output_out.load_source(filename)
def set_output(self, text):
self.output_out.set_text(text)
def view_output(self):
self.selectpage('Output')
def get_spec_funcs(self):
memo_text = self.output_memo.text.get(1.0, 'end').split('\n')
funcs = {}
for line in memo_text:
if line.startswith('table'):
(orig, i) = up_to_comma(line[6:])
(pattern, _) = up_to_comma(line[i + 7:])
i = pattern.find('(')
if i > 0:
name = pattern[:i]
else:
name = pattern
funcs[name] = orig + " --> " + pattern
self.funcs = funcs
def get_tag_position(self, x, y):
index = self.output_spec.text.index("@"+str(x)+","+str(y)+" + 1 char")
return self.output_spec.text.tag_prevrange("nametag", index)
start = None
def mouse_over(self, event):
(start, end) = self.get_tag_position(event.x, event.y)
predicate = self.output_spec.text.get(start, end)
#print "over " + start + ", " + end + " : " + predicate
#print self.funcs[predicate]
if self.start != start:
self.app.balloon.configure(relmouse="both",yoffset=15)
self.app.balloon._showBalloon(self.output_spec.text,
self.funcs[predicate], False)
self.start = start
def mouse_leave(self, event):
self.app.balloon.configure(relmouse="none",yoffset=1)
self.app.balloon.withdraw()
self.start = None
def create_new_terminal(self):
self.term_count += 1
self.terminal_pages.append('Terminal ' + str(self.term_count))
page = self.add(self.terminal_pages[-1])
self.terminals.append(None)
def quit(self):
for t in self.terminals:
if t is not None:
t.quit()
def kill_terminal(self, term_str):
i = self.terminal_pages.index(term_str)
self.terminals[i].quit()
self.delete(term_str)
self.terminals.pop(i)
self.terminal_pages.pop(i)
def reset_completions(self):
for t in self.terminals:
if t is not None:
t.reset_completions()
def add_completions(self, completions):
for t in self.terminals:
if t is not None:
t.add_completions(completions)
def up_to_comma(str):
bracket_stack = []
i = 0
current_char = str[i]
in_string = False
in_double_string = False
ret_string = ''
while len(bracket_stack) > 0 or current_char != ',' or in_string or in_double_string:
if current_char == '(' or current_char == '[' or current_char == '{':
bracket_stack.append(current_char)
elif current_char == ')' or current_char == ']' or current_char == '}':
bracket_stack = bracket_stack[:-1]
elif current_char == '"':
if in_double_string:
in_double_string = False
elif not in_string:
in_double_string = True
elif current_char == "'":
if in_string:
in_string = False
elif not in_double_string:
in_string = True
ret_string = ret_string + current_char
i = i + 1
current_char = str[i]
return (ret_string.strip(), i)
regexp = re.compile('[a-zA-z0-9_]+__[0-9]+')
def next_id(string, pos):
match = regexp.search(string[pos:])
if match is None:
return ()
else:
return (match.start() + pos, match.end() + pos)
| 34.191111 | 100 | 0.573508 | 6,352 | 0.825686 | 0 | 0 | 0 | 0 | 0 | 0 | 715 | 0.092942 |
819ee9aceebbd56d2a4d4ed207f6ae47bb68ff70 | 5,468 | py | Python | back/db.py | belshoff/Agenda | a9d3d1a80d6b3c00e4d0055847d5ed2bb6c6d7d1 | [
"Apache-2.0"
]
| null | null | null | back/db.py | belshoff/Agenda | a9d3d1a80d6b3c00e4d0055847d5ed2bb6c6d7d1 | [
"Apache-2.0"
]
| null | null | null | back/db.py | belshoff/Agenda | a9d3d1a80d6b3c00e4d0055847d5ed2bb6c6d7d1 | [
"Apache-2.0"
]
| null | null | null | import sqlite3
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS Produtos (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
price REAL,
compra_id INTEGER,
FOREIGN KEY (compra_id) REFERENCES Compras(id)
);
"""
)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS Compras (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
date TEXT NOT NULL
);
"""
)
class Produto(object):
def getAll(self):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute("SELECT * FROM Produtos;")
return [
{
"id": items[0],
"name": items[1],
"price": items[2],
"compra_id": items[3]
} for items in cursor.fetchall()
]
def getByCompra(self, compraId):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"SELECT * FROM Produtos WHERE compra_id = {compraId}")
cursor.execute(f"SELECT * FROM Produtos WHERE compra_id = {compraId}")
return [
{
"id": items[0],
"name": items[1],
"price": items[2],
} for items in cursor.fetchall()
]
def insert(self, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"INSERT INTO Produtos (name, price, compra_id) VALUES ('{args[0]}', {args[1]}, {args[2]})")
cursor.execute(f"INSERT INTO Produtos (name, price, compra_id) VALUES ('{args[0]}', {args[1]}, {args[2]})")
def getById(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM Produtos WHERE id = {id} ;")
return [
{
"id": items[0],
"name": items[1],
"price": items[2]
} for items in cursor.fetchall()
][0]
def update(self, id, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"UPDATE Produtos SET name = {args[0]}, price = {args[1]}, compra_id = {args[2]} WHERE id = {id};")
def delete(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"DELETE FROM Produtos WHERE id = {id}")
def deleteByCompra(self, compraId):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"DELETE FROM Produtos WHERE compra_id = {compraId}")
cursor.execute(f"DELETE FROM Produtos WHERE compra_id = {compraId}")
class Compra(object):
def __init__(self):
self.produto = Produto()
def getAll(self):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print("SELECT * FROM Compras;")
cursor.execute("SELECT * FROM Compras;")
return [
{
"id": items[0],
"date": items[1],
"produtos": self.produto.getByCompra(items[0])
} for items in cursor.fetchall()
]
def insert(self, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
print(f"INSERT INTO Compras (date) VALUES ('{args[0]}')")
cursor.execute(f"INSERT INTO Compras (date) VALUES ('{args[0]}')")
c = self.getAll()[-1]
ps = list(args[1])
for p in ps:
self.produto.insert(str(p["name"]), p["price"], c["id"])
# return self.getById(c.id)
def getById(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM Compras WHERE id = {id} ;")
return [
{
"id": items[0],
"date": items[1],
"produtos": self.produto.getByCompra(id)
} for items in cursor.fetchall()
][0]
def getByDate(self, date):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(f"SELECT * FROM Compras WHERE date = '{date}' ;")
return [
{
"id": items[0],
"date": items[1],
"produtos": self.produto.getByCompra(items[0])
} for items in cursor.fetchall()
]
def update(self, id, *args):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(
"""
UPDATE Compras
SET date = ?, produto_id = ?
WHERE id = ?;
""", (*args, id)
)
def delete(self, id):
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
self.produto.deleteByCompra(self.getById(id)["id"])
print(f"DELETE FROM Compras WHERE id = {id}")
cursor.execute(f"DELETE FROM Compras WHERE id = {id}") | 35.277419 | 126 | 0.492502 | 4,869 | 0.890454 | 0 | 0 | 0 | 0 | 0 | 0 | 1,818 | 0.33248 |
819ef83975adf31b6a1082bbb314f1833657807e | 246 | bzl | Python | tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 3a8a0c4359f284d29547962c2b4c43d290d8065c | [
"BSD-2-Clause"
]
| 39 | 2021-06-18T03:22:30.000Z | 2022-03-21T15:23:43.000Z | tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 3a8a0c4359f284d29547962c2b4c43d290d8065c | [
"BSD-2-Clause"
]
| 10 | 2021-06-18T03:22:19.000Z | 2022-03-18T22:14:15.000Z | tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 3a8a0c4359f284d29547962c2b4c43d290d8065c | [
"BSD-2-Clause"
]
| 4 | 2021-08-19T19:20:04.000Z | 2022-03-08T07:33:18.000Z | disable_gtk_binaries = True
def gtk_dependent_cc_library(**kwargs):
if not disable_gtk_binaries:
native.cc_library(**kwargs)
def gtk_dependent_cc_binary(**kwargs):
if not disable_gtk_binaries:
native.cc_binary(**kwargs)
| 24.6 | 39 | 0.739837 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
819fb9161fe72a4b226194ee2bfb3c7088844885 | 743 | py | Python | vectorize.py | tomohiroando/recipe_gan | 00f7418dfc5ee732e6d2de33d9c397e24f304864 | [
"MIT"
]
| null | null | null | vectorize.py | tomohiroando/recipe_gan | 00f7418dfc5ee732e6d2de33d9c397e24f304864 | [
"MIT"
]
| null | null | null | vectorize.py | tomohiroando/recipe_gan | 00f7418dfc5ee732e6d2de33d9c397e24f304864 | [
"MIT"
]
| null | null | null | import sys
from gensim import models
from gensim.models.doc2vec import LabeledSentence
import pickle
def corpus_to_sentences(corpus):
sentences = []
for idx, (name, doc) in enumerate(corpus.items()):
sys.stdout.write('\r前処理中 {}/{}'.format(idx, len(corpus)))
sentence = LabeledSentence(words=doc, tags=[name])
sentences.append(sentence)
return sentences
with open('corpus_text', 'rb') as f:
corpus = pickle.load(f)
sentences = corpus_to_sentences(corpus)
model = models.Doc2Vec(vector_size=400, window=15, alpha=.025, min_alpha=.025, min_count=1, sample=1e-6)
model.build_vocab(sentences)
print(len(corpus))
model.train(sentences, total_examples=len(corpus), epochs=20)
model.save('doc2vec.model') | 28.576923 | 104 | 0.722746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 54 | 0.071904 |
81a0eab42248c18facd3ce51c6e6f97ebfbcd166 | 6,206 | py | Python | lxman/registry.py | stuxcrystal/lxman | ea0b44a8b9424b3489e393591f5384a986f583a3 | [
"MIT"
]
| 1 | 2017-12-04T18:48:21.000Z | 2017-12-04T18:48:21.000Z | lxman/registry.py | stuxcrystal/lxman | ea0b44a8b9424b3489e393591f5384a986f583a3 | [
"MIT"
]
| null | null | null | lxman/registry.py | stuxcrystal/lxman | ea0b44a8b9424b3489e393591f5384a986f583a3 | [
"MIT"
]
| null | null | null | # -*- encoding: utf-8 -*-
from collections import UserDict
from itertools import count
import shutil
import winreg
import uuid
PATH = "Software\\Microsoft\\Windows\\CurrentVersion\\Lxss"
KEY = winreg.HKEY_CURRENT_USER
class RegistryDescriptor(object):
def __init__(self, name):
self.name = name
def __get__(self, instance, clazz):
key = instance._key()
if key is not None:
return self._get_value_by_registry(key)
return self._get_value_by_vartable(instance, key)
def __set__(self, instance, value):
key = instance._key("", winreg.KEY_WRITE)
if key is not None:
return self._set_value_by_registry(key, value)
def _get_value_by_registry(self, key):
with key as k:
try:
value, _ = winreg.QueryValueEx(k, self.name)
except FileNotFoundError:
return None
return value
def _set_value_by_registry(self, key, value):
if isinstance(value, int):
type = winreg.REG_DWORD
elif isinstance(value, (list, tuple)):
type = winreg.REG_MULTI_SZ
else:
type = winreg.REG_SZ
with key as k:
winreg.SetValueEx(k, self.name, 0, type, value)
def _get_value_by_vartable(self, instance, key):
return vars(instance)[key]
class EnvironmentVariables(UserDict):
def __init__(self, distribution):
super(EnvironmentVariables, self).__init__()
self.distribution = distribution
self.reload()
def _save_values(self):
return (f"{v[0]}={v[1]}" for v in self.data.items())
def save(self):
self.distribution.default_environment = list(self._save_values())
def reload(self):
self.clear()
self.update(dict(
v.split("=", 1) for v in self.distribution.default_environment
))
class Distribution(object):
@classmethod
def create(cls, name, source_path):
guid = "{%s}"%uuid.uuid4()
with winreg.CreateKey(KEY, f"{PATH}\\{guid}") as k:
winreg.SetValueEx(k, 'State', 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(k, 'DistributionName', 0, winreg.REG_SZ, name)
winreg.SetValueEx(k, 'BasePath', 0, winreg.REG_SZ, source_path)
winreg.SetValueEx(k, 'DefaultUid', 0, winreg.REG_DWORD, 0)
winreg.SetValueEx(k, 'Version', 0, winreg.REG_DWORD, 1)
winreg.SetValueEx(k, 'KernelCommandLine', 0, winreg.REG_SZ, 'BOOT_IMAGE=/kernel init=/init ro')
winreg.SetValueEx(k, 'DefaultEnvironment', 0, winreg.REG_MULTI_SZ, [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
])
return cls(guid)
def __init__(self, guid=""):
self.guid = guid
def _key(self, sub="", privileges=winreg.KEY_READ):
if not self.guid:
return None
if sub:
sub = "\\" + sub
return winreg.OpenKey(KEY, PATH+f"\\{self.guid}"+sub, 0, privileges)
name = RegistryDescriptor("DistributionName")
base_path = RegistryDescriptor("BasePath")
default_user = RegistryDescriptor("DefaultUid")
default_environment = RegistryDescriptor("DefaultEnvironment")
cmdline = RegistryDescriptor("KernelCommandLine")
flags = RegistryDescriptor("Flags")
package_family_name = RegistryDescriptor("PackageFamilyName")
_state = RegistryDescriptor("State")
version = RegistryDescriptor("Version")
@property
def environment(self):
return EnvironmentVariables(self)
def launch_params(self, params=("/bin/bash",)):
return [shutil.which("wsl.exe"), f"{self.guid}"] + list(params)
def __repr__(self):
return f"<Distribution '{self.name}' guid:{self.guid}>"
def delete(self):
with Lxss._key('', winreg.KEY_WRITE) as k:
winreg.DeleteKey(k, self.guid)
@property
def state(self):
st = self._state
if st == 1:
return "Ready"
elif st == 3:
return "Installing"
return "Unknown:" + str(st)
@state.setter
def state(self, value):
if isinstance(value, int):
self._state = value
return
value = value.lower()
if value == "ready":
self._state = 1
elif value == "installing":
self._state = 3
else:
self._state = value
def __enter__(self):
self._state = 3
return self
def __exit__(self, *exc):
self._state = 1
return False
class _Lxss(object):
def _key(self, sub="", privileges=winreg.KEY_READ):
if sub:
sub = "\\" + sub
return winreg.OpenKey(KEY, PATH+sub, 0, privileges)
default_distribution = RegistryDescriptor("DefaultDistribution")
@property
def default(self):
return Distribution(self.default_distribution)
@default.setter
def default(self, value):
self.default_distribution = value.guid
def __iter__(self):
for i in count():
with self._key() as k:
try:
name = winreg.EnumKey(k, i)
except OSError as e:
if e.winerror != 259:
raise
break
yield Distribution(name)
def get(self, value, default=None):
for distribution in self:
if value.startswith("{") and value.endswith("}"):
if distribution.guid.lower() == value.lower():
return distribution
else:
if distribution.name == value:
return distribution
return default
def __getitem__(self, value):
value = self.get(value, None)
if value is None:
raise KeyError("Unknown distribution")
return value
Lxss = _Lxss()
| 30.875622 | 108 | 0.563165 | 5,932 | 0.955849 | 330 | 0.053174 | 1,631 | 0.26281 | 0 | 0 | 650 | 0.104737 |
81a34a9a29a2bcf516df9f355478686bebdaa96b | 25,235 | py | Python | virtual/lib/python3.6/site-packages/debian/changelog.py | marknesh/pitches | 0a480d9bc2beafaefa0121393b1502cc05edab89 | [
"MIT"
]
| null | null | null | virtual/lib/python3.6/site-packages/debian/changelog.py | marknesh/pitches | 0a480d9bc2beafaefa0121393b1502cc05edab89 | [
"MIT"
]
| 10 | 2020-03-08T21:13:29.000Z | 2021-04-08T19:41:14.000Z | flask/lib/python3.6/site-packages/debian/changelog.py | JOFLIX/grapevines | 34576e01184570d79cc140b42ffb71d322132da6 | [
"MIT",
"Unlicense"
]
| null | null | null | # changelog.py -- Python module for Debian changelogs
# Copyright (C) 2006-7 James Westby <[email protected]>
# Copyright (C) 2008 Canonical Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# The parsing code is based on that from dpkg which is:
# Copyright 1996 Ian Jackson
# Copyright 2005 Frank Lichtenheld <[email protected]>
# and licensed under the same license as above.
"""This module implements facilities to deal with Debian changelogs."""
from __future__ import absolute_import
import os
import pwd
import re
import socket
import warnings
import sys
import six
from debian import debian_support
# Python 3 doesn't have StandardError, but let's avoid changing our
# exception inheritance hierarchy for Python 2.
try:
_base_exception_class = StandardError
except NameError:
_base_exception_class = Exception
class ChangelogParseError(_base_exception_class):
"""Indicates that the changelog could not be parsed"""
is_user_error = True
def __init__(self, line):
self._line=line
def __str__(self):
return "Could not parse changelog: "+self._line
class ChangelogCreateError(_base_exception_class):
"""Indicates that changelog could not be created, as all the information
required was not given"""
class VersionError(_base_exception_class):
"""Indicates that the version does not conform to the required format"""
is_user_error = True
def __init__(self, version):
self._version=version
def __str__(self):
return "Could not parse version: "+self._version
# TODO(jsw): Remove this in favor of using debian_support.Version directly. I
# don't think we gain anything by using this empty subclass.
class Version(debian_support.Version):
"""Represents a version of a Debian package."""
# debian_support.Version now has all the functionality we need
class ChangeBlock(object):
"""Holds all the information about one block from the changelog."""
def __init__(self, package=None, version=None, distributions=None,
urgency=None, urgency_comment=None, changes=None,
author=None, date=None, other_pairs=None, encoding='utf-8'):
self._raw_version = None
self._set_version(version)
self.package = package
self.distributions = distributions
self.urgency = urgency or "unknown"
self.urgency_comment = urgency_comment or ''
self._changes = changes
self.author = author
self.date = date
self._trailing = []
self.other_pairs = other_pairs or {}
self._encoding = encoding
self._no_trailer = False
self._trailer_separator = " "
def _set_version(self, version):
if version is not None:
self._raw_version = str(version)
def _get_version(self):
return Version(self._raw_version)
version = property(_get_version, _set_version)
def other_keys_normalised(self):
norm_dict = {}
for (key, value) in other_pairs.items():
key = key[0].upper() + key[1:].lower()
m = xbcs_re.match(key)
if m is None:
key = "XS-%s" % key
norm_dict[key] = value
return norm_dict
def changes(self):
return self._changes
def add_trailing_line(self, line):
self._trailing.append(line)
def add_change(self, change):
if self._changes is None:
self._changes = [change]
else:
#Bit of trickery to keep the formatting nicer with a blank
#line at the end if there is one
changes = self._changes
changes.reverse()
added = False
for i in range(len(changes)):
m = blankline.match(changes[i])
if m is None:
changes.insert(i, change)
added = True
break
changes.reverse()
if not added:
changes.append(change)
self._changes = changes
def _get_bugs_closed_generic(self, type_re):
changes = six.u(' ').join(self._changes)
bugs = []
for match in type_re.finditer(changes):
closes_list = match.group(0)
for match in re.finditer(r"\d+", closes_list):
bugs.append(int(match.group(0)))
return bugs
@property
def bugs_closed(self):
return self._get_bugs_closed_generic(closes)
@property
def lp_bugs_closed(self):
return self._get_bugs_closed_generic(closeslp)
def _format(self):
# TODO(jsw): Switch to StringIO or a list to join at the end.
block = ""
if self.package is None:
raise ChangelogCreateError("Package not specified")
block += self.package + " "
if self._raw_version is None:
raise ChangelogCreateError("Version not specified")
block += "(" + self._raw_version + ") "
if self.distributions is None:
raise ChangelogCreateError("Distribution not specified")
block += self.distributions + "; "
if self.urgency is None:
raise ChangelogCreateError("Urgency not specified")
block += "urgency=" + self.urgency + self.urgency_comment
for (key, value) in self.other_pairs.items():
block += ", %s=%s" % (key, value)
block += '\n'
if self.changes() is None:
raise ChangelogCreateError("Changes not specified")
for change in self.changes():
block += change + "\n"
if not self._no_trailer:
if self.author is None:
raise ChangelogCreateError("Author not specified")
if self.date is None:
raise ChangelogCreateError("Date not specified")
block += " -- " + self.author + self._trailer_separator \
+ self.date + "\n"
for line in self._trailing:
block += line + "\n"
return block
if sys.version >= '3':
__str__ = _format
def __bytes__(self):
return str(self).encode(self._encoding)
else:
__unicode__ = _format
def __str__(self):
return unicode(self).encode(self._encoding)
topline = re.compile(r'^(\w%(name_chars)s*) \(([^\(\) \t]+)\)'
r'((\s+%(name_chars)s+)+)\;'
% {'name_chars': '[-+0-9a-z.]'},
re.IGNORECASE)
blankline = re.compile(r'^\s*$')
change = re.compile(r'^\s\s+.*$')
endline = re.compile(r'^ -- (.*) <(.*)>( ?)((\w+\,\s*)?\d{1,2}\s+\w+\s+'
r'\d{4}\s+\d{1,2}:\d\d:\d\d\s+[-+]\d{4}\s*)$')
endline_nodetails = re.compile(r'^ --(?: (.*) <(.*)>( ?)((\w+\,\s*)?\d{1,2}'
r'\s+\w+\s+\d{4}\s+\d{1,2}:\d\d:\d\d\s+[-+]\d{4}'
r'))?\s*$')
keyvalue= re.compile(r'^([-0-9a-z]+)=\s*(.*\S)$', re.IGNORECASE)
value_re = re.compile(r'^([-0-9a-z]+)((\s+.*)?)$', re.IGNORECASE)
xbcs_re = re.compile('^X[BCS]+-', re.IGNORECASE)
emacs_variables = re.compile(r'^(;;\s*)?Local variables:', re.IGNORECASE)
vim_variables = re.compile('^vim:', re.IGNORECASE)
cvs_keyword = re.compile(r'^\$\w+:.*\$')
comments = re.compile(r'^\# ')
more_comments = re.compile(r'^/\*.*\*/')
closes = re.compile(r'closes:\s*(?:bug)?\#?\s?\d+(?:,\s*(?:bug)?\#?\s?\d+)*',
re.IGNORECASE)
closeslp = re.compile(r'lp:\s+\#\d+(?:,\s*\#\d+)*', re.IGNORECASE)
old_format_re1 = re.compile(r'^(\w+\s+\w+\s+\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}'
r'\s+[\w\s]*\d{4})\s+(.*)\s+(<|\()(.*)(\)|>)')
old_format_re2 = re.compile(r'^(\w+\s+\w+\s+\d{1,2},?\s*\d{4})\s+(.*)'
r'\s+(<|\()(.*)(\)|>)')
old_format_re3 = re.compile(r'^(\w[-+0-9a-z.]*) \(([^\(\) \t]+)\)\;?',
re.IGNORECASE)
old_format_re4 = re.compile(r'^([\w.+-]+)(-| )(\S+) Debian (\S+)',
re.IGNORECASE)
old_format_re5 = re.compile('^Changes from version (.*) to (.*):',
re.IGNORECASE)
old_format_re6 = re.compile(r'^Changes for [\w.+-]+-[\w.+-]+:?\s*$',
re.IGNORECASE)
old_format_re7 = re.compile(r'^Old Changelog:\s*$', re.IGNORECASE)
old_format_re8 = re.compile(r'^(?:\d+:)?\w[\w.+~-]*:?\s*$')
class Changelog(object):
"""Represents a debian/changelog file."""
# TODO(jsw): Avoid masking the 'file' built-in.
def __init__(self, file=None, max_blocks=None,
allow_empty_author=False, strict=False, encoding='utf-8'):
"""Initializer.
Args:
file: The contents of the changelog, either as a str, unicode object,
or an iterator of lines (each of which is either a str or unicode)
max_blocks: The maximum number of blocks to parse from the input.
(Default: no limit)
allow_empty_author: Whether to allow an empty author in the trailer
line of a change block. (Default: False)
strict: Whether to raise an exception if there are errors. (Default:
use a warning)
encoding: If the input is a str or iterator of str, the encoding to
use when interpreting the input.
"""
self._encoding = encoding
self._blocks = []
self.initial_blank_lines = []
if file is not None:
self.parse_changelog(file, max_blocks=max_blocks,
allow_empty_author=allow_empty_author,
strict=strict)
def _parse_error(self, message, strict):
if strict:
raise ChangelogParseError(message)
else:
warnings.warn(message)
def parse_changelog(self, file, max_blocks=None,
allow_empty_author=False, strict=True, encoding=None):
first_heading = "first heading"
next_heading_or_eof = "next heading of EOF"
start_of_change_data = "start of change data"
more_changes_or_trailer = "more change data or trailer"
slurp_to_end = "slurp to end"
encoding = encoding or self._encoding
if file is None:
self._parse_error('Empty changelog file.', strict)
return
self._blocks = []
self.initial_blank_lines = []
current_block = ChangeBlock(encoding=encoding)
changes = []
state = first_heading
old_state = None
if isinstance(file, bytes):
file = file.decode(encoding)
if isinstance(file, six.string_types):
# Make sure the changelog file is not empty.
if len(file.strip()) == 0:
self._parse_error('Empty changelog file.', strict)
return
file = file.splitlines()
for line in file:
if not isinstance(line, six.text_type):
line = line.decode(encoding)
# Support both lists of lines without the trailing newline and
# those with trailing newlines (e.g. when given a file object
# directly)
line = line.rstrip('\n')
if state == first_heading or state == next_heading_or_eof:
top_match = topline.match(line)
blank_match = blankline.match(line)
if top_match is not None:
if (max_blocks is not None
and len(self._blocks) >= max_blocks):
return
current_block.package = top_match.group(1)
current_block._raw_version = top_match.group(2)
current_block.distributions = top_match.group(3).lstrip()
pairs = line.split(";", 1)[1]
all_keys = {}
other_pairs = {}
for pair in pairs.split(','):
pair = pair.strip()
kv_match = keyvalue.match(pair)
if kv_match is None:
self._parse_error("Invalid key-value "
"pair after ';': %s" % pair, strict)
continue
key = kv_match.group(1)
value = kv_match.group(2)
if key.lower() in all_keys:
self._parse_error("Repeated key-value: "
"%s" % key.lower(), strict)
all_keys[key.lower()] = value
if key.lower() == "urgency":
val_match = value_re.match(value)
if val_match is None:
self._parse_error("Badly formatted "
"urgency value: %s" % value, strict)
else:
current_block.urgency = val_match.group(1)
comment = val_match.group(2)
if comment is not None:
current_block.urgency_comment = comment
else:
other_pairs[key] = value
current_block.other_pairs = other_pairs
state = start_of_change_data
elif blank_match is not None:
if state == first_heading:
self.initial_blank_lines.append(line)
else:
self._blocks[-1].add_trailing_line(line)
else:
emacs_match = emacs_variables.match(line)
vim_match = vim_variables.match(line)
cvs_match = cvs_keyword.match(line)
comments_match = comments.match(line)
more_comments_match = more_comments.match(line)
if ((emacs_match is not None or vim_match is not None)
and state != first_heading):
self._blocks[-1].add_trailing_line(line)
old_state = state
state = slurp_to_end
continue
if (cvs_match is not None or comments_match is not None
or more_comments_match is not None):
if state == first_heading:
self.initial_blank_lines.append(line)
else:
self._blocks[-1].add_trailing_line(line)
continue
if ((old_format_re1.match(line) is not None
or old_format_re2.match(line) is not None
or old_format_re3.match(line) is not None
or old_format_re4.match(line) is not None
or old_format_re5.match(line) is not None
or old_format_re6.match(line) is not None
or old_format_re7.match(line) is not None
or old_format_re8.match(line) is not None)
and state != first_heading):
self._blocks[-1].add_trailing_line(line)
old_state = state
state = slurp_to_end
continue
self._parse_error("Unexpected line while looking "
"for %s: %s" % (state, line), strict)
if state == first_heading:
self.initial_blank_lines.append(line)
else:
self._blocks[-1].add_trailing_line(line)
elif (state == start_of_change_data
or state == more_changes_or_trailer):
change_match = change.match(line)
end_match = endline.match(line)
end_no_details_match = endline_nodetails.match(line)
blank_match = blankline.match(line)
if change_match is not None:
changes.append(line)
state = more_changes_or_trailer
elif end_match is not None:
if end_match.group(3) != ' ':
self._parse_error("Badly formatted trailer "
"line: %s" % line, strict)
current_block._trailer_separator = end_match.group(3)
current_block.author = "%s <%s>" \
% (end_match.group(1), end_match.group(2))
current_block.date = end_match.group(4)
current_block._changes = changes
self._blocks.append(current_block)
changes = []
current_block = ChangeBlock(encoding=encoding)
state = next_heading_or_eof
elif end_no_details_match is not None:
if not allow_empty_author:
self._parse_error("Badly formatted trailer "
"line: %s" % line, strict)
continue
current_block._changes = changes
self._blocks.append(current_block)
changes = []
current_block = ChangeBlock(encoding=encoding)
state = next_heading_or_eof
elif blank_match is not None:
changes.append(line)
else:
cvs_match = cvs_keyword.match(line)
comments_match = comments.match(line)
more_comments_match = more_comments.match(line)
if (cvs_match is not None or comments_match is not None
or more_comments_match is not None):
changes.append(line)
continue
self._parse_error("Unexpected line while looking "
"for %s: %s" % (state, line), strict)
changes.append(line)
elif state == slurp_to_end:
if old_state == next_heading_or_eof:
self._blocks[-1].add_trailing_line(line)
else:
changes.append(line)
else:
assert False, "Unknown state: %s" % state
if ((state != next_heading_or_eof and state != slurp_to_end)
or (state == slurp_to_end and old_state != next_heading_or_eof)):
self._parse_error("Found eof where expected %s" % state,
strict)
current_block._changes = changes
current_block._no_trailer = True
self._blocks.append(current_block)
def get_version(self):
"""Return a Version object for the last version"""
return self._blocks[0].version
def set_version(self, version):
"""Set the version of the last changelog block
version can be a full version string, or a Version object
"""
self._blocks[0].version = Version(version)
version = property(get_version, set_version,
doc="Version object for last changelog block""")
### For convenience, let's expose some of the version properties
full_version = property(lambda self: self.version.full_version)
epoch = property(lambda self: self.version.epoch)
debian_version = property(lambda self: self.version.debian_revision)
debian_revision = property(lambda self: self.version.debian_revision)
upstream_version = property(lambda self: self.version.upstream_version)
def get_package(self):
"""Returns the name of the package in the last version."""
return self._blocks[0].package
def set_package(self, package):
self._blocks[0].package = package
package = property(get_package, set_package,
doc="Name of the package in the last version")
def get_versions(self):
"""Returns a list of version objects that the package went through."""
return [block.version for block in self._blocks]
versions = property(get_versions,
doc="List of version objects the package went through")
def _raw_versions(self):
return [block._raw_version for block in self._blocks]
def _format(self):
pieces = []
pieces.append(six.u('\n').join(self.initial_blank_lines))
for block in self._blocks:
pieces.append(six.text_type(block))
return six.u('').join(pieces)
if sys.version >= '3':
__str__ = _format
def __bytes__(self):
return str(self).encode(self._encoding)
else:
__unicode__ = _format
def __str__(self):
return unicode(self).encode(self._encoding)
def __iter__(self):
return iter(self._blocks)
def __getitem__(self, n):
""" select a changelog entry by number, version string, or Version
:param n: integer or str representing a version or Version object
"""
if type(n) is int:
return self._blocks[n]
elif type(n) is str:
return self[Version(n)]
return self._blocks[self.versions.index(n)]
def __len__(self):
return len(self._blocks)
def set_distributions(self, distributions):
self._blocks[0].distributions = distributions
distributions = property(lambda self: self._blocks[0].distributions,
set_distributions)
def set_urgency(self, urgency):
self._blocks[0].urgency = urgency
urgency = property(lambda self: self._blocks[0].urgency, set_urgency)
def add_change(self, change):
self._blocks[0].add_change(change)
def set_author(self, author):
self._blocks[0].author = author
author = property(lambda self: self._blocks[0].author, set_author)
def set_date(self, date):
self._blocks[0].date = date
date = property(lambda self: self._blocks[0].date, set_date)
def new_block(self, **kwargs):
kwargs.setdefault('encoding', self._encoding)
block = ChangeBlock(**kwargs)
block.add_trailing_line('')
self._blocks.insert(0, block)
def write_to_open_file(self, file):
file.write(self.__str__())
def get_maintainer():
"""Get the maintainer information in the same manner as dch.
This function gets the information about the current user for
the maintainer field using environment variables of gecos
informations as approriate.
It uses the same methods as dch to get the information, namely
DEBEMAIL, DEBFULLNAME, EMAIL, NAME, /etc/mailname and gecos.
:returns: a tuple of the full name, email pair as strings.
Either of the pair may be None if that value couldn't
be determined.
"""
env = os.environ
regex = re.compile(r"^(.*)\s+<(.*)>$")
# Split email and name
if 'DEBEMAIL' in env:
match_obj = regex.match(env['DEBEMAIL'])
if match_obj:
if not 'DEBFULLNAME' in env:
env['DEBFULLNAME'] = match_obj.group(1)
env['DEBEMAIL'] = match_obj.group(2)
if 'DEBEMAIL' not in env or 'DEBFULLNAME' not in env:
if 'EMAIL' in env:
match_obj = regex.match(env['EMAIL'])
if match_obj:
if not 'DEBFULLNAME' in env:
env['DEBFULLNAME'] = match_obj.group(1)
env['EMAIL'] = match_obj.group(2)
# Get maintainer's name
if 'DEBFULLNAME' in env:
maintainer = env['DEBFULLNAME']
elif 'NAME' in env:
maintainer = env['NAME']
else:
# Use password database if no data in environment variables
try:
maintainer = re.sub(r',.*', '', pwd.getpwuid(os.getuid()).pw_gecos)
except (KeyError, AttributeError):
maintainer = None
# Get maintainer's mail address
if 'DEBEMAIL' in env:
email = env['DEBEMAIL']
elif 'EMAIL' in env:
email = env['EMAIL']
else:
addr = None
if os.path.exists('/etc/mailname'):
f = open('/etc/mailname')
try:
addr = f.readline().strip()
finally:
f.close()
if not addr:
addr = socket.getfqdn()
if addr:
user = pwd.getpwuid(os.getuid()).pw_name
if not user:
addr = None
else:
addr = "%s@%s" % (user, addr)
if addr:
email = addr
else:
email = None
return (maintainer, email)
| 39.553292 | 80 | 0.559263 | 19,358 | 0.767109 | 0 | 0 | 183 | 0.007252 | 0 | 0 | 6,145 | 0.243511 |
81a35f7c896207540f74045284e195d4e4fb7b21 | 667 | py | Python | Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 7b8127681415dfd100a0e70fe8a672cec696bbb7 | [
"MIT"
]
| null | null | null | Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 7b8127681415dfd100a0e70fe8a672cec696bbb7 | [
"MIT"
]
| null | null | null | Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 7b8127681415dfd100a0e70fe8a672cec696bbb7 | [
"MIT"
]
| null | null | null | # Define a procedure, median, that takes three
# numbers as its inputs, and returns the median
# of the three numbers.
# Make sure your procedure has a return statement.
def bigger(a,b):
if a > b:
return a
else:
return b
def biggest(a,b,c):
return bigger(a,bigger(b,c))
def median(a, b ,c):
if (b >= a and a >= c) or (c >= a and a >= b):
return a
if (a >= b and b >= c) or (c >= b and b >= a):
return b
if (a >= c and c >= b) or (b >= c and c >= a):
return c
print(median(1,2,3))
#>>> 2
print(median(9,3,6))
#>>> 6
print(median(7,8,7))
#>>> 7 | 20.212121 | 51 | 0.493253 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 190 | 0.284858 |
81a49f3c33e13f260a839b1cda7b2ffc5e26f768 | 5,089 | py | Python | miss_islington/util.py | webknjaz/miss-islington | 51c38793b9ff95ecf4a3e98755a291120e7240cc | [
"Apache-2.0"
]
| null | null | null | miss_islington/util.py | webknjaz/miss-islington | 51c38793b9ff95ecf4a3e98755a291120e7240cc | [
"Apache-2.0"
]
| 1 | 2021-02-27T14:15:59.000Z | 2021-02-27T14:15:59.000Z | miss_islington/util.py | webknjaz/miss-islington | 51c38793b9ff95ecf4a3e98755a291120e7240cc | [
"Apache-2.0"
]
| null | null | null | import requests
import os
import subprocess
import gidgethub
from gidgethub import sansio
AUTOMERGE_LABEL = ":robot: automerge"
def comment_on_pr(issue_number, message):
"""
Leave a comment on a PR/Issue
"""
request_headers = sansio.create_headers(
"miss-islington", oauth_token=os.getenv("GH_AUTH")
)
issue_comment_url = (
f"https://api.github.com/repos/python/cpython/issues/{issue_number}/comments"
)
data = {"body": message}
response = requests.post(issue_comment_url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
print(f"Commented at {response.json()['html_url']}, message: {message}")
else:
print(response.status_code)
print(response.text)
return response
def assign_pr_to_core_dev(issue_number, coredev_login):
"""
Assign the PR to a core dev. Should be done when miss-islington failed
to backport.
"""
request_headers = sansio.create_headers(
"miss-islington", oauth_token=os.getenv("GH_AUTH")
)
edit_issue_url = (
f"https://api.github.com/repos/python/cpython/issues/{issue_number}"
)
data = {"assignees": [coredev_login]}
response = requests.patch(edit_issue_url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
print(f"Assigned PR {issue_number} to {coredev_login}")
else:
print(response.status_code)
print(response.text)
return response
async def leave_comment(gh, pr_number, message):
"""
Leave a comment on a PR/Issue
"""
issue_comment_url = f"/repos/python/cpython/issues/{pr_number}/comments"
data = {"body": message}
await gh.post(issue_comment_url, data=data)
def is_cpython_repo():
cmd = "git log -r 7f777ed95a19224294949e1b4ce56bbffcb1fe9f"
try:
subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
except subprocess.SubprocessError:
return False
return True
async def get_gh_participants(gh, pr_number):
pr_url = f"/repos/python/cpython/pulls/{pr_number}"
pr_result = await gh.getitem(pr_url)
created_by = pr_result["user"]["login"]
merged_by = None
if pr_result["merged_by"] and pr_result["merged_by"]["login"] != "miss-islington":
merged_by = pr_result["merged_by"]["login"]
participants = ""
if created_by == merged_by or merged_by is None:
participants = f"@{created_by}"
else:
participants = f"@{created_by} and @{merged_by}"
return participants
def get_participants(created_by, merged_by):
participants = ""
if created_by == merged_by or merged_by == "miss-islington":
participants = f"@{created_by}"
else:
participants = f"@{created_by} and @{merged_by}"
return participants
def normalize_title(title, body):
"""Normalize the title if it spills over into the PR's body."""
if not (title.endswith("…") and body.startswith("…")):
return title
else:
# Being paranoid in case \r\n is used.
return title[:-1] + body[1:].partition("\r\n")[0]
def normalize_message(body):
"""Normalize the message body to make it commit-worthy.
Mostly this just means removing HTML comments, but also removes unwanted
leading or trailing whitespace.
Returns the normalized body.
"""
while "<!--" in body:
body = body[: body.index("<!--")] + body[body.index("-->") + 3 :]
return "\n\n" + body.strip()
# Copied over from https://github.com/python/bedevere
async def is_core_dev(gh, username):
"""Check if the user is a CPython core developer."""
org_teams = "/orgs/python/teams"
team_name = "python core"
async for team in gh.getiter(org_teams):
if team["name"].lower() == team_name:
break
else:
raise ValueError(f"{team_name!r} not found at {org_teams!r}")
# The 'teams' object only provides a URL to a deprecated endpoint,
# so manually construct the URL to the non-deprecated team membership
# endpoint.
membership_url = f"/teams/{team['id']}/memberships/{username}"
try:
await gh.getitem(membership_url)
except gidgethub.BadRequest as exc:
if exc.status_code == 404:
return False
raise
else:
return True
def pr_is_awaiting_merge(pr_labels):
label_names = [label["name"] for label in pr_labels]
if (
"DO-NOT-MERGE" not in label_names
and "awaiting merge" in label_names
and "CLA signed" in label_names
):
return True
return False
def pr_is_automerge(pr_labels):
for label in pr_labels:
if label["name"] == AUTOMERGE_LABEL:
return True
return False
async def get_pr_for_commit(gh, sha):
prs_for_commit = await gh.getitem(
f"/search/issues?q=type:pr+repo:python/cpython+sha:{sha}"
)
if prs_for_commit["total_count"] > 0: # there should only be one
pr_for_commit = prs_for_commit["items"][0]
return pr_for_commit
return None
| 29.760234 | 86 | 0.660051 | 0 | 0 | 0 | 0 | 0 | 0 | 1,892 | 0.37149 | 1,771 | 0.347732 |
81a62c5963fc9b6615ab457b4e524e86f8ffa6af | 1,918 | py | Python | src/plugins/database.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
]
| 4 | 2022-01-24T05:33:34.000Z | 2022-03-25T06:29:19.000Z | src/plugins/database.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
]
| null | null | null | src/plugins/database.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
]
| 3 | 2022-02-08T13:24:59.000Z | 2022-03-13T06:42:40.000Z | import aiosqlite
import sqlite3
import asyncio
import nonebot
from nonebot.log import logger
driver: nonebot.Driver = nonebot.get_driver()
config: nonebot.config.Config = driver.config
@driver.on_startup
async def init_db():
config.db = await aiosqlite.connect("src/static/Kiba.db")
logger.info("Kiba Kernel -> Starting to Create \"Kiba Database\"")
try:
await config.db.executescript(
"create table group_poke_table (group_id bigint primary key not null, last_trigger_time int, triggered int, disabled bit, strategy text);"
"create table user_poke_table (user_id bigint, group_id bigint, triggered int);"
"create table guess_table (group_id bigint, enabled bit);"
"create table waiting_table (shop text, location text, wait int, updated text);"
"create table plp_table (id bigint, user_id bigint, nickname text, message text, is_picture bit, view bigint, reply bigint);"
"create table plp_reply_table (id bigint, plpid bigint, userid bigint, nickname text, message text);"
"create table group_plp_table (group_id bigint, disableinsert int, disabletake int, disablereply int, slowmode int, limit int, time bigint);"
"create table plp_blacklist_table (id bigint, lastbanner bigint, disableinsert int, disabletake int, disablereply int);"
"create table gld_table (qq bigint, uid bigint);"
"create table sign_table (no bigint, id bigint, day int);"
"create table acard_table (id bigint, times int, six int, five int, four int, three int, two int, one int);"
)
logger.info("Kiba Kernel -> Create \"Kiba Database\" successfully")
except Exception as e:
logger.info(f"Kiba Kernel --Skip-> Database Created....Skipped Creating Databases. \n[SKIP ERR]{e}")
pass
@driver.on_shutdown
async def free_db():
await config.db.close() | 53.277778 | 153 | 0.696038 | 0 | 0 | 0 | 0 | 1,728 | 0.900938 | 1,689 | 0.880605 | 1,272 | 0.663191 |
81a67c33ee26043b2ddf70e19e7da7c69207a707 | 5,492 | py | Python | dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
]
| 16 | 2021-04-21T14:09:19.000Z | 2022-03-22T02:30:59.000Z | dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
]
| 28 | 2021-04-15T17:43:08.000Z | 2022-03-29T16:08:35.000Z | dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
]
| 3 | 2022-02-08T17:41:30.000Z | 2022-03-18T15:32:27.000Z | from typing import Callable, Optional, Sequence, Tuple, Union
import numpy
from dexp.processing.utils.nd_slice import nd_split_slices, remove_margin_slice
from dexp.processing.utils.normalise import Normalise
from dexp.utils import xpArray
from dexp.utils.backends import Backend
def scatter_gather_i2i(
function: Callable,
image: xpArray,
tiles: Union[int, Tuple[int, ...]],
margins: Optional[Union[int, Tuple[int, ...]]] = None,
normalise: bool = False,
clip: bool = False,
to_numpy: bool = True,
internal_dtype: Optional[numpy.dtype] = None,
) -> xpArray:
"""
Image-2-image scatter-gather.
'Scatters' computation of a given unary function by splitting the input array into tiles,
computing using a given backend, and reassembling the tiles into a single array of same
shape as the inpout that is either backed by the same backend than that of the input image,
or that is backed by numpy -- usefull when the compute backend cannot hold the whole input and output
images in memory.
Parameters
----------
function : unary function
image : input image (can be any backend, numpy )
tiles : tile sizes to cut input image into, can be a single integer or a tuple of integers.
margins : margins to add to each tile, can be a single integer or a tuple of integers.
if None, no margins are added.
normalise : normalises the input image.
clip : clip after normalisation/denormalisation
to_numpy : should the result be a numpy array? Very usefull when the compute backend
cannot hold the whole input and output images in memory.
internal_dtype : internal dtype for computation
Returns
-------
Result of applying the unary function to the input image, if to_numpy==True then the image is
"""
if internal_dtype is None:
internal_dtype = image.dtype
if type(tiles) == int:
tiles = (tiles,) * image.ndim
# If None is passed for a tile that means that we don't tile along that axis, we als clip the tile size:
tiles = tuple((length if tile is None else min(length, tile)) for tile, length in zip(tiles, image.shape))
if margins is None:
margins = (0,) * image.ndim
if type(margins) == int:
margins = (margins,) * image.ndim
if to_numpy:
result = numpy.empty(shape=image.shape, dtype=internal_dtype)
else:
result = Backend.get_xp_module(image).empty_like(image, dtype=internal_dtype)
# Normalise:
norm = Normalise(Backend.to_backend(image), do_normalise=normalise, clip=clip, quantile=0.005)
# image shape:
shape = image.shape
# We compute the slices objects to cut the input and target images into batches:
tile_slices = list(nd_split_slices(shape, chunks=tiles, margins=margins))
tile_slices_no_margins = list(nd_split_slices(shape, chunks=tiles))
# Zipping together slices with and without margins:
slices = zip(tile_slices, tile_slices_no_margins)
# Number of tiles:
number_of_tiles = len(tile_slices)
if number_of_tiles == 1:
# If there is only one tile, let's not be complicated about it:
result = norm.backward(function(norm.forward(image)))
if to_numpy:
result = Backend.to_numpy(result, dtype=internal_dtype)
else:
result = Backend.to_backend(result, dtype=internal_dtype)
else:
_scatter_gather_loop(
norm.backward, function, image, internal_dtype, norm.forward, result, shape, slices, to_numpy
)
return result
def _scatter_gather_loop(
denorm_fun: Callable,
function: Callable,
image: xpArray,
internal_dtype: numpy.dtype,
norm_fun: Callable,
result: Callable,
shape: Tuple[int, ...],
slices: Sequence[Tuple[slice, ...]],
to_numpy: bool,
) -> None:
for tile_slice, tile_slice_no_margins in slices:
image_tile = image[tile_slice]
image_tile = Backend.to_backend(image_tile, dtype=internal_dtype)
image_tile = denorm_fun(function(norm_fun(image_tile)))
if to_numpy:
image_tile = Backend.to_numpy(image_tile, dtype=internal_dtype)
else:
image_tile = Backend.to_backend(image_tile, dtype=internal_dtype)
remove_margin_slice_tuple = remove_margin_slice(shape, tile_slice, tile_slice_no_margins)
image_tile = image_tile[remove_margin_slice_tuple]
result[tile_slice_no_margins] = image_tile
# Dask turned out not too work great here, HUGE overhead compared to the light approach above.
# def scatter_gather_dask(backend: Backend,
# function,
# image,
# chunks,
# margins=None):
# boundary=None
# trim=True
# align_arrays=True
#
# image_d = from_array(image, chunks=chunks, asarray=False)
#
# def function_numpy(_image):
# print(_image.shape)
# return backend.to_numpy(function(_image))
#
# #func, *args, depth=None, boundary=None, trim=True, align_arrays=True, **kwargs
# computation= map_overlap(function_numpy,
# image_d,
# depth=margins,
# boundary=boundary,
# trim=trim,
# align_arrays=align_arrays,
# dtype=image.dtype
# )
#
# #computation.visualize(filename='transpose.png')
# result = computation.compute()
#
# return result
| 35.205128 | 110 | 0.667334 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,539 | 0.462309 |
81a7268b47b548089b30e84d12ff883fa4b80a6d | 58 | py | Python | http_shadow/__init__.py | abador/http-shadow | 040935b0715f983714f38005f8ae97c255dae3e0 | [
"MIT"
]
| null | null | null | http_shadow/__init__.py | abador/http-shadow | 040935b0715f983714f38005f8ae97c255dae3e0 | [
"MIT"
]
| null | null | null | http_shadow/__init__.py | abador/http-shadow | 040935b0715f983714f38005f8ae97c255dae3e0 | [
"MIT"
]
| 2 | 2018-09-27T15:20:35.000Z | 2020-10-02T08:38:31.000Z | from .backend import Backend
from .thread import HttpPool
| 19.333333 | 28 | 0.827586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
81a85ad97e42bc868e9ae369e44cafe2a61e536a | 107 | py | Python | Problems/sample.py | HKuz/Test_Code | 798efc9fc668ef021736a6d9699ef4713cf8b718 | [
"MIT"
]
| 1 | 2020-06-14T20:10:04.000Z | 2020-06-14T20:10:04.000Z | Problems/sample.py | makramjandar/Test_Code | 798efc9fc668ef021736a6d9699ef4713cf8b718 | [
"MIT"
]
| null | null | null | Problems/sample.py | makramjandar/Test_Code | 798efc9fc668ef021736a6d9699ef4713cf8b718 | [
"MIT"
]
| 1 | 2019-12-09T12:48:05.000Z | 2019-12-09T12:48:05.000Z | #!/usr/local/bin/python3
def main():
# Test suite
return
if __name__ == '__main__':
main()
| 9.727273 | 26 | 0.579439 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.429907 |
81a8ec6fd77ecb2b6c41666fc9f2b8378760daa6 | 3,759 | py | Python | setup.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
]
| 2 | 2020-06-17T19:54:09.000Z | 2020-06-18T20:10:26.000Z | setup.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
]
| null | null | null | setup.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
]
| null | null | null | import fnmatch
import glob
import os
import sys
import setuptools.command.build_ext
APPLE = sys.platform == 'darwin'
BASE_DIR = os.path.dirname(__file__)
ABOUT = {}
def _read_about():
with open(os.path.join(BASE_DIR, 'omnibus', '__about__.py'), 'rb') as f:
src = f.read()
if sys.version_info[0] > 2:
src = src.decode('UTF-8')
exec(src, ABOUT)
_read_about()
EXCLUDED_STATIC_FILE_PATHS = [
'*.py',
'*/__pycache__/*',
'*/tests/*',
'*/_ext/cc/*',
'*/_ext/cy/*',
'*/_ext/rs/*',
]
def _get_static_files(path):
return [filepath
for (dirpath, dirnames, filenames) in os.walk(path, followlinks=True)
for filename in filenames
for filepath in [os.path.join(dirpath, filename)]
if not any(fnmatch.fnmatch(filepath, pat) for pat in EXCLUDED_STATIC_FILE_PATHS)]
PACKAGE_DATA = [
'.revision',
] + _get_static_files('omnibus')
INSTALL_REQUIRES = [
'toolz>=0.9.0',
]
EXTRAS_REQUIRE = {
'bintrees': ['bintrees>=0.2.7'],
'cytoolz': ['cytoolz>=0.9.0'],
'docker': ['docker>=3.7.0'],
'sortedcontainers': ['sortedcontainers>=2.1.0'],
}
DEBUG = 'DEBUG' in os.environ
EXT_MODULES = []
try:
import Cython
except ImportError:
pass
else:
import Cython.Build
import Cython.Compiler.Options
EXT_MODULES.extend([
*[
setuptools.Extension(
'omnibus._ext.cc.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath]
)
for fpath in glob.glob('omnibus/_ext/cc/*.cc')
],
*Cython.Build.cythonize(
[
setuptools.Extension(
'omnibus._ext.cy.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath],
language='c++',
)
for fpath in glob.glob('omnibus/_ext/cy/**/*.pyx', recursive=True)
],
language_level=3,
gdb_debug=DEBUG,
compiler_directives={
**Cython.Compiler.Options.get_directive_defaults(),
'embedsignature': True,
'binding': True,
},
),
])
if APPLE:
EXT_MODULES.extend([
setuptools.Extension(
'omnibus._ext.m.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath],
extra_link_args=[
'-framework', 'AppKit',
'-framework', 'CoreFoundation',
]
)
for fpath in glob.glob('omnibus/_ext/m/*.m')
])
if __name__ == '__main__':
setuptools.setup(
name=ABOUT['__title__'],
version=ABOUT['__version__'],
description=ABOUT['__description__'],
author=ABOUT['__author__'],
url=ABOUT['__url__'],
python_requires='>=3.7',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: ' + '.'.join(map(str, sys.version_info[:2])),
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python',
],
# zip_safe=True,
setup_requires=['setuptools'],
packages=setuptools.find_packages(
include=['omnibus', 'omnibus.*'],
exclude=['tests', '*.tests', '*.tests.*'],
),
py_modules=['omnibus'],
package_data={'omnibus': PACKAGE_DATA},
include_package_data=True,
entry_points={},
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRE,
ext_modules=EXT_MODULES,
)
| 24.096154 | 93 | 0.536313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 856 | 0.22772 |
81aab8159848ee0e48d169bcc15f7002773f809e | 689 | py | Python | day1/test_day1.py | Sundin/advent-of-code-2019 | 7ba5971ab5deeec61c60e6acbe1ac223876e77fe | [
"MIT"
]
| null | null | null | day1/test_day1.py | Sundin/advent-of-code-2019 | 7ba5971ab5deeec61c60e6acbe1ac223876e77fe | [
"MIT"
]
| null | null | null | day1/test_day1.py | Sundin/advent-of-code-2019 | 7ba5971ab5deeec61c60e6acbe1ac223876e77fe | [
"MIT"
]
| null | null | null | from day1 import *
import unittest
def test_get_fuel_requirements_for_mass():
assert get_fuel_requirements_for_mass(12) == 2
assert get_fuel_requirements_for_mass(14) == 2
assert get_fuel_requirements_for_mass(1969) == 654
assert get_fuel_requirements_for_mass(100756) == 33583
def test_sum_fuel_requirements_for_all_modules_on_spacecraft():
assert sum_fuel_requirements_for_all_modules_on_spacecraft([12, 14, 1969, 100756]) == 34241
def test_get_fuel_requirements_for_mass_recursive():
assert get_fuel_requirements_for_mass_recursive(14) == 2
assert get_fuel_requirements_for_mass_recursive(1969) == 966
assert get_fuel_requirements_for_mass_recursive(100756) == 50346
| 40.529412 | 93 | 0.835994 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
81ac62bd68434ff2ce3767d63cce77c07cbf51c7 | 305 | py | Python | PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
]
| null | null | null | PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
]
| null | null | null | PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
]
| null | null | null | import AppKit
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSPDFInfo(TestCase):
@min_os_level("10.9")
def testMethods(self):
self.assertResultIsBOOL(AppKit.NSPDFInfo.isFileExtensionHidden)
self.assertArgIsBOOL(AppKit.NSPDFInfo.setFileExtensionHidden_, 0)
| 30.5 | 73 | 0.783607 | 229 | 0.75082 | 0 | 0 | 194 | 0.636066 | 0 | 0 | 6 | 0.019672 |
81acfe851d89593a12e5f0cfee315b25fd2a0d5f | 1,636 | py | Python | gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 2e8f4811ca6032f4f89195cd019a4fce4b399dcc | [
"BSD-3-Clause"
]
| null | null | null | gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 2e8f4811ca6032f4f89195cd019a4fce4b399dcc | [
"BSD-3-Clause"
]
| null | null | null | gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 2e8f4811ca6032f4f89195cd019a4fce4b399dcc | [
"BSD-3-Clause"
]
| null | null | null | from collections import OrderedDict, defaultdict
import numpy as np
'''
generate a id to length dic
'''
def gen_sid_len(sentences):
sid2len = OrderedDict()
for i, sent in enumerate(sentences):
sid2len[i] = len(sent)
return sid2len
def batch_slice(data, batch_size):
# data is a list of sentences of the same length
batch_num = int(np.ceil(len(data) / float(batch_size)))
for i in xrange(batch_num):
cur_batch_size = batch_size if i < batch_num - 1 else len(data) - batch_size * i
# cur_batch_size is the end-point of the batch
sents = data[i * batch_size: i * batch_size + cur_batch_size]
yield sents
def data_iter(sents_id2length_dic, batch_size, shuffle=True):
"""
randomly permute data, then sort by source length, and partition into batches
ensure that the length of source sentences in each batch is decreasing
"""
buckets = defaultdict(list)
for (sent_id, sent_len) in sents_id2length_dic.iteritems():
buckets[sent_len].append(sent_id)
batched_data = []
for (sent_len, sent_ids_smlen) in buckets.iteritems():
# sent_ids_smlen is a list of sentences of the same length
if shuffle:
np.random.shuffle(sent_ids_smlen)
# pdb.set_trace()
'''
'extend' expecting a iterable finishes the iteration
'''
batched_data.extend(list(batch_slice(sent_ids_smlen, batch_size)))
if shuffle:
np.random.shuffle(batched_data)
for batch in batched_data:
"""
sent_ids in the same batch are of the same length
"""
yield batch
| 31.461538 | 88 | 0.665037 | 0 | 0 | 1,379 | 0.84291 | 0 | 0 | 0 | 0 | 522 | 0.319071 |
81ad78394864e547b6c74d97fef7b7beb9ca5228 | 1,793 | py | Python | recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
]
| null | null | null | recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
]
| null | null | null | recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
]
| null | null | null | from ._schema import DefaultSchema
from ._utils import get_minutes, get_yields, normalize_string
class TheHappyFoodie(DefaultSchema):
@classmethod
def host(cls):
return "thehappyfoodie.co.uk"
def title(self):
return self.soup.find("h1", {"class": "main-title"}).get_text()
def total_time(self):
return get_minutes(
self.soup.find("div", {"class": "recipe__data__total-time"})
) or sum(
[
get_minutes(
self.soup.find("div", {"class": "recipe__data__prep-time"})
),
get_minutes(
self.soup.find("div", {"class": "recipe__data__cook-time"})
),
]
)
def yields(self):
return get_yields(
self.soup.find("div", {"class": "recipe__data__yield"}).get_text()
)
def ingredients(self):
ingredients = self.soup.find(
"table", {"class": "recipe__ingredients-table"}
).findAll("tr")
ingredients = [
(
ingredient.find(
"td", {"class": "recipe__ingredients__amount"}
).get_text(),
ingredient.find(
"td", {"class": "recipe__ingredients__name"}
).get_text(),
)
for ingredient in ingredients
]
return [
normalize_string("{} {}".format(amount, name))
for amount, name in ingredients
]
def instructions(self):
instructions = self.soup.find("div", {"class": "recipe__instructions"}).findAll(
"p"
)
return "\n".join(
normalize_string(instruction.get_text()) for instruction in instructions
)
| 28.919355 | 88 | 0.516453 | 1,693 | 0.944228 | 0 | 0 | 69 | 0.038483 | 0 | 0 | 361 | 0.201339 |
81ae2fd5d9a2f3d3be912f6bccda2599695dd505 | 2,866 | py | Python | Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 66a2c31b36d7fc05be36ba5d5b141644459b4aba | [
"Apache-2.0"
]
| null | null | null | Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 66a2c31b36d7fc05be36ba5d5b141644459b4aba | [
"Apache-2.0"
]
| null | null | null | Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 66a2c31b36d7fc05be36ba5d5b141644459b4aba | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright [2017] Tatarnikov Viktor [[email protected]]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from urllib.parse import unquote
except ImportError:
from urllib import unquote
from flask import url_for, current_app
from werkzeug.utils import import_string
class DebugToolbar(object):
_cached_panel_classes = {}
def __init__(self, request, jinja_env):
self.jinja_env = jinja_env
self.request = request
self.panels = []
self.template_context = {
'static_path': '/static/debug_panel/'
}
self.create_panels()
def create_panels(self):
"""
Populate debug panels
"""
activated = self.request.cookies.get('fldt_active', '')
activated = unquote(activated).split(';')
for panel_class in self._iter_panels(current_app):
panel_instance = panel_class(jinja_env=self.jinja_env,
context=self.template_context)
if panel_instance.dom_id() in activated:
panel_instance.is_active = True
self.panels.append(panel_instance)
def render_toolbar(self):
context = self.template_context.copy()
context.update({'panels': self.panels})
template = self.jinja_env.get_template('base.html')
return template.render(**context)
@classmethod
def load_panels(cls, app):
for panel_class in cls._iter_panels(app):
# just loop to make sure they've been loaded
pass
@classmethod
def _iter_panels(cls, app):
for panel_path in app._config.get("web", "debug-toolbar", "panels", "").split(";"):
panel_class = cls._import_panel(app, "WEB.DebugToolbar.Panels.{}".format(panel_path))
if panel_class is not None:
yield panel_class
@classmethod
def _import_panel(cls, app, path):
cache = cls._cached_panel_classes
try:
return cache[path]
except KeyError:
pass
try:
panel_class = import_string(path)
except ImportError as e:
app.logger.warning('Disabled %s due to ImportError: %s', path, e)
panel_class = None
cache[path] = panel_class
return panel_class
| 29.854167 | 97 | 0.63887 | 2,033 | 0.709351 | 291 | 0.101535 | 919 | 0.320656 | 0 | 0 | 896 | 0.312631 |
81ae89733812f04310f2f41f0ec35c6af756fb98 | 8,372 | py | Python | CalculateLods.py | makeling/AGSSmartVectorTileTools | 009d925f883729f98c0d0744e1d466062dc260e6 | [
"Apache-2.0"
]
| 3 | 2019-02-19T06:14:03.000Z | 2020-01-06T07:57:12.000Z | CalculateLods.py | makeling/AGSSmartVectorTileTools | 009d925f883729f98c0d0744e1d466062dc260e6 | [
"Apache-2.0"
]
| null | null | null | CalculateLods.py | makeling/AGSSmartVectorTileTools | 009d925f883729f98c0d0744e1d466062dc260e6 | [
"Apache-2.0"
]
| 2 | 2019-03-25T09:43:30.000Z | 2019-11-28T03:52:56.000Z | # -*- coding: utf-8 -*-
# !/usr/bin/python
__author__ = 'ma_keling'
# Version : 1.0.0
# Start Time : 2018-11-29
# Update Time :
# Change Log :
## 1.
## 2.
## 3.
import time
import arcpy
import math
def express_arcpy_error():
severity = arcpy.GetMaxSeverity()
if severity == 2:
# If the tool returned an error
arcpy.AddError("Error occurred \n{0}".format(arcpy.GetMessages(2)))
elif severity == 1:
# If the tool returned no errors, but returned a warning
arcpy.AddWarning("Warning raised \n{0}".format(arcpy.GetMessages(1)))
else:
# If the tool did not return an error or a warning
arcpy.AddMessage(arcpy.GetMessages())
# Description: Loop layers and calculate lod for every feature in the layer.
def calculate_lods_for_feature(in_layers, fieldName):
try:
startTime = time.time()
timeStampName = time.strftime('%Y_%m_%d %H:%M:%S', time.localtime(time.time()))
arcpy.AddMessage("Start compute lods at: {0}".format(timeStampName))
for layer in in_layers:
arcpy.AddMessage("Calculating lod for layer : {0}.".format(layer))
add_field(layer, fieldName, 9)
cursor = arcpy.da.UpdateCursor(layer, ['SHAPE@', 'OID@', fieldName])
lyr_path = layer.dataSource
desc = arcpy.Describe(lyr_path)
extent = desc.extent
arcpy.AddMessage("Original dataset extent:" + str(desc.extent))
ext_wm = extent.projectAs(arcpy.SpatialReference(102100))
arcpy.AddMessage("New WebMercator extent:" + str(ext_wm))
start_level, start_compute_resolution = confirm_level(ext_wm)
if desc.shapeType == "Polygon":
baselength, basearea = get_length_area_from_pixel(96, 295828764)
lod_area = basearea / math.pow(4, start_level - 1)
arcpy.AddMessage("start lod area: " + str(lod_area))
for row in cursor:
lod = calculate_lod_for_polygon(row[0], baselength, lod_area,start_level)
row[2] = lod
cursor.updateRow(row)
elif desc.shapeType == "Point":
count = get_count(layer)
arcpy.AddMessage("Total Points:" + str(count))
if count < 200000:
arcpy.AddMessage("Input point sets too small for optimized, skip!")
continue
else:
n = math.ceil(math.log(count / (512 * 512 / 16), 4))
arcpy.AddMessage("n:" + str(n))
for row in cursor:
oid = row[1]
lod = calculate_lod_for_point(oid,start_level,n)
row[2] = lod
cursor.updateRow(row)
elif desc.shapeType == 'Polyline':
baselength = get_length_from_pixel(96, 295828764)
lod_length = baselength / math.pow(2, start_level - 1)
for row in cursor:
lod = calculate_lod_for_polyline(row[0],lod_length,start_level)
row[2] = lod
cursor.updateRow(row)
endTime = time.time()
print("Compute finished, elapsed: {0} Seconds.eter..".format(str(endTime - startTime)))
arcpy.AddMessage("Compute finished, elapsed: {0} Seconds.eter..".format(str(endTime - startTime)))
print("\n")
arcpy.AddMessage("\n")
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Compute the total records for a featureclass
def get_count(layer):
fields = ['SHAPE@']
count = 0
with arcpy.da.SearchCursor(layer, fields) as cursor:
for row in cursor:
count += 1
return count
# Description: get the start level based on layer extent
def confirm_level(extent):
width = extent.width
height = extent.height
arcpy.AddMessage("width:"+str(width) +" height:"+ str(height))
length = max(width, height)
base_resolution = 78271.516964011724
base_tile_resolution = base_resolution * 512
for level in range(21):
start_compute_resolution = base_tile_resolution
if length >= base_tile_resolution:
arcpy.AddMessage("level:" + str(level))
arcpy.AddMessage("base tile resolution:" + str(base_tile_resolution))
return level, start_compute_resolution
else:
base_tile_resolution /= 2
# Description: Add a new field with name lod to a table
def add_field(inFeatures,fieldName,fieldPrecision):
try:
startTime = time.time()
timeStampName = time.strftime('%Y_%m_%d %H:%M:%S', time.localtime(time.time()))
print("start add new field for: ", inFeatures, " at: ", timeStampName)
arcpy.AddMessage("start add new field for: {0} at: {1}".format(str(inFeatures), str(timeStampName)))
# Execute AddField for new field
arcpy.AddField_management(inFeatures, fieldName, "Text", fieldPrecision,
field_alias=fieldName, field_is_nullable="NULLABLE")
endTime = time.time()
print(inFeatures, "Add field:", fieldName, "finished, elapsed: ", str(endTime - startTime) + ' Seconds.eter..')
arcpy.AddMessage(
"Add field: {0} finished, elapsed: {1} Seconds.eter..".format(fieldName, str(endTime - startTime)))
print("\n")
arcpy.AddMessage("\n")
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Compute get area and length per pixel based on dpi and scale
def get_length_area_from_pixel(dpi,scale):
pixel = 1 / dpi * 0.025
length = scale * pixel
area = length * length
return length,area
# Description: Compute get length per pixel based on dpi and scale
def get_length_from_pixel(dpi,scale):
pixel = 1 / dpi * 0.025
length = scale * pixel
return length
# Description: Calculate lod for every polygon
def calculate_lod_for_polygon(feature,baselength, basearea, start_level):
try:
if feature:
area = feature.getArea("GEODESIC", "SQUAREMETERS")
# length = feature.getLength("GEODESIC", "METERS")
lod = start_level
for i in range(20):
# arcpy.AddMessage(str(i) + ":" + str(basearea) + "___"+str(area))
# arcpy.AddMessage(str(i) + ":" + str(baselength) + "___" + str(length))
if area >= basearea :
return str(lod)
else:
lod += 1
basearea /= 4
baselength /= 2
return str(lod)
else:
print(type(feature))
return "19"
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Calculate lod for every point
def calculate_lod_for_point(id, start_level, n):
try:
for i in range(n):
if id % math.pow(4, n - i) == 0:
return start_level
else:
start_level += 1
return start_level
except arcpy.ExecuteError:
express_arcpy_error()
# Description: Calculate lod for every polyline
def calculate_lod_for_polyline(feature,baselength, start_level):
try:
if feature:
length = feature.getLength("GEODESIC", "METERS")
lod = start_level
for i in range(20):
# arcpy.AddMessage(str(i) + ":" + str(basearea) + "___"+str(area))
# arcpy.AddMessage(str(i) + ":" + str(baselength) + "___" + str(length))
if length >= baselength:
return lod
else:
lod += 1
baselength /= 2
return lod
else:
print(type(feature))
except arcpy.ExecuteError:
express_arcpy_error()
def execute():
in_map = arcpy.GetParameter(0)
arcpy.AddMessage("Input map : {0}.".format(in_map))
in_layers = arcpy.GetParameter(1)
field_name = "lod"
calculate_lods_for_feature(in_layers, field_name)
# execute()
| 36.086207 | 121 | 0.568562 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,972 | 0.235547 |
81aeea522ff7190fbf314844be2ef81f5a72b4cb | 3,894 | py | Python | python/one-liner/cluster_of_non_0.py | Hamng/python-sources | 0cc5a5d9e576440d95f496edcfd921ae37fcd05a | [
"Unlicense"
]
| null | null | null | python/one-liner/cluster_of_non_0.py | Hamng/python-sources | 0cc5a5d9e576440d95f496edcfd921ae37fcd05a | [
"Unlicense"
]
| 1 | 2019-02-23T18:30:51.000Z | 2019-02-23T18:30:51.000Z | python/one-liner/cluster_of_non_0.py | Hamng/python-sources | 0cc5a5d9e576440d95f496edcfd921ae37fcd05a | [
"Unlicense"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 8 07:38:05 2020
@author: Ham
Self Challenge: Count Cluster of Non-0s
Given a 1-dimension array of integers,
determine how many 'clusters' of non-0 in the array.
A 'cluster' is a group of consecutive non-0 values.
Scoring: a solution needs to be a 1-liner;
i.e. NO point if implementing with a traditional 'for' loop!
Sample Input (see STDIN_SIO)
A : [
9, 0, 0, 22, 0, 0, 39, 11, 3, 0, \
0, 24, 1, 0, 50, 23, 3, 44, 0, 23, \
25, 6, 36, 19, 10, 23, 0, 37, 4, 1, \
7, 12, 0, 0, 49
]
Expected Output:
8
"""
import itertools
STDIN_SIO = """
9, 0, 0, 22, 0, 0, 39, 11, 3, 0, \
0, 24, 1, 0, 50, 23, 3, 44, 0, 23, \
2, 8, 20, 35, 0, 40, 34, 26, 36, 0, \
35, 19, 20, 18, 11, 43, 19, 21, 40, 0, \
14, 0, 14, 0, 0, 25, 35, 24, 49, 15, \
13, 3, 0, 10, 31, 25, 27, 37, 27, 43, \
44, 27, 8, 43, 0, 0, 33, 25, 19, 47, \
0, 29, 5, 2, 12, 8, 7, 0, 16, 36, \
0, 6, 17, 35, 36, 21, 0, 9, 1, 0, \
43, 29, 39, 15, 18, 0, 34, 26, 48, 0, \
34, 35, 7, 10, 0, 0, 15, 5, 12, 26, \
0, 37, 30, 33, 27, 34, 9, 37, 22, 0, \
0, 24, 30, 0, 0, 38, 23, 25, 0, 30, \
39, 24, 31, 0, 6, 19, 25, 0, 28, 15, \
8, 0, 48, 0, 35, 41, 0, 24, 1, 41, \
31, 0, 35, 21, 15, 26, 15, 27, 4, 0, \
8, 4, 0, 0, 2, 42, 18, 0, 28, 18, \
49, 34, 5, 10, 41, 48, 26, 14, 45, 44, \
9, 0, 49, 50, 24, 0, 0, 0, 23, 0, \
17, 0, 47, 31, 0, 42, 0, 0, 0, 40, \
46, 22, 50, 32, 20, 3, 44, 22, 0, 37, \
25, 0, 19, 26, 14, 23, 27, 41, 0, 1, \
13, 0, 48, 20, 37, 8, 0, 18, 0, 26, \
12, 19, 32, 19, 22, 0, 0, 0, 0, 0, \
16, 0, 0, 43, 0, 10, 5, 0, 6, 26, \
0, 24, 40, 29, 0, 43, 18, 27, 0, 0, \
37, 0, 46, 35, 17, 0, 20, 44, 29, 29, \
40, 33, 22, 27, 0, 0, 38, 21, 4, 0, \
0, 15, 31, 48, 36, 10, 0, 41, 0, 45, \
39, 0, 11, 9, 3, 38, 16, 0, 11, 22, \
37, 0, 3, 44, 10, 12, 47, 22, 32, 7, \
24, 1, 0, 22, 25, 0, 14, 0, 0, 0, \
23, 0, 36, 1, 42, 46, 0, 48, 0, 33, \
5, 27, 45, 0, 15, 29, 0, 50, 2, 31, \
25, 6, 36, 19, 10, 23, 0, 37, 4, 1, \
7, 12, 0, 0, 49
""".strip()
def count_non_0_clusters_1(arr):
"""Translate each non-0 to an 'A' char, and 0 to a space.
Then join together to become a string.
Then split(), then return number of tokens.
"""
return len("".join(["A" if e else " " for e in arr]).split())
def count_non_0_clusters_2(arr):
"""groupby() partitions into groups as:
[[True , [list of non-0]],
[False, [list of 0s]],
[True , [list of non-0]],
[False, [list of 0s]],
...
[True , [list of non-0]]]
(Old) Next, the list comprenhension iterates thru each tuple,
then collects the 1st element in each tuple if True.
Finally, return the len/count of Trues:
return len([t[0] for t in itertools.groupby(...) if t[0]])
Next, the list comprenhension iterates thru each tuple,
then collects the 1st element in each tuple.
Then return the count() of True elements.
"""
return [t[0] for t in itertools.groupby(arr, lambda e: bool(e))].count(True)
if __name__ == '__main__':
a = list(map(int, STDIN_SIO.split(",")))
# Nicely print it, 10 entries per line, with continuation
# so can copy-n-paste back into STDIN_SIO
#print(len(a))
#for i in range(0, (len(a) // 10) * 10, 10):
# print("%3u," * 10 % tuple(a[i:i+10]), end=" \\\n")
#j = a[(len(a) // 10) * 10:]
#print("%3u," * (len(j) - 1) % tuple(j[:-1]), end="")
#print("%3u" % j[-1])
print("count_*_1() returns", count_non_0_clusters_1(a), "clusters of non-0")
print("count_*_2() returns", count_non_0_clusters_2(a), "clusters of non-0")
| 34.157895 | 81 | 0.48793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,427 | 0.880072 |
81aef189550b2b54e321b9991b292a9ac7b3bfcb | 5,133 | py | Python | cardano-node-tests/cardano_node_tests/tests/test_configuration.py | MitchellTesla/Cardano-SCK | f394506eb0875622093805c009951f6905261778 | [
"Apache-2.0"
]
| 6 | 2021-08-30T00:49:12.000Z | 2022-01-27T07:07:53.000Z | cardano-node-tests/cardano_node_tests/tests/test_configuration.py | c-spider/Cardano-SCK | 1accb0426289489e371eb67422ccb19ffaab5f3c | [
"Apache-2.0"
]
| 17 | 2021-08-31T23:27:44.000Z | 2022-03-25T20:35:16.000Z | cardano-node-tests/cardano_node_tests/tests/test_configuration.py | c-spider/Cardano-SCK | 1accb0426289489e371eb67422ccb19ffaab5f3c | [
"Apache-2.0"
]
| 3 | 2021-05-20T08:26:00.000Z | 2022-03-27T22:31:36.000Z | """Tests for node configuration."""
import json
import logging
import time
from pathlib import Path
import allure
import pytest
from _pytest.tmpdir import TempdirFactory
from cardano_clusterlib import clusterlib
from cardano_node_tests.utils import cluster_management
from cardano_node_tests.utils import cluster_nodes
from cardano_node_tests.utils import configuration
from cardano_node_tests.utils import helpers
LOGGER = logging.getLogger(__name__)
@pytest.fixture(scope="module")
def create_temp_dir(tmp_path_factory: TempdirFactory):
"""Create a temporary dir."""
p = Path(tmp_path_factory.getbasetemp()).joinpath(helpers.get_id_for_mktemp(__file__)).resolve()
p.mkdir(exist_ok=True, parents=True)
return p
@pytest.fixture
def temp_dir(create_temp_dir: Path):
"""Change to a temporary dir."""
with helpers.change_cwd(create_temp_dir):
yield create_temp_dir
# use the "temp_dir" fixture for all tests automatically
pytestmark = pytest.mark.usefixtures("temp_dir")
@pytest.fixture(scope="module")
def epoch_length_start_cluster(tmp_path_factory: TempdirFactory) -> Path:
"""Update *epochLength* to 1200."""
pytest_globaltemp = helpers.get_pytest_globaltemp(tmp_path_factory)
# need to lock because this same fixture can run on several workers in parallel
with helpers.FileLockIfXdist(f"{pytest_globaltemp}/startup_files_epoch_1200.lock"):
destdir = pytest_globaltemp / "startup_files_epoch_1200"
destdir.mkdir(exist_ok=True)
# return existing script if it is already generated by other worker
destdir_ls = list(destdir.glob("start-cluster*"))
if destdir_ls:
return destdir_ls[0]
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.copy_scripts_files(
destdir=destdir
)
with open(startup_files.genesis_spec) as fp_in:
genesis_spec = json.load(fp_in)
genesis_spec["epochLength"] = 1500
with open(startup_files.genesis_spec, "w") as fp_out:
json.dump(genesis_spec, fp_out)
return startup_files.start_script
@pytest.fixture(scope="module")
def slot_length_start_cluster(tmp_path_factory: TempdirFactory) -> Path:
"""Update *slotLength* to 0.3."""
pytest_globaltemp = helpers.get_pytest_globaltemp(tmp_path_factory)
# need to lock because this same fixture can run on several workers in parallel
with helpers.FileLockIfXdist(f"{pytest_globaltemp}/startup_files_slot_03.lock"):
destdir = pytest_globaltemp / "startup_files_slot_03"
destdir.mkdir(exist_ok=True)
# return existing script if it is already generated by other worker
destdir_ls = list(destdir.glob("start-cluster*"))
if destdir_ls:
return destdir_ls[0]
startup_files = cluster_nodes.get_cluster_type().cluster_scripts.copy_scripts_files(
destdir=destdir
)
with open(startup_files.genesis_spec) as fp_in:
genesis_spec = json.load(fp_in)
genesis_spec["slotLength"] = 0.3
with open(startup_files.genesis_spec, "w") as fp_out:
json.dump(genesis_spec, fp_out)
return startup_files.start_script
@pytest.fixture
def cluster_epoch_length(
cluster_manager: cluster_management.ClusterManager, epoch_length_start_cluster: Path
) -> clusterlib.ClusterLib:
return cluster_manager.get(
singleton=True, cleanup=True, start_cmd=str(epoch_length_start_cluster)
)
@pytest.fixture
def cluster_slot_length(
cluster_manager: cluster_management.ClusterManager, slot_length_start_cluster: Path
) -> clusterlib.ClusterLib:
return cluster_manager.get(
singleton=True, cleanup=True, start_cmd=str(slot_length_start_cluster)
)
def check_epoch_length(cluster_obj: clusterlib.ClusterLib) -> None:
end_sec = 15
end_sec_padded = end_sec + 15 # padded to make sure tip got updated
cluster_obj.wait_for_new_epoch()
epoch = cluster_obj.get_epoch()
sleep_time = cluster_obj.epoch_length_sec - end_sec
time.sleep(sleep_time)
assert epoch == cluster_obj.get_epoch()
time.sleep(end_sec_padded)
assert epoch + 1 == cluster_obj.get_epoch()
@pytest.mark.run(order=3)
@pytest.mark.skipif(
bool(configuration.TX_ERA),
reason="different TX eras doesn't affect this test, pointless to run",
)
class TestBasic:
"""Basic tests for node configuration."""
@allure.link(helpers.get_vcs_link())
def test_epoch_length(self, cluster_epoch_length: clusterlib.ClusterLib):
"""Test the *epochLength* configuration."""
cluster = cluster_epoch_length
assert cluster.slot_length == 0.2
assert cluster.epoch_length == 1500
check_epoch_length(cluster)
@allure.link(helpers.get_vcs_link())
@pytest.mark.run(order=2)
def test_slot_length(self, cluster_slot_length: clusterlib.ClusterLib):
"""Test the *slotLength* configuration."""
cluster = cluster_slot_length
assert cluster.slot_length == 0.3
assert cluster.epoch_length == 1000
check_epoch_length(cluster)
| 33.116129 | 100 | 0.728034 | 756 | 0.147282 | 149 | 0.029028 | 4,108 | 0.800312 | 0 | 0 | 984 | 0.191701 |
81afed5d2a7be68d968744aa55c07d3f1c78d48b | 241,016 | py | Python | output/myresults.py | jacobseiler/rsage | b3b0a3fa3c676eab188991e37d06894396bfc74f | [
"MIT"
]
| 1 | 2019-05-23T04:11:32.000Z | 2019-05-23T04:11:32.000Z | output/myresults.py | jacobseiler/rsage | b3b0a3fa3c676eab188991e37d06894396bfc74f | [
"MIT"
]
| 7 | 2018-08-17T05:04:57.000Z | 2019-01-16T05:40:16.000Z | output/myresults.py | jacobseiler/rsage | b3b0a3fa3c676eab188991e37d06894396bfc74f | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
import os
import heapq
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.colors as colors
import matplotlib.cm as cm
from numpy import *
from random import sample, seed, randint
from os.path import getsize as getFileSize
import math
import random
import csv
from cycler import cycler
from io import StringIO
#np.set_printoptions(threshold=np.nan)
from collections import Counter
from matplotlib.colors import LogNorm
from mpl_toolkits.axes_grid1 import AxesGrid
from astropy import units as u
from astropy import cosmology
import matplotlib.ticker as mtick
import PlotScripts
import ReadScripts
import AllVars
import GalaxyPhotoion as photo
import ObservationalData as Obs
import gnedin_analytic as ga
from mpi4py import MPI
import sys
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
AllVars.Set_Params_Kali()
AllVars.Set_Constants()
PlotScripts.Set_Params_Plot()
output_format = ".png"
# For the Tiamat extended results there is a weird hump when calculating the escape fraction.
# This hump occurs at a halo mass of approximately 10.3.
# The calculation of fesc skips this hump range (defined from kink_low to kink_high)
kink_low = 10.3
kink_high = 10.30000001
m_low = 7.0 # We only sum the photons coming from halos within the mass range m_low < Halo Mass < m_high
m_high = 15.0
m_gal_low = 3.0
m_gal_high = 12.0
m_low_SAGE = pow(10, m_low)/1.0e10 * AllVars.Hubble_h
m_high_SAGE = pow(10, m_high)/1.0e10 * AllVars.Hubble_h
bin_width = 0.2
NB = int((m_high - m_low) / bin_width)
NB_gal = int((m_gal_high - m_gal_low) / bin_width)
fej_low = 0.0
fej_high = 1.0
fej_bin_width = 0.05
NB_fej = int((fej_high - fej_low) / fej_bin_width)
def raise_list_power(my_list, n):
return [pow(x, n) for x in my_list]
def raise_power_list(my_list, n):
return [pow(n, x) for x in my_list]
def calculate_beta(MUV, z):
'''
Calculation of the dust attenuation parameter Beta. Fit values are from Bouwens (2015) ApJ 793, 115.
For z = 5 and 6, Bouwens uses a piece-wise linear relationship and a linear relationship for higher redshift. ##
Parameters
----------
MUV : `float'
A value of the absolute magnitude in the UV (generally M1600) in the AB magnitude system.
z : `float'
Redshift the attenuation is calculated at.
Returns
------
beta : `float'
Value of the UV continuum paramaeter beta.
'''
if (z >= 4.5 and z < 5.5): # z = 5 fits.
if (MUV > -18.8):
dB = -0.08
else:
dB = -0.17
B = -2.05
offset = 18.8
elif (z >= 5.5 and z < 6.5): # z = 6 fits.
if (MUV > -18.8):
dB = -0.08
else:
dB = -0.24
B = -2.22
offset = 18.8
elif (z >= 6.5 and z < 7.5): # z = 7 fits.
dB = -0.20
B = -2.05
offset = 19.5
elif (z >= 7.5 and z < 8.5): # z = 8 fits.
dB = -0.15
B = -2.13
offset = 19.5
elif (z >= 8.5 and z < 9.5): # z = 9 fits.
dB = -0.16
B = -2.19
offset = 19.5
elif (z >= 9.5 and z < 10.5): # z = 10 fits.
dB = -0.16
B = -2.16
offset = 19.5
beta = dB * (MUV + offset) + B
return beta
def multiply(array):
'''
Performs element wise multiplication.
Parameters
----------
array : `~numpy.darray'
The array to be multiplied.
Returns
-------
total : `float'
Total of the elements multiplied together.
'''
total = 1
for i in range(0, len(array)):
total *= array[i]
return total
##
def Sum_Log(array):
'''
Performs an element wise sum of an array who's elements are in log-space.
Parameters
----------
array : array
Array with elements in log-space.
Returns
------
sum_total : float
Value of the elements taken to the power of 10 and summed.
Units
-----
All units are kept the same as the inputs.
'''
sum_total = 0.0
for i in range(0, len(array)):
sum_total += 10**array[i]
return sum_total
##
def Std_Log(array, mean):
'''
Calculates the standard deviation of an array with elements in log-space.
Parameters
----------
array : array
Array with elements in log-space.
mean : float
Mean of the array (not in log).
Returns
------
std : float
Standard deviation of the input array taken to the power of 10.
Units
-----
All units are kept the same as the inputs.
'''
sum_total = 0.0
for i in range(0, len(array)):
sum_total += (10**array[i] - mean)**2
sum_total *= 1.0/len(array)
std = np.sqrt(sum_total)
return std
###
def collect_across_tasks(mean_per_task, std_per_task, N_per_task, SnapList,
BinSnapList=[], binned=False, m_bin_low=0.0,
m_bin_high=0.0, my_bin_width=bin_width):
"""
Reduces arrays that are unique to each task onto the master task.
The dimensions of the input arrays will change slightly if we are collecting a statistics
that is binned across e.g., halo mass or galaxy stellar mass.
Parameters
----------
mean_per_task, std_per_task, N_per_task: Nested 2D (or 3D if binned == True) arrays of floats.
Outer length is equal to the number of models.
Inner length is equal to the number of snapshots the data has been calculated for.
Most inner length is equal to the number of bins.
Contains the mean/standard deviation/number of objects unique for each task.
SnapList: Nested 2D arrays of integers. Outer length is equal to the number of models.
Contains the snapshot numbers the data has been calculated for each model.
BinSnapList: Nested 2D arrays of integers. Outer length is equal to the number of models.
Often statistics are calculated for ALL snapshots but we only wish to plot for a subset of snapshots.
This variable allows the binned data to be collected for only a subset of the snapshots.
binned: Boolean.
Dictates whether the collected data is a 2D or 3D array with the inner-most array being binned across e.g., halo mass.
Returns
----------
master_mean, master_std, master_N: Nested 2D (or 3D if binned == True) arrays of floats.
Shape is identical to the input mean_per_task etc.
If rank == 0 these contain the collected statistics.
Otherwise these will be none.
master_bin_middle: Array of floats.
Contains the location of the middle of the bins for the data.
"""
master_mean = []
master_std = []
master_N = []
master_bin_middle = []
for model_number in range(0, len(SnapList)):
master_mean.append([])
master_std.append([])
master_N.append([])
master_bin_middle.append([])
# If we're collecting a binned statistic (e.g., binned across halo mass), then we need to perform the collecting per snapshot.
if binned:
count = 0
for snapshot_idx in range(len(SnapList[model_number])):
if SnapList[model_number][snapshot_idx] == BinSnapList[model_number][count]:
master_mean[model_number], master_std[model_number], master_N[model_number] = calculate_pooled_stats(master_mean[model_number], master_std[model_number], master_N[model_number], mean_per_task[model_number][snapshot_idx], std_per_task[model_number][snapshot_idx], N_per_task[model_number][snapshot_idx])
master_bin_middle[model_number].append(np.arange(m_bin_low,
m_bin_high+my_bin_width,
my_bin_width)[:-1]
+ my_bin_width* 0.5)
count += 1
if count == len(BinSnapList[model_number]):
break
else:
master_mean[model_number], master_std[model_number], master_N[model_number] = calculate_pooled_stats(master_mean[model_number], master_std[model_number], master_N[model_number],
mean_per_task[model_number], std_per_task[model_number],
N_per_task[model_number])
if rank == 0:
master_mean[model_number] = master_mean[model_number][0]
master_std[model_number] = master_std[model_number][0]
master_N[model_number] = master_N[model_number][0]
return master_mean, master_std, master_N, master_bin_middle
###
def calculate_pooled_stats(mean_pool, std_pool, N_pool, mean_local, std_local, N_local):
'''
Calculates the pooled mean and standard deviation from multiple processors and appends it to an input array.
Formulae taken from https://en.wikipedia.org/wiki/Pooled_variance
As we only care about these stats on the rank 0 process, we make use of junk inputs/outputs for other ranks.
NOTE: Since the input data may be an array (e.g. pooling the mean/std for a stellar mass function).
Parameters
----------
mean_pool, std_pool, N_pool : array of floats.
Arrays that contain the current pooled means/standard deviation/number of data points (for rank 0) or just a junk input (for other ranks).
mean_local, mean_std : float or array of floats.
The non-pooled mean and standard deviation unique for each process.
N_local : floating point number or array of floating point numbers.
Number of data points used to calculate the mean/standard deviation that is going to be added to the pool.
NOTE: Use floating point here so we can use MPI.DOUBLE for all MPI functions.
Returns
-------
mean_pool, std_pool : array of floats.
Original array with the new pooled mean/standard deviation appended (for rank 0) or the new pooled mean/standard deviation only (for other ranks).
Units
-----
All units are the same as the input.
All inputs MUST BE real-space (not log-space).
'''
if isinstance(mean_local, list) == True:
if len(mean_local) != len(std_local):
print("len(mean_local) = {0} \t len(std_local) = {1}".format(len(mean_local), len(std_local)))
raise ValueError("Lengths of mean_local and std_local should be equal")
if ((type(mean_local).__module__ == np.__name__) == True or (isinstance(mean_local, list) == True)): # Checks to see if we are dealing with arrays.
N_times_mean_local = np.multiply(N_local, mean_local)
N_times_var_local = np.multiply(N_local, np.multiply(std_local, std_local))
N_local = np.array(N_local).astype(float)
N_times_mean_local = np.array(N_times_mean_local).astype(np.float32)
if rank == 0: # Only rank 0 holds the final arrays so only it requires proper definitions.
N_times_mean_pool = np.zeros_like(N_times_mean_local)
N_pool_function = np.zeros_like(N_local)
N_times_var_pool = np.zeros_like(N_times_var_local)
N_times_mean_pool = N_times_mean_pool.astype(np.float64) # Recast everything to double precision then use MPI.DOUBLE.
N_pool_function = N_pool_function.astype(np.float64)
N_times_var_pool = N_times_var_pool.astype(np.float64)
else:
N_times_mean_pool = None
N_pool_function = None
N_times_var_pool = None
comm.Barrier()
N_times_mean_local = N_times_mean_local.astype(np.float64)
N_local = N_local.astype(np.float64)
N_times_var_local = N_times_var_local.astype(np.float64)
comm.Reduce([N_times_mean_local, MPI.DOUBLE], [N_times_mean_pool, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the arrays across processors.
comm.Reduce([N_local, MPI.DOUBLE],[N_pool_function, MPI.DOUBLE], op = MPI.SUM, root = 0)
comm.Reduce([N_times_var_local, MPI.DOUBLE], [N_times_var_pool, MPI.DOUBLE], op = MPI.SUM, root = 0)
else:
N_times_mean_local = N_local * mean_local
N_times_var_local = N_local * std_local * std_local
N_times_mean_pool = comm.reduce(N_times_mean_local, op = MPI.SUM, root = 0)
N_pool_function = comm.reduce(N_local, op = MPI.SUM, root = 0)
N_times_var_pool = comm.reduce(N_times_var_local, op = MPI.SUM, root = 0)
if rank == 0:
mean_pool_function = np.zeros((len(N_pool_function)))
std_pool_function = np.zeros((len(N_pool_function)))
for i in range(0, len(N_pool_function)):
if N_pool_function[i] == 0:
mean_pool_function[i] = 0.0
else:
mean_pool_function[i] = np.divide(N_times_mean_pool[i], N_pool_function[i])
if N_pool_function[i] < 3:
std_pool_function[i] = 0.0
else:
std_pool_function[i] = np.sqrt(np.divide(N_times_var_pool[i], N_pool_function[i]))
mean_pool.append(mean_pool_function)
std_pool.append(std_pool_function)
N_pool.append(N_pool_function)
return mean_pool, std_pool, N_pool
else:
return mean_pool, std_pool, N_pool_function # Junk return because non-rank 0 doesn't care.
##
def StellarMassFunction(SnapList, SMF, simulation_norm, FirstFile, LastFile, NumFile, ResolutionLimit_mean, model_tags, observations, paper_plot, output_tag):
'''
Calculates the stellar mass function for given galaxies with the option to overplot observations by Song et al. (2013) at z = 6, 7, 8 and/or Baldry et al. (2008) at z = 0.1.
Parallel compatible.
NOTE: The plotting assumes the redshifts we are plotting at are (roughly) the same for each model.
Parameters
---------
SnapList : Nested 'array-like`, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots that we plot the stellar mass function at for each model.
SMF : Nested 2-dimensional array, SMF[model_number0][snapshot0] = [bin0galaxies, ..., binNgalaxies], with length equal to the number of bins (NB_gal).
The count of galaxies within each stellar mass bin. Bounds are given by 'm_gal_low' and 'm_gal_high' in bins given by 'bin_width'.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
FirstFile, LastFile, NumFile : array of integers with length equal to the number of models.
The file numbers for each model that were read in (defined by the range between [FirstFile, LastFile] inclusive) and the TOTAL number of files for this model (we may only be plotting a subset of the volume).
ResolutionLimit_mean : array of floats with the same shape as SMF.
This is the mean stellar mass for a halo with len (number of N-body simulation particles) between 'stellar_mass_halolen_lower' and 'stellar_mass_halolen_upper'.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
observations : int
Denotes whether we want to overplot observational results.
0 : Don't plot anything.
1 : Plot Song et al. (2016) at z = 6, 7, 8.
2 : Plot Baldry et al. (2008) at z = 0.1.
3 : Plot both of these.
paper_plot : int
Denotes whether we want to split the plotting over three panels (z = 6, 7, 8) for the paper or keep it all to one figure.
output_tag : string
Name of the file that will be generated. File will be saved in the current directory with the output format defined by the 'output_format' variable at the beggining of the file.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Stellar Mass is in units of log10(Msun).
'''
## Empty array initialization ##
title = []
normalization_array = []
redshift_labels = []
counts_array = []
bin_middle_array = []
for model_number in range(0, len(SnapList)):
counts_array.append([])
bin_middle_array.append([])
redshift_labels.append([])
####
for model_number in range(0, len(SnapList)): # Does this for each of the models.
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
box_factor = (LastFile[model_number] - FirstFile[model_number] + 1.0)/(NumFile[model_number]) # This factor allows us to take a sub-volume of the box and scale the results to represent the entire box.
print("We are creating the stellar mass function using {0:.4f} of the box's volume.".format(box_factor))
norm = pow(AllVars.BoxSize,3) / pow(AllVars.Hubble_h, 3) * bin_width * box_factor
normalization_array.append(norm)
####
for snapshot_idx in range(0, len(SnapList[model_number])): # Loops for each snapshot in each model.
tmp = 'z = %.2f' %(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]) # Assigns a redshift label.
redshift_labels[model_number].append(tmp)
## We perform the plotting on Rank 0 so only this rank requires the final counts array. ##
if rank == 0:
counts_total = np.zeros_like(SMF[model_number][snapshot_idx])
else:
counts_total = None
comm.Reduce([SMF[model_number][snapshot_idx], MPI.FLOAT], [counts_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
if rank == 0:
counts_array[model_number].append(counts_total)
bin_middle_array[model_number].append(np.arange(m_gal_low, m_gal_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
####
## Plotting ##
if rank == 0: # Plot only on rank 0.
if paper_plot == 0:
f = plt.figure()
ax = plt.subplot(111)
for model_number in range(0, len(SnapList)):
for snapshot_idx in range(0, len(SnapList[model_number])):
if model_number == 0: # We assume the redshifts for each model are the same, we only want to put a legend label for each redshift once.
title = redshift_labels[model_number][snapshot_idx]
else:
title = ''
plt.plot(bin_middle_array[model_number][snapshot_idx], counts_array[model_number][snapshot_idx] / normalization_array[model_number], color = PlotScripts.colors[snapshot_idx], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = title, linewidth = PlotScripts.global_linewidth)
#print(np.min(np.log10(ResolutionLimit_mean)))
#ax.axvline(np.max(np.log10(ResolutionLimit_mean)), color = 'k', linewidth = PlotScripts.global_linewidth, linestyle = '--')
#ax.text(np.max(np.log10(ResolutionLimit_mean)) + 0.1, 1e-3, "Resolution Limit", color = 'k')
for model_number in range(0, len(SnapList)): # Place legend labels for each of the models. NOTE: Placed after previous loop for proper formatting of labels.
plt.plot(1e100, 1e100, color = 'k', linestyle = PlotScripts.linestyles[model_number], label = model_tags[model_number], rasterized=True, linewidth = PlotScripts.global_linewidth)
## Adjusting axis labels/limits. ##
plt.yscale('log', nonposy='clip')
plt.axis([6, 11.5, 1e-6, 1e-0])
ax.set_xlabel(r'$\log_{10}\ m_{\mathrm{*}} \:[M_{\odot}]$', fontsize = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\Phi\ [\mathrm{Mpc}^{-3}\: \mathrm{dex}^{-1}]$', fontsize = PlotScripts.global_fontsize)
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.set_xticks(np.arange(6.0, 12.0))
if (observations == 1 or observations == 3): # If we wanted to plot Song.
Obs.Get_Data_SMF()
delta = 0.05
caps = 5
## Song (2016) Plotting ##
plt.errorbar(Obs.Song_SMF_z6[:,0], 10**Obs.Song_SMF_z6[:,1], yerr= (10**Obs.Song_SMF_z6[:,1] - 10**Obs.Song_SMF_z6[:,3], 10**Obs.Song_SMF_z6[:,2] - 10**Obs.Song_SMF_z6[:,1]), xerr = 0.25, capsize = caps, elinewidth = PlotScripts.global_errorwidth, alpha = 1.0, lw=2.0, marker='o', ls='none', label = 'Song 2015, z = 6', color = PlotScripts.colors[0], rasterized=True)
plt.errorbar(Obs.Song_SMF_z7[:,0], 10**Obs.Song_SMF_z7[:,1], yerr= (10**Obs.Song_SMF_z7[:,1] - 10**Obs.Song_SMF_z7[:,3], 10**Obs.Song_SMF_z7[:,2] - 10**Obs.Song_SMF_z7[:,1]), xerr = 0.25, capsize = caps, alpha=0.75, elinewidth = PlotScripts.global_errorwidth, lw=1.0, marker='o', ls='none', label = 'Song 2015, z = 7', color = PlotScripts.colors[1], rasterized=True)
plt.errorbar(Obs.Song_SMF_z8[:,0], 10**Obs.Song_SMF_z8[:,1], yerr= (10**Obs.Song_SMF_z8[:,1] - 10**Obs.Song_SMF_z8[:,3], 10**Obs.Song_SMF_z8[:,2] - 10**Obs.Song_SMF_z8[:,1]), xerr = 0.25, capsize = caps, alpha=0.75, elinewidth = PlotScripts.global_errorwidth, lw=1.0, marker='o', ls='none', label = 'Song 2015, z = 8', color = PlotScripts.colors[2], rasterized=True)
####
if ((observations == 2 or observations == 3) and rank == 0): # If we wanted to plot Baldry.
Baldry_xval = np.log10(10 ** Obs.Baldry_SMF_z0[:, 0] /AllVars.Hubble_h/AllVars.Hubble_h)
Baldry_xval = Baldry_xval - 0.26 # convert back to Chabrier IMF
Baldry_yvalU = (Obs.Baldry_SMF_z0[:, 1]+Obs.Baldry_SMF_z0[:, 2]) * AllVars.Hubble_h*AllVars.Hubble_h*AllVars.Hubble_h
Baldry_yvalL = (Obs.Baldry_SMF_z0[:, 1]-Obs.Baldry_SMF_z0[:, 2]) * AllVars.Hubble_h*AllVars.Hubble_h*AllVars.Hubble_h
plt.fill_between(Baldry_xval, Baldry_yvalU, Baldry_yvalL,
facecolor='purple', alpha=0.25, label='Baldry et al. 2008 (z=0.1)')
####
leg = plt.legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile = './%s%s' %(output_tag, output_format)
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
if (paper_plot == 1):
fig, ax = plt.subplots(nrows=1, ncols=3, sharex=False, sharey=True, figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
for model_number in range(0, len(SnapList)):
for count in range(len(SnapList[model_number])):
w = np.where((counts_array[model_number][count] > 0))[0]
ax[count].plot(bin_middle_array[model_number][count][w], counts_array[model_number][count][w]
/ normalization_array[model_number], color = PlotScripts.colors[model_number],
linestyle = PlotScripts.linestyles[model_number], rasterized = True,
label = r"$\mathbf{SAGE}$", linewidth = PlotScripts.global_linewidth)
tick_locs = np.arange(6.0, 12.0)
ax[count].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs], fontsize = PlotScripts.global_fontsize)
ax[count].set_xlim([6.8, 10.3])
ax[count].tick_params(which = 'both', direction='in',
width = PlotScripts.global_tickwidth)
ax[count].tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax[count].tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
ax[count].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
fontsize = PlotScripts.global_labelsize - delta_fontsize)
ax[count].xaxis.set_minor_locator(plt.MultipleLocator(0.25))
#ax[count].set_xticks(np.arange(6.0, 12.0))
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax[count].spines[axis].set_linewidth(PlotScripts.global_axiswidth)
# Since y-axis is shared, only need to do this once.
ax[0].set_yscale('log', nonposy='clip')
ax[0].set_yticklabels([r"$\mathbf{10^{-5}}$",r"$\mathbf{10^{-5}}$",r"$\mathbf{10^{-4}}$", r"$\mathbf{10^{-3}}$",
r"$\mathbf{10^{-2}}$",r"$\mathbf{10^{-1}}$"])
ax[0].set_ylim([1e-5, 1e-1])
#ax[0].set_ylabel(r'\mathbf{$\log_{10} \Phi\ [\mathrm{Mpc}^{-3}\: \mathrm{dex}^{-1}]}$',
ax[0].set_ylabel(r'$\mathbf{log_{10} \: \Phi\ [Mpc^{-3}\: dex^{-1}]}$',
fontsize = PlotScripts.global_labelsize - delta_fontsize)
Obs.Get_Data_SMF()
PlotScripts.Plot_SMF_z6(ax[0], errorwidth=ewidth, capsize=caps)
PlotScripts.Plot_SMF_z7(ax[1], errorwidth=ewidth, capsize=caps)
PlotScripts.Plot_SMF_z8(ax[2], errorwidth=ewidth, capsize=caps)
####
ax[0].text(0.7, 0.9, r"$\mathbf{z = 6}$", transform = ax[0].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
ax[1].text(0.7, 0.9, r"$\mathbf{z = 7}$", transform = ax[1].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
ax[2].text(0.7, 0.9, r"$\mathbf{z = 8}$", transform = ax[2].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
#leg = ax[0,0].legend(loc=2, bbox_to_anchor = (0.2, -0.5), numpoints=1, labelspacing=0.1)
leg = ax[0].legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize - 2)
plt.tight_layout()
outputFile = "{0}_paper{1}".format(output_tag, output_format)
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
##
def plot_fesc_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_galaxy_fesc, std_galaxy_fesc, N_galaxy_fesc,
mean_halo_fesc, std_halo_fesc, N_halo_fesc,
ResolutionLimit_mean, model_tags, paper_plots,
mass_global, fesc_global, Ngamma_global, output_tag):
"""
Plots the escape fraction as a function of stellar/halo mass.
Parallel compatible.
Accepts 3D arrays of the escape fraction binned into Stellar Mass bins to plot the escape fraction for multiple models.
Mass units are log(Msun)
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_galaxy_fesc, std_galaxy_fesc, N_galaxy_fesc : Nested 3-dimensional array, mean_galaxy_fesc[model_number0][snapshot0] = [bin0_meanfesc, ..., binN_meanfesc], with length equal to the number of models.
Mean/Standard deviation for fesc in each stellar mass bin, for each [model_number] and [snapshot_number]. N_galaxy_fesc is the number of galaxies placed into each mass bin.
mean_halo_fesc, std_halo_fesc, N_halo_fesc Nested 3-dimensional array, mean_halo_fesc[model_number0][snapshot0] = [bin0_meanfesc, ..., binN_meanfesc], with length equal to the number of models.
Identical to previous except using the halo virial mass for the binning rather than stellar mass.
ResolutionLimit_mean : array of floats with the same shape as mean_galaxy_fesc.
This is the mean stellar mass for a halo with len (number of N-body simulation particles) between 'stellar_mass_halolen_lower' and 'stellar_mass_halolen_upper'.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
paper_plots: Integer.
Flag to denote whether we should plot a full, 4 panel plot for the
RSAGE paper.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are log(Msun).
"""
def adjust_stellarmass_plot(ax):
#ax.axhline(0.20, 0, 100, color ='k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
#ax.text(7.8, 0.22, r"$f_\mathrm{esc, base}$", color = 'k',
# size = PlotScripts.global_fontsize)
ax.set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\mathbf{\langle f_{esc}\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax.set_xlim([6.8, 10])
ax.set_ylim([0.05, 0.45])
#ax.axhline(0.35, 0, 100, color ='k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
#ax.text(9.1, 0.37, r"$f_\mathrm{esc} = 0.35$", color = 'k',
# size = PlotScripts.global_fontsize)
ax.xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax.tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax.tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax.tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax.spines[axis].set_linewidth(PlotScripts.global_axiswidth)
tick_locs = np.arange(6.0, 11.0)
ax.set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
tick_locs = np.arange(0.0, 0.80, 0.10)
ax.set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
'''
labels = ax.yaxis.get_ticklabels()
locs = ax.yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
'''
leg = ax.legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
def adjust_paper_plots(ax, model_tags):
ax[1,0].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[1,1].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[0,0].set_ylabel(r'$\mathbf{\langle f_{esc}\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax[1,0].set_ylabel(r'$\mathbf{\langle f_{esc}\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax_x = [0, 0, 1, 1]
ax_y = [0, 1, 0, 1]
for count, (x, y) in enumerate(zip(ax_x, ax_y)):
ax[x,y].set_xlim([4.8, 10.4])
ax[x,y].set_ylim([0.00, 0.68])
ax[x,y].yaxis.set_major_locator(mtick.MultipleLocator(0.1))
ax[x,y].xaxis.set_major_locator(mtick.MultipleLocator(1.0))
ax[x,y].yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax[x,y].xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax[x,y].tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax[x,y].tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax[x,y].tick_params(which = 'minor',
length = PlotScripts.global_ticklength - 2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax[x,y].spines[axis].set_linewidth(PlotScripts.global_axiswidth)
print(model_tags[count])
label = model_tags[count]
ax[x,y].text(0.05, 0.65, label, transform = ax[x,y].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
tick_locs = np.arange(4.0, 11.0)
ax[1,0].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
ax[1,1].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
tick_locs = np.arange(-0.1, 0.80, 0.10)
ax[0,0].set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
ax[1,0].set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
print("x")
labels = ax[1,0].xaxis.get_ticklabels()
locs = ax[1,0].xaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
print("y")
labels = ax[1,0].yaxis.get_ticklabels()
locs = ax[1,0].yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
print("Plotting fesc as a function of stellar mass.")
## Array initialization ##
master_mean_fesc_stellar, master_std_fesc_stellar, master_N_fesc_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_fesc, std_galaxy_fesc, N_galaxy_fesc,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
fig, ax = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
fig2, ax2 = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
count_x = 0
for count, model_number in enumerate(range(0, len(SnapList))):
if count == 2:
count_x += 1
print("There were a total of {0} galaxies over the entire redshift range.".format(sum(N_halo_fesc[model_number])))
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
w = np.where((master_N_fesc_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_fesc_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
print(master_mean_fesc_stellar[model_number][snapshot_idx])
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_fesc_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_fesc_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[0],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
#w = np.random.randint(0,
# len(mass_global[model_number][snapshot_idx][0]),
# size=500)
#sc = ax2[count_x, count%2].scatter(mass_global[model_number][snapshot_idx][0][w],
# fesc_global[model_number][snapshot_idx][0][w],
# c=np.log10(Ngamma_global[model_number][snapshot_idx][0][w]*1.0e50),
# alpha = 0.5,cmap='plasma')
#plt.colorbar(sc)
#ax2[count_x, count%2].hexbin(mass_global[model_number][snapshot_idx],
# fesc_global[model_number][snapshot_idx],
# C=Ngamma_global[model_number][snapshot_idx])
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
adjust_paper_plots(ax, model_tags)
leg = ax[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
#leg = ax2[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
#leg.draw_frame(False) # Don't want a box frame
#for t in leg.get_texts(): # Reduce the size of the text
# t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
if paper_plots == 1:
outputFile = './%s_scatter%s' %(output_tag, output_format)
fig2.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig2)
##
def plot_reionmod_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_galaxy_reionmod, std_galaxy_reionmod, N_galaxy_reionmod,
mean_galaxy_reionmod_gnedin, std_galaxy_reionmod_gnedin,
model_tags, paper_plots, output_tag):
"""
"""
def adjust_paper_plots(ax, model_tags):
ax[1,0].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[1,1].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[0,0].set_ylabel(r'$\mathbf{\langle ReionMod\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax[1,0].set_ylabel(r'$\mathbf{\langle ReionMod\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax_x = [0, 0, 1, 1]
ax_y = [0, 1, 0, 1]
for count, (x, y) in enumerate(zip(ax_x, ax_y)):
ax[x,y].set_xlim([4.8, 10.4])
ax[x,y].set_ylim([0.00, 1.05])
#ax[x,y].yaxis.set_major_locator(mtick.MultipleLocator(0.1))
ax[x,y].xaxis.set_major_locator(mtick.MultipleLocator(1.0))
#ax[x,y].yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax[x,y].xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax[x,y].tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax[x,y].tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax[x,y].tick_params(which = 'minor',
length = PlotScripts.global_ticklength - 2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax[x,y].spines[axis].set_linewidth(PlotScripts.global_axiswidth)
print(model_tags[count])
label = model_tags[count]
ax[x,y].text(0.05, 0.65, label, transform = ax[x,y].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
tick_locs = np.arange(4.0, 11.0)
ax[1,0].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
ax[1,1].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(-0.1, 0.80, 0.10)
#ax[0,0].set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
#fontsize = PlotScripts.global_fontsize)
#ax[1,0].set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
def adjust_redshift_panels(ax, redshift_tags):
ax[1,0].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[1,1].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[0,0].set_ylabel(r'$\mathbf{\langle ReionMod\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax[1,0].set_ylabel(r'$\mathbf{\langle ReionMod\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax_x = [0, 0, 1, 1]
ax_y = [0, 1, 0, 1]
for count, (x, y) in enumerate(zip(ax_x, ax_y)):
ax[x,y].set_xlim([4.8, 10.4])
ax[x,y].set_ylim([0.00, 1.05])
#ax[x,y].yaxis.set_major_locator(mtick.MultipleLocator(0.1))
ax[x,y].xaxis.set_major_locator(mtick.MultipleLocator(1.0))
#ax[x,y].yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax[x,y].xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax[x,y].tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax[x,y].tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax[x,y].tick_params(which = 'minor',
length = PlotScripts.global_ticklength - 2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax[x,y].spines[axis].set_linewidth(PlotScripts.global_axiswidth)
label = redshift_tags[count]
ax[x,y].text(0.05, 0.65, label, transform = ax[x,y].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
tick_locs = np.arange(4.0, 11.0)
ax[1,0].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
ax[1,1].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
print("Reionization Modifier as a function of stellar mass.")
## Array initialization ##
master_mean_reionmod_stellar, master_std_reionmod_stellar, master_N_reionmod_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_reionmod, std_galaxy_reionmod, N_galaxy_reionmod,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
master_mean_reionmod_gnedin_stellar, master_std_reionmod_gnedin_stellar, master_N_reionmod_gnedin_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_reionmod_gnedin, std_galaxy_reionmod_gnedin, N_galaxy_reionmod,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
fig, ax = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
fig2, ax2 = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
count_x = 0
for count, model_number in enumerate(range(0, len(SnapList))):
if count == 2:
count_x += 1
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
w = np.where((master_N_reionmod_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_reionmod_stellar[model_number][snapshot_idx][w] = np.nan
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[0],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[1],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
z_labels = []
for model_number in range(0, len(SnapList)):
count_x = 0
plot_count = 0
for count, snapshot_idx in enumerate(range(len(SnapList[model_number]))):
if count == 2:
count_x += 1
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
label = model_tags[model_number]
if (model_number == 0):
z_label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
z_labels.append(z_label)
## Plots as a function of stellar mass ##
w = np.where((master_N_reionmod_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_reionmod_stellar[model_number][snapshot_idx][w] = np.nan
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx][w] = np.nan
if (model_number == 0):
print(master_mean_reionmod_stellar[model_number][snapshot_idx])
ax2[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[model_number],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
if (model_number == 0):
ax2[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx],
color = 'k',
ls = '--',
rasterized = True, label = "Gnedin",
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
adjust_paper_plots(ax, model_tags)
print(z_labels)
adjust_redshift_panels(ax2, z_labels)
leg = ax[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
leg = ax2[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
#leg = ax2[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
#leg.draw_frame(False) # Don't want a box frame
#for t in leg.get_texts(): # Reduce the size of the text
# t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
## Output ##
outputFile = "{0}{1}".format(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
outputFile2 = "{0}_redshiftpanels{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
##
def plot_nion_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_Ngamma_galaxy, std_Ngamma_galaxy, N_Ngamma_galaxy,
model_tags, paper_plots, output_tag):
"""
Plots the number of ionizing photons emitted (not necessarily escaped) as a
function of galaxy stellar mass.
Parallel compatible.
Accepts 3D arrays of the escape fraction binned into Stellar Mass bins to plot the escape fraction for multiple models.
Mass units are log(Msun)
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_galaxy_Ngamma, std_galaxy_Ngamma, N_galaxy_Ngamma : Nested
3-dimensional array, mean_galaxy_Ngamma[model_number0][snapshot0] = [bin0_meanNgamma, ..., binN_meanNgamma], with length equal to the number of models.
Mean/Standard deviation for Ngamma in each stellar mass bin, for each
[model_number] and [snapshot_number]. N_galaxy_Ngamma is the number
of galaxies placed into each mass bin.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
paper_plots: Integer.
Flag to denote whether we should plot a full, 4 panel plot for the
RSAGE paper.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are log(Msun).
Ngamma units are 1.0e50 photons/s.
"""
def adjust_stellarmass_plot(ax):
#ax.axhline(0.20, 0, 100, color ='k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
#ax.text(7.8, 0.22, r"$f_\mathrm{esc, base}$", color = 'k',
# size = PlotScripts.global_fontsize)
ax.set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\mathbf{\log_{10}\langle f_{esc} N_\gamma\rangle_{M_*}}$',
size = PlotScripts.global_labelsize)
ax.set_xlim([6.8, 10])
#ax.set_ylim([0.05, 0.45])
#ax.axhline(0.35, 0, 100, color ='k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
#ax.text(9.1, 0.37, r"$f_\mathrm{esc} = 0.35$", color = 'k',
# size = PlotScripts.global_fontsize)
ax.xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
#ax.yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax.tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax.tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax.tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax.spines[axis].set_linewidth(PlotScripts.global_axiswidth)
tick_locs = np.arange(6.0, 11.0)
ax.set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(0.0, 0.80, 0.10)
#ax.set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
'''
labels = ax.yaxis.get_ticklabels()
locs = ax.yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
'''
leg = ax.legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
def adjust_paper_plots(ax, z_tags):
ax[1,0].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[1,1].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax[0,0].set_ylabel(r'$\mathbf{\Sigma log_{10}\langle f_{esc} N_\gamma\rangle_{M_*}}$',
size = PlotScripts.global_labelsize - 10)
ax[1,0].set_ylabel(r'$\mathbf{\Sigma log_{10}\langle f_{esc} N_\gamma\rangle_{M_*}}$',
size = PlotScripts.global_labelsize - 10)
ax_x = [0, 0, 1, 1]
ax_y = [0, 1, 0, 1]
for count, (x, y) in enumerate(zip(ax_x, ax_y)):
ax[x,y].set_xlim([4.8, 10.4])
ax[x,y].set_ylim([47, 55])
#ax[x,y].yaxis.set_major_locator(mtick.MultipleLocator(0.1))
ax[x,y].xaxis.set_major_locator(mtick.MultipleLocator(1.0))
#ax[x,y].yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax[x,y].xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax[x,y].tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax[x,y].tick_params(which = 'major', length = PlotScripts.global_ticklength)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax[x,y].spines[axis].set_linewidth(PlotScripts.global_axiswidth)
print(z_tags[count])
label = r"$\mathbf{z = " + \
str(int(round(float(z_tags[count])))) +\
"}$"
ax[x,y].text(0.7, 0.8, label, transform = ax[x,y].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
tick_locs = np.arange(4.0, 11.0)
ax[1,0].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
ax[1,1].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(0.0, 0.80, 0.10)
#ax[0,0].set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
#ax[1,0].set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
print("x")
labels = ax[1,0].xaxis.get_ticklabels()
locs = ax[1,0].xaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
print("y")
labels = ax[1,0].yaxis.get_ticklabels()
locs = ax[1,0].yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
print("Plotting Ngamma*fesc as a function of stellar mass.")
## Array initialization ##
master_mean_Ngamma_stellar, master_std_Ngamma_stellar, master_N_Ngamma_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_Ngamma_galaxy, std_Ngamma_galaxy, N_Ngamma_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
fig, ax = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
z_tags = np.zeros_like(model_tags, dtype=np.float32)
for model_number in range(0, len(SnapList)):
count_x = 0
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for count, snapshot_idx in enumerate(range(0, len(SnapList[model_number]))):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if count == 2:
count_x += 1
label = model_tags[model_number]
z_tags[count] = float(AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
## Plots as a function of stellar mass ##
w = np.where((master_N_Ngamma_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_Ngamma_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
np.log10(master_mean_Ngamma_stellar[model_number][snapshot_idx]*1.0e50),
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
np.log10(master_mean_Ngamma_stellar[model_number][snapshot_idx]*1.0e50),
color = PlotScripts.colors[model_number],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
adjust_paper_plots(ax, z_tags)
leg = ax[0,0].legend(loc="upper left", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
##
def plot_photo_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_photo_galaxy, std_photo_galaxy, N_photo_galaxy,
model_tags, paper_plots, output_tag):
"""
Plots the photoionization rate as a function of galaxy stellar mass.
Parallel compatible.
Accepts 3D arrays of the escape fraction binned into Stellar Mass bins to plot the escape fraction for multiple models.
Mass units are log(Msun)
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_photo_galaxy, std_photo_galaxy, N_photo_galaxy : Nested
3-dimensional array, mean_photo_galaxy[model_number0][snapshot0] =
[bin0_meanphoto, ..., binN_meanphoto], with length equal to the number of models.
Mean/Standard deviation for Photionization Rate in each stellar mass
bin, for each [model_number] and [snapshot_number]. N_photo_galaxy is
the number of galaxies placed into each mass bin.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
paper_plots: Integer.
Flag to denote whether we should plot a full, 4 panel plot for the
RSAGE paper.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are log(Msun).
Ngamma units are 1.0e50 photons/s.
"""
def adjust_stellarmass_plot(ax):
ax.set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
size = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\mathbf{log_{10} \: \Gamma \: [s^{-1}}$',
size = PlotScripts.global_labelsize)
ax.set_xlim([4.8, 10])
#ax.set_ylim([0.05, 0.45])
ax.xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
#ax.yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax.tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax.tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax.tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax.spines[axis].set_linewidth(PlotScripts.global_axiswidth)
#tick_locs = np.arange(4.0, 11.0)
#ax.set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(0.0, 0.80, 0.10)
#ax.set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
'''
labels = ax.yaxis.get_ticklabels()
locs = ax.yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
'''
leg = ax.legend(loc="lower right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
print("Plotting photoionization rate as a function of stellar mass.")
## Array initialization ##
master_mean_photo_stellar, master_std_photo_stellar, master_N_photo_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_photo_galaxy, std_photo_galaxy, N_photo_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
pass
for model_number in range(0, len(SnapList)):
count_x = 0
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for count, snapshot_idx in enumerate(range(0, len(SnapList[model_number]))):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
w = np.where((master_N_photo_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_photo_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
np.log10(master_mean_photo_stellar[model_number][snapshot_idx]),
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
pass
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
for model_number in range(0, len(SnapList)):
ax1.plot(np.nan, np.nan, color = 'k',
label = model_tags[model_number],
lw = PlotScripts.global_linewidth,
ls = PlotScripts.linestyles[model_number])
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
pass
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
##
##
def plot_sfr_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_galaxy_sfr, std_galaxy_sfr,
mean_galaxy_ssfr, std_galaxy_ssfr,
N_galaxy, model_tags, output_tag):
"""
Plots the specific star formation rate (sSFR) as a function of stellar mass.
Parallel compatible.
Accepts 3D arrays of the sSFR binned into Stellar Mass bins.
Mass units log(Msun).
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_galaxy_ssfr, std_galaxy_ssfr, N_galaxy_ssfr : Nested 3-dimensional array,
mean_galaxy_sfr[model_number0][snapshot0] = [bin0_meanssfr, ..., binN_meanssfr],
with length equal to the number of models.
Mean/Standard deviation for sSFR in each stellar mass bin, for each [model_number] and [snapshot_number].
N_galaxy_fesc is the number of galaxies placed into each mass bin.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are 1e10 Msun (no h).
"""
def adjust_sfr_plot(ax):
ax.set_xlabel(r'$\log_{10}\ M_*\ [M_{\odot}]$',
size = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\mathbf{\langle \mathrm{SFR}\rangle_{M_*}\:[M_\odot\mathrm{yr}^{-1}]}$',
size = PlotScripts.global_labelsize)
ax.set_xlim([4.8, 10])
ax.set_ylim([-3, 2])
ax.xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax.tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax.tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax.tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax.spines[axis].set_linewidth(PlotScripts.global_axiswidth)
tick_locs = np.arange(6.0, 11.0)
ax.set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(0.0, 0.80, 0.10)
#ax.set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
labels = ax.yaxis.get_ticklabels()
locs = ax.yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
leg = ax.legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
def adjust_ssfr_plot(ax):
ax.set_xlabel(r'$\log_{10}\ M_*\ [M_{\odot}]$',
size = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\mathbf{\langle\mathrm{sSFR}\rangle_{M_*}\:[\mathrm{yr^{-1}}}$',
size = PlotScripts.global_labelsize)
ax.set_xlim([4.8, 10])
ax.set_ylim([-9, -4])
ax.xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax.yaxis.set_minor_locator(mtick.MultipleLocator(0.1))
ax.tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax.tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax.tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax.spines[axis].set_linewidth(PlotScripts.global_axiswidth)
tick_locs = np.arange(6.0, 11.0)
ax.set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(0.0, 0.80, 0.10)
#ax.set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
labels = ax.yaxis.get_ticklabels()
locs = ax.yaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
leg = ax.legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
print("Plotting sSFR as a function of stellar mass.")
## Array initialization ##
master_mean_sfr_stellar, master_std_sfr_stellar, master_N_sfr_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_sfr, std_galaxy_sfr, N_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
master_mean_ssfr_stellar, master_std_ssfr_stellar, master_N_ssfr_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_ssfr, std_galaxy_ssfr, N_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(0, len(SnapList)):
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_sfr_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
ax2.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_ssfr_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
#for model_number in range(0, len(SnapList)): # Just plot some garbage to get the legend labels correct.
#ax1.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
#ax3.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
## Stellar Mass plots ##
adjust_sfr_plot(ax1)
adjust_ssfr_plot(ax2)
## Output ##
outputFile = "./{0}SFR{1}".format(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
outputFile = "./{0}sSFR{1}".format(output_tag, output_format)
fig2.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
##
##
def plot_fej_Ngamma(SnapList, PlotSnapList, simulation_norm,
mean_Ngamma_fej, std_Ngamma_fej,
N_fej, model_tags, output_tag):
def adjust_plot(ax):
ax.set_xlabel(r'$\mathbf{f_\mathrm{ej}}$',
size = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\mathbf{\log_{10}\langle N_\gamma\rangle_{f_{ej}}}$',
size = PlotScripts.global_labelsize)
ax.set_xlim([0.0, 1.0])
#ax.set_ylim([0.05, 0.45])
#ax.axhline(0.35, 0, 100, color ='k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
#ax.text(9.1, 0.37, r"$f_\mathrm{esc} = 0.35$", color = 'k',
# size = PlotScripts.global_fontsize)
ax.xaxis.set_minor_locator(mtick.MultipleLocator(0.10))
#ax.yaxis.set_minor_locator(mtick.MultipleLocator(0.05))
ax.tick_params(which = 'both', direction='in', width =
PlotScripts.global_tickwidth)
ax.tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax.tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax.spines[axis].set_linewidth(PlotScripts.global_axiswidth)
#tick_locs = np.arange(6.0, 11.0)
#ax.set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
#tick_locs = np.arange(0.0, 0.80, 0.10)
#ax.set_yticklabels([r"$\mathbf{%.2f}$" % x for x in tick_locs],
# fontsize = PlotScripts.global_fontsize)
labels = ax.xaxis.get_ticklabels()
locs = ax.xaxis.get_ticklocs()
for label, loc in zip(labels, locs):
print("{0} {1}".format(label, loc))
leg = ax.legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
## Array initialization ##
master_mean_Ngamma_fej, master_std_Ngamma_fej, master_N_Ngamma_fej, master_bin_middle_fej = \
collect_across_tasks(mean_Ngamma_fej, std_Ngamma_fej, N_fej,
SnapList, PlotSnapList, True, fej_low, fej_high,
fej_bin_width)
if rank == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax2 = ax1.twinx()
for model_number in range(0, len(SnapList)):
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
label = model_tags[model_number]
w = np.where((master_N_Ngamma_fej[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_Ngamma_fej[model_number][snapshot_idx][w] = np.nan
ax1.plot(master_bin_middle_fej[model_number][snapshot_idx],
np.log10(master_mean_Ngamma_fej[model_number][snapshot_idx]*1.0e50),
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
#ax1.plot(master_bin_middle_fej[model_number][snapshot_idx],
# np.log10(master_mean_Ngamma_fej[model_number][snapshot_idx]*1.0e50
# * master_N_Ngamma_fej[model_number][snapshot_idx]),
# color = PlotScripts.colors[plot_count],
# ls = PlotScripts.linestyles[model_number],
# rasterized = True, label = label,
#lw = PlotScripts.global_linewidth)
'''
ax2.plot(master_bin_middle_fej[model_number][snapshot_idx],
np.log10(master_N_Ngamma_fej[model_number][snapshot_idx]),
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
'''
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
adjust_plot(ax1)
leg = ax1.legend(loc="upper center", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
def plot_ejectedfraction(SnapList, PlotSnapList, simulation_norm, mean_mvir_ejected,
std_mvir_ejected, N_ejected, mean_ejected_z,
std_ejected_z, N_z, model_tags, output_tag):
'''
Plots the ejected fraction as a function of the halo mass.
Parallel compatible.
Accepts a 3D array of the ejected fraction so we can plot for multiple models and redshifts.
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
mean_mvir_ejected, std_mvir_ejected, N_ejected : Nested 3-dimensional array, mean_mvir_ejected[model_number0][snapshot0] = [bin0_meanejected, ..., binN_meanejected], with length equal to the number of models.
Mean/Standard deviation for the escape fraction binned into Halo Mass bins. N_ejected is the number of data points in each bin. Bounds are given by 'm_low' and 'm_high' in bins given by 'bin_width'.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Halo Mass is in units of log10(Msun).
'''
print("Plotting the Ejected Fraction as a function of halo mass.")
master_mean_ejected_halo, master_std_ejected_halo, master_N_ejected_halo, master_bin_middle_halo = \
collect_across_tasks(mean_mvir_ejected, std_mvir_ejected, N_ejected, SnapList,
PlotSnapList, True, m_low, m_high)
master_mean_ejected_z, master_std_ejected_z, master_N_ejected_z, _ = \
collect_across_tasks(mean_ejected_z, std_ejected_z, N_z, SnapList)
if rank == 0:
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(0, len(SnapList)):
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
for snapshot_idx in range(0, len(PlotSnapList[model_number])):
label = AllVars.SnapZ[PlotSnapList[model_number][snapshot_idx]]
ax1.plot(master_bin_middle_halo[model_number][snapshot_idx],
master_mean_ejected_halo[model_number][snapshot_idx],
color = PlotScripts.colors[snapshot_idx],
linestyle = PlotScripts.linestyles[model_number],
label = label, lw = PlotScripts.global_linewidth)
ax2.plot((AllVars.t_BigBang - AllVars.Lookback_Time[SnapList[model_number]]) * 1.0e3,
master_mean_ejected_z[model_number],
color = PlotScripts.colors[model_number],
label = model_tags[model_number],
ls = PlotScripts.linestyles[model_number],
lw = PlotScripts.global_linewidth)
for model_number in range(0, len(SnapList)): # Just plot some garbage to get the legend labels correct.
ax1.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
ax1.set_xlabel(r'$\log_{10}\ M_{\mathrm{vir}}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$\mathrm{Ejected \: Fraction}$', size = PlotScripts.global_fontsize)
ax1.set_xlim([8.0, 12])
ax1.set_ylim([-0.05, 1.0])
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(0.1))
ax1.yaxis.set_minor_locator(mtick.MultipleLocator(0.025))
leg = ax1.legend(loc=1, numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
outputFile = "./{0}{1}".format(output_tag, output_format)
fig1.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig1)
ax2.set_xlabel(r"$\mathbf{Time \: since \: Big \: Bang \: [Myr]}$", fontsize = PlotScripts.global_labelsize)
tick_locs = np.arange(200.0, 1000.0, 100.0)
tick_labels = [r"$\mathbf{%d}$" % x for x in tick_locs]
ax2.xaxis.set_major_locator(mtick.MultipleLocator(100))
ax2.set_xticklabels(tick_labels, fontsize = PlotScripts.global_fontsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_ylabel(r'$\mathbf{Mean f_{ej}}$', fontsize = PlotScripts.global_labelsize)
ax3 = ax2.twiny()
t_plot = (AllVars.t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$\mathbf{%d}$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax3.set_xlabel(r"$\mathbf{z}$", fontsize = PlotScripts.global_labelsize)
ax3.set_xlim(PlotScripts.time_xlim)
ax3.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax3.set_xticklabels(z_labels, fontsize = PlotScripts.global_fontsize) # But label them as redshifts.
leg = ax2.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile2 = "./{0}_z{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
##
def plot_mvir_fesc(SnapList, mass_central, fesc, model_tags, output_tag):
title = []
redshift_labels = []
mean_fesc_array = []
std_fesc_array = []
mean_halomass_array = []
std_halomass_array = []
bin_middle_array = []
for model_number in range(0, len(SnapList)):
redshift_labels.append([])
mean_fesc_array.append([])
std_fesc_array.append([])
mean_halomass_array.append([])
std_halomass_array.append([])
bin_middle_array.append([])
print("Plotting fesc against Mvir")
binwidth = 0.1
Frequency = 1
for model_number in range(0, len(SnapList)):
for snapshot_idx in range(0, len(SnapList[model_number])):
print("Doing Snapshot {0}".format(SnapList[model_number][snapshot_idx]))
tmp = 'z = %.2f' %(AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
redshift_labels[model_number].append(tmp)
minimum_mass = np.floor(min(mass_central[model_number][snapshot_idx])) - 10*binwidth
maximum_mass = np.floor(max(mass_central[model_number][snapshot_idx])) + 10*binwidth
minimum_mass = 6.0
maximum_mass = 12.0
binning_minimum = comm.allreduce(minimum_mass, op = MPI.MIN)
binning_maximum = comm.allreduce(maximum_mass, op = MPI.MAX)
halomass_nonlog = [10**x for x in mass_central[model_number][snapshot_idx]]
(mean_fesc, std_fesc, N, bin_middle) = AllVars.Calculate_2D_Mean(mass_central[model_number][snapshot_idx], fesc[model_number][snapshot_idx], binwidth, binning_minimum, binning_maximum)
mean_fesc_array[model_number], std_fesc_array[model_number] = calculate_pooled_stats(mean_fesc_array[model_number], std_fesc_array[model_number], mean_fesc, std_fesc, N)
mean_halomass_array[model_number], std_halomass_array[model_number] = calculate_pooled_stats(mean_halomass_array[model_number], std_halomass_array[model_number], np.mean(halomass_nonlog), np.std(halomass_nonlog), len(mass_central[model_number][snapshot_idx]))
## If want to do mean/etc of halo mass need to update script. ##
bin_middle_array[model_number].append(bin_middle)
mean_halomass_array[model_number] = np.log10(mean_halomass_array[model_number])
if rank == 0:
f = plt.figure()
ax1 = plt.subplot(111)
for model_number in range(0, len(SnapList)):
for snapshot_idx in range(0, len(SnapList[model_number])):
if model_number == 0:
title = redshift_labels[model_number][snapshot_idx]
else:
title = ''
mean = mean_fesc_array[model_number][snapshot_idx]
std = std_fesc_array[model_number][snapshot_idx]
bin_middle = bin_middle_array[model_number][snapshot_idx]
ax1.plot(bin_middle, mean, color = colors[snapshot_idx], linestyle = linestyles[model_number], rasterized = True, label = title)
#ax1.scatter(mean_halomass_array[model_number][snapshot_idx], np.mean(~np.isnan(mean)), color = colors[snapshot_idx], marker = 'o', rasterized = True, s = 40, lw = 3)
if (len(SnapList) == 1):
ax1.fill_between(bin_middle, np.subtract(mean,std), np.add(mean,std), color = colors[snapshot_idx], alpha = 0.25)
ax1.set_xlabel(r'$\log_{10}\ M_{\mathrm{vir}}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$f_\mathrm{esc}$', size = PlotScripts.global_fontsize)
#ax1.set_xlim([8.5, 12])
#ax1.set_ylim([0.0, 1.0])
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(0.1))
# ax1.yaxis.set_minor_locator(mtick.MultipleLocator(0.1))
# ax1.set_yscale('log', nonposy='clip')
# for model_number in range(0, len(SnapList)):
# ax1.plot(1e100, 1e100, color = 'k', ls = linestyles[model_number], label = model_tags[model_number], rasterized=True)
leg = ax1.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
outputFile = './' + output_tag + output_format
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to'.format(outputFile))
plt.close()
##
def plot_mvir_Ngamma(SnapList, mean_mvir_Ngamma, std_mvir_Ngamma, N_Ngamma, model_tags, output_tag,fesc_prescription=None, fesc_normalization=None, fitpath=None):
'''
Plots the number of ionizing photons (pure ngamma times fesc) as a function of halo mass.
Parallel compatible.
The input data has been binned as a function of halo virial mass (Mvir), with the bins defined at the top of the file (m_low, m_high, bin_width).
Accepts 3D arrays to plot ngamma for multiple models.
Parameters
----------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
mean_mvir_Ngamma, std_mvir_Ngamma, N_Ngamma : Nested 2-dimensional array, mean_mvir_Ngamma[model_number0][snapshot0] = [bin0_meanNgamma, ..., binN_meanNgamma], with length equal to the number of bins.
Mean/Standard deviation/number of data points in each halo mass (Mvir) bin.
The number of photons is in units of 1.0e50 s^-1.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
fesc_prescription : int (optional)
If this parameter is defined, we will save the Mvir-Ngamma results in a text file (not needed if not saving).
Number that controls what escape fraction prescription was used to generate the escape fractions.
0 : Constant, fesc = Constant.
1 : Scaling with Halo Mass, fesc = A*Mh^B.
2 : Scaling with ejected fraction, fesc = fej*A + B.
fesc_normalization : float (if fesc_prescription == 0) or `numpy.darray' with length 2 (if fesc_prescription == 1 or == 2) (optional).
If this parameter is defined, we will save the Mvir-Ngamma results in a text file (not needed if not saving).
Parameter not needed if you're not saving the Mvir-Ngamma results.
If fesc_prescription == 0, gives the constant value for the escape fraction.
If fesc_prescription == 1 or == 2, gives A and B with the form [A, B].
fitpath : string (optional)
If this parameter is defined, we will save the Mvir-Ngamma results in a text file (not needed if not saving).
Defines the base path for where we are saving the results.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Ngamma is in units of 1.0e50 s^-1.
'''
print("Plotting ngamma*fesc against the halo mass")
## Array initialization. ##
title = []
redshift_labels = []
mean_ngammafesc_array = []
std_ngammafesc_array = []
mean_halomass_array = []
std_halomass_array = []
bin_middle_array = []
for model_number in range(0, len(SnapList)):
redshift_labels.append([])
mean_ngammafesc_array.append([])
std_ngammafesc_array.append([])
mean_halomass_array.append([])
std_halomass_array.append([])
bin_middle_array.append([])
for model_number in range(0, len(SnapList)):
for snapshot_idx in range(0, len(SnapList[model_number])):
print("Doing Snapshot {0}".format(SnapList[model_number][snapshot_idx]))
tmp = 'z = %.2f' %(AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
redshift_labels[model_number].append(tmp)
N = N_Ngamma[model_number][snapshot_idx]
mean_ngammafesc_array[model_number], std_ngammafesc_array[model_number] = calculate_pooled_stats(mean_ngammafesc_array[model_number], std_ngammafesc_array[model_number], mean_mvir_Ngamma[model_number][snapshot_idx], std_mvir_Ngamma[model_number][snapshot_idx], N) # Collate the values from all processors.
bin_middle_array[model_number].append(np.arange(m_low, m_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
if rank == 0:
f = plt.figure()
ax1 = plt.subplot(111)
for model_number in range(0, len(SnapList)):
count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if model_number == 0:
title = redshift_labels[model_number][snapshot_idx]
else:
title = ''
mean = np.zeros((len(mean_ngammafesc_array[model_number][snapshot_idx])), dtype = np.float32)
std = np.zeros((len(mean_ngammafesc_array[model_number][snapshot_idx])), dtype=np.float32)
for i in range(0, len(mean)):
if(mean_ngammafesc_array[model_number][snapshot_idx][i] < 1e-10):
mean[i] = np.nan
std[i] = np.nan
else:
mean[i] = np.log10(mean_ngammafesc_array[model_number][snapshot_idx][i] * 1.0e50) # Remember that the input data is in units of 1.0e50 s^-1.
std[i] = 0.434 * std_ngammafesc_array[model_number][snapshot_idx][i] / mean_ngammafesc_array[model_number][snapshot_idx][i] # We're plotting in log space so the standard deviation is 0.434*log10(std)/log10(mean).
bin_middle = bin_middle_array[model_number][snapshot_idx]
if (count < 4): # Only plot at most 5 lines.
ax1.plot(bin_middle, mean, color = PlotScripts.colors[snapshot_idx], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = title, linewidth = PlotScripts.global_linewidth)
count += 1
## In this block we save the Mvir-Ngamma results to a file. ##
if (fesc_prescription == None or fesc_normalization == None or fitpath == None):
raise ValueError("You've specified you want to save the Mvir-Ngamma results but haven't provided an escape fraction prescription, normalization and base path name")
# Note: All the checks that escape fraction normalization was written correctly were performed in 'calculate_fesc()', hence it will be correct by this point and we don't need to double check.
if (fesc_prescription[model_number] == 0): # Slightly different naming scheme for the constant case (it only has a float for fesc_normalization).
fname = "%s/fesc%d_%.3f_z%.3f.txt" %(fitpath, fesc_prescription[model_number], fesc_normalization[model_number], AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
elif (fesc_prescription[model_number] == 1 or fesc_prescription[model_number] == 2):
fname = "%s/fesc%d_A%.3eB%.3f_z%.3f.txt" %(fitpath, fesc_prescription[model_number], fesc_normalization[model_number][0], fesc_normalization[model_number][1], AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
f = open(fname, "w+")
if not os.access(fname, os.W_OK):
print("The filename is {0}".format(fname))
raise ValueError("Can't write to this file.")
for i in range(0, len(bin_middle)):
f.write("%.4f %.4f %.4f %d\n" %(bin_middle[i], mean[i], std[i], N_Ngamma[model_number][snapshot_idx][i]))
f.close()
print("Wrote successfully to file {0}".format(fname))
##
for model_number in range(0, len(SnapList)): # Just plot some garbage to get the legend labels correct.
ax1.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
ax1.set_xlabel(r'$\log_{10}\ M_{\mathrm{vir}}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$\log_{10}\ \dot{N}_\gamma \: f_\mathrm{esc} \: [\mathrm{s}^{-1}]$', size = PlotScripts.global_fontsize)
ax1.set_xlim([8.5, 12])
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(0.1))
leg = ax1.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
outputFile = './' + output_tag + output_format
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to'.format(outputFile))
plt.close()
def bin_Simfast_halos(RedshiftList, SnapList, halopath, fitpath, fesc_prescription, fesc_normalization, GridSize, output_tag):
for model_number in range(0, len(fesc_prescription)):
for halo_z_idx in range(0, len(RedshiftList)):
snapshot_idx = min(range(len(SnapList)), key=lambda i: abs(SnapList[i]-RedshiftList[halo_z_idx])) # This finds the index of the simulation redshift that most closely matches the Halo redshift.
print("Binning Halo redshift {0}".format(RedshiftList[halo_z_idx]))
print("For the Halo redshift {0:.3f} the nearest simulation redshift is {1:.3f}".format(RedshiftList[halo_z_idx], SnapList[snapshot_idx]))
if (fesc_prescription[model_number] == 0):
fname = "%s/fesc%d_%.3f_z%.3f.txt" %(fitpath, fesc_prescription[model_number], fesc_normalization[model_number], AllVars.SnapZ[snapshot_idx])
elif (fesc_prescription[model_number] == 1 or fesc_prescription[model_number] == 2):
fname = "%s/fesc%d_A%.3eB%.3f_z%.3f.txt" %(fitpath, fesc_prescription[model_number], fesc_normalization[model_number][0], fesc_normalization[model_number][1], AllVars.SnapZ[snapshot_idx])
print("Reading in file {0}".format(fname))
## Here we read in the results from the Mvir-Ngamma binning. ##
f = open(fname, 'r')
fit_mvir, fit_mean, fit_std, fit_N = np.loadtxt(f, unpack = True)
f.close()
## Here we read in the halos created by Simfast21 ##
# The data file has the structure:
# long int N_halos
# Then an entry for each halo:
# float Mass
# float x, y, z positions.
# NOTE: The x,y,z positions are the grid indices but are still floats (because Simfast21 is weird like that).
Halodesc_full = [
('Halo_Mass', np.float32),
('Halo_x', np.float32),
('Halo_y', np.float32),
('Halo_z', np.float32)
]
names = [Halodesc_full[i][0] for i in range(len(Halodesc_full))]
formats = [Halodesc_full[i][1] for i in range(len(Halodesc_full))]
Halo_Desc = np.dtype({'names':names, 'formats':formats}, align=True)
fname = "%s/halonl_z%.3f_N%d_L100.0.dat.catalog" %(halopath, RedshiftList[halo_z_idx], GridSize)
f = open(fname, 'rb')
N_Halos = np.fromfile(f, count = 1, dtype = np.long)
Halos = np.fromfile(f, count = N_Halos, dtype = Halo_Desc)
binned_nion = np.zeros((GridSize*GridSize*GridSize), dtype = float32) # This grid will contain the ionizing photons that results from the binning.
binned_Halo_Mass = np.digitize(np.log10(Halos['Halo_Mass']), fit_mvir) # Places the Simfast21 halos into the correct halo mass bins defined by the Mvir-Ngamma results.
binned_Halo_Mass[binned_Halo_Mass == len(fit_mvir)] = len(fit_mvir) - 1 # Fixes up the edge case.
## Fore each Halo we now assign it an ionizing flux. ##
# This flux is determined by drawing a random number from a normal distribution with mean and standard deviation given by the Mvir-Ngamma results.
# NOTE: Remember the Mvir-Ngamma results are in units of log10(s^-1).
fit_nan = 0
for i in range(0, N_Halos):
if(np.isnan(fit_mean[binned_Halo_Mass[i]]) == True or np.isnan(fit_std[binned_Halo_Mass[i]]) == True): # This halo had mass that was not covered by the Mvir-Ngamma fits.
fit_nan += 1
continue
nion_halo = np.random.normal(fit_mean[binned_Halo_Mass[i]], fit_std[binned_Halo_Mass[i]])
## Because of how Simfast21 does their binning, we have some cases where the Halos are technically outside the box. Just fix them up. ##
x_grid = int(Halos['Halo_x'][i])
if x_grid >= GridSize:
x_grid = GridSize - 1
if x_grid < 0:
x_grid = 0
y_grid = int(Halos['Halo_y'][i])
if y_grid >= GridSize:
y_grid = GridSize - 1
if y_grid < 0:
y_grid = 0
z_grid = int(Halos['Halo_z'][i])
if z_grid >= GridSize:
z_grid = GridSize - 1
if z_grid < 0:
z_grid = 0
idx = x_grid * GridSize*GridSize + y_grid * GridSize + z_grid
binned_nion[idx] += pow(10, nion_halo)/1.0e50
# print"We had %d halos (out of %d, so %.4f fraction) that had halo mass that was not covered by the Mvir-Ngamma results." %(fit_nan, N_Halos, float(fit_nan)/float(N_Halos))
# print "There were %d cells with a non-zero ionizing flux." %(len(binned_nion[binned_nion != 0]))
binned_nion = binned_nion.reshape((GridSize,GridSize,GridSize))
cut_slice = 0
cut_width = 512
nion_slice = binned_nion[:,:, cut_slice:cut_slice+cut_width].mean(axis=-1)*1.0e50
ax1 = plt.subplot(211)
im = ax1.imshow(np.log10(nion_slice), interpolation='bilinear', origin='low', extent =[0,AllVars.BoxSize,0,AllVars.BoxSize], cmap = 'Purples', vmin = 48, vmax = 53)
cbar = plt.colorbar(im, ax = ax1)
cbar.set_label(r'$\mathrm{log}_{10}N_{\gamma} [\mathrm{s}^{-1}]$')
ax1.set_xlabel(r'$\mathrm{x} (h^{-1}Mpc)$')
ax1.set_ylabel(r'$\mathrm{y} (h^{-1}Mpc)$')
ax1.set_xlim([0.0, AllVars.BoxSize])
ax1.set_ylim([0.0, AllVars.BoxSize])
title = r"$z = %.3f$" %(RedshiftList[halo_z_idx])
ax1.set_title(title)
ax2 = plt.subplot(212)
w = np.where((Halos['Halo_z'][:] > cut_slice) & (Halos['Halo_z'][:] <= cut_slice + cut_width))[0]
x_plot = Halos['Halo_x'] * float(AllVars.BoxSize)/float(GridSize)
y_plot = Halos['Halo_y'] * float(AllVars.BoxSize)/float(GridSize)
z_plot = Halos['Halo_z'][w] * float(AllVars.BoxSize)/float(GridSize)
ax2.scatter(x_plot[w], y_plot[w], s = 2, alpha = 0.5)
ax2.set_xlabel(r'$\mathrm{x} (h^{-1}Mpc)$')
ax2.set_ylabel(r'$\mathrm{y} (h^{-1}Mpc)$')
ax2.set_xlim([0.0, AllVars.BoxSize])
ax2.set_ylim([0.0, AllVars.BoxSize])
tmp = "z%.3f" %(RedshiftList[halo_z_idx])
plt.tight_layout()
outputFile = './' + output_tag + tmp + output_format
plt.savefig(outputFile) # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
def plot_photoncount(SnapList, sum_nion, simulation_norm, FirstFile, LastFile, NumFiles, model_tags, output_tag):
'''
Plots the ionizing emissivity as a function of redshift.
We normalize the emissivity to Mpc^-3 and this function allows the read-in of only a subset of the volume.
Parallel compatible.
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model, defines the x-axis we plot against.
sum_nion : Nested 1-dimensional array, sum_nion[z0, z1, ..., zn], with length equal to the number of redshifts.
Number of escape ionizing photons (i.e., photon rate times the local escape fraction) at each redshift.
In units of 1.0e50 s^-1.
simulation_norm : array of ints with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
FirstFile, LastFile, NumFile : array of integers with length equal to the number of models.
The file numbers for each model that were read in (defined by the range between [FirstFile, LastFile] inclusive) and the TOTAL number of files for this model (we may only be plotting a subset of the volume).
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
sum_nion is in units of 1.0e50 s^-1.
'''
print("Plotting the ionizing emissivity.")
sum_array = []
for model_number in range(0, len(SnapList)):
if(simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
sum_array.append([])
for snapshot_idx in range(0, len(SnapList[model_number])):
nion_sum_snapshot = comm.reduce(sum_nion[model_number][snapshot_idx], op = MPI.SUM, root = 0)
if rank == 0:
sum_array[model_number].append(nion_sum_snapshot * 1.0e50 / (pow(AllVars.BoxSize / AllVars.Hubble_h,3) * (float(LastFile[model_number] - FirstFile[model_number] + 1) / float(NumFiles[model_number]))))
if (rank == 0):
ax1 = plt.subplot(111)
for model_number in range(0, len(SnapList)):
if(simulation_norm[model_number] == 0):
cosmo = AllVars.Set_Params_Mysim()
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
t = np.empty(len(SnapList[model_number]))
for snapshot_idx in range(0, len(SnapList[model_number])):
t[snapshot_idx] = (AllVars.t_BigBang - cosmo.lookback_time(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]).value) * 1.0e3
t = [t for t, N in zip(t, sum_array[model_number]) if N > 1.0]
sum_array[model_number] = [x for x in sum_array[model_number] if x > 1.0]
print("The total number of ionizing photons for model {0} is {1} s^1 Mpc^-3".format(model_number, sum(sum_array[model_number])))
print(np.log10(sum_array[model_number]))
ax1.plot(t, np.log10(sum_array[model_number]), color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[model_number], label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
#ax1.fill_between(t, np.subtract(mean,std), np.add(mean,std), color = colors[model_number], alpha = 0.25)
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(PlotScripts.time_tickinterval))
#ax1.yaxis.set_minor_locator(mtick.MultipleLocator(0.025))
ax1.set_xlim(PlotScripts.time_xlim)
ax1.set_ylim([48.5, 51.5])
ax2 = ax1.twiny()
t_plot = (AllVars.t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$%d$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax2.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax2.set_xticklabels(z_labels) # But label them as redshifts.
ax1.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$\sum f_\mathrm{esc}\dot{N}_\gamma \: [\mathrm{s}^{-1}\mathrm{Mpc}^{-3}]$', fontsize = PlotScripts.global_fontsize)
plot_time = 1
bouwens_z = np.arange(6,16) # Redshift range for the observations.
bouwens_t = (AllVars.t_BigBang - cosmo.lookback_time(bouwens_z).value) * 1.0e3 # Corresponding values for what we will plot on the x-axis.
bouwens_1sigma_lower = [50.81, 50.73, 50.60, 50.41, 50.21, 50.00, 49.80, 49.60, 49.39, 49.18] # 68% Confidence Intervals for the ionizing emissitivity from Bouwens 2015.
bouwens_1sigma_upper = [51.04, 50.85, 50.71, 50.62, 50.56, 50.49, 50.43, 50.36, 50.29, 50.23]
bouwens_2sigma_lower = [50.72, 50.69, 50.52, 50.27, 50.01, 49.75, 49.51, 49.24, 48.99, 48.74] # 95% CI.
bouwens_2sigma_upper = [51.11, 50.90, 50.74, 50.69, 50.66, 50.64, 50.61, 50.59, 50.57, 50.55]
if plot_time == 1:
ax1.fill_between(bouwens_t, bouwens_1sigma_lower, bouwens_1sigma_upper, color = 'k', alpha = 0.2)
ax1.fill_between(bouwens_t, bouwens_2sigma_lower, bouwens_2sigma_upper, color = 'k', alpha = 0.4, label = r"$\mathrm{Bouwens \: et \: al. \: (2015)}$")
else:
ax1.fill_between(bouwens_z, bouwens_1sigma_lower, bouwens_1sigma_upper, color = 'k', alpha = 0.2)
ax1.fill_between(bouwens_z, bouwens_2sigma_lower, bouwens_2sigma_upper, color = 'k', alpha = 0.4, label = r"$\mathrm{Bouwens \: et \: al. \: (2015)}$")
# ax1.text(0.075, 0.965, '(a)', horizontalalignment='center', verticalalignment='center', transform = ax.transAxes)
ax1.text(350, 50.0, r"$68\%$", horizontalalignment='center', verticalalignment = 'center', fontsize = PlotScripts.global_labelsize)
ax1.text(350, 50.8, r"$95\%$", horizontalalignment='center', verticalalignment = 'center', fontsize = PlotScripts.global_labelsize)
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
plt.tight_layout()
outputFile = './{0}{1}'.format(output_tag, output_format)
plt.savefig(outputFile) # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
##
def plot_singleSFR(galaxies_filepath_array, merged_galaxies_filepath_array, number_snapshots, simulation_norm, model_tags, output_tag):
SFR_gal = []
SFR_ensemble = []
ejected_gal = []
ejected_ensemble = []
infall_gal = []
infall_ensemble = []
ejectedmass_gal = []
ejectedmass_ensemble = []
N_random = 1
ax1 = plt.subplot(111)
# ax3 = plt.subplot(122)
#ax5 = plt.subplot(133)
look_for_alive = 1
#idx_array = [20004, 20005, 20016]
#halonr_array = [7381]
halonr_array = [389106]
#halonr_array = [36885]
for model_number in range(0, len(model_tags)):
if(simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
SFR_gal.append([])
SFR_ensemble.append([])
ejected_gal.append([])
ejected_ensemble.append([])
infall_gal.append([])
infall_ensemble.append([])
ejectedmass_gal.append([])
ejectedmass_ensemble.append([])
GG, Gal_Desc = ReadScripts.ReadGals_SAGE_DelayedSN(galaxies_filepath_array[model_number], 0, number_snapshots[model_number], comm) # Read in the correct galaxy file.
G_Merged, Merged_Desc = ReadScripts.ReadGals_SAGE_DelayedSN(merged_galaxies_filepath_array[model_number], 0, number_snapshots[model_number], comm) # Also need the merged galaxies.
G = ReadScripts.Join_Arrays(GG, G_Merged, Gal_Desc) # Then join them together for all galaxies that existed at this Redshift.
if look_for_alive == 1:
G.GridHistory[G.GridHistory >= 0] = 1
G.GridHistory[G.GridHistory < 0] = 0
alive = np.sum(G.GridHistory, axis = 1)
# print "The galaxy that was present in the most snapshots is %d which was in %d snaps" %(np.argmax(alive), np.amax(alive))
most_alive = alive.argsort()[-10:][::-1] # Finds the 3 galaxies alive for the most snapshots. Taken from https://stackoverflow.com/questions/6910641/how-to-get-indices-of-n-maximum-values-in-a-numpy-array
# print G.HaloNr[most_alive]
t = np.empty((number_snapshots[model_number]))
for snapshot_idx in range(0, number_snapshots[model_number]):
w = np.where((G.GridHistory[:, snapshot_idx] != -1) & (G.GridStellarMass[:, snapshot_idx] > 0.0) & (G.GridStellarMass[:, snapshot_idx] < 1e5) & (G.GridFoFMass[:, snapshot_idx] >= m_low_SAGE) & (G.GridFoFMass[:, snapshot_idx] <= m_high_SAGE))[0] # Only include those galaxies that existed at the current snapshot, had positive (but not infinite) stellar/Halo mass and Star formation rate.
SFR_ensemble[model_number].append(np.mean(G.GridSFR[w,snapshot_idx]))
ejected_ensemble[model_number].append(np.mean(G.GridOutflowRate[w, snapshot_idx]))
infall_ensemble[model_number].append(np.mean(G.GridInfallRate[w, snapshot_idx]))
t[snapshot_idx] = (t_BigBang - cosmo.lookback_time(AllVars.SnapZ[snapshot_idx]).value) * 1.0e3
for p in range(0, N_random):
random_idx = (np.where((G.HaloNr == halonr_array[p]))[0])[0]
SFR_gal[model_number].append(G.GridSFR[random_idx]) # Remember the star formation rate history of the galaxy.
ejected_gal[model_number].append(G.GridOutflowRate[random_idx])
infall_gal[model_number].append(G.GridInfallRate[random_idx])
ejectedmass_gal[model_number].append(G.GridEjectedMass[random_idx])
#SFR_gal[model_number][p][SFR_gal[model_number][p] < 1.0e-15] = 1
for snapshot_idx in range(0, number_snapshots[model_number]):
if snapshot_idx == 0:
pass
elif(G.GridHistory[random_idx, snapshot_idx] == -1):
SFR_gal[model_number][p][snapshot_idx] = SFR_gal[model_number][p][snapshot_idx - 1]
# SFR_ensemble[model_number] = np.nan_to_num(SFR_ensemble[model_number])
# SFR_ensemble[model_number][SFR_ensemble[model_number] < 1.0e-15] = 1
# ejected_ensemble[model_number][ejected_ensemble[model_number] < 1.0e-15] = 1
ax1.plot(t, SFR_ensemble[model_number], color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[model_number], label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
ax1.plot(t, ejected_ensemble[model_number], color = PlotScripts.colors[1], linestyle = PlotScripts.linestyles[model_number], linewidth = PlotScripts.global_linewidth, alpha = 1.0)
#ax5.plot(t, infall_ensemble[model_number], color = PlotScripts.colors[2], linestyle = PlotScripts.linestyles[model_number], linewidth = PlotScripts.global_linewidth, alpha = 1.0)
#ax5.plot(t, ejectedmass_ensemble[model_number], color = PlotScripts.colors[2], linestyle = PlotScripts.linestyles[model_number], linewidth = PlotScripts.global_linewidth, alpha = 1.0)
for p in range(0, N_random):
ax1.plot(t, SFR_gal[model_number][p], color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[model_number], alpha = 0.5, linewidth = 1)
ax1.plot(t, ejected_gal[model_number][p], color = PlotScripts.colors[1], linestyle = PlotScripts.linestyles[model_number], alpha = 0.5, linewidth = 1)
#ax5.plot(t, infall_gal[model_number][p], color = PlotScripts.colors[2], linestyle = PlotScripts.linestyles[model_number], alpha = 0.5, linewidth = 1)
#ax5.plot(t, ejectedmass_gal[model_number][p], color = PlotScripts.colors[2], linestyle = PlotScripts.linestyles[model_number], alpha = 0.5, linewidth = 1)
#ax1.plot(t, SFR_gal[model_number][p], color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[model_number], alpha = 1.0, linewidth = 1, label = model_tags[model_number])
#ax1.plot(t, ejected_gal[model_number][p], color = PlotScripts.colors[1], linestyle = PlotScripts.linestyles[model_number], alpha = 1.0, linewidth = 1, label = model_tags[model_number])
ax1.plot(np.nan, np.nan, color = 'r', linestyle = '-', label = "SFR")
ax1.plot(np.nan, np.nan, color = 'b', linestyle = '-', label = "Outflow")
# exit()
#ax1.plot(np.nan, np.nan, color = PlotScripts.colors[0], label = 'SFR')
#ax1.plot(np.nan, np.nan, color = PlotScripts.colors[1], label = 'Outflow')
ax1.set_yscale('log', nonposy='clip')
ax1.set_ylabel(r"$\mathrm{Mass \: Flow} \: [\mathrm{M}_\odot \mathrm{yr}^{-1}]$")
ax1.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_fontsize)
ax1.set_xlim(PlotScripts.time_xlim)
ax1.set_ylim([1e-6, 1e3])
'''
ax3.set_yscale('log', nonposy='clip')
ax3.set_ylabel(r"$\mathrm{Outflow \: Rate} \: [\mathrm{M}_\odot \mathrm{yr}^{-1}]$")
ax3.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_fontsize)
ax3.set_xlim(PlotScripts.time_xlim)
ax3.set_ylim([1e-8, 1e3])
ax5.set_yscale('log', nonposy='clip')
#ax5.set_ylabel(r"$\mathrm{Infall \: Rate} \: [\mathrm{M}_\odot \mathrm{yr}^{-1}]$")
ax5.set_ylabel(r"$\mathrm{Ejected Mass} [\mathrm{M}_\odot]$")
ax5.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_fontsize)
ax5.set_xlim(PlotScripts.time_xlim)
#ax5.set_ylim([1e-8, 1e3])
ax5.set_ylim([1e6, 1e10])
'''
ax2 = ax1.twiny()
#ax4 = ax3.twiny()
#ax6 = ax5.twiny()
t_plot = (t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$%d$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax2.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax2.set_xticklabels(z_labels) # But label them as redshifts.
'''
ax4.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax4.set_xlim(PlotScripts.time_xlim)
ax4.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax4.set_xticklabels(z_labels) # But label them as redshifts.
ax6.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax6.set_xlim(PlotScripts.time_xlim)
ax6.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax6.set_xticklabels(z_labels) # But label them as redshifts.
'''
plt.tight_layout()
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile = './Halo%d_mlow%.2f_%s%s' %(halonr_array[0], m_low_SAGE, output_tag, output_format)
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
##
def plot_quasars_count(SnapList, PlotList, N_quasars_z, N_quasars_boost_z, N_gal_z, mean_quasar_activity, std_quasar_activity, N_halo, N_merger_halo, N_gal, N_merger_galaxy, fesc_prescription, simulation_norm, FirstFile, LastFile, NumFile, model_tags, output_tag):
'''
Parameters
---------
SnapList : Nested 'array-like` of ints, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots that we plot the quasar density at for each model.
PlotList : Nested array of ints, PlotList[model_number0]= [plotsnapshot0_model0, ..., plotsnapshotN_model0], with length equal to the number of models.
Snapshots that will be plotted for the quasar activity as a function of halo mass.
N_quasars_z : Nested array of floats, N_quasars_z[model_number0] = [N_quasars_z0, N_quasars_z1, ..., N_quasars_zN]. Outer array has length equal to the number of models, inner array has length equal to length of the model's SnapList.
Number of quasars, THAT WENT OFF, during the given redshift.
N_quasars_boost_z : Nested array of floats, N_quasars_boost_z[model_number0] = [N_quasars_boost_z0, N_quasars_boost_z1, ..., N_quasars_boost_zN]. Outer array has length equal to the number of models, inner array has length equal to length of the model's SnapList.
Number of galaxies that had their escape fraction boosted by quasar activity.
N_gal_z : Nested array of floats, N_gal_z[model_number0] = [N_gal_z0, N_gal_z1, ..., N_gal_zN]. Outer array has length equal to the number of models, inner array has length equal to length of the model's SnapList.
Number of galaxies at each redshift.
mean_quasar_activity, std_quasar_activity : Nested 2-dimensional array of floats, mean_quasar_activity[model_number0][snapshot0] = [bin0quasar_activity, ..., binNquasar_activity]. Outer array has length equal to the number of models, inner array has length equal to the length of the model's snaplist and most inner array has length equal to the number of halo bins (NB).
Mean/std fraction of galaxies that had quasar go off during each snapshot as a function of halo mass.
NOTE : This is for quasars going off, not for galaxies that have their escape fraction being boosted.
fesc_prescription : Array with length equal to the number of models.
Denotes what escape fraction prescription each model used. Quasars are only tracked when fesc_prescription == 3.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
FirstFile, LastFile, NumFile : array of integers with length equal to the number of models.
The file numbers for each model that were read in (defined by the range between [FirstFile, LastFile] inclusive) and the TOTAL number of files for this model (we may only be plotting a subset of the volume).
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated. File will be saved in the current directory with the output format defined by the 'output_format' variable at the beggining of the file.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
No relevant units.
'''
print("Plotting quasar count/density")
if rank == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax6 = ax1.twinx()
fig2 = plt.figure()
ax3 = fig2.add_subplot(111)
ax5 = ax3.twinx()
fig3 = plt.figure()
ax7 = fig3.add_subplot(111)
fig4 = plt.figure()
ax50 = fig4.add_subplot(111)
fig5 = plt.figure()
ax55 = fig5.add_subplot(111)
fig6 = plt.figure()
ax56 = fig6.add_subplot(111)
mean_quasar_activity_array = []
std_quasar_activity_array = []
N_quasar_activity_array = []
N_gal_halo_array = []
N_gal_array = []
merger_counts_halo_array = []
merger_counts_galaxy_array = []
bin_middle_halo_array = []
bin_middle_galaxy_array = []
for model_number in range(0, len(SnapList)): # Does this for each of the models.
if (fesc_prescription[model_number] != 3): # Want to skip the models that didn't count quasars.
continue
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif (simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
mean_quasar_activity_array.append([])
std_quasar_activity_array.append([])
N_quasar_activity_array.append([])
N_gal_halo_array.append([])
N_gal_array.append([])
merger_counts_halo_array.append([])
merger_counts_galaxy_array.append([])
bin_middle_halo_array.append([])
bin_middle_galaxy_array.append([])
box_factor = (LastFile[model_number] - FirstFile[model_number] + 1.0)/(NumFile[model_number]) # This factor allows us to take a sub-volume of the box and scale the results to represent the entire box.
print("We are plotting the quasar density using {0:.4f} of the box's volume.".format(box_factor))
norm = pow(AllVars.BoxSize,3) / pow(AllVars.Hubble_h, 3) * box_factor
####
## We perform the plotting on Rank 0 so only this rank requires the final counts array. ##
if rank == 0:
quasars_total = np.zeros_like((N_quasars_z[model_number]))
boost_total = np.zeros_like(N_quasars_boost_z[model_number])
gal_count_total = np.zeros_like(N_gal_z[model_number])
else:
quasars_total = None
boost_total = None
gal_count_total = None
N_quasars_tmp = np.array((N_quasars_z[model_number])) # So we can use MPI.Reduce()
comm.Reduce([N_quasars_tmp, MPI.DOUBLE], [quasars_total, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the number of quasars and passes back to rank 0.
N_quasars_boost_tmp = np.array(N_quasars_boost_z[model_number]) # So we can use MPI.Reduce()
comm.Reduce([N_quasars_boost_tmp, MPI.DOUBLE], [boost_total, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the number of galaxies that had their fesc boosted.
N_gal_tmp = np.array(N_gal_z[model_number]) # So we can use MPI.Reduce()
comm.Reduce([N_gal_tmp, MPI.DOUBLE], [gal_count_total, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the number of total galaxies.
for snapshot_idx in range(len(SnapList[model_number])):
mean_quasar_activity_array[model_number], std_quasar_activity_array[model_number], N_quasar_activity_array[model_number] = calculate_pooled_stats(mean_quasar_activity_array[model_number], std_quasar_activity_array[model_number], N_quasar_activity_array[model_number], mean_quasar_activity[model_number][snapshot_idx], std_quasar_activity[model_number][snapshot_idx], N_halo[model_number][snapshot_idx])
if rank == 0:
merger_count_halo_total = np.zeros_like((N_merger_halo[model_number][snapshot_idx]))
N_gal_halo_total = np.zeros_like((N_halo[model_number][snapshot_idx]))
merger_count_galaxy_total = np.zeros_like((N_merger_galaxy[model_number][snapshot_idx]))
N_gal_total = np.zeros_like((N_gal[model_number][snapshot_idx]))
else:
merger_count_halo_total = None
N_gal_halo_total = None
merger_count_galaxy_total = None
N_gal_total = None
comm.Reduce([N_merger_halo[model_number][snapshot_idx], MPI.FLOAT], [merger_count_halo_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
comm.Reduce([N_halo[model_number][snapshot_idx], MPI.FLOAT], [N_gal_halo_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
comm.Reduce([N_merger_galaxy[model_number][snapshot_idx], MPI.FLOAT], [merger_count_galaxy_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
comm.Reduce([N_gal[model_number][snapshot_idx], MPI.FLOAT], [N_gal_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
if rank == 0:
merger_counts_halo_array[model_number].append(merger_count_halo_total)
N_gal_halo_array[model_number].append(N_gal_halo_total)
merger_counts_galaxy_array[model_number].append(merger_count_galaxy_total)
N_gal_array[model_number].append(N_gal_total)
bin_middle_halo_array[model_number].append(np.arange(m_low, m_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
bin_middle_galaxy_array[model_number].append(np.arange(m_gal_low, m_gal_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
if rank == 0:
plot_count = 0
stop_plot = 0
title = model_tags[model_number]
t = np.empty(len(SnapList[model_number]))
ZZ = np.empty(len(SnapList[model_number]))
for snapshot_idx in range(0, len(SnapList[model_number])):
t[snapshot_idx] = (AllVars.t_BigBang - AllVars.Lookback_Time[SnapList[model_number][snapshot_idx]]) * 1.0e3
ZZ[snapshot_idx] = AllVars.SnapZ[SnapList[model_number][snapshot_idx]]
if (stop_plot == 0):
# print("Snapshot {0} PlotSnapshot "
#"{1}".format(SnapList[model_number][snapshot_idx], PlotList[model_number][plot_count]))
if (SnapList[model_number][snapshot_idx] == PlotList[model_number][plot_count]):
label = "z = {0:.2f}".format(AllVars.SnapZ[PlotList[model_number][plot_count]])
ax7.plot(bin_middle_halo_array[model_number][snapshot_idx], mean_quasar_activity_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
#ax50.plot(bin_middle_halo_array[model_number][snapshot_idx], merger_counts_array[model_number][snapshot_idx] / gal_count_total[snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
ax50.plot(bin_middle_halo_array[model_number][snapshot_idx], merger_counts_halo_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
#ax50.plot(bin_middle_halo_array[model_number][snapshot_idx], merger_counts_array[model_number][snapshot_idx] / N_gal_halo_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
#ax55.plot(bin_middle_galaxy_array[model_number][snapshot_idx], merger_counts_galaxy_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
ax55.plot(bin_middle_galaxy_array[model_number][snapshot_idx],
merger_counts_galaxy_array[model_number][snapshot_idx] / N_gal_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
print("plot_count = {0} len(PlotList) = {1}".format(plot_count,
len(PlotList[model_number])))
plot_count += 1
print("plot_count = {0} len(PlotList) = {1}".format(plot_count,
len(PlotList[model_number])))
if (plot_count == len(PlotList[model_number])):
stop_plot = 1
print("For Snapshot {0} at t {3} there were {1} total mergers compared to {2} total galaxies.".format(snapshot_idx, np.sum(merger_counts_galaxy_array[model_number][snapshot_idx]), np.sum(gal_count_total[snapshot_idx]), t[snapshot_idx]))
if (np.sum(gal_count_total[snapshot_idx]) > 0.0 and np.sum(merger_counts_galaxy_array[model_number][snapshot_idx]) > 0.0):
ax56.scatter(t[snapshot_idx], np.sum(merger_counts_galaxy_array[model_number][snapshot_idx]) / np.sum(gal_count_total[snapshot_idx]), color = 'r', rasterized = True)
#ax56.scatter(t[snapshot_idx], quasars_total[snapshot_idx] / np.sum(gal_count_total[snapshot_idx]), color = 'r', rasterized = True)
ax1.plot(t, quasars_total / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[0], rasterized = True, linewidth = PlotScripts.global_linewidth)
p = np.where((ZZ < 15))[0]
#ax1.plot(ZZ[p], quasars_total[p] / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[0], rasterized = True, linewidth = PlotScripts.global_linewidth)
ax3.plot(t, boost_total, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[0], rasterized = True, label = title, linewidth = PlotScripts.global_linewidth)
w = np.where((gal_count_total > 0.0))[0] # Since we're doing a division, need to only plot those redshifts that actually have galaxies.
ax5.plot(t[w], np.divide(boost_total[w], gal_count_total[w]), color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[1], rasterized = True, linewidth = PlotScripts.global_linewidth)
ax6.plot(t[w], gal_count_total[w] / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[1], rasterized = True, linewidth = PlotScripts.global_linewidth)
#ax6.plot(ZZ[p], gal_count_total[p] / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[1], rasterized = True, linewidth = PlotScripts.global_linewidth)
ax1.plot(np.nan, np.nan, color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[0], label = "Quasar Ejection Density")
ax1.plot(np.nan, np.nan, color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[1], label = "Galaxy Density")
ax3.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[0], label = "Count")
ax3.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[1], label = "Fraction of Galaxies")
ax7.set_xlabel(r'$\log_{10}\ M_\mathrm{vir}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax7.set_ylabel(r'$\mathrm{Mean \: Quasar \: Activity}$', size = PlotScripts.global_fontsize)
ax50.set_xlabel(r'$\log_{10}\ M_\mathrm{vir}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
#ax50.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
ax50.set_ylabel(r'$\mathrm{Number \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
ax55.set_xlabel(r'$\log_{10}\ M_\mathrm{*}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax55.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
#ax55.set_ylabel(r'$\mathrm{Number \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
ax56.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_labelsize)
ax56.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
#ax56.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Quasar \: Activity}$', size = PlotScripts.global_fontsize)
ax56.set_yscale('log', nonposy='clip')
ax50.axvline(np.log10(32.0*AllVars.PartMass / AllVars.Hubble_h), color = 'k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(PlotScripts.time_tickinterval))
ax1.set_xlim(PlotScripts.time_xlim)
ax1.set_yscale('log', nonposy='clip')
ax3.xaxis.set_minor_locator(mtick.MultipleLocator(PlotScripts.time_tickinterval))
ax3.set_xlim(PlotScripts.time_xlim)
ax3.set_yscale('log', nonposy='clip')
## Create a second axis at the top that contains the corresponding redshifts. ##
## The redshift defined in the variable 'z_plot' will be displayed. ##
ax2 = ax1.twiny()
ax4 = ax3.twiny()
ax57 = ax56.twiny()
t_plot = (AllVars.t_BigBang - AllVars.cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding time values on the bottom.
z_labels = ["$%d$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax2.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax2.set_xticklabels(z_labels) # But label them as redshifts.
ax4.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax4.set_xlim(PlotScripts.time_xlim)
ax4.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax4.set_xticklabels(z_labels) # But label them as redshifts.
ax57.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax57.set_xlim(PlotScripts.time_xlim)
ax57.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax57.set_xticklabels(z_labels) # But label them as redshifts.
ax1.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_labelsize)
#ax1.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$N_\mathrm{Quasars} \: [\mathrm{Mpc}^{-3}]$', fontsize = PlotScripts.global_fontsize)
ax6.set_ylabel(r'$N_\mathrm{Gal} \: [\mathrm{Mpc}^{-3}]$', fontsize = PlotScripts.global_fontsize)
ax3.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_labelsize)
ax3.set_ylabel(r'$N_\mathrm{Boosted}$', fontsize = PlotScripts.global_fontsize)
ax5.set_ylabel(r'$\mathrm{Fraction \: Boosted}$', fontsize = PlotScripts.global_fontsize)
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax3.legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax7.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax50.legend(loc='upper right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax55.legend(loc='upper right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
fig.tight_layout()
fig2.tight_layout()
fig3.tight_layout()
fig5.tight_layout()
fig6.tight_layout()
outputFile1 = './{0}_quasardensity{1}'.format(output_tag, output_format)
outputFile2 = './{0}_boostedcount{1}'.format(output_tag, output_format)
outputFile3 = './{0}_quasar_activity_halo{1}'.format(output_tag, output_format)
outputFile4 = './{0}_mergercount_global{1}'.format(output_tag, output_format)
outputFile5 = './{0}_mergercount_global_stellarmass{1}'.format(output_tag, output_format)
outputFile6 = './{0}_mergercount_total{1}'.format(output_tag, output_format)
fig.savefig(outputFile1) # Save the figure
fig2.savefig(outputFile2) # Save the figure
fig3.savefig(outputFile3) # Save the figure
fig4.savefig(outputFile4) # Save the figure
fig5.savefig(outputFile5) # Save the figure
fig6.savefig(outputFile6) # Save the figure
print("Saved to {0}".format(outputFile1))
print("Saved to {0}".format(outputFile2))
print("Saved to {0}".format(outputFile3))
print("Saved to {0}".format(outputFile4))
print("Saved to {0}".format(outputFile5))
print("Saved to {0}".format(outputFile6))
plt.close(fig)
plt.close(fig2)
plt.close(fig3)
##
def plot_photon_quasar_fraction(snapshot, filenr, output_tag, QuasarFractionalPhoton, QuasarActivityToggle, NumSubsteps):
ax1 = plt.subplot(111)
counts, bin_edges, bin_middle = AllVars.Calculate_Histogram(QuasarFractionalPhoton, 0.05, 0, 0, 1)
ax1.plot(bin_middle, counts, lw = PlotScripts.global_linewidth, color = 'r')
ax1.axvline(np.mean(QuasarFractionalPhoton[QuasarFractionalPhoton != 0]), lw = 0.5, ls = '-')
ax1.set_yscale('log', nonposy='clip')
ax1.set_xlabel(r"$\mathrm{Fractional \: Photon \: Boost}$")
ax1.set_ylabel(r"$\mathrm{Count}$")
ax1.set_ylim([1e1, 1e5])
outputFile1 = './photonfraction/file{0}_snap{1}_{2}{3}'.format(filenr, snapshot, output_tag, output_format)
plt.tight_layout()
plt.savefig(outputFile1)
print("Saved to {0}".format(outputFile1))
plt.close()
###
def plot_quasar_substep(snapshot, filenr, output_tag, substep):
ax1 = plt.subplot(111)
counts, bin_edges, bin_middle = AllVars.Calculate_Histogram(substep, 0.1, 0, 0, 10)
ax1.plot(bin_middle, counts, lw = PlotScripts.global_linewidth, color = 'r')
ax1.axvline(np.mean(substep[substep != -1]), lw = 0.5, ls = '-')
ax1.set_yscale('log', nonposy='clip')
ax1.set_xlabel(r"$\mathrm{Substep \: Quasar \: Activity}$")
ax1.set_ylabel(r"$\mathrm{Count}$")
# ax1.set_ylim([1e1, 1e5])
outputFile1 = './substep_activity/file{0}_snap{1}_{2}{3}'.format(filenr, snapshot, output_tag, output_format)
plt.tight_layout()
plt.savefig(outputFile1)
print("Saved to {0}".format(outputFile1))
plt.close()
###
def plot_post_quasar_SFR(PlotSnapList, model_number, Gal, output_tag):
ax1 = plt.subplot(111)
ax2 = ax1.twinx()
count = 0
snapshot_thickness = 20 # How many snapshots before/after the quasar event do we want to track?
for snapshot_idx in PlotSnapList[model_number]:
w = np.where((G.QuasarActivity[:, snapshot_idx] == 1) & (G.LenHistory[:, snapshot_idx] > 200.0) & (G.GridStellarMass[:, snapshot_idx] > 0.001))[0]
w_slice_gridhistory = G.GridHistory[w,snapshot_idx-snapshot_thickness:snapshot_idx+snapshot_thickness]
potential_gal = []
for i in range(len(w_slice_gridhistory)):
ww = np.where((w_slice_gridhistory[i] >= 0))[0]
if (len(ww) == snapshot_thickness * 2):
potential_gal.append(w[i])
if (len(potential_gal) == 0):
return
count += 1
print("There were {0} galaxies that had an energetic quasar wind event at snapshot {1} (z = {2:.3f})".format(len(potential_gal), snapshot_idx, AllVars.SnapZ[snapshot_idx]))
chosen_gal = potential_gal[1]
lenhistory_array = np.empty((int(snapshot_thickness*2 + 1)))
SFR_array = np.empty((int(snapshot_thickness*2 + 1)))
gridhistory_array = np.empty((int(snapshot_thickness*2 + 1)))
coldgas_array = np.empty((int(snapshot_thickness*2 + 1)))
t = np.empty((int(snapshot_thickness*2 + 1)))
for i in range(-snapshot_thickness, snapshot_thickness+1):
#print("SFR {0} {1}".format(snapshot_idx + i, G.GridSFR[chosen_gal, snapshot_idx+i]))
#print("ColdGas {0} {1}".format(snapshot_idx + i, G.GridColdGas[chosen_gal, snapshot_idx+i]))
lenhistory_array[i+snapshot_thickness] = (G.LenHistory[chosen_gal, snapshot_idx+i])
SFR_array[i+snapshot_thickness] = (G.GridSFR[chosen_gal, snapshot_idx+i]) #- (G.GridSFR[chosen_gal, snapshot_idx])
gridhistory_array[i+snapshot_thickness] = (G.GridHistory[chosen_gal, snapshot_idx+i])
coldgas_array[i+snapshot_thickness] = (G.GridColdGas[chosen_gal, snapshot_idx+i] * 1.0e10 / AllVars.Hubble_h) #- (G.GridColdGas[chosen_gal, snapshot_idx])
t[i+snapshot_thickness] = (-AllVars.Lookback_Time[snapshot_idx+i] + AllVars.Lookback_Time[snapshot_idx]) * 1.0e3
print("Len History {0}".format(lenhistory_array))
print("Grid History {0}".format(gridhistory_array))
print("Cold Gas {0}".format(coldgas_array))
print("SFR {0}".format(SFR_array))
stellarmass_text = r"$log M_* = {0:.2f} \: M_\odot$".format(np.log10(G.GridStellarMass[chosen_gal, snapshot_idx] * 1.0e10 / AllVars.Hubble_h))
Ndym_text = "Dynamical Time = {0:.2f} Myr".format(G.DynamicalTime[chosen_gal, snapshot_idx])
z_text = "z = {0:.2f}".format(AllVars.SnapZ[snapshot_idx])
ax1.text(0.05, 0.95, z_text, transform = ax1.transAxes, fontsize = PlotScripts.global_fontsize - 4)
ax1.text(0.05, 0.9, stellarmass_text, transform = ax1.transAxes, fontsize = PlotScripts.global_fontsize - 4)
ax1.text(0.05, 0.85, Ndym_text, transform = ax1.transAxes, fontsize = PlotScripts.global_fontsize - 4)
ax1.plot(t, SFR_array, color = 'r', lw = PlotScripts.global_linewidth)
ax2.plot(t, coldgas_array, color = 'b', lw = PlotScripts.global_linewidth)
ax1.set_xlabel(r"$\mathrm{Time \: Since \: Quasar \: Event \: [Myr]}$", size = PlotScripts.global_labelsize - 10)
# ax1.set_ylabel(r"$\mathrm{Fractional \: SFR \: Relative \: To \: SFR_{Quasar}}$", size = PlotScripts.global_labelsize - 10)
# ax2.set_ylabel(r"$\mathrm{Difference \: Cold \: Gas \: Mass \: Relative \: To \: Cold_{Quasar}}$", size = PlotScripts.global_labelsize - 10)
ax1.set_ylabel(r"$\mathrm{SFR} \: [\mathrm{M}_\odot \mathrm{yr}^{-1}]$", size = PlotScripts.global_labelsize - 10)
ax2.set_ylabel(r"$\mathrm{Cold \: Gas \: Mass \: [\mathrm{M}_\odot]}$",size = PlotScripts.global_labelsize - 10)
ax1.set_yscale('log', nonposy='clip')
ax2.set_yscale('log', nonposy='clip')
ax1.plot(np.nan, np.nan, color = 'r', label = r"$\mathrm{SFR}$")
ax1.plot(np.nan, np.nan, color = 'b', label = r"$\mathrm{Cold \: Gas}$")
leg = ax1.legend(loc='upper right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile = "{0}_galaxy{2}{1}".format(output_tag, output_format, chosen_gal)
plt.tight_layout()
plt.savefig(outputFile)
print("Saved to {0}".format(outputFile))
plt.close()
exit()
###
def plot_stellarmass_blackhole(SnapList, simulation_norm, mean_galaxy_BHmass,
std_galaxy_BHmass, N_galaxy_BHmass, FirstFile,
LastFile, NumFile, model_tags, output_tag):
master_mean_SMBH, master_std_SMBH, master_N, master_bin_middle = \
collect_across_tasks(mean_galaxy_BHmass, std_galaxy_BHmass,
N_galaxy_BHmass, SnapList, SnapList, True,
m_gal_low, m_gal_high)
if rank == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(0, len(SnapList)):
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
box_factor = (LastFile[model_number] - FirstFile[model_number] + 1.0)/(NumFile[model_number]) # This factor allows us to take a sub-volume of the box and scale the results to represent the entire box.
norm = pow(AllVars.BoxSize,3) / pow(AllVars.Hubble_h, 3) * bin_width * box_factor
for snapshot_idx in range(0, len(SnapList[model_number])):
w = np.where((master_N[model_number][snapshot_idx] > 0.0))[0]
mean = np.log10(master_mean_SMBH[model_number][snapshot_idx][w])
upper = np.log10(np.add(master_mean_SMBH[model_number][snapshot_idx][w],
master_std_SMBH[model_number][snapshot_idx][w]))
lower = np.log10(np.subtract(master_mean_SMBH[model_number][snapshot_idx][w],
master_std_SMBH[model_number][snapshot_idx][w]))
label = "z = {0:.2f}" \
.format(AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
ax1.plot(master_bin_middle[model_number][snapshot_idx][w],
mean, label = label, color = PlotScripts.colors[snapshot_idx],
ls = PlotScripts.linestyles[model_number],
lw = PlotScripts.global_linewidth, rasterized = True)
#ax1.fill_between(bin_middle_stellar_array[model_number][snapshot_idx][w], lower, upper, color = PlotScripts.colors[model_number], alpha = 0.25)
ax2.plot(master_bin_middle[model_number][snapshot_idx][w],
master_N[model_number][snapshot_idx][w] / norm,
label = label, ls = PlotScripts.linestyles[model_number],
lw = PlotScripts.global_linewidth, rasterized = True)
Obs.Get_Data_SMBH()
PlotScripts.plot_SMBH_z8(ax1)
ax1.set_xlabel(r"$\log_{10}\mathrm{M}_* [\mathrm{M}_\odot]$", size = PlotScripts.global_fontsize)
ax1.set_ylabel(r"$\log_{10}\mathrm{M}_\mathrm{BH} [\mathrm{M}_\odot]$", size = PlotScripts.global_fontsize)
ax2.set_xlabel(r"$\log_{10}\mathrm{M}_\mathrm{BH} [\mathrm{M}_\odot]$", size = PlotScripts.global_fontsize)
ax2.set_ylabel(r'$\Phi\ [\mathrm{Mpc}^{-3}\: \mathrm{dex}^{-1}]$', fontsize = PlotScripts.global_fontsize)
ax2.set_yscale('log', nonposy='clip')
ax1.set_xticks(np.arange(7.0, 12.0))
ax1.set_yticks(np.arange(3.0, 12.0))
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax1.yaxis.set_minor_locator(mtick.MultipleLocator(0.25))
ax1.set_xlim([7.0, 10.25])
ax1.set_ylim([3.0, 8.0])
leg = ax1.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax2.legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile = "{0}{1}".format(output_tag, output_format)
plt.tight_layout()
fig.savefig(outputFile)
print("Saved to {0}".format(outputFile))
plt.close(fig)
outputFile2 = "{0}_MF{1}".format(output_tag, output_format)
plt.tight_layout()
fig2.savefig(outputFile2)
print("Saved to {0}".format(outputFile2))
plt.close(fig2)
###
def plot_reionmod(PlotSnapList, SnapList, simulation_norm, mean_reionmod_halo,
std_reionmod_halo, N_halo, mean_reionmod_z, std_reionmod_z,
N_reionmod, plot_z, model_tags, output_tag):
"""
Plot the reionization modifier as a function of halo mass and redshift.
Parameters
----------
PlotSnapList, SnapList: 2D Nested arrays of integers. Outer length is equal to the number of models and inner length is number of snapshots we're plotting/calculated for.
PlotSnapList contains the snapshots for each model we will plot for the halo mass figure.
SnapList contains the snapshots for each model that we have performed calculations for. These aren't equal because we don't want to plot halo curves for ALL redshifts.
simulation_norm: Array of integers. Length is equal to the number of models.
Contains the simulation identifier for each model. Used to set the parameters of each model.
mean_reionmod_halo, std_reionmod_halo: 3D Nested arrays of floats. Most outer length is equal to the number of models, next length is number of snapshots for each model, then inner-most length is the number of halo mass- bins (given by NB).
Contains the mean/standard deviation values for the reionization modifier as a function of halo mass.
NOTE: These are unique for each task.
N_halo: 3D Nested arrays of floats. Lengths are identical to mean_reionmod_halo.
Contains the number of halos in each halo mass bin.
NOTE: These are unique for each task.
mean_reionmod_z, std_reionmod_z: 2D Nested arrays of floats. Outer length is equal to the number of models, inner length is the number of snapshots for each model. NOTE: This inner length can be different to the length of PlotSnapList as we don't necessarily need to plot for every snapshot we calculate.
Contains the mean/standard deviation values for the rieonization modifier as a function of redshift.
NOTE: These are unique for each task.
N_reionmod: 2D Nested arrays of floats. Lengths are identical to mean_reionmod_z.
Contains the number of galaxies at each redshift that have non-negative reionization modifier. A negative reionization modifier is a galaxy who didn't have infall/stripping during the snapshot.
NOTE: These are unique for each task.
plot_z: Boolean.
Denotes whether we want to plot the reionization modifier as a function
of redshift. Useful because we often only calculate statistics for a
subset of the snapshots to decrease computation time. For these runs,
we don't want to plot for something that requires ALL snapshots.
model_tags: Array of strings. Length is equal to the number of models.
Contains the legend labels for each model.
output_tag: String.
The prefix for the output file.
Returns
----------
None. Plot is saved in current directory as "./<output_tag>.<output_format>"
"""
master_mean_reionmod_halo, master_std_reionmod_halo,
master_N_reionmod_halo, master_bin_middle = collect_across_tasks(mean_reionmod_halo,
std_reionmod_halo,
N_halo, SnapList,
PlotSnapList, True,
m_low, m_high)
if plot_z:
master_mean_reionmod_z, master_std_reionmod_z, master_N_reionmod_z, _ = collect_across_tasks(mean_reionmod_z,
std_reionmod_z,
N_reionmod)
if rank == 0:
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
if plot_z:
fig2 = plt.figure()
ax10 = fig2.add_subplot(111)
for model_number in range(len(PlotSnapList)):
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
for snapshot_idx in range(len((PlotSnapList[model_number]))):
if snapshot_idx == 0:
label = model_tags[model_number]
else:
label = ""
nonzero_bins = np.where(master_N_reionmod_halo[model_number][snapshot_idx] > 0.0)[0]
ax1.plot(master_bin_middle[model_number][snapshot_idx][nonzero_bins],
master_mean_reionmod_halo[model_number][snapshot_idx][nonzero_bins],
label = label, ls = PlotScripts.linestyles[model_number],
color = PlotScripts.colors[snapshot_idx])
if plot_z:
ax10.plot((AllVars.t_BigBang - AllVars.Lookback_Time[SnapList[model_number]])*1.0e3, master_mean_reionmod_z[model_number], color = PlotScripts.colors[model_number], label = model_tags[model_number], ls = PlotScripts.linestyles[model_number], lw = 3)
for count, snapshot_idx in enumerate(PlotSnapList[model_number]):
#label = r"$\mathbf{z = " + str(int(round(AllVars.SnapZ[snapshot_idx]))) + "}$"
label = r"$\mathbf{z = " + str(AllVars.SnapZ[snapshot_idx]) + "}$"
ax1.plot(np.nan, np.nan, ls = PlotScripts.linestyles[0], color =
PlotScripts.colors[count], label = label)
ax1.set_xlim([8.5, 11.5])
ax1.set_ylim([0.0, 1.05])
ax1.set_xlabel(r'$\mathbf{log_{10} \: M_{vir} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$\mathbf{Mean ReionMod}$', fontsize = PlotScripts.global_labelsize)
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile1 = "./{0}_halo{1}".format(output_tag, output_format)
fig1.savefig(outputFile1, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile1))
plt.close(fig1)
if plot_z:
ax10.set_xlabel(r"$\mathbf{Time \: since \: Big \: Bang \: [Myr]}$", fontsize = PlotScripts.global_labelsize)
tick_locs = np.arange(200.0, 1000.0, 100.0)
tick_labels = [r"$\mathbf{%d}$" % x for x in tick_locs]
ax10.xaxis.set_major_locator(mtick.MultipleLocator(100))
ax10.set_xticklabels(tick_labels, fontsize = PlotScripts.global_fontsize)
ax10.set_xlim(PlotScripts.time_xlim)
ax10.set_ylabel(r'$\mathbf{Mean ReionMod}$', fontsize = PlotScripts.global_labelsize)
ax11 = ax10.twiny()
t_plot = (AllVars.t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$\mathbf{%d}$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax11.set_xlabel(r"$\mathbf{z}$", fontsize = PlotScripts.global_labelsize)
ax11.set_xlim(PlotScripts.time_xlim)
ax11.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax11.set_xticklabels(z_labels, fontsize = PlotScripts.global_fontsize) # But label them as redshifts.
leg = ax10.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile2 = "./{0}_z{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
##
def plot_dust(PlotSnapList, SnapList, simulation_norm, mean_dust_galaxy, std_dust_galaxy,
N_galaxy, mean_dust_halo, std_dust_halo, N_halo, plot_z,
model_tags, output_tag):
"""
"""
master_mean_dust_galaxy, master_std_dust_galaxy, master_N_dust_galaxy, master_bin_middle_galaxy = \
collect_across_tasks(mean_dust_galaxy, std_dust_galaxy, N_galaxy, SnapList,
PlotSnapList, True, m_gal_low, m_gal_high)
master_mean_dust_halo, master_std_dust_halo, master_N_dust_halo, master_bin_middle_halo = \
collect_across_tasks(mean_dust_halo, std_dust_halo, N_halo, SnapList,
PlotSnapList, True, m_low, m_high)
if rank == 0:
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(len(PlotSnapList)):
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
for snapshot_idx in range(len((PlotSnapList[model_number]))):
if snapshot_idx == 0:
label = model_tags[model_number]
else:
label = ""
nonzero_bins = np.where(master_N_dust_galaxy[model_number][snapshot_idx] > 0.0)[0]
ax1.plot(master_bin_middle_galaxy[model_number][snapshot_idx][nonzero_bins],
master_mean_dust_galaxy[model_number][snapshot_idx][nonzero_bins],
label = label, ls = PlotScripts.linestyles[model_number],
color = PlotScripts.colors[snapshot_idx])
nonzero_bins = np.where(master_N_dust_halo[model_number][snapshot_idx] > 0.0)[0]
ax2.plot(master_bin_middle_halo[model_number][snapshot_idx][nonzero_bins],
master_mean_dust_halo[model_number][snapshot_idx][nonzero_bins],
label = label, ls = PlotScripts.linestyles[model_number],
color = PlotScripts.colors[snapshot_idx])
print(master_mean_dust_halo[model_number][snapshot_idx])
for count, snapshot_idx in enumerate(PlotSnapList[model_number]):
#label = r"$\mathbf{z = " + str(int(round(AllVars.SnapZ[snapshot_idx]))) + "}$"
label = r"$\mathbf{z = " + str(AllVars.SnapZ[snapshot_idx]) + "}$"
ax1.plot(np.nan, np.nan, ls = PlotScripts.linestyles[0], color =
PlotScripts.colors[count], label = label)
ax2.plot(np.nan, np.nan, ls = PlotScripts.linestyles[0], color =
PlotScripts.colors[count], label = label)
ax1.set_xlim([2.0, 10.5])
#ax1.set_ylim([1.0, 6.0])
ax1.set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$\mathbf{log_{10} \: \langle M_{Dust}\rangle_{M*}}$', fontsize = PlotScripts.global_labelsize)
leg = ax1.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile1 = "./{0}_galaxy{1}".format(output_tag, output_format)
fig1.savefig(outputFile1, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile1))
plt.close(fig1)
ax2.set_xlim([6.8, 11.5])
#ax2.set_ylim([1.0, 6.0])
ax2.set_xlabel(r'$\mathbf{log_{10} \: M_{vir} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax2.set_ylabel(r'$\mathbf{log_{10} \: \langle M_{Dust}\rangle_{Mvir}}$', fontsize = PlotScripts.global_labelsize)
leg = ax2.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile2 = "./{0}_halo{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
def plot_dust_scatter(SnapList, mass_gal, mass_halo, mass_dust, output_tag):
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
fig3 = plt.figure()
ax3 = fig3.add_subplot(111, projection='3d')
fig4 = plt.figure()
ax4 = fig4.add_subplot(111)
ax1.scatter(mass_gal, mass_dust)
ax2.scatter(mass_halo, mass_dust)
#ax3.scatter(mass_gal, mass_halo, mass_dust)
hb = ax4.hexbin(mass_halo, mass_dust, bins='log', cmap='inferno')
ax1.set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$\mathbf{log_{10} \: M_{Dust}}$', fontsize = PlotScripts.global_labelsize)
ax2.set_xlabel(r'$\mathbf{log_{10} \: M_{vir} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax2.set_ylabel(r'$\mathbf{log_{10} \: M_{Dust}}$', fontsize = PlotScripts.global_labelsize)
ax4.set_xlabel(r'$\mathbf{log_{10} \: M_{vir} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax4.set_ylabel(r'$\mathbf{log_{10} \: M_{Dust}}$', fontsize = PlotScripts.global_labelsize)
cb = fig4.colorbar(hb, ax=ax4)
cb.set_label('log10(N)')
outputFile1 = "./{0}_galaxy{1}".format(output_tag, output_format)
fig1.savefig(outputFile1, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile1))
plt.close(fig1)
outputFile2 = "./{0}_halo{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
#outputFile3 = "./{0}_3D{1}".format(output_tag, output_format)
#fig3.savefig(outputFile3, bbox_inches='tight') # Save the figure
#print('Saved file to {0}'.format(outputFile3))
#plt.close(fig3)
outputFile4 = "./{0}_hexbin{1}".format(output_tag, output_format)
fig4.savefig(outputFile4, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile4))
plt.close(fig4)
### Here ends the plotting functions. ###
### Here begins the functions that calculate various properties for the galaxies (fesc, Magnitude etc). ###
def Calculate_HaloPartStellarMass(halo_part, stellar_mass, bound_low, bound_high):
'''
Calculates the stellar mass for galaxies whose host halos contain a specified number of particles.
Parameters
----------
halo_part : array
Array containing the number of particles inside each halo.
stellar_mass : array
Array containing the Stellar Mass for each galaxy (entries align with HaloPart). Units of log10(Msun).
bound_low, bound_high : int
We calculate the Stellar Mass of galaxies whose host halo has, bound_low <= halo_part <= bound_high.
Return
-----
mass, mass_std : float
Mean and standard deviation stellar mass of galaxies whose host halo has number of particles between the specified bounds. Units of log10(Msun)
Units
-----
Input Stellar Mass is in units of log10(Msun).
Output mean/std Stellar Mass is in units of log10(Msun).
'''
w = np.where((halo_part >= bound_low) & (halo_part <= bound_high))[0] # Find the halos with particle number between the bounds.
mass = np.mean(10**(stellar_mass[w]))
mass_std = np.std(10**(stellar_mass[w]))
return np.log10(mass), np.log10(mass_std)
##
def calculate_UV_extinction(z, L, M):
'''
Calculates the observed UV magnitude after dust extinction is accounted for.
Parameters
----------
z : float
Redshift we are calculating the extinction at.
L, M : array, length equal to the number of galaxies at this snapshot.
Array containing the UV luminosities and magnitudes.
Returns
-------
M_UV_obs : array, length equal to the number of galaxies at this snapshot.
Array containing the observed UV magnitudes.
Units
-----
Luminosities are in units of log10(erg s^-1 A^-1).
Magnitudes are in the AB system.
'''
M_UV_bins = np.arange(-24, -16, 0.1)
A_mean = np.zeros((len(MUV_bins))) # A_mean is the average UV extinction for a given UV bin.
for j in range(0, len(M_UV_bins)):
beta = calculate_beta(M_UV_bins[j], AllVars.SnapZ[current_snap]) # Fits the beta parameter for the current redshift/UV bin.
dist = np.random.normal(beta, 0.34, 10000) # Generates a normal distribution with mean beta and standard deviation of 0.34.
A = 4.43 + 1.99*dist
A[A < 0] = 0 # Negative extinctions don't make sense.
A_Mean[j] = np.mean(A)
indices = np.digitize(M, M_UV_bins) # Bins the simulation magnitude into the MUV bins. Note that digitize defines an index i if bin[i-1] <= x < bin[i] whereas I prefer bin[i] <= x < bin[i+1]
dust = A_Mean[indices]
flux = AllVars.Luminosity_to_Flux(L, 10.0) # Calculate the flux from a distance of 10 parsec, units of log10(erg s^-1 A^-1 cm^-2).
flux_observed = flux - 0.4*dust
f_nu = ALlVars.spectralflux_wavelength_to_frequency(10**flux_observed, 1600) # Spectral flux desnity in Janksy.
M_UV_obs(-2.5 * np.log10(f_nu) + 8.90) # AB Magnitude from http://www.astro.ljmu.ac.uk/~ikb/convert-units/node2.html
return M_UV_obs
##
def update_cumulative_stats(mean_pool, std_pool, N_pool, mean_local, std_local, N_local):
'''
Update the cumulative statistics (such as Stellar Mass Function, Mvir-Ngamma, fesc-z) that are saved across files.
Pooled mean formulae taken : from https://www.ncbi.nlm.nih.gov/books/NBK56512/
Pooled variance formulae taken from : https://en.wikipedia.org/wiki/Pooled_variance
Parameters
----------
mean_pool, std_pool, N_pool : array of floats with length equal to the number of bins (e.g. the mass bins for the Stellar Mass Function).
The current mean, standard deviation and number of data points within in each bin. This is the array that will be updated in this function.
mean_local, std_local, N_local : array of floats with length equal to the number of bins.
The mean, standard deviation and number of data points within in each bin that will be added to the pool.
Returns
-------
mean_pool, std_pool, N_pool : (See above)
The updated arrays with the local values added and accounted for within the pools.
Units
-----
All units are kept the same as the input units.
Values are in real-space (not log-space).
'''
N_times_mean_local = np.multiply(N_local, mean_local)
N_times_var_local = np.multiply(N_local - 1, np.multiply(std_local, std_local)) # Actually N - 1 because of Bessel's Correction
# https://en.wikipedia.org/wiki/Bessel%27s_correction). #
N_times_mean_pool = np.add(N_times_mean_local, np.multiply(N_pool, mean_pool))
N_times_var_pool = np.add(N_times_var_local, np.multiply(N_pool - 1, np.multiply(std_pool, std_pool)))
N_pool = np.add(N_local, N_pool)
'''
print(mean_local)
print(type(mean_local))
print((type(mean_local).__module__ == np.__name__))
print(isinstance(mean_local, list))
print(isinstance(mean_local,float64))
print(isinstance(mean_local,float32))
'''
if (((type(mean_local).__module__ == np.__name__) == True or (isinstance(mean_local, list) == True)) and isinstance(mean_local, float) == False and isinstance(mean_local, int) == False and isinstance(mean_local,float32) == False and isinstance(mean_local, float64) == False): # Checks to see if we are dealing with arrays.
for i in range(0, len(N_pool)):
if(N_pool[i] == 0): # This case is when we have no data points in the bin.
mean_pool[i] = 0.0
else:
mean_pool[i] = N_times_mean_pool[i]/N_pool[i]
if(N_pool[i] < 3): # In this instance we don't have enough data points to properly calculate the standard deviation.
std_pool[i] = 0.0
else:
std_pool[i] = np.sqrt(N_times_var_pool[i]/ (N_pool[i] - 2)) # We have -2 because there is two instances of N_pool contains two 'N - 1' terms.
else:
mean_pool = N_times_mean_pool / N_pool
if(N_pool < 3):
std_pool = 0.0
else:
std_pool = np.sqrt(N_times_var_pool / (N_pool - 2))
return mean_pool, std_pool
### Here ends the functions that deal with galaxy data manipulation. ###
#################################
if __name__ == '__main__':
np.seterr(divide='ignore')
number_models = 4
galaxies_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/galaxies/const_0.3_z5.782"
merged_galaxies_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/galaxies/const_0.3_MergedGalaxies"
photo_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/grids/cifog/const_0.3_photHI"
zreion_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/grids/cifog/const_0.3_reionization_redshift"
galaxies_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/galaxies/fej_alpha0.40_beta0.05_z5.782"
merged_galaxies_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/galaxies/fej_alpha0.40_beta0.05_MergedGalaxies"
photo_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/grids/cifog/fej_alpha0.40_beta0.05_photHI"
zreion_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/grids/cifog/fej_alpha0.40_beta0.05_reionization_redshift"
galaxies_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/galaxies/MHneg_1e8_1e12_0.99_0.05_z5.782"
merged_galaxies_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/galaxies/MHneg_1e8_1e12_0.99_0.05_MergedGalaxies"
photo_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/grids/cifog/MHneg_1e8_1e12_0.99_0.05_photHI"
zreion_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/grids/cifog/MHneg_1e8_1e12_0.99_0.05_reionization_redshift"
galaxies_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/galaxies/MHpos_1e8_1e12_0.01_0.50_z5.782"
merged_galaxies_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/galaxies/MHpos_1e8_1e12_0.01_0.50_MergedGalaxies"
photo_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/grids/cifog/MHpos_1e8_1e12_0.01_0.50_photHI"
zreion_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/grids/cifog/MHpos_1e8_1e12_0.01_0.50_reionization_redshift"
galaxies_filepath_array = [galaxies_model1,
galaxies_model2,
galaxies_model3,
galaxies_model4]
photo_array = [photo_model1,
photo_model2,
photo_model3,
photo_model4]
zreion_array = [zreion_model1,
zreion_model2,
zreion_model3,
zreion_model4]
GridSize_array = [256,
256,
256,
256]
precision_array = [2,
2,
2,
2]
merged_galaxies_filepath_array = [merged_galaxies_model1,
merged_galaxies_model2,
merged_galaxies_model3,
merged_galaxies_model4]
number_substeps = [10, 10, 10, 10] # How many substeps does each model have (specified by STEPS variable within SAGE).
number_snapshots = [99, 99, 99, 99] # Number of snapshots in the simulation (we don't have to do calculations for ALL snapshots).
# Tiamat extended has 164 snapshots.
FirstFile = [0, 0, 0, 0] # The first file number THAT WE ARE PLOTTING.
#LastFile = [63, 63, 63, 63] # The last file number THAT WE ARE PLOTTING.
LastFile = [0, 0, 0, 0] # The last file number THAT WE ARE PLOTTING.
NumFile = [64, 64, 64, 64] # The number of files for this simulation (plotting a subset of these files is allowed).
same_files = [0, 0, 0, 0] # In the case that model 1 and model 2 (index 0 and 1) have the same files, we don't want to read them in a second time.
# This array will tell us if we should keep the files for the next model or otherwise throw them away.
# The files will be kept until same_files[current_model_number] = 0.
# For example if we had 5 models we were plotting and model 1, 2, 3 shared the same files and models 4, 5 shared different files,
# Then same_files = [1, 1, 0, 1, 0] would be the correct values.
done_model = np.zeros((number_models)) # We use this to keep track of if we have done a model already.
model_tags = [r"$\mathbf{f_\mathrm{esc} \: Constant}$",
r"$\mathbf{f_\mathrm{esc} \: \propto \: f_\mathrm{ej}}$",
r"$\mathbf{f_\mathrm{esc} \: \propto \: M_\mathrm{H}^{-1}}$",
r"$\mathbf{f_\mathrm{esc} \: \propto \: M_\mathrm{H}}$"]
## Constants used for each model. ##
# Need to add an entry for EACH model. #
halo_cut = [32, 32, 32, 32] # Only calculate properties for galaxies whose host halos have at least this many particles.
# For Tiamat, z = [6, 7, 8] are snapshots [78, 64, 51]
# For Kali, z = [6, 7, 8] are snapshots [93, 76, 64]
#SnapList = [np.arange(0,99), np.arange(0,99)] # These are the snapshots over which the properties are calculated. NOTE: If the escape fraction is selected (fesc_prescription == 3) then this should be ALL the snapshots in the simulation as this prescriptions is temporally important.
#SnapList = [np.arange(20,99), np.arange(20, 99), np.arange(20, 99)]
SnapList = [[33, 50, 76, 93],
[33, 50, 76, 93],
[33, 50, 76, 93],
[33, 50, 76, 93]]
#SnapList = [[64],
# [64],
# [64],
# [64]]
#SnapList = [[33, 50, 64, 76, 93]]
#SnapList = [[64], [64]]
#SnapList = [np.arange(20,99)]
#PlotSnapList = [[30, 50, 64, 76, 93]]
#PlotSnapList = [[93, 76, 64], [93, 76, 64]]
#SnapList = [[93, 76, 64], [93, 76, 64]]
PlotSnapList = SnapList
simulation_norm = [5, 5, 5, 5] # Changes the constants (cosmology, snapshot -> redshift mapping etc) for each simulation.
# 0 for MySim (Manodeep's old one).
# 1 for Mini-Millennium.
# 2 for Tiamat (up to z =5).
# 3 for extended Tiamat (down to z = 1.6ish).
# 4 for Britton's Sim Pip
# 5 for Manodeep's new simulation Kali.
stellar_mass_halolen_lower = [32, 95, 95, 95] # These limits are for the number of particles in a halo.
stellar_mass_halolen_upper = [50, 105, 105, 105] # We calculate the average stellar mass for galaxies whose host halos have particle count between these limits.
calculate_observed_LF = [0, 0, 0, 0] # Determines whether we want to account for dust extinction when calculating the luminosity function of each model.
paper_plots = 1
##############################################################################################################
## Do a few checks to ensure all the arrays were specified properly. ##
for model_number in range(0,number_models):
assert(LastFile[model_number] - FirstFile[model_number] + 1 >= size)
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
if (number_snapshots[model_number] != len(AllVars.SnapZ)): # Here we do a check to ensure that the simulation we've defined correctly matches the number of snapshots we have also defined.
print("The number_snapshots array is {0}".format(number_snapshots))
print("The simulation_norm array is {0}".format(simulation_norm))
print("The number of snapshots for model_number {0} has {1} but you've said there is only {2}".format(model_number, len(AllVars.SnapZ), number_snapshots[model_number]))
raise ValueError("Check either that the number of snapshots has been defined properly and that the normalization option is correct.")
######################################################################
##################### SETTING UP ARRAYS ##############################
######################################################################
### The arrays are set up in a 3 part process. ###
### This is because our arrays are 3D nested to account for the model number and snapshots. ###
# First set up the outer most array. #
## Arrays for functions of stellar mass. ##
SMF = [] # Stellar Mass Function.
mean_fesc_galaxy_array = [] # Mean escape fraction as a function of stellar mass.
std_fesc_galaxy_array = [] # Same as above but standard devation.
N_galaxy_array = [] # Number of galaxies as a function of stellar mass.
mean_BHmass_galaxy_array = [] # Black hole mass as a function of stellar mass.
std_BHmass_galaxy_array = [] # Same as above but standard deviation.
mergers_galaxy_array = [] # Number of mergers as a function of halo mass.
mean_dust_galaxy_array = [] # Mean dust mass as a function of stellar mass.
std_dust_galaxy_array = [] # Same as above but standard deviation.
mean_sfr_galaxy_array = [] # Mean star formation rate as a
# function of stellar mass
std_sfr_galaxy_array = [] # Same as above but standard deviation.
mean_ssfr_galaxy_array = [] # Mean specific star formation rate as a
# function of stellar mass
std_ssfr_galaxy_array = [] # Same as above but standard deviation.
mean_Ngamma_galaxy_array = [] # Mean number of ionizing photons emitted as
# a function of stellar mass.
std_Ngamma_galaxy_array = [] # Same as above but standard deviation.
mean_photo_galaxy_array = [] # Mean photoionization rate.
std_photo_galaxy_array = [] # Std photoionization rate.
mean_reionmod_galaxy_array = [] # Mean reionization modifier using RSAGE.
std_reionmod_galaxy_array = [] # Std.
mean_gnedin_reionmod_galaxy_array = [] # Mean reionization modifier using Gnedin analytic prescription.
std_gnedin_reionmod_galaxy_array = [] # Std.
## Arrays for functions of halo mass. ##
mean_ejected_halo_array = [] # Mean ejected fractions as a function of halo mass.
std_ejected_halo_array = [] # Same as above but standard deviation.
mean_fesc_halo_array = [] # Mean escape fraction as a function of halo mass.
std_fesc_halo_array = [] # Same as above but standard deviation.
mean_Ngamma_halo_array = [] # Mean number of ionizing photons THAT ESCAPE as a function of halo mass.
std_Ngamma_halo_array = [] # Same as above but standard deviation.
N_halo_array = [] # Number of galaxies as a function of halo mass.
mergers_halo_array = [] # Number of mergers as a function of halo mass.
mean_quasar_activity_array = [] # Mean fraction of galaxies that have quasar actvitity as a function of halo mas.
std_quasar_activity_array = [] # Same as above but standard deviation.
mean_reionmod_halo_array = [] # Mean reionization modifier as a function of halo mass.
std_reionmod_halo_array = [] # Same as above but for standard deviation.
mean_dust_halo_array = [] # Mean dust mass as a function of halo mass.
std_dust_halo_array = [] # Same as above but standard deviation.
## Arrays for functions of redshift. ##
sum_Ngamma_z_array = [] # Total number of ionizing photons THAT ESCAPE as a functio of redshift.
mean_fesc_z_array = [] # Mean number of ionizing photons THAT ESCAPE as a function of redshift.
std_fesc_z_array = [] # Same as above but standard deviation.
N_z = [] # Number of galaxies as a function of redshift.
galaxy_halo_mass_mean = [] # Mean galaxy mass as a function of redshift.
N_quasars_z = [] # This tracks how many quasars went off during a specified snapshot.
N_quasars_boost_z = [] # This tracks how many galaxies are having their escape fraction boosted by quasar activity.
dynamicaltime_quasars_mean_z = [] # Mean dynamical time of galaxies that have a quasar event as a function of redshift.
dynamicaltime_quasars_std_z = [] # Same as above but standard deviation.
dynamicaltime_all_mean_z = [] # Mean dynamical time of all galaxies.
dynamicaltime_all_std_z = [] # Same as above but standard deviation.
mean_reionmod_z = [] # Mean reionization modifier as a function of redshift.
std_reionmod_z = [] # Same as above but for standard deviation.
N_reionmod_z = [] # Number of galaxies with a non-negative reionization modifier.
mean_ejected_z = [] # Mean ejected fraction as a function of redshift.
std_ejected_z = [] # Same as above but for standard deviation.
## Arrays that aren't functions of other variables. ##
Ngamma_global = []
mass_global = []
fesc_global = []
## Arrays as a function of fej ##
mean_Ngamma_fej = []
std_Ngamma_fej = []
N_fej = []
## Now the outer arrays have been defined, set up the next nest level for the number of models. ##
for model_number in range(0,number_models):
## Galaxy Arrays ##
SMF.append([])
mean_fesc_galaxy_array.append([])
std_fesc_galaxy_array.append([])
N_galaxy_array.append([])
mean_BHmass_galaxy_array.append([])
std_BHmass_galaxy_array.append([])
mergers_galaxy_array.append([])
mean_dust_galaxy_array.append([])
std_dust_galaxy_array.append([])
mean_sfr_galaxy_array.append([])
std_sfr_galaxy_array.append([])
mean_ssfr_galaxy_array.append([])
std_ssfr_galaxy_array.append([])
mean_Ngamma_galaxy_array.append([])
std_Ngamma_galaxy_array.append([])
mean_photo_galaxy_array.append([])
std_photo_galaxy_array.append([])
mean_reionmod_galaxy_array.append([])
std_reionmod_galaxy_array.append([])
mean_gnedin_reionmod_galaxy_array.append([])
std_gnedin_reionmod_galaxy_array.append([])
## Halo arrays. ##
mean_ejected_halo_array.append([])
std_ejected_halo_array.append([])
mean_fesc_halo_array.append([])
std_fesc_halo_array.append([])
mean_Ngamma_halo_array.append([])
std_Ngamma_halo_array.append([])
N_halo_array.append([])
mergers_halo_array.append([])
mean_quasar_activity_array.append([])
std_quasar_activity_array.append([])
mean_reionmod_halo_array.append([])
std_reionmod_halo_array.append([])
mean_dust_halo_array.append([])
std_dust_halo_array.append([])
## Redshift arrays. ##
sum_Ngamma_z_array.append([])
mean_fesc_z_array.append([])
std_fesc_z_array.append([])
N_z.append([])
galaxy_halo_mass_mean.append([])
N_quasars_z.append([])
N_quasars_boost_z.append([])
dynamicaltime_quasars_mean_z.append([])
dynamicaltime_quasars_std_z.append([])
dynamicaltime_all_mean_z.append([])
dynamicaltime_all_std_z.append([])
mean_reionmod_z.append([])
std_reionmod_z.append([])
N_reionmod_z.append([])
mean_ejected_z.append([])
std_ejected_z.append([])
## Arrays that aren't functions ##
Ngamma_global.append([])
mass_global.append([])
fesc_global.append([])
## Arrays as a function of fej ##
mean_Ngamma_fej.append([])
std_Ngamma_fej.append([])
N_fej.append([])
## And then finally set up the inner most arrays ##
## NOTE: We do the counts as float so we can keep consistency when we're calling MPI operations (just use MPI.FLOAT rather than deciding if we need to use MPI.INT)
for snapshot_idx in range(len(SnapList[model_number])):
## For the arrays that are functions of stellar/halo mass, the inner most level will be an array with the statistic binned across mass ##
## E.g. SMF[model_number][snapshot_idx] will return an array whereas N_z[model_number][snapshot_idx] will return a float. ##
## Functions of stellar mass arrays. ##
SMF[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_fesc_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_fesc_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
N_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_BHmass_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_BHmass_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mergers_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_dust_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_dust_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_sfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_sfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_ssfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_ssfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_Ngamma_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_Ngamma_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_photo_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_photo_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_gnedin_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_gnedin_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
## Function of halo mass arrays. ##
mean_ejected_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_ejected_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_fesc_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_fesc_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_Ngamma_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_Ngamma_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
N_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mergers_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_quasar_activity_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_quasar_activity_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_reionmod_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_reionmod_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_dust_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_dust_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
## Function of Redshift arrays. ##
sum_Ngamma_z_array[model_number].append(0.0)
mean_fesc_z_array[model_number].append(0.0)
std_fesc_z_array[model_number].append(0.0)
N_z[model_number].append(0.0)
galaxy_halo_mass_mean[model_number].append(0.0)
N_quasars_z[model_number].append(0.0)
N_quasars_boost_z[model_number].append(0.0)
dynamicaltime_quasars_mean_z[model_number].append(0.0)
dynamicaltime_quasars_std_z[model_number].append(0.0)
dynamicaltime_all_mean_z[model_number].append(0.0)
dynamicaltime_all_std_z[model_number].append(0.0)
mean_reionmod_z[model_number].append(0.0)
std_reionmod_z[model_number].append(0.0)
N_reionmod_z[model_number].append(0.0)
mean_ejected_z[model_number].append(0.0)
std_ejected_z[model_number].append(0.0)
Ngamma_global[model_number].append([])
mass_global[model_number].append([])
fesc_global[model_number].append([])
## Arrays as a function of fej. ##
mean_Ngamma_fej[model_number].append(np.zeros((NB_fej), dtype = np.float32))
std_Ngamma_fej[model_number].append(np.zeros((NB_fej), dtype = np.float32))
N_fej[model_number].append(np.zeros((NB_fej), dtype = np.float32))
######################################################################
#################### ALL ARRAYS SETUP ################################
######################################################################
## Now it's (finally) time to read in all the data and do the actual work. ##
for model_number in range(number_models):
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
if (done_model[model_number] == 1): # If we have already done this model (i.e., we kept the files and skipped this loop), move along.
assert(FirstFile[model_number] == FirstFile[model_number - 1])
assert(LastFile[model_number] == LastFile[model_number - 1])
continue
for fnr in range(FirstFile[model_number] + rank, LastFile[model_number]+1, size): # Divide up the input files across the processors.
GG, Gal_Desc = ReadScripts.ReadGals_SAGE(galaxies_filepath_array[model_number], fnr, number_snapshots[model_number], comm) # Read galaxies
G_Merged, _ = ReadScripts.ReadGals_SAGE(merged_galaxies_filepath_array[model_number], fnr, number_snapshots[model_number], comm) # Also need the merged galaxies.
G = ReadScripts.Join_Arrays(GG, G_Merged, Gal_Desc) # Then join them together for all galaxies.
keep_files = 1 # Flips to 0 when we are done with this file.
current_model_number = model_number # Used to differentiate between outer model_number and the inner model_number because we can keep files across model_numbers.
while(keep_files == 1):
## Just a few definitions to cut down the clutter a smidge. ##
current_halo_cut = halo_cut[current_model_number]
NumSubsteps = number_substeps[current_model_number]
do_observed_LF = calculate_observed_LF[current_model_number]
for snapshot_idx in range(0, len(SnapList[current_model_number])): # Now let's calculate stats for each required redshift.
current_snap = SnapList[current_model_number][snapshot_idx] # Get rid of some clutter.
w_gal = np.where((G.GridHistory[:, current_snap] != -1) & (G.GridStellarMass[:, current_snap] > 0.0) & (G.LenHistory[:, current_snap] > current_halo_cut) & (G.GridSFR[:, current_snap] >= 0.0) & (G.GridFoFMass[:, current_snap] >= 0.0))[0] # Only include those galaxies that existed at the current snapshot, had positive (but not infinite) stellar/Halo mass and Star formation rate. Ensure the galaxies also resides in a halo that is sufficiently resolved.
w_merged_gal = np.where((G_Merged.GridHistory[:, current_snap] != -1) & (G_Merged.GridStellarMass[:, current_snap] > 0.0) & (G_Merged.LenHistory[:, current_snap] > current_halo_cut) & (G_Merged.GridSFR[:, current_snap] >= 0.0) & (G_Merged.GridFoFMass[:, current_snap] >= 0.0) & (G_Merged.LenMergerGal[:,current_snap] > current_halo_cut))[0]
print("There were {0} galaxies for snapshot {1} (Redshift {2:.3f}) model {3}.".format(len(w_gal), current_snap, AllVars.SnapZ[current_snap], current_model_number))
if (len(w_gal) == 0):
continue
mass_gal = np.log10(G.GridStellarMass[w_gal, current_snap] * 1.0e10 / AllVars.Hubble_h) # Msun. Log Units.
w_SFR = w_gal[np.where((G.GridSFR[w_gal, current_snap] > 0.0))[0]]
mass_SFR_gal = np.log10(G.GridStellarMass[w_SFR, current_snap] * \
1.0e10 / AllVars.Hubble_h)
SFR_gal = np.log10(G.GridSFR[w_SFR,current_snap])
sSFR_gal = SFR_gal - mass_SFR_gal
halo_part_count = G.LenHistory[w_gal, current_snap]
metallicity_gal = G.GridZ[w_gal, current_snap]
metallicity_tremonti_gal = np.log10(G.GridZ[w_gal, current_snap] / 0.02) + 9.0 # Using the Tremonti relationship for metallicity.
mass_central = np.log10(G.GridFoFMass[w_gal, current_snap] * 1.0e10 / AllVars.Hubble_h) # Msun. Log Units.
ejected_fraction = G.EjectedFraction[w_gal, current_snap]
w_dust = np.where(((G.GridDustColdGas[w_gal, current_snap]
+G.GridDustHotGas[w_gal, current_snap]
+G.GridDustEjectedMass[w_gal, current_snap]) > 0.0)
& (G.GridType[w_gal, current_snap] == 0))[0]
total_dust_gal = np.log10((G.GridDustColdGas[w_gal[w_dust], current_snap]
+G.GridDustHotGas[w_gal[w_dust], current_snap]
+G.GridDustEjectedMass[w_gal[w_dust], current_snap])
* 1.0e10 / AllVars.Hubble_h)
mass_gal_dust = np.log10(G.GridStellarMass[w_gal[w_dust], current_snap]
* 1.0e10 / AllVars.Hubble_h)
mass_centralgal_dust = np.log10(G.GridFoFMass[w_gal[w_dust], current_snap]
* 1.0e10 / AllVars.Hubble_h)
fesc = G.Gridfesc[w_gal, current_snap]
fesc[fesc < 0.0] = 0.0
Ngamma_gal = G.GridNgamma_HI[w_gal, current_snap] # 1.0e50
# photons/s.
if model_number < 3:
Ngamma_gal += 50.0 # Old versions of SAGE incorrectly
# subtracted 50.
Ngamma_gal *= fesc
reionmod = G.GridReionMod[w_gal, current_snap]
mass_reionmod_central = mass_central[reionmod > -1]
mass_reionmod_gal = mass_gal[reionmod > -1]
reionmod = reionmod[reionmod > -1] # Some satellite galaxies that don't have HotGas and hence won't be stripped. As a result reionmod = -1 for these. Ignore them.
mass_BH = G.GridBHMass[w_gal, current_snap] * 1.0e10 / AllVars.Hubble_h # Msun. Not log units.
L_UV = SFR_gal + 39.927 # Using relationship from STARBURST99, units of erg s^-1 A^-1. Log Units.
M_UV = AllVars.Luminosity_to_ABMag(L_UV, 1600)
if (do_observed_LF == 1): # Calculate the UV extinction if requested.
M_UV_obs = calculate_UV_extinction(AllVars.SnapZ[current_snap], L_UV, M_UV[snap_idx])
galaxy_halo_mass_mean_local, galaxy_halo_mass_std_local = Calculate_HaloPartStellarMass(halo_part_count, mass_gal, stellar_mass_halolen_lower[current_model_number], stellar_mass_halolen_upper[current_model_number]) # This is the average stellar mass for galaxies whose halos have the specified number of particles.
galaxy_halo_mass_mean[current_model_number][snapshot_idx] += pow(10, galaxy_halo_mass_mean_local) / (LastFile[current_model_number] + 1) # Adds to the average of the mean.
photofield_path = "{0}_{1:03d}".format(photo_array[current_model_number],
current_snap)
#photo_gal = photo.calc_gal_photoion(G.GridHistory[w_gal, current_snap],
# photofield_path,
# GridSize_array[current_model_number],
# precision_array[current_model_number])
#zreion_path = "{0}".format(zreion_array[current_model_number])
#zreion_gal = photo.calc_gal_zreion(G.GridHistory[w_gal, current_snap],
# zreion_path,
# GridSize_array[current_model_number],
# precision_array[current_model_number])
z_0 = 8.0
z_r = 7.0
gnedin_mfilt = ga.get_filter_mass(np.array(AllVars.SnapZ[current_snap]),
z_0, z_r)
gnedin_reionmod_gal = 1.0 / pow(1.0 + 0.26*pow(10, gnedin_mfilt - mass_central), 3.0)
###########################################
######## BASE PROPERTIES CALCULATED #######
###########################################
# Time to calculate relevant statistics.
### Functions of Galaxies/Stellar Mass ###
## Stellar Mass Function ##
(counts_local, bin_edges, bin_middle) = AllVars.Calculate_Histogram(mass_gal, bin_width, 0, m_gal_low, m_gal_high) # Bin the Stellar Mass
SMF[current_model_number][snapshot_idx] += counts_local
## Escape Fraction ##
(mean_fesc_galaxy_local, std_fesc_galaxy_local, N_local, sum_fesc_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(mass_gal, fesc, bin_width, m_gal_low, m_gal_high)
(mean_fesc_galaxy_array[current_model_number][snapshot_idx], std_fesc_galaxy_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_fesc_galaxy_array[current_model_number][snapshot_idx], std_fesc_galaxy_array[current_model_number][snapshot_idx], N_galaxy_array[current_model_number][snapshot_idx], mean_fesc_galaxy_local, std_fesc_galaxy_local, N_local)
## Black Hole Mass ##
(mean_BHmass_galaxy_local, std_BHmass_galaxy_local, N_local, sum_BHmass_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(mass_gal, mass_BH, bin_width, m_gal_low, m_gal_high)
(mean_BHmass_galaxy_array[current_model_number][snapshot_idx], std_BHmass_galaxy_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_BHmass_galaxy_array[current_model_number][snapshot_idx], std_BHmass_galaxy_array[current_model_number][snapshot_idx], N_galaxy_array[current_model_number][snapshot_idx], mean_BHmass_galaxy_local, std_BHmass_galaxy_local, N_local)
## Total Dust Mass ##
(mean_dust_galaxy_local, std_dust_galaxy_local, N_local,
sum_dust_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal_dust, total_dust_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_dust_galaxy_array[current_model_number][snapshot_idx],
std_dust_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_dust_galaxy_array[current_model_number][snapshot_idx],
std_dust_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_dust_galaxy_local,
std_dust_galaxy_local,
N_local)
## Star Formation Rate ##
(mean_sfr_galaxy_local, std_sfr_galaxy_local, N_local,
sum_sfr_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(
mass_SFR_gal, SFR_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_sfr_galaxy_array[current_model_number][snapshot_idx],
std_sfr_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_sfr_galaxy_array[current_model_number][snapshot_idx],
std_sfr_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_sfr_galaxy_local,
std_sfr_galaxy_local,
N_local)
## Specific Star Formation Rate ##
(mean_ssfr_galaxy_local, std_ssfr_galaxy_local, N_local,
sum_ssfr_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(
mass_SFR_gal, sSFR_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_ssfr_galaxy_array[current_model_number][snapshot_idx],
std_ssfr_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_ssfr_galaxy_array[current_model_number][snapshot_idx],
std_ssfr_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_ssfr_galaxy_local,
std_ssfr_galaxy_local,
N_local)
## Number of Ionizing Photons ##
(mean_Ngamma_galaxy_local, std_Ngamma_galaxy_local, N_local,
sum_Ngamma_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal, Ngamma_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_Ngamma_galaxy_array[current_model_number][snapshot_idx],
std_Ngamma_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_Ngamma_galaxy_array[current_model_number][snapshot_idx],
std_Ngamma_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_Ngamma_galaxy_local,
std_Ngamma_galaxy_local,
N_local)
## Photoionization rate ##
'''
(mean_photo_galaxy_local, std_photo_galaxy_local, N_local,
sum_photo_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal, photo_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_photo_galaxy_array[current_model_number][snapshot_idx],
std_photo_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_photo_galaxy_array[current_model_number][snapshot_idx],
std_photo_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_photo_galaxy_local,
std_photo_galaxy_local,
N_local)
'''
## RSAGE Reionization Modifier ##
(mean_reionmod_galaxy_local, std_reionmod_galaxy_local, N_local,
sum_reionmod_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_reionmod_gal, reionmod,
bin_width, m_gal_low,
m_gal_high)
(mean_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_reionmod_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_reionmod_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_reionmod_galaxy_local,
std_reionmod_galaxy_local,
N_local)
## Gnedin Reionization Modifier ##
(mean_gnedin_reionmod_galaxy_local, std_gnedin_reionmod_galaxy_local, N_local,
sum_gnedin_reionmod_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal, gnedin_reionmod_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_gnedin_reionmod_galaxy_local,
std_gnedin_reionmod_galaxy_local,
N_local)
N_galaxy_array[current_model_number][snapshot_idx] += N_local
### Functions of Halos/Halo Mass ###
## Ejected Fraction ##
(mean_ejected_halo_local, std_ejected_halo_local, N_local, sum_ejected_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_central, ejected_fraction, bin_width, m_low, m_high)
(mean_ejected_halo_array[current_model_number][snapshot_idx], std_ejected_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_ejected_halo_array[current_model_number][snapshot_idx], std_ejected_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_ejected_halo_local, std_ejected_halo_local, N_local) # Then update the running total.
## Quasar Fraction ##
(mean_quasar_activity_local, std_quasar_activity_local,N_local, sum_quasar_activity_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_central, G.QuasarActivity[w_gal, current_snap], bin_width, m_low, m_high)
(mean_quasar_activity_array[current_model_number][snapshot_idx], std_quasar_activity_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_quasar_activity_array[current_model_number][snapshot_idx], std_quasar_activity_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_quasar_activity_local, std_quasar_activity_local, N_local) # Then update the running total.
## fesc Value ##
(mean_fesc_halo_local, std_fesc_halo_local, N_local, sum_fesc_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_central, fesc, bin_width, m_low, m_high)
(mean_fesc_halo_array[current_model_number][snapshot_idx], std_fesc_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_fesc_halo_array[current_model_number][snapshot_idx], std_fesc_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_fesc_halo_local, std_fesc_halo_local, N_local) # Then update the running total.
## Ngamma ##
#(mean_Ngamma_halo_local, std_Ngamma_halo_local, N_local, sum_Ngamma_halo, bin_middle) \
#= AllVars.Calculate_2D_Mean(mass_central, ionizing_photons, bin_width, m_low, m_high)
#mean_Ngamma_halo_local = np.divide(mean_Ngamma_halo_local, 1.0e50) ## Divide out a constant to keep the numbers manageable.
#std_Ngamma_halo_local = np.divide(std_Ngamma_halo_local, 1.0e50)
#(mean_Ngamma_halo_array[current_model_number][snapshot_idx], std_Ngamma_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_Ngamma_halo_array[current_model_number][snapshot_idx], std_Ngamma_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_Ngamma_halo_local, std_Ngamma_halo_local, N_local) # Then update the running total.
## Reionization Modifier ##
(mean_reionmod_halo_local, std_reionmod_halo_local, N_local, sum_reionmod_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_reionmod_central, reionmod, bin_width, m_low, m_high)
(mean_reionmod_halo_array[current_model_number][snapshot_idx], std_reionmod_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_reionmod_halo_array[current_model_number][snapshot_idx], std_reionmod_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_reionmod_halo_local, std_reionmod_halo_local, N_local) # Then update the running total.
## Total Dust Mass ##
(mean_dust_halo_local, std_dust_halo_local, N_local,
sum_dust_halo, bin_middle) = AllVars.Calculate_2D_Mean(
mass_centralgal_dust, total_dust_gal,
bin_width, m_low,
m_high)
(mean_dust_halo_array[current_model_number][snapshot_idx],
std_dust_halo_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_dust_halo_array[current_model_number][snapshot_idx],
std_dust_halo_array[current_model_number][snapshot_idx],
N_halo_array[current_model_number][snapshot_idx],
mean_dust_halo_local,
std_dust_halo_local,
N_local)
N_halo_array[current_model_number][snapshot_idx] += N_local
### Functions of redshift ###
## Ngamma ##
#sum_Ngamma_z_array[current_model_number][snapshot_idx] += np.sum(np.divide(ionizing_photons, 1.0e50)) # Remember that we're dividing out a constant!
## fesc Value ##
(mean_fesc_z_array[current_model_number][snapshot_idx], std_fesc_z_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_fesc_z_array[current_model_number][snapshot_idx], std_fesc_z_array[current_model_number][snapshot_idx], N_z[current_model_number][snapshot_idx], np.mean(fesc), np.std(fesc), len(w_gal)) # Updates the mean escape fraction for this redshift.
## Reionization Modifier ##
(mean_reionmod_z[current_model_number][snapshot_idx], std_reionmod_z[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_reionmod_z[current_model_number][snapshot_idx], std_reionmod_z[current_model_number][snapshot_idx], N_reionmod_z[current_model_number][snapshot_idx], np.mean(reionmod), np.std(reionmod), len(reionmod))
N_reionmod_z[current_model_number][snapshot_idx] += len(reionmod)
## Ejected Fraction ##
(mean_ejected_z[current_model_number][snapshot_idx],std_ejected_z[current_model_number][snapshot_idx]) \
= update_cumulative_stats(mean_ejected_z[current_model_number][snapshot_idx],
std_ejected_z[current_model_number][snapshot_idx],
N_z[current_model_number][snapshot_idx],
np.mean(ejected_fraction),
np.std(ejected_fraction),
len(w_gal))
N_z[current_model_number][snapshot_idx] += len(w_gal)
#### Arrays that are just kept across snapshots ##
Ngamma_global[current_model_number][snapshot_idx].append(Ngamma_gal)
mass_global[current_model_number][snapshot_idx].append(mass_gal)
fesc_global[current_model_number][snapshot_idx].append(fesc)
#### Arrays that are function of fej ##
(mean_Ngamma_fej_local, std_Ngamma_fej_local, N_local,
sum_Ngamma_fej_local, bin_middle) = AllVars.Calculate_2D_Mean(
ejected_fraction, Ngamma_gal,
fej_bin_width, fej_low, fej_high)
(mean_Ngamma_fej[current_model_number][snapshot_idx],
std_Ngamma_fej[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_Ngamma_fej[current_model_number][snapshot_idx],
std_Ngamma_fej[current_model_number][snapshot_idx],
N_fej[current_model_number][snapshot_idx],
mean_Ngamma_fej_local,
std_Ngamma_fej_local,
N_local)
N_fej[current_model_number][snapshot_idx] += N_local
done_model[current_model_number] = 1
if (current_model_number < number_models):
keep_files = same_files[current_model_number] # Decide if we want to keep the files loaded or throw them out.
current_model_number += 1 # Update the inner loop model number.
#StellarMassFunction(PlotSnapList, SMF, simulation_norm, FirstFile,
# LastFile, NumFile, galaxy_halo_mass_mean, model_tags,
# 1, paper_plots, "wtf")
#plot_reionmod(PlotSnapList, SnapList, simulation_norm, mean_reionmod_halo_array,
#std_reionmod_halo_array, N_halo_array, mean_reionmod_z,
#std_reionmod_z, N_reionmod_z, False, model_tags,
#"reionmod_selfcon")
#plot_dust_scatter(SnapList, mass_gal_dust, mass_centralgal_dust, total_dust_gal,
# "dust_scatter")
#plot_dust(PlotSnapList, SnapList, simulation_norm, mean_dust_galaxy_array,
# std_dust_galaxy_array, N_galaxy_array, mean_dust_halo_array,
# std_dust_halo_array, N_halo_array, False, model_tags,
# "dustmass_total")
#plot_stellarmass_blackhole(PlotSnapList, simulation_norm, mean_BHmass_galaxy_array,
# std_BHmass_galaxy_array, N_galaxy_array,
# FirstFile, LastFile, NumFile,
# model_tags, "StellarMass_BHMass")
#plot_ejectedfraction(SnapList, PlotSnapList, simulation_norm,
# mean_ejected_halo_array, std_ejected_halo_array,
# N_halo_array, mean_ejected_z, std_ejected_z, N_z,
# model_tags, "ejectedfraction")
#plot_quasars_count(SnapList, PlotSnapList, N_quasars_z, N_quasars_boost_z, N_z, mean_quasar_activity_array, std_quasar_activity_array, N_halo_array, mergers_halo_array, SMF, mergers_galaxy_array, fesc_prescription, simulation_norm, FirstFile, LastFile, NumFile, model_tags, "SN_Prescription")
plot_fesc_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_fesc_galaxy_array, std_fesc_galaxy_array,
N_galaxy_array, mean_fesc_halo_array,
std_fesc_halo_array, N_halo_array,
galaxy_halo_mass_mean, model_tags,
paper_plots, mass_global, fesc_global, Ngamma_global,
"fesc_paper")
plot_reionmod_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_reionmod_galaxy_array, std_reionmod_galaxy_array,
N_galaxy_array, mean_gnedin_reionmod_galaxy_array,
std_gnedin_reionmod_galaxy_array,
model_tags, paper_plots, "reionmod")
exit()
#plot_nion_galaxy(SnapList, PlotSnapList, simulation_norm,
# mean_Ngamma_galaxy_array, std_Ngamma_galaxy_array,
# N_galaxy_array, model_tags,
# paper_plots, "Ngamma")
'''
plot_photo_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_photo_galaxy_array, std_photo_galaxy_array,
N_galaxy_array, model_tags,
paper_plots, "photo")
'''
plot_sfr_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_sfr_galaxy_array, std_sfr_galaxy_array,
mean_ssfr_galaxy_array, std_ssfr_galaxy_array,
N_galaxy_array, model_tags, "sSFR")
#plot_fej_Ngamma(SnapList, PlotSnapList, simulation_norm,
# mean_Ngamma_fej, std_Ngamma_fej,
# N_fej, model_tags, "Ngamma_fej")
#plot_photoncount(SnapList, sum_Ngamma_z_array, simulation_norm, FirstFile, LastFile, NumFile, model_tags, "Ngamma_test") ## PARALELL COMPATIBLE
#plot_mvir_Ngamma(SnapList, mean_Ngamma_halo_array, std_Ngamma_halo_array, N_halo_array, model_tags, "Mvir_Ngamma_test", fesc_prescription, fesc_normalization, "/lustre/projects/p004_swin/jseiler/tiamat/halo_ngamma/") ## PARALELL COMPATIBLE
| 51.258188 | 474 | 0.610478 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82,255 | 0.341284 |
81b112f3d2024fef2d743717dabaf4db53727b51 | 661 | py | Python | tests/functional_tests/test_camera.py | accessai/access-face-vision | 04469ebc03ac9644a44bbdb90951f1821dca0f6d | [
"Apache-2.0"
]
| 3 | 2019-07-19T17:59:19.000Z | 2019-07-21T16:07:43.000Z | tests/functional_tests/test_camera.py | accessai/access-face-vision | 04469ebc03ac9644a44bbdb90951f1821dca0f6d | [
"Apache-2.0"
]
| 9 | 2019-07-19T17:38:11.000Z | 2022-03-11T23:53:13.000Z | tests/functional_tests/test_camera.py | accessai/access-face-vision | 04469ebc03ac9644a44bbdb90951f1821dca0f6d | [
"Apache-2.0"
]
| 1 | 2019-07-21T16:07:54.000Z | 2019-07-21T16:07:54.000Z | from multiprocessing import Queue, Value
from time import sleep
from access_face_vision.source.camera import Camera
from access_face_vision.utils import create_parser
from access_face_vision import access_logger
LOG_LEVEL = 'debug'
logger, log_que, que_listener = access_logger.set_main_process_logger(LOG_LEVEL)
def test_camera():
logger.info('Starting Camera test')
cmd_args = create_parser()
camera = Camera(cmd_args, Queue(), log_que, LOG_LEVEL, Value('i',0), draw_frames=True)
camera.start()
sleep(60)
camera.stop()
logger.info('Camera test completed')
que_listener.stop()
if __name__ == '__main__':
test_camera()
| 26.44 | 90 | 0.754917 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 65 | 0.098336 |
81b28caa54d539dfc14006299c0cf1e06133e78c | 1,537 | py | Python | utils/deserializer/__tests__/test_protobuf_deserializer.py | Mouse-BB-Team/Bot-Detection | 4438d8ccec1baaa22f3357213e6d52a62ff6d618 | [
"MIT"
]
| 5 | 2020-09-30T16:58:59.000Z | 2021-11-30T22:34:10.000Z | utils/deserializer/__tests__/test_protobuf_deserializer.py | Mouse-BB-Team/Bot-Detection | 4438d8ccec1baaa22f3357213e6d52a62ff6d618 | [
"MIT"
]
| null | null | null | utils/deserializer/__tests__/test_protobuf_deserializer.py | Mouse-BB-Team/Bot-Detection | 4438d8ccec1baaa22f3357213e6d52a62ff6d618 | [
"MIT"
]
| null | null | null | from utils.deserializer.protobuf_deserializer import ProtoLoader
from pathlib import Path
import pandas as pd
import pytest
PROTOFILES_DIR_PATH = Path(__file__).parent.joinpath("protofilesdir").absolute().__str__()
INVALID_PATH = "some/wrong/path"
@pytest.mark.parametrize('filepath', ["test_file.pb", "test_file_1.txt", "test_file_2.xml"])
def test_should_return_single_df_sequence_regardless_file_extension(filepath):
loader = ProtoLoader(PROTOFILES_DIR_PATH)
sequence = loader.get_single_sequence(filepath)
assert isinstance(sequence, pd.DataFrame)
def test_should_return_not_none_when_directory_not_empty():
loader = ProtoLoader(PROTOFILES_DIR_PATH)
seq_list = loader.get_list_of_sequences()
assert seq_list is not None
def test_should_return_correct_length_of_seq_list():
loader = ProtoLoader(PROTOFILES_DIR_PATH)
seq_list = loader.get_list_of_sequences()
assert len(seq_list) == 3
def test_should_return_empty_list_when_directory_empty():
loader = ProtoLoader(PROTOFILES_DIR_PATH + INVALID_PATH)
seq_list = loader.get_list_of_sequences()
assert len(seq_list) == 0
def test_should_check_for_list_when_directory_empty():
loader = ProtoLoader(PROTOFILES_DIR_PATH + INVALID_PATH)
seq_list = loader.get_list_of_sequences()
assert isinstance(seq_list, list)
def test_should_return_list_of_sequences():
loader = ProtoLoader(PROTOFILES_DIR_PATH)
seq_list = loader.get_list_of_sequences()
for seq in seq_list:
assert isinstance(seq, pd.DataFrame)
| 33.413043 | 92 | 0.791802 | 0 | 0 | 0 | 0 | 315 | 0.204945 | 0 | 0 | 90 | 0.058556 |
81b2cfe5a1a59f76e8e712dc7fabc6c32050694c | 18,966 | py | Python | wisdem/assemblies/turbinese/turbine_se_seam.py | dzalkind/WISDEM | 724a7bf9c19bad3ca7e18c240628d1a75b07e3f0 | [
"Apache-2.0"
]
| 1 | 2020-01-22T17:48:30.000Z | 2020-01-22T17:48:30.000Z | wisdem/assemblies/turbinese/turbine_se_seam.py | dzalkind/WISDEM | 724a7bf9c19bad3ca7e18c240628d1a75b07e3f0 | [
"Apache-2.0"
]
| 17 | 2019-09-13T22:21:15.000Z | 2019-10-25T20:04:26.000Z | wisdem/assemblies/turbinese/turbine_se_seam.py | dzalkind/WISDEM | 724a7bf9c19bad3ca7e18c240628d1a75b07e3f0 | [
"Apache-2.0"
]
| 2 | 2019-03-21T10:38:05.000Z | 2021-01-08T18:49:53.000Z | #!/usr/bin/env python
# encoding: utf-8
"""
turbine.py
Created by Andrew Ning and Katherine Dykes on 2014-01-13.
Copyright (c) NREL. All rights reserved.
"""
from openmdao.main.api import Assembly, Component
from openmdao.main.datatypes.api import Float, Array, Enum, Bool, Int
from openmdao.lib.drivers.api import FixedPointIterator
import numpy as np
#from rotorse.rotor import RotorSE
#from towerse.tower import TowerSE
#from commonse.rna import RNAMass, RotorLoads
from drivewpact.drive import DriveWPACT
from drivewpact.hub import HubWPACT
from commonse.csystem import DirectionVector
from commonse.utilities import interp_with_deriv, hstack, vstack
from drivese.drive import Drive4pt, Drive3pt
from drivese.drivese_utils import blade_moment_transform, blade_force_transform
from drivese.hub import HubSE, Hub_System_Adder_drive
from SEAMLoads.SEAMLoads import SEAMLoads
from SEAMTower.SEAMTower import SEAMTower
from SEAMAero.SEAM_AEP import SEAM_PowerCurve
from SEAMRotor.SEAMRotor import SEAMBladeStructure
# from SEAMGeometry.SEAMGeometry import SEAMGeometry
def connect_io(top, cls):
cls_name = cls.name
for name in cls.list_inputs():
try:
top.connect(name, cls_name + '.%s' % name)
except:
# print 'failed connecting', cls_name, name
pass
for name in cls.list_outputs():
try:
top.connect(cls_name + '.%s' % name, name)
except:
pass
def configure_turbine(assembly, with_new_nacelle=True, flexible_blade=False, with_3pt_drive=False):
"""a stand-alone configure method to allow for flatter assemblies
Parameters
----------
assembly : Assembly
an openmdao assembly to be configured
with_new_nacelle : bool
False uses the default implementation, True uses an experimental implementation designed
to smooth out discontinities making in amenable for gradient-based optimization
flexible_blade : bool
if True, internally solves the coupled aero/structural deflection using fixed point iteration.
Note that the coupling is currently only in the flapwise deflection, and is primarily
only important for highly flexible blades. If False, the aero loads are passed
to the structure but there is no further iteration.
"""
#SEAM variables ----------------------------------
#d2e = Float(0.73, iotype='in', desc='Dollars to Euro ratio'
assembly.add('rated_power',Float(3000., iotype='in', units='kW', desc='Turbine rated power', group='Global'))
assembly.add('hub_height', Float(100., iotype='in', units='m', desc='Hub height', group='Global'))
assembly.add('rotor_diameter', Float(110., iotype='in', units='m', desc='Rotor diameter', group='Global'))
# assembly.add('site_type',Enum('onshore', values=('onshore', 'offshore'), iotype='in', desc='Site type', group='Global'))
assembly.add('tower_bottom_diameter', Float(4., iotype='in', desc='Tower bottom diameter', group='Global'))
assembly.add('tower_top_diameter', Float(2., iotype='in', desc='Tower top diameter', group='Global'))
assembly.add('project_lifetime', Float(iotype = 'in', desc='Operating years', group='Global'))
assembly.add('rho_steel', Float(7.8e3, iotype='in', desc='density of steel', group='Tower'))
assembly.add('lifetime_cycles', Float(1.e7, iotype='in', desc='Equivalent lifetime cycles', group='Rotor'))
assembly.add('stress_limit_extreme_tower', Float(iotype='in', units='MPa', desc='Tower ultimate strength', group='Tower'))
assembly.add('stress_limit_fatigue_tower', Float(iotype='in', units='MPa', desc='Tower fatigue strength', group='Tower'))
assembly.add('safety_factor_tower', Float(iotype='in', desc='Tower loads safety factor', group='Tower'))
assembly.add('PMtarget_tower', Float(1., iotype='in', desc='', group='Tower'))
assembly.add('wohler_exponent_tower', Float(4., iotype='in', desc='Tower fatigue Wohler exponent', group='Tower'))
assembly.add('tower_z', Array(iotype='out', desc='Tower discretization'))
assembly.add('tower_wall_thickness', Array(iotype='out', units='m', desc='Tower wall thickness'))
assembly.add('tower_mass', Float(iotype='out', units='kg', desc='Tower mass'))
assembly.add('tsr', Float(iotype='in', units='m', desc='Design tip speed ratio', group='Aero'))
assembly.add('F', Float(iotype='in', desc='Rotor power loss factor', group='Aero'))
assembly.add('wohler_exponent_blade_flap', Float(iotype='in', desc='Wohler Exponent blade flap', group='Rotor'))
assembly.add('nSigma4fatFlap', Float(iotype='in', desc='', group='Loads'))
assembly.add('nSigma4fatTower', Float(iotype='in', desc='', group='Loads'))
assembly.add('dLoad_dU_factor_flap', Float(iotype='in', desc='', group='Loads'))
assembly.add('dLoad_dU_factor_tower', Float(iotype='in', desc='', group='Loads'))
assembly.add('blade_edge_dynload_factor_ext', Float(iotype='in', desc='Extreme dynamic edgewise loads factor', group='Loads'))
assembly.add('blade_edge_dynload_factor_fat', Float(iotype='in', desc='Fatigue dynamic edgewise loads factor', group='Loads'))
assembly.add('PMtarget_blades', Float(1., iotype='in', desc='', group='Rotor'))
assembly.add('max_tipspeed', Float(iotype='in', desc='Maximum tip speed', group='Aero'))
assembly.add('n_wsp', Int(iotype='in', desc='Number of wind speed bins', group='Aero'))
assembly.add('min_wsp', Float(0.0, iotype = 'in', units = 'm/s', desc = 'min wind speed', group='Aero'))
assembly.add('max_wsp', Float(iotype = 'in', units = 'm/s', desc = 'max wind speed', group='Aero'))
assembly.add('turbulence_int', Float(iotype='in', desc='Reference turbulence intensity', group='Plant_AEP'))
# assembly.add('WeibullInput', Bool(True, iotype='in', desc='Flag for Weibull input', group='AEP'))
assembly.add('weibull_C', Float(iotype = 'in', units='m/s', desc = 'Weibull scale factor', group='AEP'))
assembly.add('weibull_k', Float(iotype = 'in', desc='Weibull shape or form factor', group='AEP'))
assembly.add('blade_sections', Int(iotype='in', desc='number of sections along blade', group='Rotor'))
assembly.add('wohler_exponent_blade_flap', Float(iotype='in', desc='Blade flap fatigue Wohler exponent', group='Rotor'))
assembly.add('MaxChordrR', Float(iotype='in', units='m', desc='Spanwise position of maximum chord', group='Rotor'))
assembly.add('tif_blade_root_flap_ext', Float(1., iotype='in', desc='Technology improvement factor flap extreme', group='Rotor'))
assembly.add('tif_blade_root_edge_ext', Float(1., iotype='in', desc='Technology improvement factor edge extreme', group='Rotor'))
assembly.add('tif_blade_root_flap_fat', Float(1., iotype='in', desc='Technology improvement factor flap LEQ', group='Rotor'))
assembly.add('sc_frac_flap', Float(iotype='in', desc='spar cap fraction of chord', group='Rotor'))
assembly.add('sc_frac_edge', Float(iotype='in', desc='spar cap fraction of thickness', group='Rotor'))
assembly.add('safety_factor_blade', Float(iotype='in', desc='Blade loads safety factor', group='Rotor'))
assembly.add('stress_limit_extreme_blade', Float(iotype='in', units='MPa', desc='Blade ultimate strength', group='Rotor'))
assembly.add('stress_limit_fatigue_blade', Float(iotype='in', units='MPa', desc='Blade fatigue strength', group='Rotor'))
assembly.add('AddWeightFactorBlade', Float(iotype='in', desc='Additional weight factor for blade shell', group='Rotor'))
assembly.add('blade_material_density', Float(iotype='in', units='kg/m**3', desc='Average density of blade materials', group='Rotor'))
assembly.add('blade_mass', Float(iotype = 'out', units = 'kg', desc = 'Blade mass'))
# assembly.add('mean_wsp', Float(iotype = 'in', units = 'm/s', desc = 'mean wind speed', group='Aero')) # [m/s]
assembly.add('air_density', Float(iotype = 'in', units = 'kg/m**3', desc = 'density of air', group='Plant_AEP')) # [kg / m^3]
assembly.add('max_Cp', Float(iotype = 'in', desc = 'max CP', group='Aero'))
assembly.add('gearloss_const', Float(iotype = 'in', desc = 'Gear loss constant', group='Drivetrain'))
assembly.add('gearloss_var', Float(iotype = 'in', desc = 'Gear loss variable', group='Drivetrain'))
assembly.add('genloss', Float(iotype = 'in', desc = 'Generator loss', group='Drivetrain'))
assembly.add('convloss', Float(iotype = 'in', desc = 'Converter loss', group='Drivetrain'))
# Outputs
assembly.add('rated_wind_speed', Float(units = 'm / s', iotype='out', desc='wind speed for rated power'))
assembly.add('ideal_power_curve', Array(iotype='out', units='kW', desc='total power before losses and turbulence'))
assembly.add('power_curve', Array(iotype='out', units='kW', desc='total power including losses and turbulence'))
assembly.add('wind_curve', Array(iotype='out', units='m/s', desc='wind curve associated with power curve'))
assembly.add('aep', Float(iotype = 'out', units='mW*h', desc='Annual energy production in mWh'))
assembly.add('total_aep', Float(iotype = 'out', units='mW*h', desc='AEP for total years of production'))
# END SEAM Variables ----------------------
# Add SEAM components and connections
assembly.add('loads', SEAMLoads())
assembly.add('tower_design', SEAMTower(21))
assembly.add('blade_design', SEAMBladeStructure())
assembly.add('aep_calc', SEAM_PowerCurve())
assembly.driver.workflow.add(['loads', 'tower_design', 'blade_design', 'aep_calc'])
assembly.connect('loads.tower_bottom_moment_max', 'tower_design.tower_bottom_moment_max')
assembly.connect('loads.tower_bottom_moment_leq', 'tower_design.tower_bottom_moment_leq')
assembly.connect('loads.blade_root_flap_max', 'blade_design.blade_root_flap_max')
assembly.connect('loads.blade_root_edge_max', 'blade_design.blade_root_edge_max')
assembly.connect('loads.blade_root_flap_leq', 'blade_design.blade_root_flap_leq')
assembly.connect('loads.blade_root_edge_leq', 'blade_design.blade_root_edge_leq')
connect_io(assembly, assembly.aep_calc)
connect_io(assembly, assembly.loads)
connect_io(assembly, assembly.tower_design)
connect_io(assembly, assembly.blade_design)
# End SEAM add components and connections -------------
if with_new_nacelle:
assembly.add('hub',HubSE())
assembly.add('hubSystem',Hub_System_Adder_drive())
if with_3pt_drive:
assembly.add('nacelle', Drive3pt())
else:
assembly.add('nacelle', Drive4pt())
else:
assembly.add('nacelle', DriveWPACT())
assembly.add('hub', HubWPACT())
assembly.driver.workflow.add(['hub', 'nacelle'])
if with_new_nacelle:
assembly.driver.workflow.add(['hubSystem'])
# connections to hub and hub system
assembly.connect('blade_design.blade_mass', 'hub.blade_mass')
assembly.connect('loads.blade_root_flap_max', 'hub.rotor_bending_moment')
assembly.connect('rotor_diameter', ['hub.rotor_diameter'])
assembly.connect('blade_design.blade_root_diameter', 'hub.blade_root_diameter')
assembly.add('blade_number',Int(3,iotype='in',desc='number of blades', group='Aero'))
assembly.connect('blade_number', 'hub.blade_number')
if with_new_nacelle:
assembly.connect('rated_power','hub.machine_rating')
assembly.connect('rotor_diameter', ['hubSystem.rotor_diameter'])
assembly.connect('nacelle.MB1_location','hubSystem.MB1_location') # TODO: bearing locations
assembly.connect('nacelle.L_rb','hubSystem.L_rb')
assembly.add('rotor_tilt', Float(5.0, iotype='in', desc='rotor tilt', group='Rotor'))
assembly.connect('rotor_tilt','hubSystem.shaft_angle')
assembly.connect('hub.hub_diameter','hubSystem.hub_diameter')
assembly.connect('hub.hub_thickness','hubSystem.hub_thickness')
assembly.connect('hub.hub_mass','hubSystem.hub_mass')
assembly.connect('hub.spinner_mass','hubSystem.spinner_mass')
assembly.connect('hub.pitch_system_mass','hubSystem.pitch_system_mass')
# connections to nacelle #TODO: fatigue option variables
assembly.connect('rotor_diameter', 'nacelle.rotor_diameter')
assembly.connect('1.5 * aep_calc.rated_torque', 'nacelle.rotor_torque')
assembly.connect('loads.max_thrust', 'nacelle.rotor_thrust')
assembly.connect('aep_calc.rated_speed', 'nacelle.rotor_speed')
assembly.connect('rated_power', 'nacelle.machine_rating')
assembly.add('generator_speed',Float(1173.7,iotype='in',units='rpm',desc='speed of generator', group='Drivetrain')) # - should be in nacelle
assembly.connect('generator_speed/aep_calc.rated_speed', 'nacelle.gear_ratio')
assembly.connect('tower_top_diameter', 'nacelle.tower_top_diameter')
assembly.connect('blade_number * blade_design.blade_mass + hub.hub_system_mass', 'nacelle.rotor_mass') # assuming not already in rotor force / moments
# variable connections for new nacelle
if with_new_nacelle:
assembly.connect('blade_number','nacelle.blade_number')
assembly.connect('rotor_tilt','nacelle.shaft_angle')
assembly.connect('333.3 * rated_power / 1000.0','nacelle.shrink_disc_mass')
assembly.connect('blade_design.blade_root_diameter','nacelle.blade_root_diameter')
#moments - ignoring for now (nacelle will use internal defaults)
#assembly.connect('rotor.Mxyz_0','moments.b1')
#assembly.connect('rotor.Mxyz_120','moments.b2')
#assembly.connect('rotor.Mxyz_240','moments.b3')
#assembly.connect('rotor.Pitch','moments.pitch_angle')
#assembly.connect('rotor.TotalCone','moments.cone_angle')
assembly.connect('1.5 * aep_calc.rated_torque','nacelle.rotor_bending_moment_x') #accounted for in ratedConditions.Q
#assembly.connect('moments.My','nacelle.rotor_bending_moment_y')
#assembly.connect('moments.Mz','nacelle.rotor_bending_moment_z')
#forces - ignoring for now (nacelle will use internal defaults)
#assembly.connect('rotor.Fxyz_0','forces.b1')
#assembly.connect('rotor.Fxyz_120','forces.b2')
#assembly.connect('rotor.Fxyz_240','forces.b3')
#assembly.connect('rotor.Pitch','forces.pitch_angle')
#assembly.connect('rotor.TotalCone','forces.cone_angle')
assembly.connect('loads.max_thrust','nacelle.rotor_force_x')
#assembly.connect('forces.Fy','nacelle.rotor_force_y')
#assembly.connect('forces.Fz','nacelle.rotor_force_z')
class Turbine_SE_SEAM(Assembly):
def configure(self):
configure_turbine(self)
if __name__ == '__main__':
turbine = Turbine_SE_SEAM()
#=========== SEAM inputs
turbine.AddWeightFactorBlade = 1.2
turbine.blade_material_density = 2100.0
turbine.tower_bottom_diameter = 6.
turbine.tower_top_diameter = 3.78
turbine.blade_edge_dynload_factor_ext = 2.5
turbine.blade_edge_dynload_factor_fat = 0.75
turbine.F = 0.777
turbine.MaxChordrR = 0.2
turbine.project_lifetime = 20.0
turbine.lifetime_cycles = 10000000.0
turbine.blade_sections = 21
turbine.PMtarget_tower = 1.0
turbine.PMtarget_blades = 1.0
turbine.safety_factor_blade = 1.1
turbine.safety_factor_tower = 1.5
turbine.stress_limit_extreme_tower = 235.0
turbine.stress_limit_fatigue_tower = 14.885
turbine.stress_limit_extreme_blade = 200.0
turbine.stress_limit_fatigue_blade = 27.0
turbine.tif_blade_root_flap_ext = 1.0
turbine.tif_blade_root_flap_fat = 1.0
turbine.tif_blade_root_edge_ext = 1.0
turbine.weibull_C = 11.0
turbine.weibull_k = 2.0
turbine.wohler_exponent_blade_flap = 10.0
turbine.wohler_exponent_tower = 4.0
turbine.dLoad_dU_factor_flap = 0.9
turbine.dLoad_dU_factor_tower = 0.8
turbine.hub_height = 90.0
turbine.max_tipspeed = 80.0
turbine.n_wsp = 26
turbine.min_wsp = 0.0
turbine.max_wsp = 25.0
turbine.nSigma4fatFlap = 1.2
turbine.nSigma4fatTower = 0.8
turbine.rated_power = 5000.0
turbine.rho_steel = 7800.0
turbine.rotor_diameter = 126.0
turbine.sc_frac_edge = 0.8
turbine.sc_frac_flap = 0.3
turbine.tsr = 8.0
turbine.air_density = 1.225
turbine.turbulence_int = 0.16
turbine.max_Cp = 0.49
turbine.gearloss_const = 0.01 # Fraction
turbine.gearloss_var = 0.014 # Fraction
turbine.genloss = 0.03 # Fraction
turbine.convloss = 0.03 # Fraction
#==============
# === nacelle ======
turbine.blade_number = 3 # turbine level that must be added for SEAM
turbine.rotor_tilt = 5.0 # turbine level that must be added for SEAM
turbine.generator_speed = 1173.7
turbine.nacelle.L_ms = 1.0 # (Float, m): main shaft length downwind of main bearing in low-speed shaft
turbine.nacelle.L_mb = 2.5 # (Float, m): main shaft length in low-speed shaft
turbine.nacelle.h0_front = 1.7 # (Float, m): height of Ibeam in bedplate front
turbine.nacelle.h0_rear = 1.35 # (Float, m): height of Ibeam in bedplate rear
turbine.nacelle.drivetrain_design = 'geared'
turbine.nacelle.crane = True # (Bool): flag for presence of crane
turbine.nacelle.bevel = 0 # (Int): Flag for the presence of a bevel stage - 1 if present, 0 if not
turbine.nacelle.gear_configuration = 'eep' # (Str): tring that represents the configuration of the gearbox (stage number and types)
turbine.nacelle.Np = [3, 3, 1] # (Array): number of planets in each stage
turbine.nacelle.ratio_type = 'optimal' # (Str): optimal or empirical stage ratios
turbine.nacelle.shaft_type = 'normal' # (Str): normal or short shaft length
#turbine.nacelle.shaft_angle = 5.0 # (Float, deg): Angle of the LSS inclindation with respect to the horizontal
turbine.nacelle.shaft_ratio = 0.10 # (Float): Ratio of inner diameter to outer diameter. Leave zero for solid LSS
turbine.nacelle.carrier_mass = 8000.0 # estimated for 5 MW
turbine.nacelle.mb1Type = 'CARB' # (Str): Main bearing type: CARB, TRB or SRB
turbine.nacelle.mb2Type = 'SRB' # (Str): Second bearing type: CARB, TRB or SRB
turbine.nacelle.yaw_motors_number = 8.0 # (Float): number of yaw motors
turbine.nacelle.uptower_transformer = True
turbine.nacelle.flange_length = 0.5 #m
turbine.nacelle.gearbox_cm = 0.1
turbine.nacelle.hss_length = 1.5
turbine.nacelle.overhang = 5.0 #TODO - should come from turbine configuration level
turbine.nacelle.check_fatigue = 0 #0 if no fatigue check, 1 if parameterized fatigue check, 2 if known loads inputs
# =================
# === run ===
turbine.run()
print 'mass rotor blades (kg) =', turbine.blade_number * turbine.blade_design.blade_mass
print 'mass hub system (kg) =', turbine.hubSystem.hub_system_mass
print 'mass nacelle (kg) =', turbine.nacelle.nacelle_mass
print 'mass tower (kg) =', turbine.tower_design.tower_mass
# =================
| 54.188571 | 154 | 0.703048 | 90 | 0.004745 | 0 | 0 | 0 | 0 | 0 | 0 | 9,374 | 0.494253 |
81b36615a4cceca74102543564ca1a7f49b62e92 | 2,880 | py | Python | src/triage/component/results_schema/alembic/versions/5dd2ba8222b1_add_run_type.py | josephbajor/triage_NN | cbaee6e5a06e597c91fec372717d89a2b5f34fa5 | [
"MIT"
]
| 160 | 2017-06-13T09:59:59.000Z | 2022-03-21T22:00:35.000Z | src/triage/component/results_schema/alembic/versions/5dd2ba8222b1_add_run_type.py | josephbajor/triage_NN | cbaee6e5a06e597c91fec372717d89a2b5f34fa5 | [
"MIT"
]
| 803 | 2016-10-21T19:44:02.000Z | 2022-03-29T00:02:33.000Z | src/triage/component/results_schema/alembic/versions/5dd2ba8222b1_add_run_type.py | josephbajor/triage_NN | cbaee6e5a06e597c91fec372717d89a2b5f34fa5 | [
"MIT"
]
| 59 | 2017-01-31T22:10:22.000Z | 2022-03-19T12:35:03.000Z | """add run_type
Revision ID: 5dd2ba8222b1
Revises: 079a74c15e8b
Create Date: 2021-07-22 23:53:04.043651
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '5dd2ba8222b1'
down_revision = '079a74c15e8b'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('experiment_runs', sa.Column('run_type', sa.Text(), nullable=True), schema='triage_metadata')
op.execute("UPDATE triage_metadata.experiment_runs SET run_type='experiment' WHERE run_type IS NULL")
op.alter_column('experiment_runs', 'experiment_hash', nullable=True, new_column_name='run_hash', schema='triage_metadata')
op.drop_constraint('experiment_runs_experiment_hash_fkey', 'experiment_runs', type_='foreignkey', schema='triage_metadata')
op.execute("ALTER TABLE triage_metadata.experiment_runs RENAME TO triage_runs")
op.create_table('retrain',
sa.Column('retrain_hash', sa.Text(), nullable=False),
sa.Column('config', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('prediction_date', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('retrain_hash'),
schema='triage_metadata',
)
op.alter_column('models', 'built_in_experiment_run', nullable=False, new_column_name='built_in_triage_run', schema='triage_metadata')
op.execute("CREATE TABLE triage_metadata.deprecated_models_built_by_experiment AS SELECT model_id, model_hash, built_by_experiment FROM triage_metadata.models")
op.drop_column('models', 'built_by_experiment', schema='triage_metadata')
op.create_table('retrain_models',
sa.Column('retrain_hash', sa.String(), nullable=False),
sa.Column('model_hash', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['retrain_hash'], ['triage_metadata.retrain.retrain_hash'], ),
sa.PrimaryKeyConstraint('retrain_hash', 'model_hash'),
schema='triage_metadata'
)
def downgrade():
op.execute("ALTER TABLE triage_metadata.triage_runs RENAME TO experiment_runs")
op.drop_column('experiment_runs', 'run_type', schema='triage_metadata')
op.alter_column('experiment_runs', 'run_hash', nullable=True, new_column_name='experiment_hash', schema='triage_metadata')
op.create_foreign_key('experiment_runs_experiment_hash_fkey', 'experiment_runs', 'experiments', ['experiment_hash'], ['experiment_hash'], source_schema='triage_metadata', referent_schema='triage_metadata')
op.drop_table('retrain_models', schema='triage_metadata')
op.drop_table('retrain', schema='triage_metadata')
op.add_column('models', sa.Column('built_by_experiment', sa.Text(), nullable=True), schema='triage_metadata')
op.alter_column('models', 'built_in_triage_run', nullable=False, new_column_name='built_in_experiment_run', schema='triage_metadata')
| 48.813559 | 209 | 0.755556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,487 | 0.516319 |
81b43298bda18b704f77ed56a530bc20370af1bf | 126 | py | Python | projects/PanopticFCN_cityscapes/panopticfcn/__init__.py | fatihyildiz-cs/detectron2 | 700b1e6685ca95a60e27cb961f363a2ca7f30d3c | [
"Apache-2.0"
]
| 166 | 2020-12-01T18:34:47.000Z | 2021-03-27T04:20:15.000Z | panopticfcn/__init__.py | ywcmaike/PanopticFCN | 9201b06d871df128547ce36b80f6caceb105465d | [
"Apache-2.0"
]
| 28 | 2021-05-20T08:59:05.000Z | 2022-03-18T13:17:35.000Z | panopticfcn/__init__.py | ywcmaike/PanopticFCN | 9201b06d871df128547ce36b80f6caceb105465d | [
"Apache-2.0"
]
| 33 | 2021-05-23T14:09:19.000Z | 2022-03-30T14:27:55.000Z | from .config import add_panopticfcn_config
from .panoptic_seg import PanopticFCN
from .build_solver import build_lr_scheduler
| 31.5 | 44 | 0.880952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
81b626c300ff32d3e035f1c56e66bff3d7d1f4dc | 592 | py | Python | 03_lecture_Django/lecture3/hello/views.py | MoStgt/CS50 | 62bd6eb38bea745c6356e1a8f03adb6ab70e2a37 | [
"MIT"
]
| null | null | null | 03_lecture_Django/lecture3/hello/views.py | MoStgt/CS50 | 62bd6eb38bea745c6356e1a8f03adb6ab70e2a37 | [
"MIT"
]
| null | null | null | 03_lecture_Django/lecture3/hello/views.py | MoStgt/CS50 | 62bd6eb38bea745c6356e1a8f03adb6ab70e2a37 | [
"MIT"
]
| null | null | null | from http.client import HTTPResponse
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
# def index(request):
# return HttpResponse("Hello World!")
def index(request):
return render(request, "hello/index.html")
def brian(request):
return HttpResponse("Hello Brian")
def david(request):
return HttpResponse("Hello David")
# def greet(request, name):
# return HttpResponse(f"Hello, {name.capitalize()}!")
def greet(request, name):
return render(request, "hello/greet.html", {
"name": name.capitalize()
}) | 24.666667 | 57 | 0.706081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 239 | 0.403716 |
81b65c798718e0eb1c455ac640017cba04a70ce8 | 19,426 | py | Python | txdav/common/datastore/upgrade/test/test_migrate.py | backwardn/ccs-calendarserver | 13c706b985fb728b9aab42dc0fef85aae21921c3 | [
"Apache-2.0"
]
| 462 | 2016-08-14T17:43:24.000Z | 2022-03-17T07:38:16.000Z | txdav/common/datastore/upgrade/test/test_migrate.py | backwardn/ccs-calendarserver | 13c706b985fb728b9aab42dc0fef85aae21921c3 | [
"Apache-2.0"
]
| 72 | 2016-09-01T23:19:35.000Z | 2020-02-05T02:09:26.000Z | txdav/common/datastore/upgrade/test/test_migrate.py | backwardn/ccs-calendarserver | 13c706b985fb728b9aab42dc0fef85aae21921c3 | [
"Apache-2.0"
]
| 171 | 2016-08-16T03:50:30.000Z | 2022-03-26T11:49:55.000Z | ##
# Copyright (c) 2010-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Tests for L{txdav.common.datastore.upgrade.migrate}.
"""
from twext.enterprise.adbapi2 import Pickle
from twext.enterprise.dal.syntax import Delete
from twext.python.filepath import CachingFilePath
from txweb2.http_headers import MimeType
from twisted.internet.defer import inlineCallbacks, Deferred, returnValue
from twisted.internet.protocol import Protocol
from twisted.protocols.amp import AMP, Command, String
from twisted.python.modules import getModule
from twisted.python.reflect import qual, namedAny
from twisted.trial.unittest import TestCase
from twistedcaldav import customxml, caldavxml
from twistedcaldav.config import config
from twistedcaldav.ical import Component
from txdav.base.propertystore.base import PropertyName
from txdav.caldav.datastore.test.common import CommonTests
from txdav.carddav.datastore.test.common import CommonTests as ABCommonTests
from txdav.common.datastore.file import CommonDataStore
from txdav.common.datastore.sql_tables import schema
from txdav.common.datastore.test.util import SQLStoreBuilder
from txdav.common.datastore.test.util import (
populateCalendarsFrom, StubNotifierFactory, resetCalendarMD5s,
populateAddressBooksFrom, resetAddressBookMD5s, deriveValue,
withSpecialValue, CommonCommonTests
)
from txdav.common.datastore.upgrade.migrate import UpgradeToDatabaseStep, \
StoreSpawnerService, swapAMP
from txdav.xml import element
import copy
class CreateStore(Command):
"""
Create a store in a subprocess.
"""
arguments = [('delegateTo', String())]
class PickleConfig(Command):
"""
Unpickle some configuration in a subprocess.
"""
arguments = [('delegateTo', String()),
('config', Pickle())]
class StoreCreator(AMP):
"""
Helper protocol.
"""
@CreateStore.responder
def createStore(self, delegateTo):
"""
Create a store and pass it to the named delegate class.
"""
swapAMP(self, namedAny(delegateTo)(SQLStoreBuilder.childStore()))
return {}
@PickleConfig.responder
def pickleConfig(self, config, delegateTo):
# from twistedcaldav.config import config as globalConfig
# globalConfig._data = config._data
swapAMP(self, namedAny(delegateTo)(config))
return {}
class StubSpawner(StoreSpawnerService):
"""
Stub spawner service which populates the store forcibly.
"""
def __init__(self, config=None):
super(StubSpawner, self).__init__()
self.config = config
@inlineCallbacks
def spawnWithStore(self, here, there):
"""
'here' and 'there' are the helper protocols 'there' will expect to be
created with an instance of a store.
"""
master = yield self.spawn(AMP(), StoreCreator)
yield master.callRemote(CreateStore, delegateTo=qual(there))
returnValue(swapAMP(master, here))
@inlineCallbacks
def spawnWithConfig(self, config, here, there):
"""
Similar to spawnWithStore except the child process gets a configuration
object instead.
"""
master = yield self.spawn(AMP(), StoreCreator)
subcfg = copy.deepcopy(self.config)
del subcfg._postUpdateHooks[:]
yield master.callRemote(PickleConfig, config=subcfg,
delegateTo=qual(there))
returnValue(swapAMP(master, here))
class HomeMigrationTests(CommonCommonTests, TestCase):
"""
Tests for L{UpgradeToDatabaseStep}.
"""
av1 = Component.fromString("""BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//calendarserver.org//Zonal//EN
BEGIN:VAVAILABILITY
ORGANIZER:mailto:[email protected]
UID:[email protected]
DTSTAMP:20061005T133225Z
DTEND:20140101T000000Z
BEGIN:AVAILABLE
UID:[email protected]
DTSTAMP:20061005T133225Z
SUMMARY:Monday to Friday from 9:00 to 17:00
DTSTART:20130101T090000Z
DTEND:20130101T170000Z
RRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
END:AVAILABLE
END:VAVAILABILITY
END:VCALENDAR
""")
@inlineCallbacks
def setUp(self):
"""
Set up two stores to migrate between.
"""
yield super(HomeMigrationTests, self).setUp()
yield self.buildStoreAndDirectory(
extraUids=(
u"home1",
u"home2",
u"home3",
u"home_defaults",
u"home_no_splits",
u"home_splits",
u"home_splits_shared",
)
)
self.sqlStore = self.store
# Add some files to the file store.
self.filesPath = CachingFilePath(self.mktemp())
self.filesPath.createDirectory()
fileStore = self.fileStore = CommonDataStore(
self.filesPath, {"push": StubNotifierFactory()}, self.directory, True, True
)
self.upgrader = UpgradeToDatabaseStep(self.fileStore, self.sqlStore)
requirements = CommonTests.requirements
extras = deriveValue(self, "extraRequirements", lambda t: {})
requirements = self.mergeRequirements(requirements, extras)
yield populateCalendarsFrom(requirements, fileStore)
md5s = CommonTests.md5s
yield resetCalendarMD5s(md5s, fileStore)
self.filesPath.child("calendars").child(
"__uids__").child("ho").child("me").child("home1").child(
".some-extra-data").setContent("some extra data")
requirements = ABCommonTests.requirements
yield populateAddressBooksFrom(requirements, fileStore)
md5s = ABCommonTests.md5s
yield resetAddressBookMD5s(md5s, fileStore)
self.filesPath.child("addressbooks").child(
"__uids__").child("ho").child("me").child("home1").child(
".some-extra-data").setContent("some extra data")
# Add some properties we want to check get migrated over
txn = self.fileStore.newTransaction()
home = yield txn.calendarHomeWithUID("home_defaults")
cal = yield home.calendarWithName("calendar_1")
props = cal.properties()
props[PropertyName.fromElement(caldavxml.SupportedCalendarComponentSet)] = caldavxml.SupportedCalendarComponentSet(
caldavxml.CalendarComponent(name="VEVENT"),
caldavxml.CalendarComponent(name="VTODO"),
)
props[PropertyName.fromElement(element.ResourceType)] = element.ResourceType(
element.Collection(),
caldavxml.Calendar(),
)
props[PropertyName.fromElement(customxml.GETCTag)] = customxml.GETCTag.fromString("foobar")
inbox = yield home.calendarWithName("inbox")
props = inbox.properties()
props[PropertyName.fromElement(customxml.CalendarAvailability)] = customxml.CalendarAvailability.fromString(str(self.av1))
props[PropertyName.fromElement(caldavxml.ScheduleDefaultCalendarURL)] = caldavxml.ScheduleDefaultCalendarURL(
element.HRef.fromString("/calendars/__uids__/home_defaults/calendar_1"),
)
yield txn.commit()
def mergeRequirements(self, a, b):
"""
Merge two requirements dictionaries together, modifying C{a} and
returning it.
@param a: Some requirements, in the format of
L{CommonTests.requirements}.
@type a: C{dict}
@param b: Some additional requirements, to be merged into C{a}.
@type b: C{dict}
@return: C{a}
@rtype: C{dict}
"""
for homeUID in b:
homereq = a.setdefault(homeUID, {})
homeExtras = b[homeUID]
for calendarUID in homeExtras:
calreq = homereq.setdefault(calendarUID, {})
calendarExtras = homeExtras[calendarUID]
calreq.update(calendarExtras)
return a
@withSpecialValue(
"extraRequirements",
{
"home1": {
"calendar_1": {
"bogus.ics": (
getModule("twistedcaldav").filePath.sibling("zoneinfo")
.child("EST.ics").getContent(),
CommonTests.metadata1
)
}
}
}
)
@inlineCallbacks
def test_unknownTypeNotMigrated(self):
"""
The only types of calendar objects that should get migrated are VEVENTs
and VTODOs. Other component types, such as free-standing VTIMEZONEs,
don't have a UID and can't be stored properly in the database, so they
should not be migrated.
"""
yield self.upgrader.stepWithResult(None)
txn = self.sqlStore.newTransaction()
self.addCleanup(txn.commit)
self.assertIdentical(
None,
(yield (yield (yield (
yield txn.calendarHomeWithUID("home1")
).calendarWithName("calendar_1"))
).calendarObjectWithName("bogus.ics"))
)
@inlineCallbacks
def test_upgradeCalendarHomes(self):
"""
L{UpgradeToDatabaseService.startService} will do the upgrade, then
start its dependent service by adding it to its service hierarchy.
"""
# Create a fake directory in the same place as a home, but with a non-existent uid
fake_dir = self.filesPath.child("calendars").child("__uids__").child("ho").child("me").child("foobar")
fake_dir.makedirs()
# Create a fake file in the same place as a home,with a name that matches the hash uid prefix
fake_file = self.filesPath.child("calendars").child("__uids__").child("ho").child("me").child("home_file")
fake_file.setContent("")
yield self.upgrader.stepWithResult(None)
txn = self.sqlStore.newTransaction()
self.addCleanup(txn.commit)
for uid in CommonTests.requirements:
if CommonTests.requirements[uid] is not None:
self.assertNotIdentical(
None, (yield txn.calendarHomeWithUID(uid))
)
# Successfully migrated calendar homes are deleted
self.assertFalse(self.filesPath.child("calendars").child(
"__uids__").child("ho").child("me").child("home1").exists())
# Want metadata preserved
home = (yield txn.calendarHomeWithUID("home1"))
calendar = (yield home.calendarWithName("calendar_1"))
for name, metadata, md5 in (
("1.ics", CommonTests.metadata1, CommonTests.md5Values[0]),
("2.ics", CommonTests.metadata2, CommonTests.md5Values[1]),
("3.ics", CommonTests.metadata3, CommonTests.md5Values[2]),
):
object = (yield calendar.calendarObjectWithName(name))
self.assertEquals(object.getMetadata(), metadata)
self.assertEquals(object.md5(), md5)
@withSpecialValue(
"extraRequirements",
{
"nonexistent": {
"calendar_1": {
}
}
}
)
@inlineCallbacks
def test_upgradeCalendarHomesMissingDirectoryRecord(self):
"""
Test an upgrade where a directory record is missing for a home;
the original home directory will remain on disk.
"""
yield self.upgrader.stepWithResult(None)
txn = self.sqlStore.newTransaction()
self.addCleanup(txn.commit)
for uid in CommonTests.requirements:
if CommonTests.requirements[uid] is not None:
self.assertNotIdentical(
None, (yield txn.calendarHomeWithUID(uid))
)
self.assertIdentical(None, (yield txn.calendarHomeWithUID(u"nonexistent")))
# Skipped calendar homes are not deleted
self.assertTrue(self.filesPath.child("calendars").child(
"__uids__").child("no").child("ne").child("nonexistent").exists())
@inlineCallbacks
def test_upgradeExistingHome(self):
"""
L{UpgradeToDatabaseService.startService} will skip migrating existing
homes.
"""
startTxn = self.sqlStore.newTransaction("populate empty sample")
yield startTxn.calendarHomeWithUID("home1", create=True)
yield startTxn.commit()
yield self.upgrader.stepWithResult(None)
vrfyTxn = self.sqlStore.newTransaction("verify sample still empty")
self.addCleanup(vrfyTxn.commit)
home = yield vrfyTxn.calendarHomeWithUID("home1")
# The default calendar is still there.
self.assertNotIdentical(None, (yield home.calendarWithName("calendar")))
# The migrated calendar isn't.
self.assertIdentical(None, (yield home.calendarWithName("calendar_1")))
@inlineCallbacks
def test_upgradeAttachments(self):
"""
L{UpgradeToDatabaseService.startService} upgrades calendar attachments
as well.
"""
# Need to tweak config and settings to setup dropbox to work
self.patch(config, "EnableDropBox", True)
self.patch(config, "EnableManagedAttachments", False)
self.sqlStore.enableManagedAttachments = False
txn = self.sqlStore.newTransaction()
cs = schema.CALENDARSERVER
yield Delete(
From=cs,
Where=cs.NAME == "MANAGED-ATTACHMENTS"
).on(txn)
yield txn.commit()
txn = self.fileStore.newTransaction()
committed = []
def maybeCommit():
if not committed:
committed.append(True)
return txn.commit()
self.addCleanup(maybeCommit)
@inlineCallbacks
def getSampleObj():
home = (yield txn.calendarHomeWithUID("home1"))
calendar = (yield home.calendarWithName("calendar_1"))
object = (yield calendar.calendarObjectWithName("1.ics"))
returnValue(object)
inObject = yield getSampleObj()
someAttachmentName = "some-attachment"
someAttachmentType = MimeType.fromString("application/x-custom-type")
attachment = yield inObject.createAttachmentWithName(
someAttachmentName,
)
transport = attachment.store(someAttachmentType)
someAttachmentData = "Here is some data for your attachment, enjoy."
transport.write(someAttachmentData)
yield transport.loseConnection()
yield maybeCommit()
yield self.upgrader.stepWithResult(None)
committed = []
txn = self.sqlStore.newTransaction()
outObject = yield getSampleObj()
outAttachment = yield outObject.attachmentWithName(someAttachmentName)
allDone = Deferred()
class SimpleProto(Protocol):
data = ''
def dataReceived(self, data):
self.data += data
def connectionLost(self, reason):
allDone.callback(self.data)
self.assertEquals(outAttachment.contentType(), someAttachmentType)
outAttachment.retrieve(SimpleProto())
allData = yield allDone
self.assertEquals(allData, someAttachmentData)
@inlineCallbacks
def test_upgradeAddressBookHomes(self):
"""
L{UpgradeToDatabaseService.startService} will do the upgrade, then
start its dependent service by adding it to its service hierarchy.
"""
yield self.upgrader.stepWithResult(None)
txn = self.sqlStore.newTransaction()
self.addCleanup(txn.commit)
for uid in ABCommonTests.requirements:
if ABCommonTests.requirements[uid] is not None:
self.assertNotIdentical(
None, (yield txn.addressbookHomeWithUID(uid))
)
# Successfully migrated addressbook homes are deleted
self.assertFalse(self.filesPath.child("addressbooks").child(
"__uids__").child("ho").child("me").child("home1").exists())
# Want metadata preserved
home = (yield txn.addressbookHomeWithUID("home1"))
adbk = (yield home.addressbookWithName("addressbook"))
for name, md5 in (
("1.vcf", ABCommonTests.md5Values[0]),
("2.vcf", ABCommonTests.md5Values[1]),
("3.vcf", ABCommonTests.md5Values[2]),
):
object = (yield adbk.addressbookObjectWithName(name))
self.assertEquals(object.md5(), md5)
@inlineCallbacks
def test_upgradeProperties(self):
"""
L{UpgradeToDatabaseService.startService} will do the upgrade, then
start its dependent service by adding it to its service hierarchy.
"""
yield self.upgrader.stepWithResult(None)
txn = self.sqlStore.newTransaction()
self.addCleanup(txn.commit)
# Want metadata preserved
home = (yield txn.calendarHomeWithUID("home_defaults"))
cal = (yield home.calendarWithName("calendar_1"))
inbox = (yield home.calendarWithName("inbox"))
# Supported components
self.assertEqual(cal.getSupportedComponents(), "VEVENT")
self.assertTrue(cal.properties().get(PropertyName.fromElement(caldavxml.SupportedCalendarComponentSet)) is None)
# Resource type removed
self.assertTrue(cal.properties().get(PropertyName.fromElement(element.ResourceType)) is None)
# Ctag removed
self.assertTrue(cal.properties().get(PropertyName.fromElement(customxml.GETCTag)) is None)
# Availability
self.assertEquals(str(home.getAvailability()), str(self.av1))
self.assertTrue(inbox.properties().get(PropertyName.fromElement(customxml.CalendarAvailability)) is None)
# Default calendar
self.assertTrue(home.isDefaultCalendar(cal))
self.assertTrue(inbox.properties().get(PropertyName.fromElement(caldavxml.ScheduleDefaultCalendarURL)) is None)
def test_fileStoreFromPath(self):
"""
Verify that fileStoreFromPath() will return a CommonDataStore if
the given path contains either "calendars" or "addressbooks"
sub-directories. Otherwise it returns None
"""
# No child directories
docRootPath = CachingFilePath(self.mktemp())
docRootPath.createDirectory()
step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath)
self.assertEquals(step, None)
# "calendars" child directory exists
childPath = docRootPath.child("calendars")
childPath.createDirectory()
step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath)
self.assertTrue(isinstance(step, CommonDataStore))
childPath.remove()
# "addressbooks" child directory exists
childPath = docRootPath.child("addressbooks")
childPath.createDirectory()
step = UpgradeToDatabaseStep.fileStoreFromPath(docRootPath)
self.assertTrue(isinstance(step, CommonDataStore))
childPath.remove()
| 37.357692 | 130 | 0.653197 | 17,380 | 0.894677 | 13,024 | 0.670442 | 14,301 | 0.736178 | 0 | 0 | 5,561 | 0.286266 |
81b69499f86483624239f156b1fed165ba08aee8 | 1,770 | py | Python | generated-libraries/python/netapp/fcp/aliases_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
]
| 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/fcp/aliases_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
]
| null | null | null | generated-libraries/python/netapp/fcp/aliases_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
]
| null | null | null | from netapp.netapp_object import NetAppObject
class AliasesInfo(NetAppObject):
"""
A list of WWPNs and their aliases generated according
to the input - alias, WWPN or nothing.
"""
_vserver = None
@property
def vserver(self):
"""
Vserver containing the alias
"""
return self._vserver
@vserver.setter
def vserver(self, val):
if val != None:
self.validate('vserver', val)
self._vserver = val
_aliases_wwpn = None
@property
def aliases_wwpn(self):
"""
The FCP WWPN for which the alias is given
"""
return self._aliases_wwpn
@aliases_wwpn.setter
def aliases_wwpn(self, val):
if val != None:
self.validate('aliases_wwpn', val)
self._aliases_wwpn = val
_aliases_alias = None
@property
def aliases_alias(self):
"""
The 32-character alias for a given FCP WWPN
"""
return self._aliases_alias
@aliases_alias.setter
def aliases_alias(self, val):
if val != None:
self.validate('aliases_alias', val)
self._aliases_alias = val
@staticmethod
def get_api_name():
return "aliases-info"
@staticmethod
def get_desired_attrs():
return [
'vserver',
'aliases-wwpn',
'aliases-alias',
]
def describe_properties(self):
return {
'vserver': { 'class': basestring, 'is_list': False, 'required': 'optional' },
'aliases_wwpn': { 'class': basestring, 'is_list': False, 'required': 'required' },
'aliases_alias': { 'class': basestring, 'is_list': False, 'required': 'required' },
}
| 26.818182 | 95 | 0.565537 | 1,722 | 0.972881 | 0 | 0 | 1,089 | 0.615254 | 0 | 0 | 532 | 0.300565 |
81b7304301ce82e40f227f18f20c21206d2e4a7b | 5,745 | py | Python | imagernn/generic_batch_generator.py | OnlyBelter/learn_neuralTalk | 53c78962960e142bbd149eb92405029b795845ed | [
"MIT"
]
| 7 | 2019-03-18T10:19:11.000Z | 2021-11-10T02:10:55.000Z | imagernn/generic_batch_generator.py | AllenMas/learn_neuralTalk | 53c78962960e142bbd149eb92405029b795845ed | [
"MIT"
]
| null | null | null | imagernn/generic_batch_generator.py | AllenMas/learn_neuralTalk | 53c78962960e142bbd149eb92405029b795845ed | [
"MIT"
]
| 8 | 2017-11-17T08:54:51.000Z | 2021-05-29T15:08:34.000Z | import numpy as np
import code
from imagernn.utils import merge_init_structs, initw, accumNpDicts
from imagernn.lstm_generator import LSTMGenerator
from imagernn.rnn_generator import RNNGenerator
def decodeGenerator(generator):
if generator == 'lstm':
return LSTMGenerator
if generator == 'rnn':
return RNNGenerator
else:
raise Exception('generator %s is not yet supported' % (base_generator_str,))
class GenericBatchGenerator:
"""
Base batch generator class.
This class is aware of the fact that we are generating
sentences from images.
"""
@staticmethod
def init(params, misc):
# inputs
image_encoding_size = params.get('image_encoding_size', 128)
word_encoding_size = params.get('word_encoding_size', 128)
hidden_size = params.get('hidden_size', 128)
generator = params.get('generator', 'lstm')
vocabulary_size = len(misc['wordtoix'])
output_size = len(misc['ixtoword']) # these should match though
image_size = 4096 # size of CNN vectors hardcoded here
if generator == 'lstm':
assert image_encoding_size == word_encoding_size, 'this implementation does not support different sizes for these parameters'
# initialize the encoder models
model = {}
model['We'] = initw(image_size, image_encoding_size) # image encoder
model['be'] = np.zeros((1,image_encoding_size))
model['Ws'] = initw(vocabulary_size, word_encoding_size) # word encoder
update = ['We', 'be', 'Ws']
regularize = ['We', 'Ws']
init_struct = { 'model' : model, 'update' : update, 'regularize' : regularize}
# descend into the specific Generator and initialize it
# why generate again?? Belter, 20170510
Generator = decodeGenerator(generator)
generator_init_struct = Generator.init(word_encoding_size, hidden_size, output_size)
merge_init_structs(init_struct, generator_init_struct)
return init_struct
@staticmethod
def forward(batch, model, params, misc, predict_mode = False):
""" iterates over items in the batch and calls generators on them """
# we do the encoding here across all images/words in batch in single matrix
# multiplies to gain efficiency. The RNNs are then called individually
# in for loop on per-image-sentence pair and all they are concerned about is
# taking single matrix of vectors and doing the forward/backward pass without
# knowing anything about images, sentences or anything of that sort.
# encode all images
# concatenate as rows. If N is number of image-sentence pairs,
# F will be N x image_size
F = np.row_stack(x['image']['feat'] for x in batch)
We = model['We']
be = model['be']
Xe = F.dot(We) + be # Xe becomes N x image_encoding_size
# decode the generator we wish to use
generator_str = params.get('generator', 'lstm')
Generator = decodeGenerator(generator_str)
# encode all words in all sentences (which exist in our vocab)
wordtoix = misc['wordtoix']
Ws = model['Ws']
gen_caches = []
Ys = [] # outputs
for i,x in enumerate(batch):
# take all words in this sentence and pluck out their word vectors
# from Ws. Then arrange them in a single matrix Xs
# Note that we are setting the start token as first vector
# and then all the words afterwards. And start token is the first row of Ws
ix = [0] + [ wordtoix[w] for w in x['sentence']['tokens'] if w in wordtoix ]
Xs = np.row_stack( [Ws[j, :] for j in ix] )
Xi = Xe[i,:]
# forward prop through the RNN
gen_Y, gen_cache = Generator.forward(Xi, Xs, model, params, predict_mode = predict_mode)
gen_caches.append((ix, gen_cache))
Ys.append(gen_Y)
# back up information we need for efficient backprop
cache = {}
if not predict_mode:
# ok we need cache as well because we'll do backward pass
cache['gen_caches'] = gen_caches
cache['Xe'] = Xe
cache['Ws_shape'] = Ws.shape
cache['F'] = F
cache['generator_str'] = generator_str
return Ys, cache
@staticmethod
def backward(dY, cache):
Xe = cache['Xe']
generator_str = cache['generator_str']
dWs = np.zeros(cache['Ws_shape'])
gen_caches = cache['gen_caches']
F = cache['F']
dXe = np.zeros(Xe.shape)
Generator = decodeGenerator(generator_str)
# backprop each item in the batch
grads = {}
for i in xrange(len(gen_caches)):
ix, gen_cache = gen_caches[i] # unpack
local_grads = Generator.backward(dY[i], gen_cache)
dXs = local_grads['dXs'] # intercept the gradients wrt Xi and Xs
del local_grads['dXs']
dXi = local_grads['dXi']
del local_grads['dXi']
accumNpDicts(grads, local_grads) # add up the gradients wrt model parameters
# now backprop from dXs to the image vector and word vectors
dXe[i,:] += dXi # image vector
for n,j in enumerate(ix): # and now all the other words
dWs[j,:] += dXs[n,:]
# finally backprop into the image encoder
dWe = F.transpose().dot(dXe)
dbe = np.sum(dXe, axis=0, keepdims = True)
accumNpDicts(grads, { 'We':dWe, 'be':dbe, 'Ws':dWs })
return grads
@staticmethod
def predict(batch, model, params, **kwparams):
""" some code duplication here with forward pass, but I think we want the freedom in future """
F = np.row_stack(x['image']['feat'] for x in batch)
We = model['We']
be = model['be']
Xe = F.dot(We) + be # Xe becomes N x image_encoding_size
generator_str = params['generator']
Generator = decodeGenerator(generator_str)
Ys = []
for i,x in enumerate(batch):
gen_Y = Generator.predict(Xe[i, :], model, model['Ws'], params, **kwparams)
Ys.append(gen_Y)
return Ys
| 37.064516 | 131 | 0.673281 | 5,323 | 0.926545 | 0 | 0 | 5,149 | 0.896258 | 0 | 0 | 2,323 | 0.404352 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.