ext
stringclasses
9 values
sha
stringlengths
40
40
content
stringlengths
3
1.04M
py
1a40596ddf8fdd4a8e7f064a95c5bb1bf81e0726
from dataclasses import dataclass, field from typing import List from xsdata.models.datatype import XmlPeriod __NAMESPACE__ = "http://xstest-tns/schema11_D3_3_14_v01" @dataclass class Root: class Meta: name = "root" namespace = "http://xstest-tns/schema11_D3_3_14_v01" el_date: List[XmlPeriod] = field( default_factory=list, metadata={ "name": "elDate", "type": "Element", "namespace": "", "min_occurs": 1, "min_inclusive": XmlPeriod("---16+13:00"), } )
py
1a405c514486d51ed80afb5b7fd2450aac3d256e
""" SEASON GLOBAL """ from season.models import Fixture from season.core.weeks import getAllWeeks __all__ = ['endOfSeason','midSeasonWeek','midSeason'] def endOfSeason(): """ indicates whether the season is finished i.e. all fixtures have a final score """ return Fixture.objects.filter(score_a=None,score_b=None).count() == 0 def midSeasonWeek(): """ returns the number of the week of the mid season """ try: #mid season week return int(int(getAllWeeks()[-1])/2) except: pass #should not get here return 0 def midSeason(): """ indicates whether the season has reached end of first half i.e. all first half fixtures have a final score """ return Fixture.objects.filter(week__lte=midSeasonWeek(),score_a=None,score_b=None).count() == 0
py
1a405c94212d5318732505cf246ceb9d8ac19ad3
import pyquil.quil as pq import pyquil.api as api from pyquil.gates import * from grove.amplification.grover import Grover import numpy as np from grove.utils.utility_programs import ControlledProgramBuilder import grove.amplification.oracles as oracle def grovers(n, s): """ generates a pyquil program for grover search :param n: number of qubits :param s: number to search for (0 <= s <= 2^(n)-1) :return: quantum program """ # Construct program grover = pq.Program() # set up minus grover.inst(X(n)) grover.inst(H(n)) # grover_r = Grover() for i in range(n): grover.inst(H(i)) # BUILD UF (ONLY WORKS FOR 0 AS OF NOW) U_f = np.identity(2**(n+1)) flip = s U_f[flip][flip] = 0 U_f[2**(n+1)-1][flip] = 1 U_f[flip][2**(n+1)-1] = 1 U_f[2**(n+1)-1][2**(n+1)-1] = 0 grover.defgate('Uf', U_f) string = "" for i in range (n+1): string += " "+str(i) string2 = "" for i in range(n ): string2 += " " + str(i) second = -1*np.identity(2 ** (n)) second[0][0] = 1 grover.defgate('second', second) #for _ in range (int((np.pi *2**(n/2))/4)): for _ in range(int(2**(n+2))): # apply Uf grover.inst('Uf' + string) #grover.inst(SWAP(s, n+1)) for i in range(n): grover.inst(H(i)) grover.inst("second" + string2) for i in range(n): grover.inst(H(i)) for i in range(n): grover.measure(i) return grover if __name__ == "__main__": qvm = api.SyncConnection() for i in range(50): p = grovers(6, 0) #results = qvm.run(p, classical_addresses=[]) results = qvm.wavefunction(p) print(results)
py
1a405cfe92ed3034133ae048ce018db5393c8306
class Data: def __init__(self, a, b): self.test_data= a + b
py
1a405e16e0a01b9ca16ae811378ffbba387c4c54
# -*- coding: utf-8 -*- # snapshottest: v1 - https://goo.gl/zC4yUc from __future__ import unicode_literals from snapshottest import GenericRepr, Snapshot snapshots = Snapshot() snapshots['test_read_diff_file 1'] = [ [ 'change', '', [ None, { '_id': 'crag4l7j', 'abbreviation': 'ABTV', 'created_at': GenericRepr('datetime.datetime(2019, 11, 27, 4, 54, 6, 180000)'), 'imported': True, 'isolates': [ { 'default': True, 'id': '4e8amg20', 'sequences': [ { '_id': 'czwypki5', 'accession': 'NC_010317', 'definition': 'Abaca bunchy top virus DNA-M, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': '5w3oz9pl' }, 'segment': '', 'sequence': 'GGGGCTGGGGCTTATTATTACCCCCAGCCCCGGAACGGGACATCACGTGCCTCCCTGTACACGCACGTGACATCAAATGGCTTGCTGYGCAAGATAGAAAGGCTTCCTGCGGAAGCCAGGCATCATATATGGGTTGTAGAGGATCAGCCGACTCTATAAATATAGGGAGGCGTGGTCATGGGATTGACAGGAGAACGAGTGAAACAATTCTTTGAATGGTTTCTGTTCTTCTCAGCAATATTTGTTGCAATAACAATAATATATATATTGCTGGCAGTTCTCCTTGAACTACCGAAGTATATTAAGGGTTTAGTACAATACGTAGTGGAGTATATTACTAGACGACGGGTATGGACACGGAGGACCCAATTGACAGAGGCAACCGGAGGAGGCGATATAGAAGCTGTAGGGCATGACAGTCAGGCGTATACGCATACTGTTATGCCATSTGTTCCACCAGTTAGCGCTCCTATATCAAATAGGAGAGCTGATCAGCCTCTTCGATCAAGCGCCGGACCAATGTTTTAAATACCCGTGATATTTAATATGCAAGTGTATAAATACCCATTGTAGATCTGTTTGTAACCTGAATATGCAAAGTATATAATACTTTGTTGTAATGTATAAGTACATTAATAATATACGAAGTATAATGTTGATGCGATGTCTTCGGAAAATGAAGTATACCCAAATACACAAAAAAACATATATGTGGTGTATACTTATTGTTAAGTATAATAAAATTATAATACAAACAAATATATTGTGTATTATAATACATAAAAGAAGACAGAGCTGTGAAGTTAAGTAAGAAAGCGACGGATTCGTATTGGATAATATGATTCGCGGAGCATTACTTAACGGCGAAGTAAAGCATCAGACAAGAATATGACAGCTGTCATATCAACTAAAAAGCATAGCTTGCCGAGCACGCTATGCAATATAAGGGAAAAATGAAATAATGACCTGTCACGTTTACACTTTTCGTAAAGATGTAGGGCCGAAGGCCCTAATGACGCGTGTCATATTCTCTATAGTGGTGGGTCATATGTCCCGAGTTAGTGCGCCACGTAA' }, { '_id': 'qod5uj1p', 'accession': 'NC_010318', 'definition': 'Abaca bunchy top virus DNA-C, complete sequence', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'fl6r8ylz' }, 'segment': '', 'sequence': 'GGCAGGGGGGCTTATTATTACCCCCCCTGCCCGGGACGGGACATCACGTGCGCCTATAAAGAAGCACGTGATGATGTGACATATGTTTGCCGTAAGCTGTACAAAAGCGAATGCTTTATTGCTTTATTCGTTGCTTAGCCCCTAAGCAACCTGGTAGTGGGATGTGGTCCCTGATGCGAAGCAGCTTTGTTGTTGCTATTTATATCTGTGTTCGCCATGTAAAATGCGAAATACAACGCTGATCAGAATGGAGTTCTGGAATTCGGAAGCATTTTGCGACGATGTGAAGCGTGTCATTAAGCAAAAATATTGGGAAGAGCGGATGAAATCTCTATTTATAGAGAAGGTGAGTGGTTATGTTCGAAGGATTCTTGTTTATGGTAATCTTGATGATACCATATATGCGGTTCAGCAAATGAAGACTTCTATAGTTCAGTGTGCTGAACGTTTCGGTAAAGCCTGTGTGGTGGTATATAATGGTTTGGATCCATCAATAGGTTTCCGATTACACACTATGGCGTTCTTCTTCGAAGAATATGTTGAGGAAGTGAGTACTGCAGATCCAATGGCGGTTCAGTTATTTTGTGATGAAGAAATAGAAGAATTCTCAAATTCTGATGTACGCCTTATTAAAAATGTTATTATGGCATCGACAGATGCATCGATTGATGTAGGAAATTGTATTCAGATAATAATATCTGATAATGTAATAACATTCTATATATGTTAACTTCATTTATACATAAGAATGAATGAAGTGGTTTATTTATGATTTTTAGAATATAATCATAAATGGTAAACCTTAAGCAAAACCACCCTAAAACAAATAAACCTCTCTGACATACAACCCTCTATAAAATAAAGCCCATGTAAGATTCAAATTTAGAATGAAAAATGGGCCGAAGGCCCATATAAATGCATTTAAGGCCGAAGGCCTTATAATTGCAGGAAGAAAAGAACACGGTTTTGCTTACGTGGCCTGTGGGCCATATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': '92dhxina', 'accession': 'NC_010316', 'definition': 'Abaca bunchy top virus DNA-S, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'heprpw4w' }, 'segment': '', 'sequence': 'GGCAGGGGGGCTTATTATTACCCCCCCTGCCCGGGACGGGACATTCTGTGATGGGCTGGGCTTTATGCGGCCAAATAAGCCCATAAAGCCAGATCTGGGCCCATTTAAGGGCCCGTGGTTTGAAAATGTCGCGTTCCCGCCAAAATAGTTGCTTGCTCTGCAAGCAAACTATATCTATTATAAATACCAGATGTAATGGTTGCCTGAGAATGAAAATAAAAGGATGGCGAGGTATCCGAAGAAGGCGCTAAAGAAGAGGAAGGCGGTACGCCGTAAGTATGGAAGTAAAGCTACGACCAGTCATGATTATGCTGTTGATACTTCATTTATTGTTCCTGAAAATACTATTAAGCTGTATCGTATTGAGCCTACTGATAAAACATTACCCAGATATTTTATCTGGAAAATGTTTATGTTGTTGGTATGCAAAGTCAGACCAGGGCGTATACTTCATTGGGCGATGATTAAGAGCTCTTGGGATGTGAAGGATCCAACAGTTGTTCTTGAAGCGCCTGGATTGTTTATCAAGCCAGCAAACAGCCATCTGGTTAAACTGGTATGCAGTGGCGAGTTAGAAGCTCCAGTAGGAGGAGGGACTTCAGAGGTTGAATGTCTTCTACGGAAGACAACTTTACTTCGTAGGAATGTTACAGAATTGGATTTCTTGTATTTGGCGTTTTATTGTTCTTCTGGAGTTACAATCAACTACCAGAACAGGATTACATATCATGTATAAACCACATAAAATAAATGTGGTGTTGCAGGCATGGGAAGAATAAAACAATGTTTGCCTACGAAATATTTTGGTAAAGTGAAATTATGACCTGTCAGAATTAAGTTTAGAATGAACTGAGGCCGAAGGCCTCACCGAGGCCGAAGGCCGTCAAGTTGGATGAATAAAATACAAGGTATAAGTACGAAGAGCGGTATAATATCTGAAAGGAAATAATAATAATATAATAAAATATTATGATGTCCCAAAATAGCAGAATGCTAAAGGAACAAAAGGATGCTCTAAGTACAGGGTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': 'f8oj78sm', 'accession': 'NC_010319', 'definition': 'Abaca bunchy top virus DNA-R, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'j0g7sfp4' }, 'segment': '', 'sequence': 'GGCAGGGGGGCTTATTATTACCCCCCCTGCCCGGGACGGGACATTTGCATCTATAAATAGAAGCGCCCTCGCTCAACCAGATCAGGCGCTGCAATGGCTAGATATGTCGTATGTTGGATGTTCACCATCAACAATCCCGAAGCTCTTCCAGAGATGAGGGAAGAATACAAATACCTGGTTTACCAGGTGGAGCGAGGCGAAAGCGGTACACGACATGTGCAGGGCTATGTTGAAATGAAGAGACGAAGTTCTCTGAAACAAATGAGGGCTTTAATTCCTGGTGCCCATCTCGAAAAGAGAAGGGGCACACAGGAAGAAGCTAGAGCTTATTGTATGAAGGCAGATACGAGAGTCGAAGGTCCCTTCGAGTTTGGTCTTTTCAAAGTATCATGTAATGATAATTTGTTTGATGTCATACAGGATATGAGAGAAACGCACAAACGGCCGATTGAGTATTTATACGACTGTCCTAATACCTTCGATAGAAGTAAGGATACATTATACAGGGTACAAGCGGAAATGAATAAAATGCAAGCTATGATGTCGTGGTCGGAAACCTATGGTTGCTGGACGAAGGAAGTGGAGGAACTAATGGCGGAGCCATGTCACCGACGGATTATTTGGGTCTATGGCCCAAATGGTGGTGAAGGTAAAACAACCTATGCGAAGCATCTAATCAAGACCAGAAATGCATTTTATACACCTGGCGGAAAGACACTGGATATATGTAGGCTGTATAATTATGAGGGAATTGTAATATTTGATATTCCCAGATGCAAAGAGGATTACTTGAATTACGGAATTCTTGAGGAATTCAAGAATGGCATCATTCAGAGCGGGAAATATGAACCAGTTTTAAAAATTGTAGAGTATGTGGAGGTCATTGTCATGGCTAACTTCCTGCCGAAGGAAGGAATATTCTCGGAAGACCGAATAAAGCTTGTAACTTGTTGAACACGCTATGCAATAAAGGGGAAAAATGCAATTATGACCTGTCACGTTTACACTTTTCGTAAAGATGTAGGGCCGAAGGCCCTAATGACGCGTGTCATATTCTCTATAGTGGTGGGTCATATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': 'fdepnu5q', 'accession': 'NC_010314', 'definition': 'Abaca bunchy top virus DNA-N, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 't2ppu39d' }, 'segment': '', 'sequence': 'AGCAGGGGGGCTTATTATTACCCCCCCTGCTCGGGGCGGGACATTCTGTGATGGGCTGGGCTTTATGCGGCCAAATAAGCCCATAAAGCCAGATCTGGGCCCATTTAAGGGCCCGTGGTTTGAAAATGTCGCGTTCCCGCCTAAATTGTTTGCTTGCCCTGCAAGGAAACGAAAACTCTATAAATAGGGTTGTTCTCTGCTTGTTTAATACATCAGGCGCAAATCTTTTGCAACGATGGATTGGATGGAATCACAATTCAAGACATGTACGCATGGCTGCGACTGGAAGGCGATAGCTCCAGAAGCACAAGATAATATACAGGTAATTACATGTTCCGATTCAGGTTACGGAAGAAAGAACCCTCGTAAGGTTCTTCTGAGGAGTATTCAGATAGGGTTCAATGGAAGCTTCAGAGGAAGTAATAGAAATGTTCGAGGCTTCATATACGTGTCTGTAAGACAGGATGATGGCCAAATGAGACCAATTATGGTCGTTCCATTCGGAGGGTATGGATATCATAACGACTACTATTATTTTGAAGGACAATCCAGTACGAATTGTGAGATAGTGTCGGACTATATTCCGGCCGGTCAAGACTGGAGCAGAGATATGGAGATAAGTATAAGTAACAGCAACAATTGTAATCAAGAGTGCGATATCAAGTGTTATGTAGTATGTAATTTAAGAATTAAGGAATAAWATTGTTGCCGAAGGTCTGTTATTTGAATGTTGAGATAAGGAAAGGGGCGGCGAAGCATGTGTGTATAATAACATATAACACACTATTATATATTTTGTAAAGAATAAAATTATGACCTGTCAGATTAAGTTTAGAATGAACTGAGGCCGAAGGCCTCACCGAGGCCGAAGGCCGTCAGGATGGTTTTACAAAATAATTATAAGCACCTGTACTAAGTACGAAGAGCGGTATAATATCTGAAAGGAAAAAATAATAATATAATAAAAATATTATGATGTCCCAAAATAGCAGAATGCTAAAGGAACAAAAGGATGCTCTAAGTACAGGGTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCCACGTC' }, { '_id': '7wkiibpp', 'accession': 'NC_010315', 'definition': 'Abaca bunchy top virus segment 2, complete sequence', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'wzpj56v7' }, 'segment': '', 'sequence': 'AGCAGGGGGGCTTATTATTACCCCCCCTGCTGGGACGGGACATCCGAATAGTAATGGGCTTAATACATAAATGGGCCGCCAAATAAGCGTACAGTTCAGTATCTTCGTTTTGGGCCTCGGCCCAAAATTAAGAGAACGTGTGCGCTTGTTTGGGTTGGACCGTAGGTCCGGTTCGAATGAGGAATATTCCTTGCTTCCATAGGAAGGAACAGCGAAATAAAATAAATCGCTGTTACACGTGTAAGAATACTATTAGTTCGCAGAGTTTATAAATACCCTTCAAGTTGAAGGGTGTGGTGCTCTCTCTCTTCTGTCAGACGGTGTGCCATGATGAAGCTCTCCGGGAGGAAAGAAGGGCGGAGGTGGAGGAGCCGCCTCGCCCGTCCCTCCGACGAAGTATACGAAGTCGTAGGTATGTCAGGGTATTTATAGAAGCGTCGTATACTAAACGCGCCTGTATCATAATTGTATTTGTGTATTTAAATATTTAAATACCAAACCTTAAGGAATAAAATATATGTTTAATAAAATAACACATGACAATAAAAGAAAACAATAATACAATAATAAAATTGTATTATGCAAACGTTGTATAATTAAGGTCTGTTTAATATACATATATATATGTATATTACAGTATTGTTTAAAATAAATGACTTGGAAGGAAATAATAATATTAATGATAAGCAATAAAATATTCCGAATACATAAAAAGGGGAAAATGCAATTAAGACCTGTCACGTTTACATATTTCGAATATATTAAGGGCCGAAGGCCCGTCAGTATGCAGGTACATCAGTGATTGCTTCGTGACGAAGCAAGGGTATTATTGTAAATAAGAAAGCATCTGACAACTTTAATAGTGGTCCCCTATACAGCTGTCATATGACAGCTGGCAAAGGATCATTGGGCGGACTCCAAATATATATTAAATATAACATATAAAATATATAAGGTATAGATACTATTATTAATTAAAAGAGGCGGGAAAGAGGACACGTGCGCCGCACGCCACTTTAGTGGTGGGCCATATGTCCCGAGTTAGTGCGCTTACGTC' } ], 'source_name': 'Q767', 'source_type': 'isolate' }, { 'default': False, 'id': 'mzojjjmz', 'sequences': [ { '_id': '1a9ngyao', 'accession': 'EF546804.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-S, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': '0owj7oas' }, 'segment': '', 'sequence': 'GGCAGGGGGGCATATTATTACCCCCCCTGCCCGGGACGGGACATTCTGTAATGGATTGGGCTTAATGATGCCAAAGGCCCATTAAGCGGAAGCGCGGCCCATTCAATGGCCTGTGCGCTGGATTCAAATGTCGCGTTCCCGCCTATTTAGTTGCTTGCTCTGCAAGCAACAACATCTATTATAAATACCAGGATGGAAGGCAGTCTGATATGCAAGAAACAAATGGCGAGGTATCCCAAGAAATCGTTGAAGAAAAGAAAGGCGGTTCGCCGGAAGTATGGAAGTAAGGCTACTACCAGTCATGATTATGCTATTGACACTTCATTTATTGTTCCCGAAACAACTATTAAGTTGTATCGTATTGAGCCTACTGATAAAACATTACCCAGATATTTTATCTGGAAAATGTTTATGCTATTAGTATGCAAAGTGAGACCTGGTCGTATACTTCATTGGGCGATGATTAAGAGTTCATGGGATATACATCAACCAGAGGCAGTGTTAGAAGCCCCTGGCCTGTTTATAAAGCCTGCAAATAGCCATTTGGTGAAACTGGTATGCAGTGGCGAGTTAGAGGCTCCAGTTGGCGGAGGGACAACAGATGTGGAATGTCTTCTACGGAAGACCACATTACTTCGTAGGAATGTAACAGAGTTGGATTTCCTGTATCTGGCGTTTTATTGTACTTCTGGAGTTACAATTAACTACCAGAATCGGATAACATATCATGTATAAACCCCTATATAATAATGGGGTGATGTACATATGTGAAATAAAACAATGATATGCCTACGAAATATTTTGGGAAAATGCAATTATGACCTGTCAGATTGTGTATATGAATGAACTTACGGCCGAAGGCCGTAAGTAAGAATATGCCTAGAATAAAATACAAGGTATATGTACGAAGAGATGTATATAACCGAAAGGGTATGAACAAAAACATAATATTATAAAATAATAAGGGGCAATGAAAGCAGAATGCGTATGTAACAAAATGATGCTCTGAGTACAGGCTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': 'uf8ccce0', 'accession': 'EF546807.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-R, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'cpm1g5h4' }, 'segment': '', 'sequence': 'GGGGCTGGGGCTTATTATTACCCCCAGCCCCGGGACGGGACATCTTGCATCTATAAATAGATTGCCTCCTCGCACTACCAGTGCAGGCAATGCAATGGCTAGGTATGTGATATGTTGGATGTTCACCATCAACAATCCCGAAACCCTTCCACCAATGAGGGATGAATACAAATACATGGTATATCAAGTAGAGCGAGGAGAAAGCGGTACTCGGCATGTGCAGGGATACGTGGAGATGAAGAGACGCACGTCTCTGAAACAAATGAGAGCTTTAATTCCTGGTGCCCATCTCGAGAAGAGAAAGGGCACACAGGAAGAAGCTAGATCATACTGTATGAAGGCGGATACAAGAATCGAAGGACCCTTCGAGTACGGTATGTTCAAAATATCATGTAATGATAATTTGTATGATGTCATACAGGATATGAGAGAAACGAACAAAAGGCCGATTGAGTATTTATATGATTGTCCTAATACCTTCGATAGAAGTAGAGATACATTATACAGGGTTCAGGCAGAACTCAATAAAATGCAAGCAATGAAGTCGTGGTCGGAATCCTATGGTTGTTGGACGAAGGAAGTGGAGGAAATAATGGCGGAGCCATGTCACCGACGGATCATTTGGGTCTATGGACCAAATGGTGGTGAAGGAAAAACAACCTATGCGAAGCATTTAATCAAGACCAGAAATGCATTTTATACACCTGGCGGGAAATCACTGGATATATGTAGGTTGTATAATTATGAGTCTATTGTAATATTCGATATTCCAAGATGCAAAGAGGAATATTTGAATTACGGAATAGTTGAGGAATTCAAAAATGGCATCATCCAGAGCGGGAAATATGAACCTGTTCTGAAAGTAGTACAGTATGTGGAAGTCATTGTCATGGCTAACTTCCTTCCGAAGGAAGGAATCTTCTCAGAAGATAGGATAAAGCTTGTAACTTGTTGAACACGCTATGACAAAAAGGGAAAAATGCAATTATGACCTGTCAGATTGTTTTATAGGACCGACCGAGGCCGCAGGCCGTCAGTATGCCACCTGTACTTACTTTAATGTGGTGGGTCATATGTCCCGAGTTAGTGCGCCACGTA' }, { '_id': '951z8646', 'accession': 'EF546802.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-N, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'fgdkm0kp' }, 'segment': '', 'sequence': 'GGCGGGGGGGCATATTATTACCCCCCCCGCCCGGGACGGGACATTCTGTAATGGATTGGGCTTAATGATGCCAAAGGCCCATTAAGCGGAAGCGCGGCCCATTCGATGGCCTGTGCGCTGGTTTCAAATGTCGCGTTCCCGCCTATTTAGTTGCTTGCCCTGCAAGCAATTAACTAGTCTATAAATAAGGTGGTTCTCTGCGAGTCCAATACAGCAGTTGCGATAACATAGCAACAATGGATTGGTTTGAATCACAATTCAAGACGTGTACGCATGGATGCGACTGGAAGGCGATAGCCCCAGATGCAAGAGACAATATACAGGTAATTACTTGTATCGATTCAGGTTATGGAAGAAAGAACCCTCGTAGGGTTCTTCTAAGAAGTATTCAGATAGGGTTCAATGGTAGCTTCAAAGGAAACAATAGAAACGTTCGAGGATTCCTCTACGTTTCTATAAGGCAAGATGATGGGGAATTGAGACCTATCATGGTTGTGCCATTTGGAGGATATGGATATCATAATGACTATTACTATTTCGAAGGGCAATCAAGTACGAATTGTGAGATAGGGTCGGATTATATCCCGGCCGGTGAAGACTGGAGCAGAGACATGGAAATCAGTATAAGCAACAGCAACAATTGTACTCAAGAGTGTGATATAAAATGCTATGTGGTATGTAACTTAAGAATTAAGGAATAATATGTTGCTGAAAGATCTTATTTAATGTTTAGATAATGATGTTGTGCTGTGAAGTATCTGTTTATATTTATTATAAACACGCTATGCCAATAAGGGAAAAATGCAATTAAGACCTGTCAGATTGTGTATATGAATGAATATACGGCCGAAGGCCGTAAGTATGAAATTAGAAATAATTTATAAGCACTGGTTAAATGTACGAAGAGATGTATATAACCGAAAGGGTATGAACAATAACATAATACCATAAAATAATAAGGGGCAATGAAAGCAGAATGCGTATGTAACAAAATGATGCTCTGAGTACAGGCTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCTAGTGG' }, { '_id': '7domcyti', 'accession': 'EF546806.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-C, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'iz8ant3f' }, 'segment': '', 'sequence': 'GGCGGGGGGGCATATTATTACCCCCCCCGCCCGGGACGGGACATCACGTGATGCCTTTATAAATCACGTGGGCATATAACTTTTGTATGCCGTAAGCTGTATAAAAGCGGCAGCTTTATGTTTTATTCGTTGCTTCATCGATAAGCAATGTGATAATGGATGGTGGTCCTGTTGCAAAGCAGGTGACTGGTTTGCTATTTATATCAGTTGTTGTGCAGGGTTTTGGTGTATACAACGCTGATCAGAATGGAATTCTGGAATTCGGAAGCATTCTGCGACGATGTGAAGCGTTGTATCAAGCAAAAATATTGGGAAGAGCGGATGAAATCTCTATTTATAGAGAGGGTAAGTGGTTATGTAAGAAGGATTTGTGGTTATGGTAATCTTGATGATGCAATATATGCTGTTCAGCAATTGAAGACTTCTATAGTTCAGTGTGCTGAACGATATGGGAAAGCTTGTGTGGTTGTATATTCGGGTACTGATAAATTAATGGTCTATAGACTACATACAATGGCTTTCTTCTTCGAAGAATATGTTGAGGAACTGAAGACTGCAGATCCATTAACTGTTCAGCTATTCTGTGATGAGGATATATCAGAATTCGTAGAATCTGATGTACGTTATTGTAAGAATGTTATTATGGCATCGACAGATGAGCATGTATCTATAAACAAATGTATTCAGATAGTAATATCTGATAATGTAATAACATTCTATTAATGTAATCTTCATTTCAGCGTAAGTCTATAATGAAGTGGTTGATTTATGATTTTTAGAATATAATCATAAATGGTCAACTAGACCAAATAATACCTTAAGAAATATAACACACTATGCCAATAAGGGAAAAATGCAATTAAGACCTCTAAGATTCTATTTAGAATAATATAAATGTGCCGAAGGCACATATAAATAATATTAGGCCGAAGGCCTAATAATTTGTAAGTGAATGACCATCTGTACGCTTACGTGGAATGTGGGTCATATGTCCCGAGTTAGTGCGCCAGTGG' }, { '_id': 'oh5ouqi2', 'accession': 'EF546805.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-M, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'pqm6etvy' }, 'segment': '', 'sequence': 'GGCGGGGGGGCATATTATTACCCCCCCCGCCCGGGACGGGACATCACGTGCGCCTATAGTAATGCACGTGATAATACATGGCTTGCAGCGCAAGATATATGGCTTCCGCAGGAAGCCTTGCAATATATGGGTTTGTCTAAGACCTGCCGACTCTATAAAAAGAGGCGGCGAGTTCATGGCTTTGACAGGAGAACGAGTGAAACAATTCTTTGAATGGTTTCTGTTCTTCGCAGCAATATTTGTTGCTATATCAATAATATATATATTGTTGGCAGTTCTCCTTGAACTACCTAAGTATATTAAGGGCGTGGTAAAATACATAGTGGAGTATGTAACCAGACGGCGAGTATGGATGCGACGAACACAGTTGACGGAGGCAACTGGAGGTGGCGAGATAGAAGGAGATCGGCATGACAGTCATATTACTGTTATGCCTTCTGTTCCACCGGTAAGTGCTCCTATATCCAATAGGAGAGGTGATCAGGGTCTTCGACCAAGCACGGGACCAATGTTTTAAATACTCGTGTAATATAATATACGAGAATACAATACCCAATGATCTGTTGAATGTAATATGTAATGCAAAATATAATATAGTTTGCTGTTACATGTTGTTGTACAATTCAAAATATATGTGGTGTCGAAGTCTTCTTATATGGAAGTATACCTAATAACTAAAATATAAGATTGGTGTATACTTCATGACAAGGATATTAAACTATACAATACAACCACATAAATACAATGTATTGTATGAATTAAATAAGAAGACAGAGAAGTAAAGTGAAGTCTGACAGCGACAGATAAGGACAGGAGAATATGATTGGTGGAGCAAGACTTAACGGTTAAGCAAAGCATATGGATAAGAATCTTTCTTCCGAAGGAAGGAATATTCGCGGAGGATAGAATAAAGATTGTAATCTGAAAAACACACTATGACAGAAAGGGAAAAATGCAATTATGACCTGTCAGATTGTGTTATAGGACCGACGGAGGCCGAAGGCCGAAGGCCTGAAGGCCGATCAGTATGGCGCCTGTATTATTCTTTATTATGGTGGGCCATATGTCCCGAGTTAGTGCGTTAGTGG' }, { '_id': '4vbmnj24', 'accession': 'EF546803.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-U3, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'vza1fxor' }, 'segment': '', 'sequence': 'GGGGCTGGGGCATATTATTACYCCCAGCCCCGGAACGGGACATCTGAATAGTATTGGGCTCATTATATATAATGGGCCGACAAATAAGCGTACAGTTCAGTATCTTCGTTTTGGGCCTTGGCCCAAAATTAAGAGAACGTGTGCGCTTGTTTGGTTAGACCGAAGGTCCGGTTCGAATAAGGTATATTCTTTGCTTCCGAAGGAAGGAACAACGAAATATTCAATTTCGTTGTGACAGGTGTACGTTTACTATTGGTTCGCAATGTTTATAAATACCCGCGAAGCTCAAGGGTATGGTGCTCTCTCTCTTCTGTCATATGCGACTGYAAGGCGATCATATGCGACTGCAAGGCGATACTCGCCGTGAGTAAAGAAGGGCGGAGGTGGAGGAGCCGCCTCGCCCGTCCTCTCAGACGACGTAGTATACGAAGTCGTGTGTAGGTAAGGGTATTTATAGAAGCGCGTGTACTATATACGCGCCTGTATATTACATGTATTTGTGTATTTAATTATTTAAATACTAAACCTTAATGAGGAGAAGTGTTCATATAACAAATAAAATATAACATATTTACTAGGCAACAATATTAACAATATTTGCGATCAAAAGGGTATAATTAAGGTCCTGTAATATACATATTTATATGTATATCAGAGTTCAGGTAAATAATCTGGCCTTGAAGGAACCAATAATAATACTGATAGGATAAGAAGAAATATAACACGCTATGCCAATAAGGGAAAAATAATATAATGACCTGTAAGATTGTTTTTTAGAACCGACGGAGGCCGAAGGCCGAAGGCCTGAAGGCCGATCAGTATGCCGGATCATTAGGTGTTGCTTCTCCAAGAAGCAAGGGTGTAAATGTAAATATGAAAGCATCTGACTACTTTAGTGGTGGGCCTACATACAGCTGTCATAAGACAGCTGGCAGAGGATAATTGGACGGACTTGAAATATAAATACAATACAACATATTAATATATTCCACTATCAGACAAATGATGCGCTGACGTCATGGCGTGCAATCGTGACCGTTTAAATGTCCCGCTGACGTA' } ], 'source_name': 'Q1108', 'source_type': 'isolate' } ], 'issues': None, 'last_indexed_version': None, 'lower_name': 'abaca bunchy top virus', 'name': 'Abaca bunchy top virus', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'c93ec9a9' }, 'schema': [ ], 'user': { 'id': 'igboyes' }, 'verified': True, 'version': 0 } ] ] ] snapshots['test_write_diff_file 1'] = [ [ 'change', '', [ None, { '_id': 'crag4l7j', 'abbreviation': 'ABTV', 'created_at': '2019-11-27T04:54:06.180000+00:00', 'imported': True, 'isolates': [ { 'default': True, 'id': '4e8amg20', 'sequences': [ { '_id': 'czwypki5', 'accession': 'NC_010317', 'definition': 'Abaca bunchy top virus DNA-M, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': '5w3oz9pl' }, 'segment': '', 'sequence': 'GGGGCTGGGGCTTATTATTACCCCCAGCCCCGGAACGGGACATCACGTGCCTCCCTGTACACGCACGTGACATCAAATGGCTTGCTGYGCAAGATAGAAAGGCTTCCTGCGGAAGCCAGGCATCATATATGGGTTGTAGAGGATCAGCCGACTCTATAAATATAGGGAGGCGTGGTCATGGGATTGACAGGAGAACGAGTGAAACAATTCTTTGAATGGTTTCTGTTCTTCTCAGCAATATTTGTTGCAATAACAATAATATATATATTGCTGGCAGTTCTCCTTGAACTACCGAAGTATATTAAGGGTTTAGTACAATACGTAGTGGAGTATATTACTAGACGACGGGTATGGACACGGAGGACCCAATTGACAGAGGCAACCGGAGGAGGCGATATAGAAGCTGTAGGGCATGACAGTCAGGCGTATACGCATACTGTTATGCCATSTGTTCCACCAGTTAGCGCTCCTATATCAAATAGGAGAGCTGATCAGCCTCTTCGATCAAGCGCCGGACCAATGTTTTAAATACCCGTGATATTTAATATGCAAGTGTATAAATACCCATTGTAGATCTGTTTGTAACCTGAATATGCAAAGTATATAATACTTTGTTGTAATGTATAAGTACATTAATAATATACGAAGTATAATGTTGATGCGATGTCTTCGGAAAATGAAGTATACCCAAATACACAAAAAAACATATATGTGGTGTATACTTATTGTTAAGTATAATAAAATTATAATACAAACAAATATATTGTGTATTATAATACATAAAAGAAGACAGAGCTGTGAAGTTAAGTAAGAAAGCGACGGATTCGTATTGGATAATATGATTCGCGGAGCATTACTTAACGGCGAAGTAAAGCATCAGACAAGAATATGACAGCTGTCATATCAACTAAAAAGCATAGCTTGCCGAGCACGCTATGCAATATAAGGGAAAAATGAAATAATGACCTGTCACGTTTACACTTTTCGTAAAGATGTAGGGCCGAAGGCCCTAATGACGCGTGTCATATTCTCTATAGTGGTGGGTCATATGTCCCGAGTTAGTGCGCCACGTAA' }, { '_id': 'qod5uj1p', 'accession': 'NC_010318', 'definition': 'Abaca bunchy top virus DNA-C, complete sequence', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'fl6r8ylz' }, 'segment': '', 'sequence': 'GGCAGGGGGGCTTATTATTACCCCCCCTGCCCGGGACGGGACATCACGTGCGCCTATAAAGAAGCACGTGATGATGTGACATATGTTTGCCGTAAGCTGTACAAAAGCGAATGCTTTATTGCTTTATTCGTTGCTTAGCCCCTAAGCAACCTGGTAGTGGGATGTGGTCCCTGATGCGAAGCAGCTTTGTTGTTGCTATTTATATCTGTGTTCGCCATGTAAAATGCGAAATACAACGCTGATCAGAATGGAGTTCTGGAATTCGGAAGCATTTTGCGACGATGTGAAGCGTGTCATTAAGCAAAAATATTGGGAAGAGCGGATGAAATCTCTATTTATAGAGAAGGTGAGTGGTTATGTTCGAAGGATTCTTGTTTATGGTAATCTTGATGATACCATATATGCGGTTCAGCAAATGAAGACTTCTATAGTTCAGTGTGCTGAACGTTTCGGTAAAGCCTGTGTGGTGGTATATAATGGTTTGGATCCATCAATAGGTTTCCGATTACACACTATGGCGTTCTTCTTCGAAGAATATGTTGAGGAAGTGAGTACTGCAGATCCAATGGCGGTTCAGTTATTTTGTGATGAAGAAATAGAAGAATTCTCAAATTCTGATGTACGCCTTATTAAAAATGTTATTATGGCATCGACAGATGCATCGATTGATGTAGGAAATTGTATTCAGATAATAATATCTGATAATGTAATAACATTCTATATATGTTAACTTCATTTATACATAAGAATGAATGAAGTGGTTTATTTATGATTTTTAGAATATAATCATAAATGGTAAACCTTAAGCAAAACCACCCTAAAACAAATAAACCTCTCTGACATACAACCCTCTATAAAATAAAGCCCATGTAAGATTCAAATTTAGAATGAAAAATGGGCCGAAGGCCCATATAAATGCATTTAAGGCCGAAGGCCTTATAATTGCAGGAAGAAAAGAACACGGTTTTGCTTACGTGGCCTGTGGGCCATATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': '92dhxina', 'accession': 'NC_010316', 'definition': 'Abaca bunchy top virus DNA-S, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'heprpw4w' }, 'segment': '', 'sequence': 'GGCAGGGGGGCTTATTATTACCCCCCCTGCCCGGGACGGGACATTCTGTGATGGGCTGGGCTTTATGCGGCCAAATAAGCCCATAAAGCCAGATCTGGGCCCATTTAAGGGCCCGTGGTTTGAAAATGTCGCGTTCCCGCCAAAATAGTTGCTTGCTCTGCAAGCAAACTATATCTATTATAAATACCAGATGTAATGGTTGCCTGAGAATGAAAATAAAAGGATGGCGAGGTATCCGAAGAAGGCGCTAAAGAAGAGGAAGGCGGTACGCCGTAAGTATGGAAGTAAAGCTACGACCAGTCATGATTATGCTGTTGATACTTCATTTATTGTTCCTGAAAATACTATTAAGCTGTATCGTATTGAGCCTACTGATAAAACATTACCCAGATATTTTATCTGGAAAATGTTTATGTTGTTGGTATGCAAAGTCAGACCAGGGCGTATACTTCATTGGGCGATGATTAAGAGCTCTTGGGATGTGAAGGATCCAACAGTTGTTCTTGAAGCGCCTGGATTGTTTATCAAGCCAGCAAACAGCCATCTGGTTAAACTGGTATGCAGTGGCGAGTTAGAAGCTCCAGTAGGAGGAGGGACTTCAGAGGTTGAATGTCTTCTACGGAAGACAACTTTACTTCGTAGGAATGTTACAGAATTGGATTTCTTGTATTTGGCGTTTTATTGTTCTTCTGGAGTTACAATCAACTACCAGAACAGGATTACATATCATGTATAAACCACATAAAATAAATGTGGTGTTGCAGGCATGGGAAGAATAAAACAATGTTTGCCTACGAAATATTTTGGTAAAGTGAAATTATGACCTGTCAGAATTAAGTTTAGAATGAACTGAGGCCGAAGGCCTCACCGAGGCCGAAGGCCGTCAAGTTGGATGAATAAAATACAAGGTATAAGTACGAAGAGCGGTATAATATCTGAAAGGAAATAATAATAATATAATAAAATATTATGATGTCCCAAAATAGCAGAATGCTAAAGGAACAAAAGGATGCTCTAAGTACAGGGTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': 'f8oj78sm', 'accession': 'NC_010319', 'definition': 'Abaca bunchy top virus DNA-R, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'j0g7sfp4' }, 'segment': '', 'sequence': 'GGCAGGGGGGCTTATTATTACCCCCCCTGCCCGGGACGGGACATTTGCATCTATAAATAGAAGCGCCCTCGCTCAACCAGATCAGGCGCTGCAATGGCTAGATATGTCGTATGTTGGATGTTCACCATCAACAATCCCGAAGCTCTTCCAGAGATGAGGGAAGAATACAAATACCTGGTTTACCAGGTGGAGCGAGGCGAAAGCGGTACACGACATGTGCAGGGCTATGTTGAAATGAAGAGACGAAGTTCTCTGAAACAAATGAGGGCTTTAATTCCTGGTGCCCATCTCGAAAAGAGAAGGGGCACACAGGAAGAAGCTAGAGCTTATTGTATGAAGGCAGATACGAGAGTCGAAGGTCCCTTCGAGTTTGGTCTTTTCAAAGTATCATGTAATGATAATTTGTTTGATGTCATACAGGATATGAGAGAAACGCACAAACGGCCGATTGAGTATTTATACGACTGTCCTAATACCTTCGATAGAAGTAAGGATACATTATACAGGGTACAAGCGGAAATGAATAAAATGCAAGCTATGATGTCGTGGTCGGAAACCTATGGTTGCTGGACGAAGGAAGTGGAGGAACTAATGGCGGAGCCATGTCACCGACGGATTATTTGGGTCTATGGCCCAAATGGTGGTGAAGGTAAAACAACCTATGCGAAGCATCTAATCAAGACCAGAAATGCATTTTATACACCTGGCGGAAAGACACTGGATATATGTAGGCTGTATAATTATGAGGGAATTGTAATATTTGATATTCCCAGATGCAAAGAGGATTACTTGAATTACGGAATTCTTGAGGAATTCAAGAATGGCATCATTCAGAGCGGGAAATATGAACCAGTTTTAAAAATTGTAGAGTATGTGGAGGTCATTGTCATGGCTAACTTCCTGCCGAAGGAAGGAATATTCTCGGAAGACCGAATAAAGCTTGTAACTTGTTGAACACGCTATGCAATAAAGGGGAAAAATGCAATTATGACCTGTCACGTTTACACTTTTCGTAAAGATGTAGGGCCGAAGGCCCTAATGACGCGTGTCATATTCTCTATAGTGGTGGGTCATATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': 'fdepnu5q', 'accession': 'NC_010314', 'definition': 'Abaca bunchy top virus DNA-N, complete genome', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 't2ppu39d' }, 'segment': '', 'sequence': 'AGCAGGGGGGCTTATTATTACCCCCCCTGCTCGGGGCGGGACATTCTGTGATGGGCTGGGCTTTATGCGGCCAAATAAGCCCATAAAGCCAGATCTGGGCCCATTTAAGGGCCCGTGGTTTGAAAATGTCGCGTTCCCGCCTAAATTGTTTGCTTGCCCTGCAAGGAAACGAAAACTCTATAAATAGGGTTGTTCTCTGCTTGTTTAATACATCAGGCGCAAATCTTTTGCAACGATGGATTGGATGGAATCACAATTCAAGACATGTACGCATGGCTGCGACTGGAAGGCGATAGCTCCAGAAGCACAAGATAATATACAGGTAATTACATGTTCCGATTCAGGTTACGGAAGAAAGAACCCTCGTAAGGTTCTTCTGAGGAGTATTCAGATAGGGTTCAATGGAAGCTTCAGAGGAAGTAATAGAAATGTTCGAGGCTTCATATACGTGTCTGTAAGACAGGATGATGGCCAAATGAGACCAATTATGGTCGTTCCATTCGGAGGGTATGGATATCATAACGACTACTATTATTTTGAAGGACAATCCAGTACGAATTGTGAGATAGTGTCGGACTATATTCCGGCCGGTCAAGACTGGAGCAGAGATATGGAGATAAGTATAAGTAACAGCAACAATTGTAATCAAGAGTGCGATATCAAGTGTTATGTAGTATGTAATTTAAGAATTAAGGAATAAWATTGTTGCCGAAGGTCTGTTATTTGAATGTTGAGATAAGGAAAGGGGCGGCGAAGCATGTGTGTATAATAACATATAACACACTATTATATATTTTGTAAAGAATAAAATTATGACCTGTCAGATTAAGTTTAGAATGAACTGAGGCCGAAGGCCTCACCGAGGCCGAAGGCCGTCAGGATGGTTTTACAAAATAATTATAAGCACCTGTACTAAGTACGAAGAGCGGTATAATATCTGAAAGGAAAAAATAATAATATAATAAAAATATTATGATGTCCCAAAATAGCAGAATGCTAAAGGAACAAAAGGATGCTCTAAGTACAGGGTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCCACGTC' }, { '_id': '7wkiibpp', 'accession': 'NC_010315', 'definition': 'Abaca bunchy top virus segment 2, complete sequence', 'host': 'Musa sp.', 'isolate_id': '4e8amg20', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'wzpj56v7' }, 'segment': '', 'sequence': 'AGCAGGGGGGCTTATTATTACCCCCCCTGCTGGGACGGGACATCCGAATAGTAATGGGCTTAATACATAAATGGGCCGCCAAATAAGCGTACAGTTCAGTATCTTCGTTTTGGGCCTCGGCCCAAAATTAAGAGAACGTGTGCGCTTGTTTGGGTTGGACCGTAGGTCCGGTTCGAATGAGGAATATTCCTTGCTTCCATAGGAAGGAACAGCGAAATAAAATAAATCGCTGTTACACGTGTAAGAATACTATTAGTTCGCAGAGTTTATAAATACCCTTCAAGTTGAAGGGTGTGGTGCTCTCTCTCTTCTGTCAGACGGTGTGCCATGATGAAGCTCTCCGGGAGGAAAGAAGGGCGGAGGTGGAGGAGCCGCCTCGCCCGTCCCTCCGACGAAGTATACGAAGTCGTAGGTATGTCAGGGTATTTATAGAAGCGTCGTATACTAAACGCGCCTGTATCATAATTGTATTTGTGTATTTAAATATTTAAATACCAAACCTTAAGGAATAAAATATATGTTTAATAAAATAACACATGACAATAAAAGAAAACAATAATACAATAATAAAATTGTATTATGCAAACGTTGTATAATTAAGGTCTGTTTAATATACATATATATATGTATATTACAGTATTGTTTAAAATAAATGACTTGGAAGGAAATAATAATATTAATGATAAGCAATAAAATATTCCGAATACATAAAAAGGGGAAAATGCAATTAAGACCTGTCACGTTTACATATTTCGAATATATTAAGGGCCGAAGGCCCGTCAGTATGCAGGTACATCAGTGATTGCTTCGTGACGAAGCAAGGGTATTATTGTAAATAAGAAAGCATCTGACAACTTTAATAGTGGTCCCCTATACAGCTGTCATATGACAGCTGGCAAAGGATCATTGGGCGGACTCCAAATATATATTAAATATAACATATAAAATATATAAGGTATAGATACTATTATTAATTAAAAGAGGCGGGAAAGAGGACACGTGCGCCGCACGCCACTTTAGTGGTGGGCCATATGTCCCGAGTTAGTGCGCTTACGTC' } ], 'source_name': 'Q767', 'source_type': 'isolate' }, { 'default': False, 'id': 'mzojjjmz', 'sequences': [ { '_id': '1a9ngyao', 'accession': 'EF546804.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-S, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': '0owj7oas' }, 'segment': '', 'sequence': 'GGCAGGGGGGCATATTATTACCCCCCCTGCCCGGGACGGGACATTCTGTAATGGATTGGGCTTAATGATGCCAAAGGCCCATTAAGCGGAAGCGCGGCCCATTCAATGGCCTGTGCGCTGGATTCAAATGTCGCGTTCCCGCCTATTTAGTTGCTTGCTCTGCAAGCAACAACATCTATTATAAATACCAGGATGGAAGGCAGTCTGATATGCAAGAAACAAATGGCGAGGTATCCCAAGAAATCGTTGAAGAAAAGAAAGGCGGTTCGCCGGAAGTATGGAAGTAAGGCTACTACCAGTCATGATTATGCTATTGACACTTCATTTATTGTTCCCGAAACAACTATTAAGTTGTATCGTATTGAGCCTACTGATAAAACATTACCCAGATATTTTATCTGGAAAATGTTTATGCTATTAGTATGCAAAGTGAGACCTGGTCGTATACTTCATTGGGCGATGATTAAGAGTTCATGGGATATACATCAACCAGAGGCAGTGTTAGAAGCCCCTGGCCTGTTTATAAAGCCTGCAAATAGCCATTTGGTGAAACTGGTATGCAGTGGCGAGTTAGAGGCTCCAGTTGGCGGAGGGACAACAGATGTGGAATGTCTTCTACGGAAGACCACATTACTTCGTAGGAATGTAACAGAGTTGGATTTCCTGTATCTGGCGTTTTATTGTACTTCTGGAGTTACAATTAACTACCAGAATCGGATAACATATCATGTATAAACCCCTATATAATAATGGGGTGATGTACATATGTGAAATAAAACAATGATATGCCTACGAAATATTTTGGGAAAATGCAATTATGACCTGTCAGATTGTGTATATGAATGAACTTACGGCCGAAGGCCGTAAGTAAGAATATGCCTAGAATAAAATACAAGGTATATGTACGAAGAGATGTATATAACCGAAAGGGTATGAACAAAAACATAATATTATAAAATAATAAGGGGCAATGAAAGCAGAATGCGTATGTAACAAAATGATGCTCTGAGTACAGGCTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCCACGTG' }, { '_id': 'uf8ccce0', 'accession': 'EF546807.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-R, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'cpm1g5h4' }, 'segment': '', 'sequence': 'GGGGCTGGGGCTTATTATTACCCCCAGCCCCGGGACGGGACATCTTGCATCTATAAATAGATTGCCTCCTCGCACTACCAGTGCAGGCAATGCAATGGCTAGGTATGTGATATGTTGGATGTTCACCATCAACAATCCCGAAACCCTTCCACCAATGAGGGATGAATACAAATACATGGTATATCAAGTAGAGCGAGGAGAAAGCGGTACTCGGCATGTGCAGGGATACGTGGAGATGAAGAGACGCACGTCTCTGAAACAAATGAGAGCTTTAATTCCTGGTGCCCATCTCGAGAAGAGAAAGGGCACACAGGAAGAAGCTAGATCATACTGTATGAAGGCGGATACAAGAATCGAAGGACCCTTCGAGTACGGTATGTTCAAAATATCATGTAATGATAATTTGTATGATGTCATACAGGATATGAGAGAAACGAACAAAAGGCCGATTGAGTATTTATATGATTGTCCTAATACCTTCGATAGAAGTAGAGATACATTATACAGGGTTCAGGCAGAACTCAATAAAATGCAAGCAATGAAGTCGTGGTCGGAATCCTATGGTTGTTGGACGAAGGAAGTGGAGGAAATAATGGCGGAGCCATGTCACCGACGGATCATTTGGGTCTATGGACCAAATGGTGGTGAAGGAAAAACAACCTATGCGAAGCATTTAATCAAGACCAGAAATGCATTTTATACACCTGGCGGGAAATCACTGGATATATGTAGGTTGTATAATTATGAGTCTATTGTAATATTCGATATTCCAAGATGCAAAGAGGAATATTTGAATTACGGAATAGTTGAGGAATTCAAAAATGGCATCATCCAGAGCGGGAAATATGAACCTGTTCTGAAAGTAGTACAGTATGTGGAAGTCATTGTCATGGCTAACTTCCTTCCGAAGGAAGGAATCTTCTCAGAAGATAGGATAAAGCTTGTAACTTGTTGAACACGCTATGACAAAAAGGGAAAAATGCAATTATGACCTGTCAGATTGTTTTATAGGACCGACCGAGGCCGCAGGCCGTCAGTATGCCACCTGTACTTACTTTAATGTGGTGGGTCATATGTCCCGAGTTAGTGCGCCACGTA' }, { '_id': '951z8646', 'accession': 'EF546802.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-N, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'fgdkm0kp' }, 'segment': '', 'sequence': 'GGCGGGGGGGCATATTATTACCCCCCCCGCCCGGGACGGGACATTCTGTAATGGATTGGGCTTAATGATGCCAAAGGCCCATTAAGCGGAAGCGCGGCCCATTCGATGGCCTGTGCGCTGGTTTCAAATGTCGCGTTCCCGCCTATTTAGTTGCTTGCCCTGCAAGCAATTAACTAGTCTATAAATAAGGTGGTTCTCTGCGAGTCCAATACAGCAGTTGCGATAACATAGCAACAATGGATTGGTTTGAATCACAATTCAAGACGTGTACGCATGGATGCGACTGGAAGGCGATAGCCCCAGATGCAAGAGACAATATACAGGTAATTACTTGTATCGATTCAGGTTATGGAAGAAAGAACCCTCGTAGGGTTCTTCTAAGAAGTATTCAGATAGGGTTCAATGGTAGCTTCAAAGGAAACAATAGAAACGTTCGAGGATTCCTCTACGTTTCTATAAGGCAAGATGATGGGGAATTGAGACCTATCATGGTTGTGCCATTTGGAGGATATGGATATCATAATGACTATTACTATTTCGAAGGGCAATCAAGTACGAATTGTGAGATAGGGTCGGATTATATCCCGGCCGGTGAAGACTGGAGCAGAGACATGGAAATCAGTATAAGCAACAGCAACAATTGTACTCAAGAGTGTGATATAAAATGCTATGTGGTATGTAACTTAAGAATTAAGGAATAATATGTTGCTGAAAGATCTTATTTAATGTTTAGATAATGATGTTGTGCTGTGAAGTATCTGTTTATATTTATTATAAACACGCTATGCCAATAAGGGAAAAATGCAATTAAGACCTGTCAGATTGTGTATATGAATGAATATACGGCCGAAGGCCGTAAGTATGAAATTAGAAATAATTTATAAGCACTGGTTAAATGTACGAAGAGATGTATATAACCGAAAGGGTATGAACAATAACATAATACCATAAAATAATAAGGGGCAATGAAAGCAGAATGCGTATGTAACAAAATGATGCTCTGAGTACAGGCTTGCGTGCTCTGGACGCCACTTTAGTGGTGGGCCAGATGTCCCGAGTTAGTGCGCTAGTGG' }, { '_id': '7domcyti', 'accession': 'EF546806.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-C, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'iz8ant3f' }, 'segment': '', 'sequence': 'GGCGGGGGGGCATATTATTACCCCCCCCGCCCGGGACGGGACATCACGTGATGCCTTTATAAATCACGTGGGCATATAACTTTTGTATGCCGTAAGCTGTATAAAAGCGGCAGCTTTATGTTTTATTCGTTGCTTCATCGATAAGCAATGTGATAATGGATGGTGGTCCTGTTGCAAAGCAGGTGACTGGTTTGCTATTTATATCAGTTGTTGTGCAGGGTTTTGGTGTATACAACGCTGATCAGAATGGAATTCTGGAATTCGGAAGCATTCTGCGACGATGTGAAGCGTTGTATCAAGCAAAAATATTGGGAAGAGCGGATGAAATCTCTATTTATAGAGAGGGTAAGTGGTTATGTAAGAAGGATTTGTGGTTATGGTAATCTTGATGATGCAATATATGCTGTTCAGCAATTGAAGACTTCTATAGTTCAGTGTGCTGAACGATATGGGAAAGCTTGTGTGGTTGTATATTCGGGTACTGATAAATTAATGGTCTATAGACTACATACAATGGCTTTCTTCTTCGAAGAATATGTTGAGGAACTGAAGACTGCAGATCCATTAACTGTTCAGCTATTCTGTGATGAGGATATATCAGAATTCGTAGAATCTGATGTACGTTATTGTAAGAATGTTATTATGGCATCGACAGATGAGCATGTATCTATAAACAAATGTATTCAGATAGTAATATCTGATAATGTAATAACATTCTATTAATGTAATCTTCATTTCAGCGTAAGTCTATAATGAAGTGGTTGATTTATGATTTTTAGAATATAATCATAAATGGTCAACTAGACCAAATAATACCTTAAGAAATATAACACACTATGCCAATAAGGGAAAAATGCAATTAAGACCTCTAAGATTCTATTTAGAATAATATAAATGTGCCGAAGGCACATATAAATAATATTAGGCCGAAGGCCTAATAATTTGTAAGTGAATGACCATCTGTACGCTTACGTGGAATGTGGGTCATATGTCCCGAGTTAGTGCGCCAGTGG' }, { '_id': 'oh5ouqi2', 'accession': 'EF546805.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-M, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'pqm6etvy' }, 'segment': '', 'sequence': 'GGCGGGGGGGCATATTATTACCCCCCCCGCCCGGGACGGGACATCACGTGCGCCTATAGTAATGCACGTGATAATACATGGCTTGCAGCGCAAGATATATGGCTTCCGCAGGAAGCCTTGCAATATATGGGTTTGTCTAAGACCTGCCGACTCTATAAAAAGAGGCGGCGAGTTCATGGCTTTGACAGGAGAACGAGTGAAACAATTCTTTGAATGGTTTCTGTTCTTCGCAGCAATATTTGTTGCTATATCAATAATATATATATTGTTGGCAGTTCTCCTTGAACTACCTAAGTATATTAAGGGCGTGGTAAAATACATAGTGGAGTATGTAACCAGACGGCGAGTATGGATGCGACGAACACAGTTGACGGAGGCAACTGGAGGTGGCGAGATAGAAGGAGATCGGCATGACAGTCATATTACTGTTATGCCTTCTGTTCCACCGGTAAGTGCTCCTATATCCAATAGGAGAGGTGATCAGGGTCTTCGACCAAGCACGGGACCAATGTTTTAAATACTCGTGTAATATAATATACGAGAATACAATACCCAATGATCTGTTGAATGTAATATGTAATGCAAAATATAATATAGTTTGCTGTTACATGTTGTTGTACAATTCAAAATATATGTGGTGTCGAAGTCTTCTTATATGGAAGTATACCTAATAACTAAAATATAAGATTGGTGTATACTTCATGACAAGGATATTAAACTATACAATACAACCACATAAATACAATGTATTGTATGAATTAAATAAGAAGACAGAGAAGTAAAGTGAAGTCTGACAGCGACAGATAAGGACAGGAGAATATGATTGGTGGAGCAAGACTTAACGGTTAAGCAAAGCATATGGATAAGAATCTTTCTTCCGAAGGAAGGAATATTCGCGGAGGATAGAATAAAGATTGTAATCTGAAAAACACACTATGACAGAAAGGGAAAAATGCAATTATGACCTGTCAGATTGTGTTATAGGACCGACGGAGGCCGAAGGCCGAAGGCCTGAAGGCCGATCAGTATGGCGCCTGTATTATTCTTTATTATGGTGGGCCATATGTCCCGAGTTAGTGCGTTAGTGG' }, { '_id': '4vbmnj24', 'accession': 'EF546803.1', 'definition': 'Abaca bunchy top virus isolate Q1108 segment DNA-U3, complete sequence', 'host': 'Musa textilis', 'isolate_id': 'mzojjjmz', 'otu_id': 'crag4l7j', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'vza1fxor' }, 'segment': '', 'sequence': 'GGGGCTGGGGCATATTATTACYCCCAGCCCCGGAACGGGACATCTGAATAGTATTGGGCTCATTATATATAATGGGCCGACAAATAAGCGTACAGTTCAGTATCTTCGTTTTGGGCCTTGGCCCAAAATTAAGAGAACGTGTGCGCTTGTTTGGTTAGACCGAAGGTCCGGTTCGAATAAGGTATATTCTTTGCTTCCGAAGGAAGGAACAACGAAATATTCAATTTCGTTGTGACAGGTGTACGTTTACTATTGGTTCGCAATGTTTATAAATACCCGCGAAGCTCAAGGGTATGGTGCTCTCTCTCTTCTGTCATATGCGACTGYAAGGCGATCATATGCGACTGCAAGGCGATACTCGCCGTGAGTAAAGAAGGGCGGAGGTGGAGGAGCCGCCTCGCCCGTCCTCTCAGACGACGTAGTATACGAAGTCGTGTGTAGGTAAGGGTATTTATAGAAGCGCGTGTACTATATACGCGCCTGTATATTACATGTATTTGTGTATTTAATTATTTAAATACTAAACCTTAATGAGGAGAAGTGTTCATATAACAAATAAAATATAACATATTTACTAGGCAACAATATTAACAATATTTGCGATCAAAAGGGTATAATTAAGGTCCTGTAATATACATATTTATATGTATATCAGAGTTCAGGTAAATAATCTGGCCTTGAAGGAACCAATAATAATACTGATAGGATAAGAAGAAATATAACACGCTATGCCAATAAGGGAAAAATAATATAATGACCTGTAAGATTGTTTTTTAGAACCGACGGAGGCCGAAGGCCGAAGGCCTGAAGGCCGATCAGTATGCCGGATCATTAGGTGTTGCTTCTCCAAGAAGCAAGGGTGTAAATGTAAATATGAAAGCATCTGACTACTTTAGTGGTGGGCCTACATACAGCTGTCATAAGACAGCTGGCAGAGGATAATTGGACGGACTTGAAATATAAATACAATACAACATATTAATATATTCCACTATCAGACAAATGATGCGCTGACGTCATGGCGTGCAATCGTGACCGTTTAAATGTCCCGCTGACGTA' } ], 'source_name': 'Q1108', 'source_type': 'isolate' } ], 'issues': None, 'last_indexed_version': None, 'lower_name': 'abaca bunchy top virus', 'name': 'Abaca bunchy top virus', 'reference': { 'id': 'i0a9tnrl' }, 'remote': { 'id': 'c93ec9a9' }, 'schema': [ ], 'user': { 'id': 'igboyes' }, 'verified': True, 'version': 0 } ] ] ]
py
1a405e34a42ad5f83f22005278f93e42d563bd8b
import datetime import json import os import re import fnmatch import cv2 from PIL import Image import numpy as np from pycococreatortools import pycococreatortools ROOT_DIR = '../' DATA_DIR = '/media/margery/4ABB9B07DF30B9DB/pythonDemo/medical_image_segmentation/Data/data_png_png' ANNOTATION_TUMOR_DIR = '../test_tumor_mask' ANNOTATION_WALL_DIR = '../test_wall_mask' INFO = { "description": "Rectal Cancer Dataset", "url": "https://github.com/waspinator/pycococreator", "version": "0.1.0", "year": 2020, "contributor": "PING MENG", "date_created": datetime.datetime.utcnow().isoformat(' ') } LICENSES = [ { "id": 1, "name": "Attribution-NonCommercial-ShareAlike License", "url": "http://creativecommons.org/licenses/by-nc-sa/2.0/" } ] # 根据自己的需要添加种类 CATEGORIES = [ { 'id': 0, 'name': 'Tumor', 'supercategory': 'Tumor', }, { 'id': 1, 'name': 'RectalWall', 'supercategory': 'RectalWall' } ] def filter_for_jpeg(root, files): file_types = ['*.jpeg', '*.jpg', '*.png'] file_types = r'|'.join([fnmatch.translate(x) for x in file_types]) files = [os.path.join(root, f) for f in files] files = [f for f in files if re.match(file_types, f)] return files def filter_for_annotations(root, files, image_filename): file_types = ['*.png'] file_types = r'|'.join([fnmatch.translate(x) for x in file_types]) basename_no_extension = os.path.splitext(os.path.basename(image_filename))[0] # file_name_prefix = basename_no_extension + '.*' file_name_prefix = basename_no_extension files = [os.path.join(root, f) for f in files] files = [f for f in files if re.match(file_types, f)] files = [f for f in files if re.match(file_name_prefix, os.path.splitext(os.path.basename(f))[0][:10])] return files def main(): coco_output = { "info": INFO, "licenses": LICENSES, "categories": CATEGORIES, "images": [], "annotations": [] } image_id = 1 segmentation_id = 1 # data_list = [l.strip('\n') for l in open(os.path.join(DATA_DIR,'train.txt')).readlines()] # data_list = [l.strip('\n') for l in open(os.path.join(DATA_DIR,'val.txt')).readlines()] data_list = [file for file in os.listdir('/media/margery/4ABB9B07DF30B9DB/pythonDemo/tools/prepare_detection_dataset/imgs_rectal')] for i in range(len(data_list)): image = Image.open(os.path.join(DATA_DIR,'imgs',data_list[i])) image_info = pycococreatortools.create_image_info( image_id, os.path.basename(data_list[i]), image.size) coco_output["images"].append(image_info) # filter for associated png annotations for (root, _, files), (rootw, w_, filesw) in zip(os.walk(ANNOTATION_TUMOR_DIR),os.walk(ANNOTATION_WALL_DIR)): tumor_anno_files = filter_for_annotations(root, files, data_list[i]) wall_anno_files = filter_for_annotations(rootw, filesw, data_list[i]) # go through each associated annotation for tumor_anno_filename in tumor_anno_files: class_id = [x['id'] for x in CATEGORIES] t_category_info = {'id': class_id[0], 'is_crowd': 0} t_binary_mask = np.asarray(Image.open(tumor_anno_filename) .convert('1')).astype(np.uint8) t_anno_info = pycococreatortools.create_annotation_info( segmentation_id, image_id, t_category_info, t_binary_mask, image.size, tolerance=2) if t_anno_info is not None: coco_output["annotations"].append(t_anno_info) segmentation_id = segmentation_id + 1 for wall_anno_filename in wall_anno_files: class_id = [x['id'] for x in CATEGORIES] w_category_info = {'id': class_id[1], 'is_crowd': 0} w_binary_mask = np.asarray(Image.open(wall_anno_filename) .convert('1')).astype(np.uint8) w_anno_info = pycococreatortools.create_annotation_info( segmentation_id, image_id, w_category_info, w_binary_mask, image.size, tolerance=2) if w_anno_info is not None: coco_output["annotations"].append(w_anno_info) segmentation_id = segmentation_id + 1 image_id = image_id + 1 with open('{}/rectal_seg_test.json'.format(ROOT_DIR), 'w') as output_json_file: json.dump(coco_output, output_json_file) if __name__ == "__main__": main()
py
1a405ebf7bae981410aa5adc31b9de7af56a0424
# make sure you use grpc version 1.39.0 or later, # because of https://github.com/grpc/grpc/issues/15880 that affected earlier versions import grpc import hello_pb2_grpc import hello_pb2 from locust import events, User, task from locust.exception import LocustError from locust.user.task import LOCUST_STATE_STOPPING from hello_server import start_server import gevent import time # patch grpc so that it uses gevent instead of asyncio import grpc.experimental.gevent as grpc_gevent grpc_gevent.init_gevent() @events.init.add_listener def run_grpc_server(environment, **_kwargs): # Start the dummy server. This is not something you would do in a real test. gevent.spawn(start_server) class GrpcClient: def __init__(self, stub): self._stub_class = stub.__class__ self._stub = stub def __getattr__(self, name): func = self._stub_class.__getattribute__(self._stub, name) def wrapper(*args, **kwargs): request_meta = { "request_type": "grpc", "name": name, "start_time": time.time(), "response_length": 0, "exception": None, "context": None, "response": None, } start_perf_counter = time.perf_counter() try: request_meta["response"] = func(*args, **kwargs) request_meta["response_length"] = len(request_meta["response"].message) except grpc.RpcError as e: request_meta["exception"] = e request_meta["response_time"] = (time.perf_counter() - start_perf_counter) * 1000 events.request.fire(**request_meta) return request_meta["response"] return wrapper class GrpcUser(User): abstract = True stub_class = None def __init__(self, environment): super().__init__(environment) for attr_value, attr_name in ((self.host, "host"), (self.stub_class, "stub_class")): if attr_value is None: raise LocustError(f"You must specify the {attr_name}.") self._channel = grpc.insecure_channel(self.host) self._channel_closed = False stub = self.stub_class(self._channel) self.client = GrpcClient(stub) class HelloGrpcUser(GrpcUser): host = "localhost:50051" stub_class = hello_pb2_grpc.HelloServiceStub @task def sayHello(self): if not self._channel_closed: self.client.SayHello(hello_pb2.HelloRequest(name="Test")) time.sleep(1)
py
1a405fdc269a9ff85e9e36a18371bc0ce3fdc543
from continual.utils import temporary_parameter def test_temporary_parameter(): class MyClass: def __init__(self) -> None: self.x = 0 c = MyClass() assert c.x == 0 # Existing param with temporary_parameter(c, "x", 42): assert c.x == 42 assert c.x == 0 # Non-existing param with temporary_parameter(c, "y", 42): assert c.y == 42 assert not hasattr(c, "y")
py
1a4060ea797d163705feb1d27f4c27bbe8469c39
import numpy as np import os import sklearn from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer from sklearn.svm import LinearSVC from sklearn.tree import DecisionTreeClassifier from sklearn.naive_bayes import MultinomialNB from sklearn.linear_model import RidgeClassifier from sklearn.model_selection import train_test_split, KFold from sklearn.metrics import classification_report, confusion_matrix from pymongo import MongoClient import datetime import sys sys.path.append('../..') import utils.dbUtils import utils.gensimUtils client = MongoClient('localhost', 27017) db = client.TFE collection = db.results5 def train_and_test(experiment_id, max_features = None): print("Using max features : {}".format(max_features)) idx = collection.insert_one({'date' : datetime.datetime.now(), 'corpus' : 'news_cleaned', 'max_features' : max_features, 'experiment_id' : experiment_id}) print("Making dataset") train = utils.dbUtils.TokenizedIterator('news_cleaned', filters = {'type' : {'$in' : ['fake', 'reliable']}, 'domain' : {'$nin' : ['nytimes.com', 'beforeitsnews.com']}}) y_train = np.array([x for x in train.iterTags()]) test = utils.dbUtils.TokenizedIterator('news_cleaned', filters = {'type' : {'$in' : ['fake', 'reliable']}, 'domain' : {'$in' : ['nytimes.com', 'beforeitsnews.com']}}) y_test = np.array([x for x in test.iterTags()]) print("Fiting tf-idf") vectorizer = TfidfVectorizer(max_features = max_features) X_train = vectorizer.fit_transform([' '.join(news) for news in train]) X_test = vectorizer.transform([' '.join(news) for news in test]) print("Fiting linearSVC") model = LinearSVC() model.fit(X_train, y_train) crp = classification_report(y_test, model.predict(X_test), labels=['fake', 'reliable'], output_dict = True) collection.update_one({'_id' : idx.inserted_id}, { '$push' : {'report' : {'model' : 'LinearSVC', 'classification_report' : crp, 'train_accuracy' : model.score(X_train, y_train), 'test_accuracy' : model.score(X_test, y_test), 'confusion matrix' : { 'train' : list(map(int, confusion_matrix(y_train, model.predict(X_train), labels=['fake', 'reliable']).ravel())), 'test' : list(map(int, confusion_matrix(y_test, model.predict(X_test), labels=['fake', 'reliable']).ravel())) } } } }) print("MultinomialNB") model = MultinomialNB() model.fit(X_train, y_train) crp = classification_report(y_test, model.predict(X_test), labels=['fake', 'reliable'], output_dict = True) collection.update_one({'_id' : idx.inserted_id}, { '$push' : {'report' : {'model' : 'MultinomialNB', 'classification_report' : crp, 'train_accuracy' : model.score(X_train, y_train), 'test_accuracy' : model.score(X_test, y_test), 'confusion matrix' : { 'train' : list(map(int, confusion_matrix(y_train, model.predict(X_train), labels=['fake', 'reliable']).ravel())), 'test' : list(map(int, confusion_matrix(y_test, model.predict(X_test), labels=['fake', 'reliable']).ravel())) } } } }) print("DecisionTreeClassifier") model = DecisionTreeClassifier() model.fit(X_train, y_train) crp = classification_report(y_test, model.predict(X_test), labels=['fake', 'reliable'], output_dict = True) collection.update_one({'_id' : idx.inserted_id}, { '$push' : {'report' : {'model' : 'DecisionTreeClassifier', 'classification_report' : crp, 'train_accuracy' : model.score(X_train, y_train), 'test_accuracy' : model.score(X_test, y_test), 'confusion matrix' : { 'train' : list(map(int, confusion_matrix(y_train, model.predict(X_train), labels=['fake', 'reliable']).ravel())), 'test' : list(map(int, confusion_matrix(y_test, model.predict(X_test), labels=['fake', 'reliable']).ravel())) } } } }) print("RidgeClassifier") model = RidgeClassifier() model.fit(X_train, y_train) crp = classification_report(y_test, model.predict(X_test), labels=['fake', 'reliable'], output_dict = True) collection.update_one({'_id' : idx.inserted_id}, { '$push' : {'report' : {'model' : 'RidgeClassifier', 'classification_report' : crp, 'train_accuracy' : model.score(X_train, y_train), 'test_accuracy' : model.score(X_test, y_test), 'confusion matrix' : { 'train' : list(map(int, confusion_matrix(y_train, model.predict(X_train), labels=['fake', 'reliable']).ravel())), 'test' : list(map(int, confusion_matrix(y_test, model.predict(X_test), labels=['fake', 'reliable']).ravel())) } } } }) if __name__ == "__main__": max_features = [10000, 50000, 100000, 250000, 500000, 1000000] for features in max_features: train_and_test(13, features)
py
1a4061b9295068d7dd2caa5a01ceef8cff344630
import logging from threading import Thread from .mikecrm import Mikecrm class MikeBrush(): def __init__(self, target, proxys, count): ''' Brush for voting on mike :param target: {"page":"", "data":""} :param proxys: Queue for {"type":"", "ip":"", "port":00} :param count: number of threadings ''' self.target = target self.proxys = proxys self.count = count self.total = 0 self.votes = 0 def brush_schedule(self, index): proxys = self.proxys brush = Mikecrm(**self.target) logging.info('Brush thead-%d : task started!' % index) while not proxys.empty(): proxy = proxys.get_nowait() self.total += 1 if brush.set_proxy(*proxy).submit(): self.votes += 1 logging.info('Current successes count is %d / %d' % (self.votes, self.total)) logging.info('Brush thead-%d : task complete!' % index) def run(self, block=True): tasks = [] for index in range(self.count): task = Thread(name='Theading-%d'%(index+1), target=self.brush_schedule, args=(index,)) tasks.append(task) task.start() logging.info('Brush tasks all started!') if block: for task in tasks: task.join() logging.info('Brush tasks all complete!')
py
1a4061ffd0d7072d0f501586ef2dc702b5b5d37e
#!/usr/bin/env python __all__ = ['soundcloud_download', 'soundcloud_download_by_id'] from ..common import * import json import urllib.error client_id = 'WKcQQdEZw7Oi01KqtHWxeVSxNyRzgT8M' def soundcloud_download_by_id(id, title=None, output_dir='.', merge=True, info_only=False): assert title url = 'https://api.soundcloud.com/tracks/{}/{}?client_id={}'.format(id, 'stream', client_id) type, ext, size = url_info(url) print_info(site_info, title, type, size) if not info_only: download_urls([url], title, ext, size, output_dir, merge = merge) def soundcloud_i1_api(track_id): url = 'https://api.soundcloud.com/i1/tracks/{}/streams?client_id={}'.format(track_id, client_id) return json.loads(get_content(url))['http_mp3_128_url'] def soundcloud_download(url, output_dir='.', merge=True, info_only=False, **kwargs): url = 'https://api.soundcloud.com/resolve.json?url={}&client_id={}'.format(url, client_id) metadata = get_content(url) info = json.loads(metadata) title = info["title"] real_url = info.get('download_url') if real_url is None: real_url = info.get('steram_url') if real_url is None: raise Exception('Cannot get media URI for {}'.format(url)) real_url = soundcloud_i1_api(info['id']) mime, ext, size = url_info(real_url) print_info(site_info, title, mime, size) if not info_only: download_urls([real_url], title, ext, size, output_dir, merge=merge) site_info = "SoundCloud.com" download = soundcloud_download download_playlist = playlist_not_supported('soundcloud')
py
1a4062b68a8ac01634cb95fecdd40e5c51d11658
# -*- coding: utf-8 -*- """Test parsing of symbolic records """ import io import sys from vcfpy import parser __author__ = "Manuel Holtgrewe <[email protected]>" MEDIUM_HEADER = """ ##fileformat=VCFv4.3 ##fileDate=20090805 ##source=myImputationProgramV3.1 ##reference=file:///seq/references/1000GenomesPilot-NCBI36.fasta ##contig=<ID=20,length=62435964,assembly=B36,md5=f126cdf8a6e0c7f379d618ff66beb2da,species="Homo sapiens",taxonomy=x> ##phasing=partial ##INFO=<ID=NS,Number=1,Type=Integer,Description="Number of Samples With Data"> ##INFO=<ID=DP,Number=1,Type=Integer,Description="Total Depth"> ##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency"> ##INFO=<ID=AA,Number=1,Type=String,Description="Ancestral Allele"> ##INFO=<ID=DB,Number=0,Type=Flag,Description="dbSNP membership, build 129"> ##INFO=<ID=H2,Number=0,Type=Flag,Description="HapMap2 membership"> ##INFO=<ID=ANNO,Number=.,Type=String,Description="Additional annotation"> ##INFO=<ID=SVTYPE,Number=1,Type=String,Description="SV type"> ##INFO=<ID=END,Number=1,Type=Integer,Description="SV end position"> ##INFO=<ID=SVLEN,Number=1,Type=Integer,Description="SV length"> ##FILTER=<ID=q10,Description="Quality below 10"> ##FILTER=<ID=s50,Description="Less than 50% of samples have data"> ##FILTER=<ID=PASS,Description="All filters passed"> ##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype"> ##FORMAT=<ID=GQ,Number=1,Type=Integer,Description="Genotype Quality"> ##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Read Depth"> ##FORMAT=<ID=HQ,Number=2,Type=Integer,Description="Haplotype Quality"> ##FORMAT=<ID=FT,Number=1,Type=String,Description="Call-wise filters"> ##ALT=<ID=DUP,Description="Duplication"> ##ALT=<ID=R,Description="IUPAC code R = A/G"> #CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tNA00001\tNA00002\tNA00003 """.lstrip() def vcf_parser(lines): return parser.Parser(io.StringIO(MEDIUM_HEADER + lines), "<builtin>") def test_parse_dup(): # Setup parser with stock header and lines to parse LINES = "2\t321681\t.\tN\t<DUP>\t.\tPASS\tSVTYPE=DUP;END=324681;SVLEN=3000\tGT\t0/1\t0/0\t0/0\n" p = vcf_parser(LINES) p.parse_header() # Perform the actual test if sys.version_info < (3, 6): EXPECTED = ( "Record('2', 321681, [], 'N', [SymbolicAllele('DUP')], None, ['PASS'], " "OrderedDict([('SVTYPE', 'DUP'), ('END', 324681), ('SVLEN', 3000)]), ['GT'], [" "Call('NA00001', OrderedDict([('GT', '0/1')])), Call('NA00002', OrderedDict([('GT', '0/0')])), " "Call('NA00003', OrderedDict([('GT', '0/0')]))])" ) else: EXPECTED = ( "Record('2', 321681, [], 'N', [SymbolicAllele('DUP')], None, ['PASS'], " "{'SVTYPE': 'DUP', 'END': 324681, 'SVLEN': 3000}, ['GT'], [" "Call('NA00001', {'GT': '0/1'}), Call('NA00002', {'GT': '0/0'}), Call('NA00003', {'GT': '0/0'})])" ) rec = p.parse_next_record() assert str(rec) == EXPECTED assert rec.ALT[0].serialize() == "<DUP>" def test_parse_iupac(): # Setup parser with stock header and lines to parse LINES = "2\t321681\t.\tC\t<R>\t.\tPASS\t.\tGT\t0/1\t0/0\t0/0\n" p = vcf_parser(LINES) p.parse_header() # Perform the actual test if sys.version_info < (3, 6): EXPECTED = ( "Record('2', 321681, [], 'C', [SymbolicAllele('R')], None, ['PASS'], OrderedDict(), ['GT'], " "[Call('NA00001', OrderedDict([('GT', '0/1')])), Call('NA00002', OrderedDict([('GT', '0/0')])), " "Call('NA00003', OrderedDict([('GT', '0/0')]))])" ) else: EXPECTED = ( "Record('2', 321681, [], 'C', [SymbolicAllele('R')], None, ['PASS'], {}, ['GT'], " "[Call('NA00001', {'GT': '0/1'}), Call('NA00002', {'GT': '0/0'}), Call('NA00003', {'GT': '0/0'})])" ) rec = p.parse_next_record() assert str(rec) == EXPECTED assert rec.ALT[0].serialize() == "<R>"
py
1a4063eddcb84227e5a4eb2a11b26c9fb11b3392
import os import pandas as pd import yaml from tqdm import tqdm class ResLogger: def __init__(self, path): self.path = path if not os.path.isdir(path): os.mkdir(path) # Infer the last result computation that has been run if os.path.isfile(path+'res.csv'): with open(path+'res.csv', 'r') as res: lines = res.readlines() # File is empty with no header if len(lines) == 0: self.header = False self.last_run = None # File has header else: first_line = lines[0] last_line= lines[0] self.columns = pd.Index((first_line[1:] .rstrip().split(','))) self.header = True # File is empty with header if last_line.split(',')[0] == 0: self.last_run = None # Previous result computations exists else: self.last_run = int(lines[-1].split(',')[0]) # If result file does not exist else: self.header = False self.last_run = None def __enter__(self): self.res = open(self.path+'res.csv', 'a').__enter__() return self def __exit__(self, exc_type, exc_value, traceback): self.res.__exit__(exc_type, exc_value, traceback) def write_header(self, columns): self.columns = columns for column in columns: self.res.write(','+column) self.res.write('\n') def write_res(self, idx, res_series): res_list = res_series[self.columns].values self.res.write(str(idx)) for res in res_list: self.res.write(','+str(res)) self.res.write('\n') def run_simulations(path, net, metrics, simulation_step_func, until=None, overwrite=False): # Load simulation inputs with open(path+'input_config.yaml', 'r') as config_file: eq_list = yaml.safe_load(config_file) eq_frame_dict = {} for (element, quantity) in eq_list: eq_frame = pd.read_csv(path+f'{element}_{quantity}.csv', index_col=0) eq_frame_dict[(element, quantity)] = eq_frame # Set final simulation step if until==None: stop = len(eq_frame.index) else: stop = until # Logic for applying n-th inputs and running simulation step def set_eq_and_run(n): for (e_name, q_name), q_value in eq_frame_dict.items(): q_series = pd.Series(q_value.loc[n, :], name=q_name) set_eq_by_element_name(net, e_name, q_series) return simulation_step_func(net, metrics) # Check progress with logger with ResLogger(path) as l: # If no header, run zeroth simulation step to infer column names if not l.header: progress = iter(tqdm(range(stop))) results = set_eq_and_run(next(progress)) l.write_header(results.index) l.write_res(0, results) # If header but no last run, start from beginning elif not l.last_run: progress = tqdm(range(stop)) # Otherwise start after last run else: progress = tqdm(range(l.last_run + 1, stop)) # Main loop for n in progress: results = set_eq_and_run(n) l.write_res(n, results) def init_simulations(path, eq_frame_dict): if not os.path.isdir(path): os.mkdir(path) eq_list = [] for (element, quantity), eq_frame in eq_frame_dict.items(): eq_frame.to_csv(path+f'{element}_{quantity}.csv') eq_list.append([element, quantity]) with open(path+'input_config.yaml', 'w') as config_file: yaml.dump(eq_list, config_file) def set_eq_by_element_name(net, element, eq_series): pp_idx = getattr(net, element + '_name_map')[eq_series.index] getattr(net, element).loc[pp_idx, eq_series.name] = eq_series.values
py
1a406434556c4f79d893e17e4b7aec74c0a91dee
# Generated by Django 3.1.2 on 2020-11-02 14:24 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('book_tabl', '0005_auto_20201102_1722'), ] operations = [ migrations.AlterField( model_name='book', name='book_name', field=models.CharField(default='', max_length=200, verbose_name='Название книги'), ), ]
py
1a40644dbb24e98dba8f2ee94953c842f3372947
import pandas as pd from pipedown.nodes.metrics import MeanAbsolutePercentageError def test_mean_absolute_percentage_error(is_close): y_pred = pd.Series([1.0, 2.0, 3.0, 4.0]) y_true = pd.Series([1.0, 3.0, 5.0, 7.0]) mape = MeanAbsolutePercentageError() v = mape.run(y_pred, y_true) assert isinstance(v, float) assert is_close(v, 100 * (0 + 1 / 3 + 2 / 5 + 3 / 7) / 4) assert mape.get_metric_name() == "mean_absolute_percentage_error"
py
1a4064b12a7271604d0dc492b4813d2337b4f41f
# -*- coding: utf-8 -*- # # Copyright (C) 2019 Chris Caron <[email protected]> # All rights reserved. # # This code is licensed under the MIT License. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files(the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and / or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions : # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import re import os import six import pytest import requests import mock from json import dumps from random import choice from string import ascii_uppercase as str_alpha from string import digits as str_num from apprise import plugins from apprise import NotifyType from apprise import NotifyBase from apprise import Apprise from apprise import AppriseAsset from apprise import AppriseAttachment from apprise.common import NotifyFormat from apprise.common import OverflowMode # Disable logging for a cleaner testing output import logging logging.disable(logging.CRITICAL) # a test UUID we can use UUID4 = '8b799edf-6f98-4d3a-9be7-2862fb4e5752' # Some exception handling we'll use REQUEST_EXCEPTIONS = ( requests.ConnectionError( 0, 'requests.ConnectionError() not handled'), requests.RequestException( 0, 'requests.RequestException() not handled'), requests.HTTPError( 0, 'requests.HTTPError() not handled'), requests.ReadTimeout( 0, 'requests.ReadTimeout() not handled'), requests.TooManyRedirects( 0, 'requests.TooManyRedirects() not handled'), ) # Attachment Directory TEST_VAR_DIR = os.path.join(os.path.dirname(__file__), 'var') TEST_URLS = ( ################################## # NotifyBoxcar ################################## ('boxcar://', { # invalid secret key 'instance': TypeError, }), # A a bad url ('boxcar://:@/', { 'instance': TypeError, }), # No secret specified ('boxcar://%s' % ('a' * 64), { 'instance': TypeError, }), # No access specified (whitespace is trimmed) ('boxcar://%%20/%s' % ('a' * 64), { 'instance': TypeError, }), # No secret specified (whitespace is trimmed) ('boxcar://%s/%%20' % ('a' * 64), { 'instance': TypeError, }), # Provide both an access and a secret ('boxcar://%s/%s' % ('a' * 64, 'b' * 64), { 'instance': plugins.NotifyBoxcar, 'requests_response_code': requests.codes.created, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'boxcar://a...a/****/', }), # Test without image set ('boxcar://%s/%s?image=True' % ('a' * 64, 'b' * 64), { 'instance': plugins.NotifyBoxcar, 'requests_response_code': requests.codes.created, # don't include an image in Asset by default 'include_image': False, }), ('boxcar://%s/%s?image=False' % ('a' * 64, 'b' * 64), { 'instance': plugins.NotifyBoxcar, 'requests_response_code': requests.codes.created, }), # our access, secret and device are all 64 characters # which is what we're doing here ('boxcar://%s/%s/@tag1/tag2///%s/?to=tag3' % ( 'a' * 64, 'b' * 64, 'd' * 64), { 'instance': plugins.NotifyBoxcar, 'requests_response_code': requests.codes.created, }), # An invalid tag ('boxcar://%s/%s/@%s' % ('a' * 64, 'b' * 64, 't' * 64), { 'instance': plugins.NotifyBoxcar, 'requests_response_code': requests.codes.created, }), ('boxcar://%s/%s/' % ('a' * 64, 'b' * 64), { 'instance': plugins.NotifyBoxcar, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('boxcar://%s/%s/' % ('a' * 64, 'b' * 64), { 'instance': plugins.NotifyBoxcar, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('boxcar://%s/%s/' % ('a' * 64, 'b' * 64), { 'instance': plugins.NotifyBoxcar, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyClickSend ################################## ('clicksend://', { # We failed to identify any valid authentication 'instance': TypeError, }), ('clicksend://:@/', { # We failed to identify any valid authentication 'instance': TypeError, }), ('clicksend://user:pass@{}/{}/{}'.format('1' * 10, '2' * 15, 'a' * 13), { # invalid target numbers; we'll fail to notify anyone 'instance': plugins.NotifyClickSend, 'notify_response': False, }), ('clicksend://user:pass@{}?batch=yes'.format('3' * 14), { # valid number 'instance': plugins.NotifyClickSend, }), ('clicksend://user:pass@{}?batch=yes&to={}'.format('3' * 14, '6' * 14), { # valid number but using the to= variable 'instance': plugins.NotifyClickSend, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'clicksend://user:****', }), ('clicksend://user:pass@{}?batch=no'.format('3' * 14), { # valid number - no batch 'instance': plugins.NotifyClickSend, }), ('clicksend://user:pass@{}'.format('3' * 14), { 'instance': plugins.NotifyClickSend, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('clicksend://user:pass@{}'.format('3' * 14), { 'instance': plugins.NotifyClickSend, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyD7Networks ################################## ('d7sms://', { # We failed to identify any valid authentication 'instance': TypeError, }), ('d7sms://:@/', { # We failed to identify any valid authentication 'instance': TypeError, }), ('d7sms://user:pass@{}/{}/{}'.format('1' * 10, '2' * 15, 'a' * 13), { # No valid targets to notify 'instance': TypeError, }), ('d7sms://user:pass@{}?batch=yes'.format('3' * 14), { # valid number 'instance': plugins.NotifyD7Networks, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'd7sms://user:****@', }), ('d7sms://user:pass@{}?batch=yes'.format('7' * 14), { # valid number 'instance': plugins.NotifyD7Networks, # Test what happens if a batch send fails to return a messageCount 'requests_response_text': { 'data': { 'messageCount': 0, }, }, # Expected notify() response 'notify_response': False, }), ('d7sms://user:pass@{}?batch=yes&to={}'.format('3' * 14, '6' * 14), { # valid number 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}?batch=yes&from=apprise'.format('3' * 14), { # valid number, utilizing the optional from= variable 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}?batch=yes&source=apprise'.format('3' * 14), { # valid number, utilizing the optional source= variable (same as from) 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}?priority=invalid'.format('3' * 14), { # valid number; invalid priority 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}?priority=3'.format('3' * 14), { # valid number; adjusted priority 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}?priority=high'.format('3' * 14), { # valid number; adjusted priority (string supported) 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}?batch=no'.format('3' * 14), { # valid number - no batch 'instance': plugins.NotifyD7Networks, }), ('d7sms://user:pass@{}'.format('3' * 14), { 'instance': plugins.NotifyD7Networks, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('d7sms://user:pass@{}'.format('3' * 14), { 'instance': plugins.NotifyD7Networks, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyDiscord ################################## ('discord://', { 'instance': TypeError, }), # An invalid url ('discord://:@/', { 'instance': TypeError, }), # No webhook_token specified ('discord://%s' % ('i' * 24), { 'instance': TypeError, }), # Provide both an webhook id and a webhook token ('discord://%s/%s' % ('i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), # Provide a temporary username ('discord://l2g@%s/%s' % ('i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), # test image= field ('discord://%s/%s?format=markdown&footer=Yes&image=Yes' % ( 'i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, # don't include an image by default 'include_image': False, }), ('discord://%s/%s?format=markdown&footer=Yes&image=No' % ( 'i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), ('discord://%s/%s?format=markdown&footer=Yes&image=Yes' % ( 'i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), ('https://discord.com/api/webhooks/{}/{}'.format( '0' * 10, 'B' * 40), { # Native URL Support, support the provided discord URL from their # webpage. 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), ('https://discordapp.com/api/webhooks/{}/{}'.format( '0' * 10, 'B' * 40), { # Legacy Native URL Support, support the older URL (to be # decomissioned on Nov 7th 2020) 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), ('https://discordapp.com/api/webhooks/{}/{}?footer=yes'.format( '0' * 10, 'B' * 40), { # Native URL Support with arguments 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), ('discord://%s/%s?format=markdown&avatar=No&footer=No' % ( 'i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), # different format support ('discord://%s/%s?format=markdown' % ('i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), ('discord://%s/%s?format=text' % ('i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), # Test with avatar URL ('discord://%s/%s?avatar_url=http://localhost/test.jpg' % ( 'i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, }), # Test without image set ('discord://%s/%s' % ('i' * 24, 't' * 64), { 'instance': plugins.NotifyDiscord, 'requests_response_code': requests.codes.no_content, # don't include an image by default 'include_image': False, }), ('discord://%s/%s/' % ('a' * 24, 'b' * 64), { 'instance': plugins.NotifyDiscord, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('discord://%s/%s/' % ('a' * 24, 'b' * 64), { 'instance': plugins.NotifyDiscord, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('discord://%s/%s/' % ('a' * 24, 'b' * 64), { 'instance': plugins.NotifyDiscord, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyEmby ################################## # Insecure Request; no hostname specified ('emby://', { 'instance': None, }), # Secure Emby Request; no hostname specified ('embys://', { 'instance': None, }), # No user specified ('emby://localhost', { # Missing a username 'instance': TypeError, }), ('emby://:@/', { 'instance': None, }), # Valid Authentication ('emby://l2g@localhost', { 'instance': plugins.NotifyEmby, # our response will be False because our authentication can't be # tested very well using this matrix. It will resume in # in test_notify_emby_plugin() 'response': False, }), ('embys://l2g:password@localhost', { 'instance': plugins.NotifyEmby, # our response will be False because our authentication can't be # tested very well using this matrix. It will resume in # in test_notify_emby_plugin() 'response': False, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'embys://l2g:****@localhost', }), # The rest of the emby tests are in test_notify_emby_plugin() ################################## # NotifyEnigma2 ################################## ('enigma2://:@/', { 'instance': None, }), ('enigma2://', { 'instance': None, }), ('enigma2s://', { 'instance': None, }), ('enigma2://localhost', { 'instance': plugins.NotifyEnigma2, # This will fail because we're also expecting a server acknowledgement 'notify_response': False, }), ('enigma2://localhost', { 'instance': plugins.NotifyEnigma2, # invalid JSON response 'requests_response_text': '{', 'notify_response': False, }), ('enigma2://localhost', { 'instance': plugins.NotifyEnigma2, # False is returned 'requests_response_text': { 'result': False }, 'notify_response': False, }), ('enigma2://localhost', { 'instance': plugins.NotifyEnigma2, # With the right content, this will succeed 'requests_response_text': { 'result': True } }), ('enigma2://user@localhost', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True } }), # Set timeout ('enigma2://user@localhost?timeout=-1', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True } }), # Set timeout ('enigma2://user@localhost?timeout=-1000', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True } }), # Set invalid timeout (defaults to a set value) ('enigma2://user@localhost?timeout=invalid', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True } }), ('enigma2://user:pass@localhost', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'enigma2://user:****@localhost', }), ('enigma2://localhost:8080', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, }), ('enigma2://user:pass@localhost:8080', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, }), ('enigma2s://localhost', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, }), ('enigma2s://user:pass@localhost', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'enigma2s://user:****@localhost', }), ('enigma2s://localhost:8080/path/', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'enigma2s://localhost:8080/path/', }), ('enigma2s://user:pass@localhost:8080', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, }), ('enigma2://localhost:8080/path?-HeaderKey=HeaderValue', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, }), ('enigma2://user:pass@localhost:8081', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('enigma2://user:pass@localhost:8082', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('enigma2://user:pass@localhost:8083', { 'instance': plugins.NotifyEnigma2, 'requests_response_text': { 'result': True }, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyFaast ################################## ('faast://', { 'instance': TypeError, }), ('faast://:@/', { 'instance': TypeError, }), # Auth Token specified ('faast://%s' % ('a' * 32), { 'instance': plugins.NotifyFaast, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'faast://a...a', }), ('faast://%s' % ('a' * 32), { 'instance': plugins.NotifyFaast, # don't include an image by default 'include_image': False, }), ('faast://%s' % ('a' * 32), { 'instance': plugins.NotifyFaast, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('faast://%s' % ('a' * 32), { 'instance': plugins.NotifyFaast, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('faast://%s' % ('a' * 32), { 'instance': plugins.NotifyFaast, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyFlock ################################## # No token specified ('flock://', { 'instance': TypeError, }), # An invalid url ('flock://:@/', { 'instance': TypeError, }), # Provide a token ('flock://%s' % ('t' * 24), { 'instance': plugins.NotifyFlock, }), # Image handling ('flock://%s?image=True' % ('t' * 24), { 'instance': plugins.NotifyFlock, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'flock://t...t', }), ('flock://%s?image=False' % ('t' * 24), { 'instance': plugins.NotifyFlock, }), ('flock://%s?image=True' % ('t' * 24), { 'instance': plugins.NotifyFlock, # Run test when image is set to True, but one couldn't actually be # loaded from the Asset Object. 'include_image': False, }), # Test to= ('flock://%s?to=u:%s&format=markdown' % ('i' * 24, 'u' * 12), { 'instance': plugins.NotifyFlock, }), # Provide markdown format ('flock://%s?format=markdown' % ('i' * 24), { 'instance': plugins.NotifyFlock, }), # Provide text format ('flock://%s?format=text' % ('i' * 24), { 'instance': plugins.NotifyFlock, }), # Native URL Support, take the slack URL and still build from it ('https://api.flock.com/hooks/sendMessage/{}/'.format('i' * 24), { 'instance': plugins.NotifyFlock, }), # Native URL Support with arguments ('https://api.flock.com/hooks/sendMessage/{}/?format=markdown'.format( 'i' * 24), { 'instance': plugins.NotifyFlock, }), # Bot API presumed if one or more targets are specified # Provide markdown format ('flock://%s/u:%s?format=markdown' % ('i' * 24, 'u' * 12), { 'instance': plugins.NotifyFlock, }), # Bot API presumed if one or more targets are specified # Provide text format ('flock://%s/u:%s?format=html' % ('i' * 24, 'u' * 12), { 'instance': plugins.NotifyFlock, }), # Bot API presumed if one or more targets are specified # u: is optional ('flock://%s/%s?format=text' % ('i' * 24, 'u' * 12), { 'instance': plugins.NotifyFlock, }), # Bot API presumed if one or more targets are specified # Multi-entries ('flock://%s/g:%s/u:%s?format=text' % ('i' * 24, 'g' * 12, 'u' * 12), { 'instance': plugins.NotifyFlock, }), # Bot API presumed if one or more targets are specified # Multi-entries using @ for user and # for channel ('flock://%s/#%s/@%s?format=text' % ('i' * 24, 'g' * 12, 'u' * 12), { 'instance': plugins.NotifyFlock, }), # Bot API presumed if one or more targets are specified # has bad entry ('flock://%s/g:%s/u:%s?format=text' % ('i' * 24, 'g' * 12, 'u' * 10), { 'instance': plugins.NotifyFlock, }), # Invalid user/group defined ('flock://%s/g:/u:?format=text' % ('i' * 24), { 'instance': TypeError, }), # we don't focus on the invalid length of the user/group fields. # As a result, the following will load and pass the data upstream ('flock://%s/g:%s/u:%s?format=text' % ('i' * 24, 'g' * 14, 'u' * 10), { # We will still instantiate the object 'instance': plugins.NotifyFlock, }), # Error Testing ('flock://%s/g:%s/u:%s?format=text' % ('i' * 24, 'g' * 12, 'u' * 10), { 'instance': plugins.NotifyFlock, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('flock://%s/' % ('t' * 24), { 'instance': plugins.NotifyFlock, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('flock://%s/' % ('t' * 24), { 'instance': plugins.NotifyFlock, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('flock://%s/' % ('t' * 24), { 'instance': plugins.NotifyFlock, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyGitter ################################## ('gitter://', { 'instance': TypeError, }), ('gitter://:@/', { 'instance': TypeError, }), # Invalid Token Length ('gitter://%s' % ('a' * 12), { 'instance': TypeError, }), # Token specified but no channel ('gitter://%s' % ('a' * 40), { 'instance': TypeError, }), # Token + channel ('gitter://%s/apprise' % ('b' * 40), { 'instance': plugins.NotifyGitter, 'response': False, }), # include image in post ('gitter://%s/apprise?image=Yes' % ('c' * 40), { 'instance': plugins.NotifyGitter, 'response': False, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'gitter://c...c/apprise', }), # Don't include image in post (this is the default anyway) ('gitter://%s/apprise?image=Yes' % ('d' * 40), { 'instance': plugins.NotifyGitter, 'response': False, # don't include an image by default 'include_image': False, }), # Don't include image in post (this is the default anyway) ('gitter://%s/apprise?image=No' % ('e' * 40), { 'instance': plugins.NotifyGitter, 'response': False, }), ('gitter://%s/apprise' % ('f' * 40), { 'instance': plugins.NotifyGitter, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('gitter://%s/apprise' % ('g' * 40), { 'instance': plugins.NotifyGitter, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('gitter://%s/apprise' % ('h' * 40), { 'instance': plugins.NotifyGitter, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyGotify ################################## ('gotify://', { 'instance': None, }), # No token specified ('gotify://hostname', { 'instance': TypeError, }), # Provide a hostname and token ('gotify://hostname/%s' % ('t' * 16), { 'instance': plugins.NotifyGotify, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'gotify://hostname/t...t', }), # Provide a hostname, path, and token ('gotify://hostname/a/path/ending/in/a/slash/%s' % ('u' * 16), { 'instance': plugins.NotifyGotify, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'gotify://hostname/a/path/ending/in/a/slash/u...u/', }), # Provide a hostname, path, and token ('gotify://hostname/a/path/not/ending/in/a/slash/%s' % ('v' * 16), { 'instance': plugins.NotifyGotify, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'gotify://hostname/a/path/not/ending/in/a/slash/v...v/', }), # Provide a priority ('gotify://hostname/%s?priority=high' % ('i' * 16), { 'instance': plugins.NotifyGotify, }), # Provide an invalid priority ('gotify://hostname:8008/%s?priority=invalid' % ('i' * 16), { 'instance': plugins.NotifyGotify, }), # An invalid url ('gotify://:@/', { 'instance': None, }), ('gotify://hostname/%s/' % ('t' * 16), { 'instance': plugins.NotifyGotify, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('gotifys://localhost/%s/' % ('t' * 16), { 'instance': plugins.NotifyGotify, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('gotify://localhost/%s/' % ('t' * 16), { 'instance': plugins.NotifyGotify, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyIFTTT - If This Than That ################################## ('ifttt://', { 'instance': TypeError, }), ('ifttt://:@/', { 'instance': TypeError, }), # No User ('ifttt://EventID/', { 'instance': TypeError, }), # A nicely formed ifttt url with 1 event and a new key/value store ('ifttt://WebHookID@EventID/?+TemplateKey=TemplateVal', { 'instance': plugins.NotifyIFTTT, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'ifttt://W...D', }), # Test to= in which case we set the host to the webhook id ('ifttt://WebHookID?to=EventID,EventID2', { 'instance': plugins.NotifyIFTTT, }), # Removing certain keys: ('ifttt://WebHookID@EventID/?-Value1=&-Value2', { 'instance': plugins.NotifyIFTTT, }), # A nicely formed ifttt url with 2 events defined: ('ifttt://WebHookID@EventID/EventID2/', { 'instance': plugins.NotifyIFTTT, }), # Support Native URL references ('https://maker.ifttt.com/use/WebHookID/', { # No EventID specified 'instance': TypeError, }), ('https://maker.ifttt.com/use/WebHookID/EventID/', { 'instance': plugins.NotifyIFTTT, }), # Native URL with arguments ('https://maker.ifttt.com/use/WebHookID/EventID/?-Value1=', { 'instance': plugins.NotifyIFTTT, }), # Test website connection failures ('ifttt://WebHookID@EventID', { 'instance': plugins.NotifyIFTTT, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('ifttt://WebHookID@EventID', { 'instance': plugins.NotifyIFTTT, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('ifttt://WebHookID@EventID', { 'instance': plugins.NotifyIFTTT, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyJoin ################################## ('join://', { 'instance': TypeError, }), # API Key + bad url ('join://:@/', { 'instance': TypeError, }), # APIkey; no device ('join://%s' % ('a' * 32), { 'instance': plugins.NotifyJoin, }), # API Key + device (using to=) ('join://%s?to=%s' % ('a' * 32, 'd' * 32), { 'instance': plugins.NotifyJoin, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'join://a...a/', }), # API Key + priority setting ('join://%s?priority=high' % ('a' * 32), { 'instance': plugins.NotifyJoin, }), # API Key + invalid priority setting ('join://%s?priority=invalid' % ('a' * 32), { 'instance': plugins.NotifyJoin, }), # API Key + priority setting (empty) ('join://%s?priority=' % ('a' * 32), { 'instance': plugins.NotifyJoin, }), # API Key + device ('join://%s@%s?image=True' % ('a' * 32, 'd' * 32), { 'instance': plugins.NotifyJoin, }), # No image ('join://%s@%s?image=False' % ('a' * 32, 'd' * 32), { 'instance': plugins.NotifyJoin, }), # API Key + Device Name ('join://%s/%s' % ('a' * 32, 'My Device'), { 'instance': plugins.NotifyJoin, }), # API Key + device ('join://%s/%s' % ('a' * 32, 'd' * 32), { 'instance': plugins.NotifyJoin, # don't include an image by default 'include_image': False, }), # API Key + 2 devices ('join://%s/%s/%s' % ('a' * 32, 'd' * 32, 'e' * 32), { 'instance': plugins.NotifyJoin, # don't include an image by default 'include_image': False, }), # API Key + 1 device and 1 group ('join://%s/%s/%s' % ('a' * 32, 'd' * 32, 'group.chrome'), { 'instance': plugins.NotifyJoin, }), ('join://%s' % ('a' * 32), { 'instance': plugins.NotifyJoin, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('join://%s' % ('a' * 32), { 'instance': plugins.NotifyJoin, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('join://%s' % ('a' * 32), { 'instance': plugins.NotifyJoin, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyJSON ################################## ('json://:@/', { 'instance': None, }), ('json://', { 'instance': None, }), ('jsons://', { 'instance': None, }), ('json://localhost', { 'instance': plugins.NotifyJSON, }), ('json://user:pass@localhost', { 'instance': plugins.NotifyJSON, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'json://user:****@localhost', }), ('json://user@localhost', { 'instance': plugins.NotifyJSON, }), ('json://localhost:8080', { 'instance': plugins.NotifyJSON, }), ('json://user:pass@localhost:8080', { 'instance': plugins.NotifyJSON, }), ('jsons://localhost', { 'instance': plugins.NotifyJSON, }), ('jsons://user:pass@localhost', { 'instance': plugins.NotifyJSON, }), ('jsons://localhost:8080/path/', { 'instance': plugins.NotifyJSON, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'jsons://localhost:8080/path/', }), ('jsons://user:password@localhost:8080', { 'instance': plugins.NotifyJSON, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'jsons://user:****@localhost:8080', }), ('json://localhost:8080/path?-HeaderKey=HeaderValue', { 'instance': plugins.NotifyJSON, }), ('json://user:pass@localhost:8081', { 'instance': plugins.NotifyJSON, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('json://user:pass@localhost:8082', { 'instance': plugins.NotifyJSON, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('json://user:pass@localhost:8083', { 'instance': plugins.NotifyJSON, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyKavenegar ################################## ('kavenegar://', { # We failed to identify any valid authentication 'instance': TypeError, }), ('kavenegar://:@/', { # We failed to identify any valid authentication 'instance': TypeError, }), ('kavenegar://{}/{}/{}'.format('1' * 10, '2' * 15, 'a' * 13), { # No valid targets to notify 'instance': TypeError, }), ('kavenegar://{}/{}'.format('a' * 24, '3' * 14), { # valid api key and valid number 'instance': plugins.NotifyKavenegar, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kavenegar://a...a/', }), ('kavenegar://{}?to={}'.format('a' * 24, '3' * 14), { # valid api key and valid number 'instance': plugins.NotifyKavenegar, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kavenegar://a...a/', }), ('kavenegar://{}@{}/{}'.format('1' * 14, 'b' * 24, '3' * 14), { # valid api key and valid number 'instance': plugins.NotifyKavenegar, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kavenegar://{}@b...b/'.format('1' * 14), }), ('kavenegar://{}@{}/{}'.format('a' * 14, 'b' * 24, '3' * 14), { # invalid from number 'instance': TypeError, }), ('kavenegar://{}@{}/{}'.format('3' * 4, 'b' * 24, '3' * 14), { # invalid from number 'instance': TypeError, }), ('kavenegar://{}/{}?from={}'.format('b' * 24, '3' * 14, '1' * 14), { # valid api key and valid number 'instance': plugins.NotifyKavenegar, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kavenegar://{}@b...b/'.format('1' * 14), }), ('kavenegar://{}/{}'.format('b' * 24, '4' * 14), { 'instance': plugins.NotifyKavenegar, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('kavenegar://{}/{}'.format('c' * 24, '5' * 14), { 'instance': plugins.NotifyKavenegar, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyKODI ################################## ('kodi://', { 'instance': None, }), ('kodis://', { 'instance': None, }), ('kodi://localhost', { 'instance': plugins.NotifyXBMC, }), ('kodi://192.168.4.1', { # Support IPv4 Addresses 'instance': plugins.NotifyXBMC, }), ('kodi://[2001:db8:002a:3256:adfe:05c0:0003:0006]', { # Support IPv6 Addresses 'instance': plugins.NotifyXBMC, }), ('kodi://user:pass@localhost', { 'instance': plugins.NotifyXBMC, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kodi://user:****@localhost', }), ('kodi://localhost:8080', { 'instance': plugins.NotifyXBMC, }), ('kodi://user:pass@localhost:8080', { 'instance': plugins.NotifyXBMC, }), ('kodis://localhost', { 'instance': plugins.NotifyXBMC, }), ('kodis://user:pass@localhost', { 'instance': plugins.NotifyXBMC, }), ('kodis://localhost:8080/path/', { 'instance': plugins.NotifyXBMC, }), ('kodis://user:password@localhost:8080', { 'instance': plugins.NotifyXBMC, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kodis://user:****@localhost:8080', }), ('kodi://localhost', { 'instance': plugins.NotifyXBMC, # Experement with different notification types 'notify_type': NotifyType.WARNING, }), ('kodi://localhost', { 'instance': plugins.NotifyXBMC, # Experement with different notification types 'notify_type': NotifyType.FAILURE, }), ('kodis://localhost:443', { 'instance': plugins.NotifyXBMC, # don't include an image by default 'include_image': False, }), ('kodi://:@/', { 'instance': None, }), ('kodi://user:pass@localhost:8081', { 'instance': plugins.NotifyXBMC, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('kodi://user:pass@localhost:8082', { 'instance': plugins.NotifyXBMC, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('kodi://user:pass@localhost:8083', { 'instance': plugins.NotifyXBMC, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyKumulos ################################## ('kumulos://', { # No API or Server Key specified 'instance': TypeError, }), ('kumulos://:@/', { # No API or Server Key specified # We don't have strict host checking on for kumulos, so this URL # actually becomes parseable and :@ becomes a hostname. # The below errors because a second token wasn't found 'instance': TypeError, }), ('kumulos://{}/'.format(UUID4), { # No server key was specified 'instance': TypeError, }), ('kumulos://{}/{}/'.format(UUID4, 'w' * 36), { # Everything is okay 'instance': plugins.NotifyKumulos, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kumulos://8...2/w...w/', }), ('kumulos://{}/{}/'.format(UUID4, 'x' * 36), { 'instance': plugins.NotifyKumulos, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kumulos://8...2/x...x/', }), ('kumulos://{}/{}/'.format(UUID4, 'y' * 36), { 'instance': plugins.NotifyKumulos, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'kumulos://8...2/y...y/', }), ('kumulos://{}/{}/'.format(UUID4, 'z' * 36), { 'instance': plugins.NotifyKumulos, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyLametric ################################## ('lametric://', { # No APIKey or Client ID/Secret specified 'instance': TypeError, }), ('lametric://:@/', { # No APIKey or Client ID/Secret specified 'instance': TypeError, }), ('lametric://{}/'.format(UUID4), { # No APIKey or Client ID specified 'instance': TypeError, }), ('lametric://root:{}@192.168.0.5:8080/'.format(UUID4), { # Everything is okay; this would be picked up in Device Mode # We're using a default port and enforcing a special user 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://root:[email protected]/', }), ('lametric://{}@192.168.0.4:8000/'.format(UUID4), { # Everything is okay; this would be picked up in Device Mode # Port is enforced 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://[email protected]:8000/', }), ('lametric://{}@192.168.0.5/'.format(UUID4), { # Everything is okay; this would be picked up in Device Mode 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://[email protected]/', }), ('lametrics://{}@192.168.0.6/?mode=device'.format(UUID4), { # Everything is okay; Device mode forced 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametrics://[email protected]/', }), ('lametric://192.168.2.8/?mode=device&apikey=abc123', { # Everything is okay; Device mode forced 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://[email protected]/', }), ('lametrics://{}@abcd==/?mode=cloud'.format(UUID4), { # Everything is okay; Cloud mode forced 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://8...2@****/', }), ('lametric://_/?mode=cloud&oauth_id=abcd&oauth_secret=1234&cycles=3', { # Everything is okay; Cloud mode forced # arguments used on URL path 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://a...d@****/', }), ('lametrics://{}@abcd==/?mode=cloud&sound=knock&icon_type=info' '&priority=critical'.format(UUID4), { # Cloud mode forced, sound, icon_type, and priority not supported # with cloud mode so warnings are created 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://8...2@****/', }), ('lametrics://{}@192.168.0.7/?mode=invalid'.format(UUID4), { # Invalid Mode 'instance': TypeError, }), ('lametrics://{}@192.168.0.6/?sound=alarm1'.format(UUID4), { # Device mode with sound set to alarm1 'instance': plugins.NotifyLametric, }), ('lametrics://{}@192.168.0.7/?sound=bike'.format(UUID4), { # Device mode with sound set to bicycle using alias 'instance': plugins.NotifyLametric, # Bike is an alias, 'url_matches': r'sound=bicycle', }), ('lametrics://{}@192.168.0.8/?sound=invalid!'.format(UUID4), { # Invalid sounds just produce warnings... object still loads 'instance': plugins.NotifyLametric, }), ('lametrics://{}@192.168.0.9/?icon_type=alert'.format(UUID4), { # Icon Type Changed 'instance': plugins.NotifyLametric, # icon=alert exists somewhere on our generated URL 'url_matches': r'icon_type=alert', }), ('lametrics://{}@192.168.0.10/?icon_type=invalid'.format(UUID4), { # Invalid icon types just produce warnings... object still loads 'instance': plugins.NotifyLametric, }), ('lametric://{}@192.168.1.1/?priority=warning'.format(UUID4), { # Priority changed 'instance': plugins.NotifyLametric, }), ('lametrics://{}@192.168.1.2/?priority=invalid'.format(UUID4), { # Invalid priority just produce warnings... object still loads 'instance': plugins.NotifyLametric, }), ('lametric://{}@192.168.1.3/?cycles=2'.format(UUID4), { # Cycles changed 'instance': plugins.NotifyLametric, }), ('lametric://{}@192.168.1.4/?cycles=-1'.format(UUID4), { # Cycles changed (out of range) 'instance': plugins.NotifyLametric, }), ('lametrics://{}@192.168.1.5/?cycles=invalid'.format(UUID4), { # Invalid priority just produce warnings... object still loads 'instance': plugins.NotifyLametric, }), ('lametric://{}@{}/'.format( UUID4, 'YWosnkdnoYREsdogfoSDff734kjsfbweo7r434597FYODIoicosdonnreiuhvd' 'ciuhouerhohcd8sds89fdRw=='), { # Everything is okay; this would be picked up in Cloud Mode 'instance': plugins.NotifyLametric, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://8...2@****/', }), ('lametric://{}@example.com/'.format(UUID4), { 'instance': plugins.NotifyLametric, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametric://[email protected]/', }), ('lametrics://{}@example.ca/'.format(UUID4), { 'instance': plugins.NotifyLametric, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'lametrics://[email protected]/', }), ('lametrics://{}@example.net/'.format(UUID4), { 'instance': plugins.NotifyLametric, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyMailgun ################################## ('mailgun://', { 'instance': TypeError, }), ('mailgun://:@/', { 'instance': TypeError, }), # No Token specified ('mailgun://[email protected]', { 'instance': TypeError, }), # Token is valid, but no user name specified ('mailgun://localhost.localdomain/{}-{}-{}'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': TypeError, }), # Invalid from email address ('mailgun://[email protected]/{}-{}-{}'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': TypeError, }), # No To email address, but everything else is valid ('mailgun://[email protected]/{}-{}-{}'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, }), # valid url with region specified (case insensitve) ('mailgun://[email protected]/{}-{}-{}?region=uS'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, }), # valid url with region specified (case insensitve) ('mailgun://[email protected]/{}-{}-{}?region=EU'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, }), # invalid url with region specified (case insensitve) ('mailgun://[email protected]/{}-{}-{}?region=invalid'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': TypeError, }), # One To Email address ('mailgun://[email protected]/{}-{}-{}/[email protected]'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, }), ('mailgun://[email protected]/' '{}-{}-{}[email protected]'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun}), # One To Email address, a from name specified too ('mailgun://[email protected]/{}-{}-{}/' '[email protected]?name="Frodo"'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun}), ('mailgun://[email protected]/{}-{}-{}'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('mailgun://[email protected]/{}-{}-{}'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('mailgun://[email protected]/{}-{}-{}'.format( 'a' * 32, 'b' * 8, 'c' * 8), { 'instance': plugins.NotifyMailgun, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyMatrix ################################## ('matrix://', { 'instance': None, }), ('matrixs://', { 'instance': None, }), ('matrix://localhost?mode=off', { # treats it as a anonymous user to register 'instance': plugins.NotifyMatrix, # response is false because we have nothing to notify 'response': False, }), ('matrix://localhost', { # response is TypeError because we'll try to initialize as # a t2bot and fail (localhost is too short of a api key) 'instance': TypeError }), ('matrix://user:pass@localhost/#room1/#room2/#room3', { 'instance': plugins.NotifyMatrix, 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('matrix://user:pass@localhost/#room1/#room2/!room1', { 'instance': plugins.NotifyMatrix, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('matrix://user:pass@localhost:1234/#room', { 'instance': plugins.NotifyMatrix, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'matrix://user:****@localhost:1234/', }), # Matrix supports webhooks too; the following tests this now: ('matrix://user:token@localhost?mode=matrix&format=text', { # user and token correctly specified with webhook 'instance': plugins.NotifyMatrix, 'response': False, }), ('matrix://user:token@localhost?mode=matrix&format=html', { # user and token correctly specified with webhook 'instance': plugins.NotifyMatrix, }), ('matrix://user:token@localhost?mode=slack&format=text', { # user and token correctly specified with webhook 'instance': plugins.NotifyMatrix, }), ('matrixs://user:token@localhost?mode=SLACK&format=markdown', { # user and token specified; slack webhook still detected # despite uppercase characters 'instance': plugins.NotifyMatrix, }), # Image Reference ('matrixs://user:token@localhost?mode=slack&format=markdown&image=True', { # user and token specified; image set to True 'instance': plugins.NotifyMatrix, }), ('matrixs://user:token@localhost?mode=slack&format=markdown&image=False', { # user and token specified; image set to True 'instance': plugins.NotifyMatrix, }), ('matrixs://user@{}?mode=t2bot&format=markdown&image=True' .format('a' * 64), { # user and token specified; image set to True 'instance': plugins.NotifyMatrix}), ('matrix://user@{}?mode=t2bot&format=markdown&image=False' .format('z' * 64), { # user and token specified; image set to True 'instance': plugins.NotifyMatrix}), # This will default to t2bot because no targets were specified and no # password ('matrixs://{}'.format('c' * 64), { 'instance': plugins.NotifyMatrix, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), # Test Native URL ('https://webhooks.t2bot.io/api/v1/matrix/hook/{}/'.format('d' * 64), { # user and token specified; image set to True 'instance': plugins.NotifyMatrix, }), ('matrix://user:token@localhost?mode=On', { # invalid webhook specified (unexpected boolean) 'instance': TypeError, }), ('matrix://token@localhost/?mode=Matrix', { 'instance': plugins.NotifyMatrix, 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('matrix://user:token@localhost/mode=matrix', { 'instance': plugins.NotifyMatrix, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('matrix://token@localhost:8080/?mode=slack', { 'instance': plugins.NotifyMatrix, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ('matrix://{}/?mode=t2bot'.format('b' * 64), { 'instance': plugins.NotifyMatrix, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyMatterMost ################################## ('mmost://', { 'instance': None, }), ('mmosts://', { 'instance': None, }), ('mmost://:@/', { 'instance': None, }), ('mmosts://localhost', { # Thrown because there was no webhook id specified 'instance': TypeError, }), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, }), ('mmost://user@localhost/3ccdd113474722377935511fc85d3dd4?channel=test', { 'instance': plugins.NotifyMatterMost, }), ('mmost://user@localhost/3ccdd113474722377935511fc85d3dd4?to=test', { 'instance': plugins.NotifyMatterMost, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'mmost://user@localhost/3...4/', }), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4' '?to=test&image=True', { 'instance': plugins.NotifyMatterMost}), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4' \ '?to=test&image=False', { 'instance': plugins.NotifyMatterMost}), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4' \ '?to=test&image=True', { 'instance': plugins.NotifyMatterMost, # don't include an image by default 'include_image': False}), ('mmost://localhost:8080/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'mmost://localhost:8080/3...4/', }), ('mmost://localhost:8080/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, }), ('mmost://localhost:invalid-port/3ccdd113474722377935511fc85d3dd4', { 'instance': None, }), ('mmosts://localhost/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, }), # Test our paths ('mmosts://localhost/a/path/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, }), ('mmosts://localhost/////3ccdd113474722377935511fc85d3dd4///', { 'instance': plugins.NotifyMatterMost, }), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('mmost://localhost/3ccdd113474722377935511fc85d3dd4', { 'instance': plugins.NotifyMatterMost, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyMSTeams ################################## ('msteams://', { # First API Token not specified 'instance': TypeError, }), ('msteams://:@/', { # We don't have strict host checking on for msteams, so this URL # actually becomes parseable and :@ becomes a hostname. # The below errors because a second token wasn't found 'instance': TypeError, }), ('msteams://{}'.format(UUID4), { # Just half of one token 1 provided 'instance': TypeError, }), ('msteams://{}@{}/'.format(UUID4, UUID4), { # Just 1 tokens provided 'instance': TypeError, }), ('msteams://{}@{}/{}'.format(UUID4, UUID4, 'a' * 32), { # Just 2 tokens provided 'instance': TypeError, }), ('msteams://{}@{}/{}/{}?t1'.format(UUID4, UUID4, 'a' * 32, UUID4), { # All tokens provided - we're good 'instance': plugins.NotifyMSTeams, }), # Support native URLs ('https://outlook.office.com/webhook/{}@{}/IncomingWebhook/{}/{}' .format(UUID4, UUID4, 'a' * 32, UUID4), { # All tokens provided - we're good 'instance': plugins.NotifyMSTeams}), ('msteams://{}@{}/{}/{}?t2'.format(UUID4, UUID4, 'a' * 32, UUID4), { # All tokens provided - we're good 'instance': plugins.NotifyMSTeams, # don't include an image by default 'include_image': False, }), ('msteams://{}@{}/{}/{}?image=No'.format(UUID4, UUID4, 'a' * 32, UUID4), { # All tokens provided - we're good no image 'instance': plugins.NotifyMSTeams, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'msteams://8...2/a...a/8...2/', }), ('msteams://{}@{}/{}/{}?tx'.format(UUID4, UUID4, 'a' * 32, UUID4), { 'instance': plugins.NotifyMSTeams, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('msteams://{}@{}/{}/{}?ty'.format(UUID4, UUID4, 'a' * 32, UUID4), { 'instance': plugins.NotifyMSTeams, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('msteams://{}@{}/{}/{}?tz'.format(UUID4, UUID4, 'a' * 32, UUID4), { 'instance': plugins.NotifyMSTeams, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyNexmo ################################## ('nexmo://', { # No API Key specified 'instance': TypeError, }), ('nexmo://:@/', { # invalid Auth key 'instance': TypeError, }), ('nexmo://AC{}@12345678'.format('a' * 8), { # Just a key provided 'instance': TypeError, }), ('nexmo://AC{}:{}@{}'.format('a' * 8, 'b' * 16, '3' * 9), { # key and secret provided and from but invalid from no 'instance': TypeError, }), ('nexmo://AC{}:{}@{}/?ttl=0'.format('b' * 8, 'c' * 16, '3' * 11), { # Invalid ttl defined 'instance': TypeError, }), ('nexmo://AC{}:{}@{}'.format('d' * 8, 'e' * 16, 'a' * 11), { # Invalid source number 'instance': TypeError, }), ('nexmo://AC{}:{}@{}/123/{}/abcd/'.format( 'f' * 8, 'g' * 16, '3' * 11, '9' * 15), { # valid everything but target numbers 'instance': plugins.NotifyNexmo, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'nexmo://A...f:****@', }), ('nexmo://AC{}:{}@{}'.format('h' * 8, 'i' * 16, '5' * 11), { # using phone no with no target - we text ourselves in # this case 'instance': plugins.NotifyNexmo, }), ('nexmo://_?key=AC{}&secret={}&from={}'.format( 'a' * 8, 'b' * 16, '5' * 11), { # use get args to acomplish the same thing 'instance': plugins.NotifyNexmo, }), ('nexmo://_?key=AC{}&secret={}&source={}'.format( 'a' * 8, 'b' * 16, '5' * 11), { # use get args to acomplish the same thing (use source instead of from) 'instance': plugins.NotifyNexmo, }), ('nexmo://_?key=AC{}&secret={}&from={}&to={}'.format( 'a' * 8, 'b' * 16, '5' * 11, '7' * 13), { # use to= 'instance': plugins.NotifyNexmo, }), ('nexmo://AC{}:{}@{}'.format('a' * 8, 'b' * 16, '6' * 11), { 'instance': plugins.NotifyNexmo, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('nexmo://AC{}:{}@{}'.format('a' * 8, 'b' * 16, '6' * 11), { 'instance': plugins.NotifyNexmo, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyNotica ################################## ('notica://', { 'instance': TypeError, }), ('notica://:@/', { 'instance': TypeError, }), # Native URL ('https://notica.us/?%s' % ('z' * 6), { 'instance': plugins.NotifyNotica, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'notica://z...z/', }), # Native URL with additional arguments ('https://notica.us/?%s&overflow=upstream' % ('z' * 6), { 'instance': plugins.NotifyNotica, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'notica://z...z/', }), # Token specified ('notica://%s' % ('a' * 6), { 'instance': plugins.NotifyNotica, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'notica://a...a/', }), # Self-Hosted configuration ('notica://localhost/%s' % ('b' * 6), { 'instance': plugins.NotifyNotica, }), ('notica://user@localhost/%s' % ('c' * 6), { 'instance': plugins.NotifyNotica, }), ('notica://user:pass@localhost/%s/' % ('d' * 6), { 'instance': plugins.NotifyNotica, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'notica://user:****@localhost/d...d', }), ('notica://user:pass@localhost/a/path/%s/' % ('r' * 6), { 'instance': plugins.NotifyNotica, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'notica://user:****@localhost/a/path/r...r', }), ('notica://localhost:8080/%s' % ('a' * 6), { 'instance': plugins.NotifyNotica, }), ('notica://user:pass@localhost:8080/%s' % ('b' * 6), { 'instance': plugins.NotifyNotica, }), ('noticas://localhost/%s' % ('j' * 6), { 'instance': plugins.NotifyNotica, 'privacy_url': 'noticas://localhost/j...j', }), ('noticas://user:pass@localhost/%s' % ('e' * 6), { 'instance': plugins.NotifyNotica, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'noticas://user:****@localhost/e...e', }), ('noticas://localhost:8080/path/%s' % ('5' * 6), { 'instance': plugins.NotifyNotica, 'privacy_url': 'noticas://localhost:8080/path/5...5', }), ('noticas://user:pass@localhost:8080/%s' % ('6' * 6), { 'instance': plugins.NotifyNotica, }), ('notica://%s' % ('b' * 6), { 'instance': plugins.NotifyNotica, # don't include an image by default 'include_image': False, }), # Test Header overrides ('notica://localhost:8080//%s/?+HeaderKey=HeaderValue' % ('7' * 6), { 'instance': plugins.NotifyNotica, }), ('notica://%s' % ('c' * 6), { 'instance': plugins.NotifyNotica, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('notica://%s' % ('d' * 7), { 'instance': plugins.NotifyNotica, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('notica://%s' % ('e' * 8), { 'instance': plugins.NotifyNotica, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyNotifico ################################## ('notifico://', { 'instance': TypeError, }), ('notifico://:@/', { 'instance': TypeError, }), ('notifico://1234', { # Just a project id provided (no message token) 'instance': TypeError, }), ('notifico://abcd/ckhrjW8w672m6HG', { # an invalid project id provided 'instance': TypeError, }), ('notifico://1234/ckhrjW8w672m6HG', { # A project id and message hook provided 'instance': plugins.NotifyNotifico, }), ('notifico://1234/ckhrjW8w672m6HG?prefix=no', { # Disable our prefix 'instance': plugins.NotifyNotifico, }), ('notifico://1234/ckhrjW8w672m6HG?color=yes', { 'instance': plugins.NotifyNotifico, 'notify_type': 'info', }), ('notifico://1234/ckhrjW8w672m6HG?color=yes', { 'instance': plugins.NotifyNotifico, 'notify_type': 'success', }), ('notifico://1234/ckhrjW8w672m6HG?color=yes', { 'instance': plugins.NotifyNotifico, 'notify_type': 'warning', }), ('notifico://1234/ckhrjW8w672m6HG?color=yes', { 'instance': plugins.NotifyNotifico, 'notify_type': 'failure', }), ('notifico://1234/ckhrjW8w672m6HG?color=yes', { 'instance': plugins.NotifyNotifico, 'notify_type': 'invalid', }), ('notifico://1234/ckhrjW8w672m6HG?color=no', { # Test our color flag by having it set to off 'instance': plugins.NotifyNotifico, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'notifico://1...4/c...G', }), # Support Native URLs ('https://n.tkte.ch/h/2144/uJmKaBW9WFk42miB146ci3Kj', { 'instance': plugins.NotifyNotifico, }), ('notifico://1234/ckhrjW8w672m6HG', { 'instance': plugins.NotifyNotifico, # don't include an image by default 'include_image': False, }), ('notifico://1234/ckhrjW8w672m6HG', { 'instance': plugins.NotifyNotifico, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('notifico://1234/ckhrjW8w672m6HG', { 'instance': plugins.NotifyNotifico, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('notifico://1234/ckhrjW8w672m6HG', { 'instance': plugins.NotifyNotifico, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyNextcloud ################################## ('ncloud://:@/', { 'instance': None, }), ('ncloud://', { 'instance': None, }), ('nclouds://', { # No hostname 'instance': None, }), ('ncloud://localhost', { # No user specified 'instance': TypeError, }), ('ncloud://localhost/admin', { 'instance': plugins.NotifyNextcloud, }), ('ncloud://user@localhost/admin', { 'instance': plugins.NotifyNextcloud, }), ('ncloud://user@localhost?to=user1,user2', { 'instance': plugins.NotifyNextcloud, }), ('ncloud://user:pass@localhost/user1/user2', { 'instance': plugins.NotifyNextcloud, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'ncloud://user:****@localhost/user1/user2', }), ('ncloud://user:pass@localhost:8080/admin', { 'instance': plugins.NotifyNextcloud, }), ('nclouds://user:pass@localhost/admin', { 'instance': plugins.NotifyNextcloud, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'nclouds://user:****@localhost/admin', }), ('nclouds://user:pass@localhost:8080/admin/', { 'instance': plugins.NotifyNextcloud, }), ('ncloud://localhost:8080/admin?-HeaderKey=HeaderValue', { 'instance': plugins.NotifyNextcloud, }), ('ncloud://user:pass@localhost:8081/admin', { 'instance': plugins.NotifyNextcloud, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('ncloud://user:pass@localhost:8082/admin', { 'instance': plugins.NotifyNextcloud, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('ncloud://user:pass@localhost:8083/user1/user2/user3', { 'instance': plugins.NotifyNextcloud, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyOffice365 ################################## ('o365://', { # Missing tenant, client_id, secret, and targets! 'instance': TypeError, }), ('o365://:@/', { # invalid url 'instance': TypeError, }), ('o365://{tenant}:{aid}/{cid}/{secret}/{targets}'.format( # invalid tenant tenant=',', cid='ab-cd-ef-gh', aid='[email protected]', secret='abcd/123/3343/@jack/test', targets='/'.join(['[email protected]'])), { # We're valid and good to go 'instance': TypeError, }), ('o365://{tenant}:{aid}/{cid}/{secret}/{targets}'.format( tenant='tenant', # invalid client id cid='ab.', aid='[email protected]', secret='abcd/123/3343/@jack/test', targets='/'.join(['[email protected]'])), { # We're valid and good to go 'instance': TypeError, }), ('o365://{tenant}:{aid}/{cid}/{secret}/{targets}'.format( tenant='tenant', cid='ab-cd-ef-gh', aid='[email protected]', secret='abcd/123/3343/@jack/test', targets='/'.join(['[email protected]'])), { # We're valid and good to go 'instance': plugins.NotifyOffice365, # Test what happens if a batch send fails to return a messageCount 'requests_response_text': { 'expires_in': 2000, 'access_token': 'abcd1234', }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'o365://t...t:[email protected]/a...h/' \ '****/email1%40test.ca/'}), # test our arguments ('o365://_/?oauth_id={cid}&oauth_secret={secret}&tenant={tenant}' '&to={targets}&from={aid}'.format( tenant='tenant', cid='ab-cd-ef-gh', aid='[email protected]', secret='abcd/123/3343/@jack/test', targets='[email protected]'), { # We're valid and good to go 'instance': plugins.NotifyOffice365, # Test what happens if a batch send fails to return a messageCount 'requests_response_text': { 'expires_in': 2000, 'access_token': 'abcd1234', }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'o365://t...t:[email protected]/a...h/' \ '****/email1%40test.ca/'}), # Test invalid JSON (no tenant defaults to email domain) ('o365://{tenant}:{aid}/{cid}/{secret}/{targets}'.format( tenant='tenant', cid='ab-cd-ef-gh', aid='[email protected]', secret='abcd/123/3343/@jack/test', targets='/'.join(['[email protected]'])), { # We're valid and good to go 'instance': plugins.NotifyOffice365, # invalid JSON response 'requests_response_text': '{', 'notify_response': False, }), # No Targets specified ('o365://{tenant}:{aid}/{cid}/{secret}'.format( tenant='tenant', cid='ab-cd-ef-gh', aid='[email protected]', secret='abcd/123/3343/@jack/test'), { # We're valid and good to go 'instance': plugins.NotifyOffice365, # There were no targets to notify; so we use our own email 'requests_response_text': { 'expires_in': 2000, 'access_token': 'abcd1234', }, }), ('o365://{tenant}:{aid}/{cid}/{secret}/{targets}'.format( tenant='tenant', cid='zz-zz-zz-zz', aid='[email protected]', secret='abcd/abc/dcba/@john/test', targets='/'.join(['[email protected]'])), { 'instance': plugins.NotifyOffice365, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('o365://{tenant}:{aid}/{cid}/{secret}/{targets}'.format( tenant='tenant', cid='01-12-23-34', aid='[email protected]', secret='abcd/321/4321/@test/test', targets='/'.join(['[email protected]'])), { 'instance': plugins.NotifyOffice365, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyProwl ################################## ('prowl://', { 'instance': TypeError, }), # bad url ('prowl://:@/', { 'instance': TypeError, }), # Invalid API Key ('prowl://%s' % ('a' * 20), { 'instance': TypeError, }), # Provider Key ('prowl://%s/%s' % ('a' * 40, 'b' * 40), { 'instance': plugins.NotifyProwl, }), # Invalid Provider Key ('prowl://%s/%s' % ('a' * 40, 'b' * 20), { 'instance': TypeError, }), # APIkey; no device ('prowl://%s' % ('a' * 40), { 'instance': plugins.NotifyProwl, }), # API Key ('prowl://%s' % ('a' * 40), { 'instance': plugins.NotifyProwl, # don't include an image by default 'include_image': False, }), # API Key + priority setting ('prowl://%s?priority=high' % ('a' * 40), { 'instance': plugins.NotifyProwl, }), # API Key + invalid priority setting ('prowl://%s?priority=invalid' % ('a' * 40), { 'instance': plugins.NotifyProwl, }), # API Key + priority setting (empty) ('prowl://%s?priority=' % ('a' * 40), { 'instance': plugins.NotifyProwl, }), # API Key + No Provider Key (empty) ('prowl://%s///' % ('w' * 40), { 'instance': plugins.NotifyProwl, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'prowl://w...w/', }), # API Key + Provider Key ('prowl://%s/%s' % ('a' * 40, 'b' * 40), { 'instance': plugins.NotifyProwl, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'prowl://a...a/b...b', }), # API Key + with image ('prowl://%s' % ('a' * 40), { 'instance': plugins.NotifyProwl, }), ('prowl://%s' % ('a' * 40), { 'instance': plugins.NotifyProwl, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('prowl://%s' % ('a' * 40), { 'instance': plugins.NotifyProwl, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('prowl://%s' % ('a' * 40), { 'instance': plugins.NotifyProwl, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyPushBullet ################################## ('pbul://', { 'instance': TypeError, }), ('pbul://:@/', { 'instance': TypeError, }), # APIkey ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # APIkey; but support attachment response ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # Test what happens if a batch send fails to return a messageCount 'requests_response_text': { 'file_name': 'cat.jpeg', 'file_type': 'image/jpeg', 'file_url': 'http://file_url', 'upload_url': 'http://upload_url', }, }), # APIkey; attachment testing that isn't an image type ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # Test what happens if a batch send fails to return a messageCount 'requests_response_text': { 'file_name': 'test.pdf', 'file_type': 'application/pdf', 'file_url': 'http://file_url', 'upload_url': 'http://upload_url', }, }), # APIkey; attachment testing were expected entry in payload is missing ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # Test what happens if a batch send fails to return a messageCount 'requests_response_text': { 'file_name': 'test.pdf', 'file_type': 'application/pdf', 'file_url': 'http://file_url', # upload_url missing }, # Our Notification calls associated with attachments will fail: 'attach_response': False, }), # API Key + channel ('pbul://%s/#channel/' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # API Key + channel (via to= ('pbul://%s/?to=#channel' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # API Key + 2 channels ('pbul://%s/#channel1/#channel2' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'pbul://a...a/', 'check_attachments': False, }), # API Key + device ('pbul://%s/device/' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # API Key + 2 devices ('pbul://%s/device1/device2/' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # API Key + email ('pbul://%s/[email protected]/' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # API Key + 2 emails ('pbul://%s/[email protected]/[email protected]/' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # API Key + Combo ('pbul://%s/device/#channel/[email protected]/' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, 'check_attachments': False, }), # , ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, 'check_attachments': False, }), ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, 'check_attachments': False, }), ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, 'check_attachments': False, }), ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, 'check_attachments': False, }), ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, 'check_attachments': False, }), ('pbul://%s' % ('a' * 32), { 'instance': plugins.NotifyPushBullet, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, 'check_attachments': False, }), ################################## # NotifyPushSafer ################################## ('psafer://:@/', { 'instance': TypeError, }), ('psafer://', { 'instance': TypeError, }), ('psafers://', { 'instance': TypeError, }), ('psafer://{}'.format('a' * 20), { 'instance': plugins.NotifyPushSafer, # This will fail because we're also expecting a server acknowledgement 'notify_response': False, }), ('psafer://{}'.format('b' * 20), { 'instance': plugins.NotifyPushSafer, # invalid JSON response 'requests_response_text': '{', 'notify_response': False, }), ('psafer://{}'.format('c' * 20), { 'instance': plugins.NotifyPushSafer, # A failure has status set to zero # We also expect an 'error' flag to be set 'requests_response_text': { 'status': 0, 'error': 'we failed' }, 'notify_response': False, }), ('psafers://{}'.format('d' * 20), { 'instance': plugins.NotifyPushSafer, # A failure has status set to zero # Test without an 'error' flag 'requests_response_text': { 'status': 0, }, 'notify_response': False, }), # This will notify all users ('a') ('psafer://{}'.format('e' * 20), { 'instance': plugins.NotifyPushSafer, # A status of 1 is a success 'requests_response_text': { 'status': 1, } }), # This will notify a selected set of devices ('psafer://{}/12/24/53'.format('e' * 20), { 'instance': plugins.NotifyPushSafer, # A status of 1 is a success 'requests_response_text': { 'status': 1, } }), # Same as above, but exercises the to= argument ('psafer://{}?to=12,24,53'.format('e' * 20), { 'instance': plugins.NotifyPushSafer, # A status of 1 is a success 'requests_response_text': { 'status': 1, } }), # Set priority ('psafer://{}?priority=emergency'.format('f' * 20), { 'instance': plugins.NotifyPushSafer, 'requests_response_text': { 'status': 1, } }), # Support integer value too ('psafer://{}?priority=-1'.format('f' * 20), { 'instance': plugins.NotifyPushSafer, 'requests_response_text': { 'status': 1, } }), # Invalid priority ('psafer://{}?priority=invalid'.format('f' * 20), { # Invalid Priority 'instance': TypeError, }), # Invalid priority ('psafer://{}?priority=25'.format('f' * 20), { # Invalid Priority 'instance': TypeError, }), # Set sound ('psafer://{}?sound=ok'.format('g' * 20), { 'instance': plugins.NotifyPushSafer, 'requests_response_text': { 'status': 1, } }), # Support integer value too ('psafers://{}?sound=14'.format('g' * 20), { 'instance': plugins.NotifyPushSafer, 'requests_response_text': { 'status': 1, }, 'privacy_url': 'psafers://g...g', }), # Invalid sound ('psafer://{}?sound=invalid'.format('h' * 20), { # Invalid Sound 'instance': TypeError, }), ('psafer://{}?sound=94000'.format('h' * 20), { # Invalid Sound 'instance': TypeError, }), # Set vibration (integer only) ('psafers://{}?vibration=1'.format('h' * 20), { 'instance': plugins.NotifyPushSafer, 'requests_response_text': { 'status': 1, }, 'privacy_url': 'psafers://h...h', }), # Invalid sound ('psafer://{}?vibration=invalid'.format('h' * 20), { # Invalid Vibration 'instance': TypeError, }), # Invalid vibration ('psafer://{}?vibration=25000'.format('h' * 20), { # Invalid Vibration 'instance': TypeError, }), ('psafers://{}'.format('d' * 20), { 'instance': plugins.NotifyPushSafer, # A failure has status set to zero # Test without an 'error' flag 'requests_response_text': { 'status': 0, }, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('psafer://{}'.format('d' * 20), { 'instance': plugins.NotifyPushSafer, # A failure has status set to zero # Test without an 'error' flag 'requests_response_text': { 'status': 0, }, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('psafers://{}'.format('d' * 20), { 'instance': plugins.NotifyPushSafer, # A failure has status set to zero # Test without an 'error' flag 'requests_response_text': { 'status': 0, }, # Throws a series of connection and transfer exceptions when this # flag is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyTechulusPush ################################## ('push://', { # Missing API Key 'instance': TypeError, }), # Invalid API Key ('push://%s' % ('+' * 24), { 'instance': TypeError, }), # APIkey ('push://%s' % UUID4, { 'instance': plugins.NotifyTechulusPush, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'push://8...2/', }), # API Key + bad url ('push://:@/', { 'instance': TypeError, }), ('push://%s' % UUID4, { 'instance': plugins.NotifyTechulusPush, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('push://%s' % UUID4, { 'instance': plugins.NotifyTechulusPush, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('push://%s' % UUID4, { 'instance': plugins.NotifyTechulusPush, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyPushed ################################## ('pushed://', { 'instance': TypeError, }), # Application Key Only ('pushed://%s' % ('a' * 32), { 'instance': TypeError, }), # Invalid URL ('pushed://:@/', { 'instance': TypeError, }), # Application Key+Secret ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, }), # Application Key+Secret + channel ('pushed://%s/%s/#channel/' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, }), # Application Key+Secret + channel (via to=) ('pushed://%s/%s?to=channel' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'pushed://a...a/****/', }), # Application Key+Secret + dropped entry ('pushed://%s/%s/dropped_value/' % ('a' * 32, 'a' * 64), { # No entries validated is a fail 'instance': TypeError, }), # Application Key+Secret + 2 channels ('pushed://%s/%s/#channel1/#channel2' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, }), # Application Key+Secret + User Pushed ID ('pushed://%s/%s/@ABCD/' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, }), # Application Key+Secret + 2 devices ('pushed://%s/%s/@ABCD/@DEFG/' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, }), # Application Key+Secret + Combo ('pushed://%s/%s/@ABCD/#channel' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, }), # , ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('pushed://%s/%s/#channel' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('pushed://%s/%s/@user' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('pushed://%s/%s' % ('a' * 32, 'a' * 64), { 'instance': plugins.NotifyPushed, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyPushjet ################################## ('pjet://', { 'instance': None, }), ('pjets://', { 'instance': None, }), ('pjet://:@/', { 'instance': None, }), # You must specify a secret key ('pjet://%s' % ('a' * 32), { 'instance': TypeError, }), # The proper way to log in ('pjet://user:pass@localhost/%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, }), # The proper way to log in ('pjets://localhost/%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, }), # Specify your own server with login (secret= MUST be provided) ('pjet://user:pass@localhost?secret=%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'pjet://user:****@localhost', }), # Specify your own server with port ('pjets://localhost:8080/%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, }), ('pjets://localhost:8080/%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('pjets://localhost:4343/%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('pjet://localhost:8081/%s' % ('a' * 32), { 'instance': plugins.NotifyPushjet, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyPushover ################################## ('pover://', { 'instance': TypeError, }), # bad url ('pover://:@/', { 'instance': TypeError, }), # APIkey; no user ('pover://%s' % ('a' * 30), { 'instance': TypeError, }), # API Key + invalid sound setting ('pover://%s@%s?sound=invalid' % ('u' * 30, 'a' * 30), { 'instance': TypeError, }), # API Key + valid alternate sound picked ('pover://%s@%s?sound=spacealarm' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), # API Key + Valid User ('pover://%s@%s' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, # don't include an image by default 'include_image': False, }), # API Key + Valid User + 1 Device ('pover://%s@%s/DEVICE' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), # API Key + Valid User + 1 Device (via to=) ('pover://%s@%s?to=DEVICE' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), # API Key + Valid User + 2 Devices ('pover://%s@%s/DEVICE1/DEVICE2/' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'pover://[email protected]', }), # API Key + Valid User + invalid device ('pover://%s@%s/%s/' % ('u' * 30, 'a' * 30, 'd' * 30), { 'instance': plugins.NotifyPushover, # Notify will return False since there is a bad device in our list 'response': False, }), # API Key + Valid User + device + invalid device ('pover://%s@%s/DEVICE1/%s/' % ('u' * 30, 'a' * 30, 'd' * 30), { 'instance': plugins.NotifyPushover, # Notify will return False since there is a bad device in our list 'response': False, }), # API Key + priority setting ('pover://%s@%s?priority=high' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), # API Key + invalid priority setting ('pover://%s@%s?priority=invalid' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), # API Key + emergency(2) priority setting ('pover://%s@%s?priority=emergency' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), # API Key + emergency priority setting with retry and expire ('pover://%s@%s?priority=emergency&%s&%s' % ('u' * 30, 'a' * 30, 'retry=30', 'expire=300'), { 'instance': plugins.NotifyPushover, }), # API Key + emergency priority setting with text retry ('pover://%s@%s?priority=emergency&%s&%s' % ('u' * 30, 'a' * 30, 'retry=invalid', 'expire=300'), { 'instance': plugins.NotifyPushover, }), # API Key + emergency priority setting with text expire ('pover://%s@%s?priority=emergency&%s&%s' % ('u' * 30, 'a' * 30, 'retry=30', 'expire=invalid'), { 'instance': plugins.NotifyPushover, }), # API Key + emergency priority setting with invalid expire ('pover://%s@%s?priority=emergency&%s' % ('u' * 30, 'a' * 30, 'expire=100000'), { 'instance': TypeError, }), # API Key + emergency priority setting with invalid retry ('pover://%s@%s?priority=emergency&%s' % ('u' * 30, 'a' * 30, 'retry=15'), { 'instance': TypeError, }), # API Key + priority setting (empty) ('pover://%s@%s?priority=' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, }), ('pover://%s@%s' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('pover://%s@%s' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('pover://%s@%s' % ('u' * 30, 'a' * 30), { 'instance': plugins.NotifyPushover, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyRocketChat ################################## ('rocket://', { 'instance': None, }), ('rockets://', { 'instance': None, }), ('rocket://:@/', { 'instance': None, }), # No username or pass ('rocket://localhost', { 'instance': TypeError, }), # No room or channel ('rocket://user:pass@localhost', { 'instance': TypeError, }), # No valid rooms or channels ('rocket://user:pass@localhost/#/!/@', { 'instance': TypeError, }), # No user/pass combo ('rocket://user@localhost/room/', { 'instance': TypeError, }), # No user/pass combo ('rocket://localhost/room/', { 'instance': TypeError, }), # A room and port identifier ('rocket://user:pass@localhost:8080/room/', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, }), # A channel (using the to=) ('rockets://user:pass@localhost?to=#channel', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, }), # A channel ('rockets://user:pass@localhost/#channel', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, }), # Several channels ('rocket://user:pass@localhost/#channel1/#channel2/?avatar=No', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, }), # Several Rooms ('rocket://user:pass@localhost/room1/room2', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, }), # A room and channel ('rocket://user:pass@localhost/room/#channel?mode=basic', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'rocket://user:****@localhost', }), # A user/pass where the pass matches a webtoken # to ensure we get the right mode, we enforce basic mode # so that web/token gets interpreted as a password ('rockets://user:pass%2Fwithslash@localhost/#channel/?mode=basic', { 'instance': plugins.NotifyRocketChat, # The response text is expected to be the following on a success 'requests_response_text': { 'status': 'success', 'data': { 'authToken': 'abcd', 'userId': 'user', }, }, }), # A room and channel ('rockets://user:pass@localhost/rooma/#channela', { # The response text is expected to be the following on a success 'requests_response_code': requests.codes.ok, 'requests_response_text': { # return something other then a success message type 'status': 'failure', }, # Exception is thrown in this case 'instance': plugins.NotifyRocketChat, # Notifications will fail in this event 'response': False, }), # A web token ('rockets://web/token@localhost/@user/#channel/roomid', { 'instance': plugins.NotifyRocketChat, }), ('rockets://user:web/token@localhost/@user/?mode=webhook', { 'instance': plugins.NotifyRocketChat, }), ('rockets://user:web/token@localhost?to=@user2,#channel2', { 'instance': plugins.NotifyRocketChat, }), ('rockets://web/token@localhost/?avatar=No', { # a simple webhook token with default values 'instance': plugins.NotifyRocketChat, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'rockets://w...n@localhost', }), ('rockets://localhost/@user/?mode=webhook&webhook=web/token', { 'instance': plugins.NotifyRocketChat, }), ('rockets://user:web/token@localhost/@user/?mode=invalid', { # invalid mode 'instance': TypeError, }), ('rocket://user:pass@localhost:8081/room1/room2', { 'instance': plugins.NotifyRocketChat, # force a failure using basic mode 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('rockets://user:web/token@localhost?to=@user3,#channel3', { 'instance': plugins.NotifyRocketChat, # force a failure using webhook mode 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('rocket://user:pass@localhost:8082/#channel', { 'instance': plugins.NotifyRocketChat, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('rocket://user:pass@localhost:8083/#chan1/#chan2/room', { 'instance': plugins.NotifyRocketChat, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyRyver ################################## ('ryver://', { 'instance': TypeError, }), ('ryver://:@/', { 'instance': TypeError, }), ('ryver://apprise', { # Just org provided (no token) 'instance': TypeError, }), ('ryver://apprise/ckhrjW8w672m6HG?mode=invalid', { # invalid mode provided 'instance': TypeError, }), ('ryver://x/ckhrjW8w672m6HG?mode=slack', { # Invalid org 'instance': TypeError, }), ('ryver://apprise/ckhrjW8w672m6HG?mode=slack', { # No username specified; this is still okay as we use whatever # the user told the webhook to use; set our slack mode 'instance': plugins.NotifyRyver, }), ('ryver://apprise/ckhrjW8w672m6HG?mode=ryver', { # No username specified; this is still okay as we use whatever # the user told the webhook to use; set our ryver mode 'instance': plugins.NotifyRyver, }), # Legacy webhook mode setting: # Legacy webhook mode setting: ('ryver://apprise/ckhrjW8w672m6HG?webhook=slack', { # No username specified; this is still okay as we use whatever # the user told the webhook to use; set our slack mode 'instance': plugins.NotifyRyver, }), ('ryver://apprise/ckhrjW8w672m6HG?webhook=ryver', { # No username specified; this is still okay as we use whatever # the user told the webhook to use; set our ryver mode 'instance': plugins.NotifyRyver, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'ryver://apprise/c...G', }), # Support Native URLs ('https://apprise.ryver.com/application/webhook/ckhrjW8w672m6HG', { 'instance': plugins.NotifyRyver, }), # Support Native URLs with arguments ('https://apprise.ryver.com/application/webhook/ckhrjW8w672m6HG' '?webhook=ryver', { 'instance': plugins.NotifyRyver, }), ('ryver://caronc@apprise/ckhrjW8w672m6HG', { 'instance': plugins.NotifyRyver, # don't include an image by default 'include_image': False, }), ('ryver://apprise/ckhrjW8w672m6HG', { 'instance': plugins.NotifyRyver, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('ryver://apprise/ckhrjW8w672m6HG', { 'instance': plugins.NotifyRyver, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('ryver://apprise/ckhrjW8w672m6HG', { 'instance': plugins.NotifyRyver, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifySendGrid ################################## ('sendgrid://', { 'instance': None, }), ('sendgrid://:@/', { 'instance': None, }), ('sendgrid://abcd', { # Just an broken email (no api key or email) 'instance': None, }), ('sendgrid://abcd@host', { # Just an Email specified, no API Key 'instance': None, }), ('sendgrid://invalid-api-key+*-d:[email protected]', { # An invalid API Key 'instance': TypeError, }), ('sendgrid://abcd:[email protected]', { # No To/Target Address(es) specified; so we sub in the same From # address 'instance': plugins.NotifySendGrid, }), ('sendgrid://abcd:[email protected]/[email protected]', { # A good email 'instance': plugins.NotifySendGrid, }), ('sendgrid://abcd:[email protected]/[email protected]' '[email protected]', { # A good email with Blind Carbon Copy 'instance': plugins.NotifySendGrid, }), ('sendgrid://abcd:[email protected]/[email protected]' '[email protected]', { # A good email with Carbon Copy 'instance': plugins.NotifySendGrid, }), ('sendgrid://abcd:[email protected]/[email protected]' '[email protected]', { # A good email with Carbon Copy 'instance': plugins.NotifySendGrid, }), ('sendgrid://abcd:[email protected]/[email protected]' '?template={}'.format(UUID4), { # A good email with a template + no substitutions 'instance': plugins.NotifySendGrid, }), ('sendgrid://abcd:[email protected]/[email protected]' '?template={}&+sub=value&+sub2=value2'.format(UUID4), { # A good email with a template + substitutions 'instance': plugins.NotifySendGrid, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'sendgrid://a...d:[email protected]/', }), ('sendgrid://abcd:[email protected]/[email protected]', { 'instance': plugins.NotifySendGrid, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('sendgrid://abcd:[email protected]/[email protected]', { 'instance': plugins.NotifySendGrid, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('sendgrid://abcd:[email protected]/[email protected]', { 'instance': plugins.NotifySendGrid, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifySinch ################################## ('sinch://', { # No Account SID specified 'instance': TypeError, }), ('sinch://:@/', { # invalid Auth token 'instance': TypeError, }), ('sinch://{}@12345678'.format('a' * 32), { # Just spi provided 'instance': TypeError, }), ('sinch://{}:{}@_'.format('a' * 32, 'b' * 32), { # spi and token provided but invalid from 'instance': TypeError, }), ('sinch://{}:{}@{}'.format('a' * 32, 'b' * 32, '3' * 5), { # using short-code (5 characters) without a target # We can still instantiate ourselves with a valid short code 'instance': TypeError, }), ('sinch://{}:{}@{}'.format('a' * 32, 'b' * 32, '3' * 9), { # spi and token provided and from but invalid from no 'instance': TypeError, }), ('sinch://{}:{}@{}/123/{}/abcd/'.format( 'a' * 32, 'b' * 32, '3' * 11, '9' * 15), { # valid everything but target numbers 'instance': plugins.NotifySinch, }), ('sinch://{}:{}@12345/{}'.format('a' * 32, 'b' * 32, '4' * 11), { # using short-code (5 characters) 'instance': plugins.NotifySinch, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'sinch://...aaaa:b...b@12345', }), ('sinch://{}:{}@123456/{}'.format('a' * 32, 'b' * 32, '4' * 11), { # using short-code (6 characters) 'instance': plugins.NotifySinch, }), ('sinch://{}:{}@{}'.format('a' * 32, 'b' * 32, '5' * 11), { # using phone no with no target - we text ourselves in # this case 'instance': plugins.NotifySinch, }), ('sinch://{}:{}@{}?region=eu'.format('a' * 32, 'b' * 32, '5' * 11), { # Specify a region 'instance': plugins.NotifySinch, }), ('sinch://{}:{}@{}?region=invalid'.format('a' * 32, 'b' * 32, '5' * 11), { # Invalid region 'instance': TypeError, }), ('sinch://_?spi={}&token={}&from={}'.format( 'a' * 32, 'b' * 32, '5' * 11), { # use get args to acomplish the same thing 'instance': plugins.NotifySinch, }), ('sinch://_?spi={}&token={}&source={}'.format( 'a' * 32, 'b' * 32, '5' * 11), { # use get args to acomplish the same thing (use source instead of from) 'instance': plugins.NotifySinch, }), ('sinch://_?spi={}&token={}&from={}&to={}'.format( 'a' * 32, 'b' * 32, '5' * 11, '7' * 13), { # use to= 'instance': plugins.NotifySinch, }), ('sinch://{}:{}@{}'.format('a' * 32, 'b' * 32, '6' * 11), { 'instance': plugins.NotifySinch, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('sinch://{}:{}@{}'.format('a' * 32, 'b' * 32, '6' * 11), { 'instance': plugins.NotifySinch, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifySpontit ################################## ('spontit://', { # invalid url 'instance': TypeError, }), # Another bad url ('spontit://:@/', { 'instance': TypeError, }), # No user specified ('spontit://%s' % ('a' * 100), { 'instance': TypeError, }), # Invalid API Key specified ('spontit://user@%%20_', { 'instance': TypeError, }), # Provide a valid user and API Key ('spontit://%s@%s' % ('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'spontit://{}@b...b/'.format('u' * 11), }), # Provide a valid user and API Key, but provide an invalid channel ('spontit://%s@%s/#!!' % ('u' * 11, 'b' * 100), { # An instance is still created, but the channel won't be notified 'instance': plugins.NotifySpontit, }), # Provide a valid user, API Key and a valid channel ('spontit://%s@%s/#abcd' % ('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, }), # Provide a valid user, API Key, and a subtitle ('spontit://%s@%s/?subtitle=Test' % ('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, }), # Provide a valid user, API Key, and a lengthy subtitle ('spontit://%s@%s/?subtitle=%s' % ('u' * 11, 'b' * 100, 'c' * 300), { 'instance': plugins.NotifySpontit, }), # Provide a valid user and API Key, but provide a valid channel (that is # not ours). # Spontit uses a slash (/) to delimite the user from the channel id when # specifying channel entries. For Apprise we need to encode this # so we convert the slash (/) into %2F ('spontit://{}@{}/#1245%2Fabcd'.format('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, }), # Provide multipe channels ('spontit://{}@{}/#1245%2Fabcd/defg'.format('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, }), # Provide multipe channels through the use of the to= variable ('spontit://{}@{}/?to=#1245/abcd'.format('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, }), ('spontit://%s@%s' % ('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('spontit://%s@%s' % ('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('spontit://%s@%s' % ('u' * 11, 'b' * 100), { 'instance': plugins.NotifySpontit, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifySimplePush ################################## ('spush://', { # No api key 'instance': TypeError, }), ('spush://{}'.format('A' * 14), { # API Key specified however expected server response # didn't have 'OK' in JSON response 'instance': plugins.NotifySimplePush, # Expected notify() response 'notify_response': False, }), ('spush://{}'.format('Y' * 14), { # API Key valid and expected response was valid 'instance': plugins.NotifySimplePush, # Set our response to OK 'requests_response_text': { 'status': 'OK', }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'spush://Y...Y/', }), ('spush://{}?event=Not%20So%20Good'.format('X' * 14), { # API Key valid and expected response was valid 'instance': plugins.NotifySimplePush, # Set our response to something that is not okay 'requests_response_text': { 'status': 'NOT-OK', }, # Expected notify() response 'notify_response': False, }), ('spush://salt:pass@{}'.format('X' * 14), { # Now we'll test encrypted messages with new salt 'instance': plugins.NotifySimplePush, # Set our response to OK 'requests_response_text': { 'status': 'OK', }, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'spush://****:****@X...X/', }), ('spush://{}'.format('Y' * 14), { 'instance': plugins.NotifySimplePush, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, # Set a failing message too 'requests_response_text': { 'status': 'BadRequest', 'message': 'Title or message too long', }, }), ('spush://{}'.format('Z' * 14), { 'instance': plugins.NotifySimplePush, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifySlack ################################## ('slack://', { 'instance': TypeError, }), ('slack://:@/', { 'instance': TypeError, }), ('slack://T1JJ3T3L2', { # Just Token 1 provided 'instance': TypeError, }), ('slack://T1JJ3T3L2/A1BRTD4JD/', { # Just 2 tokens provided 'instance': TypeError, }), ('slack://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#hmm/#-invalid-', { # No username specified; this is still okay as we sub in # default; The one invalid channel is skipped when sending a message 'instance': plugins.NotifySlack, # There is an invalid channel that we will fail to deliver to # as a result the response type will be false 'response': False, 'requests_response_text': { 'ok': False, 'message': 'Bad Channel', }, }), ('slack://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#channel', { # No username specified; this is still okay as we sub in # default; The one invalid channel is skipped when sending a message 'instance': plugins.NotifySlack, # don't include an image by default 'include_image': False, 'requests_response_text': { 'ok': True, 'message': '', }, }), ('slack://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/+id/@id/', { # + encoded id, # @ userid 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', }, }), ('slack://username@T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/' '?to=#nuxref', { 'instance': plugins.NotifySlack, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'slack://[email protected]/A...D/T...Q/', 'requests_response_text': { 'ok': True, 'message': '', }, }), ('slack://username@T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#nuxref', { 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', }, }), # Test using a bot-token (also test footer set to no flag) ('slack://username@xoxb-1234-1234-abc124/#nuxref?footer=no', { 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', # support attachments 'file': { 'url_private': 'http://localhost/', }, }, }), ('slack://username@xoxb-1234-1234-abc124/#nuxref', { 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', }, # we'll fail to send attachments because we had no 'file' response in # our object 'response': False, }), ('slack://username@T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ', { # Missing a channel, falls back to webhook channel bindings 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', }, }), # Native URL Support, take the slack URL and still build from it ('https://hooks.slack.com/services/{}/{}/{}'.format( 'A' * 9, 'B' * 9, 'c' * 24), { 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', }, }), # Native URL Support with arguments ('https://hooks.slack.com/services/{}/{}/{}?format=text'.format( 'A' * 9, 'B' * 9, 'c' * 24), { 'instance': plugins.NotifySlack, 'requests_response_text': { 'ok': True, 'message': '', }, }), ('slack://username@-INVALID-/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#cool', { # invalid 1st Token 'instance': TypeError, }), ('slack://username@T1JJ3T3L2/-INVALID-/TIiajkdnlazkcOXrIdevi7FQ/#great', { # invalid 2rd Token 'instance': TypeError, }), ('slack://username@T1JJ3T3L2/A1BRTD4JD/-INVALID-/#channel', { # invalid 3rd Token 'instance': TypeError, }), ('slack://l2g@T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#usenet', { 'instance': plugins.NotifySlack, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, 'requests_response_text': { 'ok': False, 'message': '', }, }), ('slack://respect@T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#a', { 'instance': plugins.NotifySlack, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, 'requests_response_text': { 'ok': False, 'message': '', }, }), ('slack://notify@T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/#b', { 'instance': plugins.NotifySlack, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, 'requests_response_text': { 'ok': False, 'message': '', }, }), ################################## # NotifySNS (AWS) ################################## ('sns://', { 'instance': TypeError, }), ('sns://:@/', { 'instance': TypeError, }), ('sns://T1JJ3T3L2', { # Just Token 1 provided 'instance': TypeError, }), ('sns://T1JJ3TD4JD/TIiajkdnlazk7FQ/', { # Missing a region 'instance': TypeError, }), ('sns://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcevi7FQ/us-west-2/12223334444', { # we have a valid URL and one number to text 'instance': plugins.NotifySNS, }), ('sns://T1JJ3TD4JD/TIiajkdnlazk7FQ/us-west-2/12223334444/12223334445', { # Multi SNS Suppport 'instance': plugins.NotifySNS, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'sns://T...D/****/us-west-2', }), ('sns://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcOXrIdevi7FQ/us-east-1' '?to=12223334444', { # Missing a topic and/or phone No 'instance': plugins.NotifySNS, }), ('sns://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcevi7FQ/us-west-2/12223334444', { 'instance': plugins.NotifySNS, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('sns://T1JJ3T3L2/A1BRTD4JD/TIiajkdnlazkcevi7FQ/us-west-2/15556667777', { 'instance': plugins.NotifySNS, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyTelegram ################################## ('tgram://', { 'instance': None, }), # Simple Message ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, }), # Simple Message (no images) ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, # don't include an image by default 'include_image': False, }), # Simple Message with multiple chat names ('tgram://123456789:abcdefg_hijklmnop/id1/id2/', { 'instance': plugins.NotifyTelegram, }), # Simple Message with multiple chat names ('tgram://123456789:abcdefg_hijklmnop/?to=id1,id2', { 'instance': plugins.NotifyTelegram, }), # Simple Message with an invalid chat ID ('tgram://123456789:abcdefg_hijklmnop/%$/', { 'instance': plugins.NotifyTelegram, # Notify will fail 'response': False, }), # Simple Message with multiple chat ids ('tgram://123456789:abcdefg_hijklmnop/id1/id2/23423/-30/', { 'instance': plugins.NotifyTelegram, }), # Simple Message with multiple chat ids (no images) ('tgram://123456789:abcdefg_hijklmnop/id1/id2/23423/-30/', { 'instance': plugins.NotifyTelegram, # don't include an image by default 'include_image': False, }), # Support bot keyword prefix ('tgram://bottest@123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, }), # Testing image ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?image=Yes', { 'instance': plugins.NotifyTelegram, }), # Testing invalid format (fall's back to html) ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?format=invalid', { 'instance': plugins.NotifyTelegram, }), # Testing empty format (falls back to html) ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?format=', { 'instance': plugins.NotifyTelegram, }), # Testing valid formats ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?format=markdown', { 'instance': plugins.NotifyTelegram, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?format=html', { 'instance': plugins.NotifyTelegram, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?format=text', { 'instance': plugins.NotifyTelegram, }), # Simple Message without image ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, # don't include an image by default 'include_image': False, }), # Invalid Bot Token ('tgram://alpha:abcdefg_hijklmnop/lead2gold/', { 'instance': None, }), # AuthToken + bad url ('tgram://:@/', { 'instance': None, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?image=Yes', { 'instance': plugins.NotifyTelegram, # force a failure without an image specified 'include_image': False, 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('tgram://123456789:abcdefg_hijklmnop/id1/id2/', { 'instance': plugins.NotifyTelegram, # force a failure with multiple chat_ids 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('tgram://123456789:abcdefg_hijklmnop/id1/id2/', { 'instance': plugins.NotifyTelegram, # force a failure without an image specified 'include_image': False, 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, # throw a bizzare code forcing us to fail to look it up without # having an image included 'include_image': False, 'response': False, 'requests_response_code': 999, }), # Test with image set ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?image=Yes', { 'instance': plugins.NotifyTelegram, # throw a bizzare code forcing us to fail to look it up without # having an image included 'include_image': True, 'response': False, 'requests_response_code': 999, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/', { 'instance': plugins.NotifyTelegram, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ('tgram://123456789:abcdefg_hijklmnop/lead2gold/?image=Yes', { 'instance': plugins.NotifyTelegram, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them without images set 'include_image': True, 'test_requests_exceptions': True, }), ################################## # NotifyTwilio ################################## ('twilio://', { # No Account SID specified 'instance': TypeError, }), ('twilio://:@/', { # invalid Auth token 'instance': TypeError, }), ('twilio://AC{}@12345678'.format('a' * 32), { # Just sid provided 'instance': TypeError, }), ('twilio://AC{}:{}@_'.format('a' * 32, 'b' * 32), { # sid and token provided but invalid from 'instance': TypeError, }), ('twilio://AC{}:{}@{}'.format('a' * 32, 'b' * 32, '3' * 5), { # using short-code (5 characters) without a target # We can still instantiate ourselves with a valid short code 'instance': TypeError, }), ('twilio://AC{}:{}@{}'.format('a' * 32, 'b' * 32, '3' * 9), { # sid and token provided and from but invalid from no 'instance': TypeError, }), ('twilio://AC{}:{}@{}/123/{}/abcd/'.format( 'a' * 32, 'b' * 32, '3' * 11, '9' * 15), { # valid everything but target numbers 'instance': plugins.NotifyTwilio, }), ('twilio://AC{}:{}@12345/{}'.format('a' * 32, 'b' * 32, '4' * 11), { # using short-code (5 characters) 'instance': plugins.NotifyTwilio, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'twilio://...aaaa:b...b@12345', }), ('twilio://AC{}:{}@123456/{}'.format('a' * 32, 'b' * 32, '4' * 11), { # using short-code (6 characters) 'instance': plugins.NotifyTwilio, }), ('twilio://AC{}:{}@{}'.format('a' * 32, 'b' * 32, '5' * 11), { # using phone no with no target - we text ourselves in # this case 'instance': plugins.NotifyTwilio, }), ('twilio://_?sid=AC{}&token={}&from={}'.format( 'a' * 32, 'b' * 32, '5' * 11), { # use get args to acomplish the same thing 'instance': plugins.NotifyTwilio, }), ('twilio://_?sid=AC{}&token={}&source={}'.format( 'a' * 32, 'b' * 32, '5' * 11), { # use get args to acomplish the same thing (use source instead of from) 'instance': plugins.NotifyTwilio, }), ('twilio://_?sid=AC{}&token={}&from={}&to={}'.format( 'a' * 32, 'b' * 32, '5' * 11, '7' * 13), { # use to= 'instance': plugins.NotifyTwilio, }), ('twilio://AC{}:{}@{}'.format('a' * 32, 'b' * 32, '6' * 11), { 'instance': plugins.NotifyTwilio, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('twilio://AC{}:{}@{}'.format('a' * 32, 'b' * 32, '6' * 11), { 'instance': plugins.NotifyTwilio, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyTwist ################################## ('twist://', { # Missing Email and Login 'instance': None, }), ('twist://:@/', { 'instance': None, }), ('twist://[email protected]/', { # No password 'instance': None, }), ('twist://[email protected]/password', { # Password acceptable as first entry in path 'instance': plugins.NotifyTwist, # Expected notify() response is False because internally we would # have failed to login 'notify_response': False, }), ('twist://password:[email protected]', { # password:login acceptable 'instance': plugins.NotifyTwist, # Expected notify() response is False because internally we would # have failed to login 'notify_response': False, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'twist://****:[email protected]', }), ('twist://password:[email protected]', { # password:login acceptable 'instance': plugins.NotifyTwist, # Expected notify() response is False because internally we would # have logged in, but we would have failed to look up the #General # channel and workspace. 'requests_response_text': { # Login expected response 'id': 1234, 'default_workspace': 9876, }, 'notify_response': False, }), ('twist://password:[email protected]', { 'instance': plugins.NotifyTwist, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('twist://password:[email protected]', { 'instance': plugins.NotifyTwist, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyTwitter ################################## ('twitter://', { # Missing Consumer API Key 'instance': TypeError, }), ('twitter://:@/', { 'instance': TypeError, }), ('twitter://consumer_key', { # Missing Keys 'instance': TypeError, }), ('twitter://consumer_key/consumer_secret/', { # Missing Keys 'instance': TypeError, }), ('twitter://consumer_key/consumer_secret/access_token/', { # Missing Access Secret 'instance': TypeError, }), ('twitter://consumer_key/consumer_secret/access_token/access_secret', { # No user mean's we message ourselves 'instance': plugins.NotifyTwitter, # Expected notify() response False (because we won't be able # to detect our user) 'notify_response': False, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'twitter://c...y/****/a...n/****', }), ('twitter://consumer_key/consumer_secret/access_token/access_secret' '?cache=no', { # No user mean's we message ourselves 'instance': plugins.NotifyTwitter, # However we'll be okay if we return a proper response 'requests_response_text': { 'id': 12345, 'screen_name': 'test' }, }), ('twitter://consumer_key/consumer_secret/access_token/access_secret', { # No user mean's we message ourselves 'instance': plugins.NotifyTwitter, # However we'll be okay if we return a proper response 'requests_response_text': { 'id': 12345, 'screen_name': 'test' }, }), # A duplicate of the entry above, this will cause cache to be referenced ('twitter://consumer_key/consumer_secret/access_token/access_secret', { # No user mean's we message ourselves 'instance': plugins.NotifyTwitter, # However we'll be okay if we return a proper response 'requests_response_text': { 'id': 12345, 'screen_name': 'test' }, }), # handle cases where the screen_name is missing from the response causing # an exception during parsing ('twitter://consumer_key/consumer_secret2/access_token/access_secret', { # No user mean's we message ourselves 'instance': plugins.NotifyTwitter, # However we'll be okay if we return a proper response 'requests_response_text': { 'id': 12345, }, # due to a mangled response_text we'll fail 'notify_response': False, }), ('twitter://user@consumer_key/csecret2/access_token/access_secret/-/%/', { # One Invalid User 'instance': plugins.NotifyTwitter, # Expected notify() response False (because we won't be able # to detect our user) 'notify_response': False, }), ('twitter://user@consumer_key/csecret/access_token/access_secret' '?cache=No', { # No Cache 'instance': plugins.NotifyTwitter, 'requests_response_text': [{ 'id': 12345, 'screen_name': 'user' }], }), ('twitter://user@consumer_key/csecret/access_token/access_secret', { # We're good! 'instance': plugins.NotifyTwitter, 'requests_response_text': [{ 'id': 12345, 'screen_name': 'user' }], }), # A duplicate of the entry above, this will cause cache to be referenced # for this reason, we don't even need to return a valid response ('twitter://user@consumer_key/csecret/access_token/access_secret', { # We're identifying the same user we already sent to 'instance': plugins.NotifyTwitter, }), ('twitter://ckey/csecret/access_token/access_secret?mode=tweet', { # A Public Tweet 'instance': plugins.NotifyTwitter, }), ('twitter://user@ckey/csecret/access_token/access_secret?mode=invalid', { # An invalid mode 'instance': TypeError, }), ('twitter://usera@consumer_key/consumer_secret/access_token/' 'access_secret/user/?to=userb', { # We're good! 'instance': plugins.NotifyTwitter, 'requests_response_text': [{ 'id': 12345, 'screen_name': 'usera' }, { 'id': 12346, 'screen_name': 'userb' }, { # A garbage entry we can test exception handling on 'id': 123, }], }), ('twitter://ckey/csecret/access_token/access_secret', { 'instance': plugins.NotifyTwitter, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('twitter://ckey/csecret/access_token/access_secret', { 'instance': plugins.NotifyTwitter, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ('twitter://ckey/csecret/access_token/access_secret?mode=tweet', { 'instance': plugins.NotifyTwitter, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyMSG91 ################################## ('msg91://', { # No hostname/authkey specified 'instance': TypeError, }), ('msg91://-', { # Invalid AuthKey 'instance': TypeError, }), ('msg91://{}'.format('a' * 23), { # No number specified 'instance': TypeError, }), ('msg91://{}/123'.format('a' * 23), { # invalid phone number 'instance': TypeError, }), ('msg91://{}/abcd'.format('a' * 23), { # No number to notify 'instance': TypeError, }), ('msg91://{}/15551232000/?country=invalid'.format('a' * 23), { # invalid country 'instance': TypeError, }), ('msg91://{}/15551232000/?country=99'.format('a' * 23), { # invalid country 'instance': TypeError, }), ('msg91://{}/15551232000/?route=invalid'.format('a' * 23), { # invalid route 'instance': TypeError, }), ('msg91://{}/15551232000/?route=99'.format('a' * 23), { # invalid route 'instance': TypeError, }), ('msg91://{}/15551232000'.format('a' * 23), { # a valid message 'instance': plugins.NotifyMSG91, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'msg91://a...a/15551232000', }), ('msg91://{}/?to=15551232000'.format('a' * 23), { # a valid message 'instance': plugins.NotifyMSG91, }), ('msg91://{}/15551232000?country=91&route=1'.format('a' * 23), { # using phone no with no target - we text ourselves in # this case 'instance': plugins.NotifyMSG91, }), ('msg91://{}/15551232000'.format('a' * 23), { # use get args to acomplish the same thing 'instance': plugins.NotifyMSG91, }), ('msg91://{}/15551232000'.format('a' * 23), { 'instance': plugins.NotifyMSG91, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('msg91://{}/15551232000'.format('a' * 23), { 'instance': plugins.NotifyMSG91, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyMessageBird ################################## ('msgbird://', { # No hostname/apikey specified 'instance': TypeError, }), ('msgbird://{}/abcd'.format('a' * 25), { # invalid characters in source phone number 'instance': TypeError, }), ('msgbird://{}/123'.format('a' * 25), { # invalid source phone number 'instance': TypeError, }), ('msgbird://{}/15551232000'.format('a' * 25), { # target phone number becomes who we text too; all is good 'instance': plugins.NotifyMessageBird, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'msgbird://a...a/15551232000', }), ('msgbird://{}/15551232000/abcd'.format('a' * 25), { # invalid target phone number; we have no one to notify 'instance': TypeError, }), ('msgbird://{}/15551232000/123'.format('a' * 25), { # invalid target phone number 'instance': TypeError, }), ('msgbird://{}/?from=15551233000&to=15551232000'.format('a' * 25), { # reference to to= and from= 'instance': plugins.NotifyMessageBird, }), ('msgbird://{}/15551232000'.format('a' * 25), { 'instance': plugins.NotifyMessageBird, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('msgbird://{}/15551232000'.format('a' * 25), { 'instance': plugins.NotifyMessageBird, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('msgbird://{}/15551232000'.format('a' * 25), { 'instance': plugins.NotifyMessageBird, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyPopcorn (PopcornNotify) ################################## ('popcorn://', { # No hostname/apikey specified 'instance': TypeError, }), ('popcorn://{}/18001231234'.format('_' * 9), { # invalid apikey 'instance': TypeError, }), ('popcorn://{}/1232348923489234923489234289-32423'.format('a' * 9), { # invalid phone number 'instance': plugins.NotifyPopcornNotify, 'notify_response': False, }), ('popcorn://{}/abc'.format('b' * 9), { # invalid email 'instance': plugins.NotifyPopcornNotify, 'notify_response': False, }), ('popcorn://{}/15551232000/[email protected]'.format('c' * 9), { # value phone and email 'instance': plugins.NotifyPopcornNotify, }), ('popcorn://{}/15551232000/[email protected]?batch=yes'.format('w' * 9), { # value phone and email with batch mode set 'instance': plugins.NotifyPopcornNotify, }), ('popcorn://{}/?to=15551232000'.format('w' * 9), { # reference to to= 'instance': plugins.NotifyPopcornNotify, }), ('popcorn://{}/15551232000'.format('x' * 9), { 'instance': plugins.NotifyPopcornNotify, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('popcorn://{}/15551232000'.format('y' * 9), { 'instance': plugins.NotifyPopcornNotify, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('popcorn://{}/15551232000'.format('z' * 9), { 'instance': plugins.NotifyPopcornNotify, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyWebexTeams ################################## ('wxteams://', { # Teams Token missing 'instance': TypeError, }), ('wxteams://:@/', { # We don't have strict host checking on for wxteams, so this URL # actually becomes parseable and :@ becomes a hostname. # The below errors because a second token wasn't found 'instance': TypeError, }), ('wxteams://{}'.format('a' * 80), { # token provided - we're good 'instance': plugins.NotifyWebexTeams, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'wxteams://a...a/', }), # Support Native URLs ('https://api.ciscospark.com/v1/webhooks/incoming/{}'.format('a' * 80), { # token provided - we're good 'instance': plugins.NotifyWebexTeams, }), # Support Native URLs with arguments ('https://api.ciscospark.com/v1/webhooks/incoming/{}?format=text'.format( 'a' * 80), { # token provided - we're good 'instance': plugins.NotifyWebexTeams, }), ('wxteams://{}'.format('a' * 80), { 'instance': plugins.NotifyWebexTeams, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('wxteams://{}'.format('a' * 80), { 'instance': plugins.NotifyWebexTeams, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('wxteams://{}'.format('a' * 80), { 'instance': plugins.NotifyWebexTeams, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyKODI ################################## ('xbmc://', { 'instance': None, }), ('xbmc://localhost', { 'instance': plugins.NotifyXBMC, }), ('xbmc://localhost?duration=14', { 'instance': plugins.NotifyXBMC, }), ('xbmc://localhost?duration=invalid', { 'instance': plugins.NotifyXBMC, }), ('xbmc://localhost?duration=-1', { 'instance': plugins.NotifyXBMC, }), ('xbmc://user:pass@localhost', { 'instance': plugins.NotifyXBMC, }), ('xbmc://localhost:8080', { 'instance': plugins.NotifyXBMC, }), ('xbmc://user:pass@localhost:8080', { 'instance': plugins.NotifyXBMC, }), ('xbmc://user@localhost', { 'instance': plugins.NotifyXBMC, # don't include an image by default 'include_image': False, }), ('xbmc://localhost', { 'instance': plugins.NotifyXBMC, # Experement with different notification types 'notify_type': NotifyType.WARNING, }), ('xbmc://localhost', { 'instance': plugins.NotifyXBMC, # Experement with different notification types 'notify_type': NotifyType.FAILURE, }), ('xbmc://:@/', { 'instance': None, }), ('xbmc://user:pass@localhost:8081', { 'instance': plugins.NotifyXBMC, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('xbmc://user:pass@localhost:8082', { 'instance': plugins.NotifyXBMC, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('xbmc://user:pass@localhost:8083', { 'instance': plugins.NotifyXBMC, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyXML ################################## ('xml://:@/', { 'instance': None, }), ('xml://', { 'instance': None, }), ('xmls://', { 'instance': None, }), ('xml://localhost', { 'instance': plugins.NotifyXML, }), ('xml://user@localhost', { 'instance': plugins.NotifyXML, }), ('xml://user:pass@localhost', { 'instance': plugins.NotifyXML, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'xml://user:****@localhost', }), ('xml://localhost:8080', { 'instance': plugins.NotifyXML, }), ('xml://user:pass@localhost:8080', { 'instance': plugins.NotifyXML, }), ('xmls://localhost', { 'instance': plugins.NotifyXML, }), ('xmls://user:pass@localhost', { 'instance': plugins.NotifyXML, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'xmls://user:****@localhost', }), ('xmls://localhost:8080/path/', { 'instance': plugins.NotifyXML, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'xmls://localhost:8080/path/', }), ('xmls://user:pass@localhost:8080', { 'instance': plugins.NotifyXML, }), ('xml://localhost:8080/path?-HeaderKey=HeaderValue', { 'instance': plugins.NotifyXML, }), ('xml://user:pass@localhost:8081', { 'instance': plugins.NotifyXML, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('xml://user:pass@localhost:8082', { 'instance': plugins.NotifyXML, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('xml://user:pass@localhost:8083', { 'instance': plugins.NotifyXML, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ################################## # NotifyZulip ################################## ('zulip://', { 'instance': TypeError, }), ('zulip://:@/', { 'instance': TypeError, }), ('zulip://apprise', { # Just org provided (no token or botname) 'instance': TypeError, }), ('zulip://botname@apprise', { # Just org and botname provided (no token) 'instance': TypeError, }), # invalid token ('zulip://botname@apprise/{}'.format('a' * 24), { 'instance': TypeError, }), # invalid botname ('zulip://....@apprise/{}'.format('a' * 32), { 'instance': TypeError, }), # Valid everything - no target so default is used ('zulip://botname@apprise/{}'.format('a' * 32), { 'instance': plugins.NotifyZulip, # Our expected url(privacy=True) startswith() response: 'privacy_url': 'zulip://botname@apprise/a...a/', }), # Valid everything - organization as hostname ('zulip://[email protected]/{}'.format('a' * 32), { 'instance': plugins.NotifyZulip, }), # Valid everything - 2 channels specified ('zulip://botname@apprise/{}/channel1/channel2'.format('a' * 32), { 'instance': plugins.NotifyZulip, }), # Valid everything - 2 channels specified (using to=) ('zulip://botname@apprise/{}/?to=channel1/channel2'.format('a' * 32), { 'instance': plugins.NotifyZulip, }), # Valid everything - 2 emails specified ('zulip://botname@apprise/{}/[email protected]/[email protected]'.format( 'a' * 32), { 'instance': plugins.NotifyZulip, }), ('zulip://botname@apprise/{}'.format('a' * 32), { 'instance': plugins.NotifyZulip, # don't include an image by default 'include_image': False, }), ('zulip://botname@apprise/{}'.format('a' * 32), { 'instance': plugins.NotifyZulip, # force a failure 'response': False, 'requests_response_code': requests.codes.internal_server_error, }), ('zulip://botname@apprise/{}'.format('a' * 32), { 'instance': plugins.NotifyZulip, # throw a bizzare code forcing us to fail to look it up 'response': False, 'requests_response_code': 999, }), ('zulip://botname@apprise/{}'.format('a' * 32), { 'instance': plugins.NotifyZulip, # Throws a series of connection and transfer exceptions when this flag # is set and tests that we gracfully handle them 'test_requests_exceptions': True, }), ) @mock.patch('requests.get') @mock.patch('requests.post') def test_rest_plugins(mock_post, mock_get): """ API: REST Based Plugins() """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Define how many characters exist per line row = 80 # Some variables we use to control the data we work with body_len = 1024 title_len = 1024 # Create a large body and title with random data body = ''.join(choice(str_alpha + str_num + ' ') for _ in range(body_len)) body = '\r\n'.join([body[i: i + row] for i in range(0, len(body), row)]) # Create our title using random data title = ''.join(choice(str_alpha + str_num) for _ in range(title_len)) # iterate over our dictionary and test it out for (url, meta) in TEST_URLS: # Our expected instance instance = meta.get('instance', None) # Our expected server objects self = meta.get('self', None) # Our expected Query response (True, False, or exception type) response = meta.get('response', True) # Our expected Notify response (True or False) notify_response = meta.get('notify_response', response) # Our expected Notify Attachment response (True or False) attach_response = meta.get('attach_response', notify_response) # Our expected privacy url # Don't set this if don't need to check it's value privacy_url = meta.get('privacy_url') # Our regular expression url_matches = meta.get('url_matches') # Test attachments # Don't set this if don't need to check it's value check_attachments = meta.get('check_attachments', True) # Allow us to force the server response code to be something other then # the defaults requests_response_code = meta.get( 'requests_response_code', requests.codes.ok if response else requests.codes.not_found, ) # Allow us to force the server response text to be something other then # the defaults requests_response_text = meta.get('requests_response_text') if not isinstance(requests_response_text, six.string_types): # Convert to string requests_response_text = dumps(requests_response_text) # Allow notification type override, otherwise default to INFO notify_type = meta.get('notify_type', NotifyType.INFO) # Whether or not we should include an image with our request; unless # otherwise specified, we assume that images are to be included include_image = meta.get('include_image', True) if include_image: # a default asset asset = AppriseAsset() else: # Disable images asset = AppriseAsset(image_path_mask=False, image_url_mask=False) asset.image_url_logo = None test_requests_exceptions = meta.get( 'test_requests_exceptions', False) # A request robj = mock.Mock() robj.content = u'' mock_get.return_value = robj mock_post.return_value = robj if test_requests_exceptions is False: # Handle our default response mock_post.return_value.status_code = requests_response_code mock_get.return_value.status_code = requests_response_code # Handle our default text response mock_get.return_value.content = requests_response_text mock_post.return_value.content = requests_response_text # Ensure there is no side effect set mock_post.side_effect = None mock_get.side_effect = None else: # Handle exception testing; first we turn the boolean flag ito # a list of exceptions test_requests_exceptions = REQUEST_EXCEPTIONS try: obj = Apprise.instantiate( url, asset=asset, suppress_exceptions=False) if obj is None: if instance is not None: # We're done (assuming this is what we were expecting) print("{} didn't instantiate itself " "(we expected it to be a {})".format(url, instance)) assert False continue if instance is None: # Expected None but didn't get it print('%s instantiated %s (but expected None)' % ( url, str(obj))) assert False assert isinstance(obj, instance) is True if isinstance(obj, plugins.NotifyBase): # We loaded okay; now lets make sure we can reverse this url assert isinstance(obj.url(), six.string_types) is True # Test url() with privacy=True assert isinstance( obj.url(privacy=True), six.string_types) is True # Some Simple Invalid Instance Testing assert instance.parse_url(None) is None assert instance.parse_url(object) is None assert instance.parse_url(42) is None if privacy_url: # Assess that our privacy url is as expected assert obj.url(privacy=True).startswith(privacy_url) if url_matches: # Assess that our URL matches a set regex assert re.search(url_matches, obj.url()) # Instantiate the exact same object again using the URL from # the one that was already created properly obj_cmp = Apprise.instantiate(obj.url()) # Our object should be the same instance as what we had # originally expected above. if not isinstance(obj_cmp, plugins.NotifyBase): # Assert messages are hard to trace back with the way # these tests work. Just printing before throwing our # assertion failure makes things easier to debug later on print('TEST FAIL: {} regenerated as {}'.format( url, obj.url())) assert False # Tidy our object del obj_cmp if self: # Iterate over our expected entries inside of our object for key, val in self.items(): # Test that our object has the desired key assert hasattr(key, obj) is True assert getattr(key, obj) == val # # Stage 1: with title defined # try: if test_requests_exceptions is False: # Disable throttling obj.request_rate_per_sec = 0 # check that we're as expected assert obj.notify( body=body, title=title, notify_type=notify_type) == notify_response # check that this doesn't change using different overflow # methods assert obj.notify( body=body, title=title, notify_type=notify_type, overflow=OverflowMode.UPSTREAM) == notify_response assert obj.notify( body=body, title=title, notify_type=notify_type, overflow=OverflowMode.TRUNCATE) == notify_response assert obj.notify( body=body, title=title, notify_type=notify_type, overflow=OverflowMode.SPLIT) == notify_response # # Handle varations of the Asset Object missing fields # # First make a backup app_id = asset.app_id app_desc = asset.app_desc # now clear records asset.app_id = None asset.app_desc = None # Notify should still work assert obj.notify( body=body, title=title, notify_type=notify_type) == notify_response # App ID only asset.app_id = app_id asset.app_desc = None # Notify should still work assert obj.notify( body=body, title=title, notify_type=notify_type) == notify_response # App Desc only asset.app_id = None asset.app_desc = app_desc # Notify should still work assert obj.notify( body=body, title=title, notify_type=notify_type) == notify_response # Restore asset.app_id = app_id asset.app_desc = app_desc if check_attachments: # Test single attachment support; even if the service # doesn't support attachments, it should still # gracefully ignore the data attach = os.path.join(TEST_VAR_DIR, 'apprise-test.gif') assert obj.notify( body=body, title=title, notify_type=notify_type, attach=attach) == attach_response # Same results should apply to a list of attachments attach = AppriseAttachment(( os.path.join(TEST_VAR_DIR, 'apprise-test.gif'), os.path.join(TEST_VAR_DIR, 'apprise-test.png'), os.path.join(TEST_VAR_DIR, 'apprise-test.jpeg'), )) assert obj.notify( body=body, title=title, notify_type=notify_type, attach=attach) == attach_response else: # Disable throttling obj.request_rate_per_sec = 0 for _exception in REQUEST_EXCEPTIONS: mock_post.side_effect = _exception mock_get.side_effect = _exception try: assert obj.notify( body=body, title=title, notify_type=NotifyType.INFO) is False except AssertionError: # Don't mess with these entries raise except Exception: # We can't handle this exception type raise except AssertionError: # Don't mess with these entries raise except Exception as e: # Check that we were expecting this exception to happen try: if not isinstance(e, response): raise e except TypeError: print('%s Unhandled response %s' % (url, type(e))) raise e # # Stage 2: without title defined # try: if test_requests_exceptions is False: # check that we're as expected assert obj.notify(body='body', notify_type=notify_type) \ == notify_response else: for _exception in REQUEST_EXCEPTIONS: mock_post.side_effect = _exception mock_get.side_effect = _exception try: assert obj.notify( body=body, notify_type=NotifyType.INFO) is False except AssertionError: # Don't mess with these entries raise except Exception: # We can't handle this exception type raise except AssertionError: # Don't mess with these entries raise except Exception as e: # Check that we were expecting this exception to happen if not isinstance(e, response): raise e # Tidy our object and allow any possible defined deconstructors to # be executed. del obj except AssertionError: # Don't mess with these entries print('%s AssertionError' % url) raise except Exception as e: # Handle our exception if instance is None: print('%s %s' % (url, str(e))) raise e if not isinstance(e, instance): print('%s %s' % (url, str(e))) raise e @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_boxcar_plugin(mock_post, mock_get): """ API: NotifyBoxcar() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Generate some generic message types device = 'A' * 64 tag = '@B' * 63 access = '-' * 64 secret = '_' * 64 # Initializes the plugin with recipients set to None plugins.NotifyBoxcar(access=access, secret=secret, targets=None) # Initializes the plugin with a valid access, but invalid access key with pytest.raises(TypeError): plugins.NotifyBoxcar(access=None, secret=secret, targets=None) # Initializes the plugin with a valid access, but invalid secret with pytest.raises(TypeError): plugins.NotifyBoxcar(access=access, secret=None, targets=None) # Initializes the plugin with recipients list # the below also tests our the variation of recipient types plugins.NotifyBoxcar( access=access, secret=secret, targets=[device, tag]) mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() mock_post.return_value.status_code = requests.codes.created mock_get.return_value.status_code = requests.codes.created # Test notifications without a body or a title p = plugins.NotifyBoxcar(access=access, secret=secret, targets=None) assert p.notify(body=None, title=None, notify_type=NotifyType.INFO) is True # Test comma, separate values device = 'a' * 64 p = plugins.NotifyBoxcar( access=access, secret=secret, targets=','.join([device, device, device])) assert len(p.device_tokens) == 3 @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_emby_plugin_login(mock_post, mock_get): """ API: NotifyEmby.login() """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() obj = Apprise.instantiate('emby://l2g:l2gpass@localhost') assert isinstance(obj, plugins.NotifyEmby) # Test our exception handling for _exception in REQUEST_EXCEPTIONS: mock_post.side_effect = _exception mock_get.side_effect = _exception # We'll fail to log in each time assert obj.login() is False # Disable Exceptions mock_post.side_effect = None mock_get.side_effect = None # Our login flat out fails if we don't have proper parseable content mock_post.return_value.content = u'' mock_get.return_value.content = mock_post.return_value.content # KeyError handling mock_post.return_value.status_code = 999 mock_get.return_value.status_code = 999 assert obj.login() is False # General Internal Server Error mock_post.return_value.status_code = requests.codes.internal_server_error mock_get.return_value.status_code = requests.codes.internal_server_error assert obj.login() is False mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok obj = Apprise.instantiate('emby://l2g:l2gpass@localhost:1234') # Set a different port (outside of default) assert isinstance(obj, plugins.NotifyEmby) assert obj.port == 1234 # The login will fail because '' is not a parseable JSON response assert obj.login() is False # Disable the port completely obj.port = None assert obj.login() is False # Default port assignments obj = Apprise.instantiate('emby://l2g:l2gpass@localhost') assert isinstance(obj, plugins.NotifyEmby) assert obj.port == 8096 # The login will (still) fail because '' is not a parseable JSON response assert obj.login() is False # Our login flat out fails if we don't have proper parseable content mock_post.return_value.content = dumps({ u'AccessToken': u'0000-0000-0000-0000', }) mock_get.return_value.content = mock_post.return_value.content obj = Apprise.instantiate('emby://l2g:l2gpass@localhost') assert isinstance(obj, plugins.NotifyEmby) # The login will fail because the 'User' or 'Id' field wasn't parsed assert obj.login() is False # Our text content (we intentionally reverse the 2 locations # that store the same thing; we do this so we can test which # one it defaults to if both are present mock_post.return_value.content = dumps({ u'User': { u'Id': u'abcd123', }, u'Id': u'123abc', u'AccessToken': u'0000-0000-0000-0000', }) mock_get.return_value.content = mock_post.return_value.content obj = Apprise.instantiate('emby://l2g:l2gpass@localhost') assert isinstance(obj, plugins.NotifyEmby) # Login assert obj.login() is True assert obj.user_id == '123abc' assert obj.access_token == '0000-0000-0000-0000' # We're going to log in a second time which checks that we logout # first before logging in again. But this time we'll scrap the # 'Id' area and use the one found in the User area if detected mock_post.return_value.content = dumps({ u'User': { u'Id': u'abcd123', }, u'AccessToken': u'0000-0000-0000-0000', }) mock_get.return_value.content = mock_post.return_value.content # Login assert obj.login() is True assert obj.user_id == 'abcd123' assert obj.access_token == '0000-0000-0000-0000' @mock.patch('apprise.plugins.NotifyEmby.login') @mock.patch('apprise.plugins.NotifyEmby.logout') @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_emby_plugin_sessions(mock_post, mock_get, mock_logout, mock_login): """ API: NotifyEmby.sessions() """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() # This is done so we don't obstruct our access_token and user_id values mock_login.return_value = True mock_logout.return_value = True obj = Apprise.instantiate('emby://l2g:l2gpass@localhost') assert isinstance(obj, plugins.NotifyEmby) obj.access_token = 'abc' obj.user_id = '123' # Test our exception handling for _exception in REQUEST_EXCEPTIONS: mock_post.side_effect = _exception mock_get.side_effect = _exception # We'll fail to log in each time sessions = obj.sessions() assert isinstance(sessions, dict) is True assert len(sessions) == 0 # Disable Exceptions mock_post.side_effect = None mock_get.side_effect = None # Our login flat out fails if we don't have proper parseable content mock_post.return_value.content = u'' mock_get.return_value.content = mock_post.return_value.content # KeyError handling mock_post.return_value.status_code = 999 mock_get.return_value.status_code = 999 sessions = obj.sessions() assert isinstance(sessions, dict) is True assert len(sessions) == 0 # General Internal Server Error mock_post.return_value.status_code = requests.codes.internal_server_error mock_get.return_value.status_code = requests.codes.internal_server_error sessions = obj.sessions() assert isinstance(sessions, dict) is True assert len(sessions) == 0 mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok mock_get.return_value.content = mock_post.return_value.content # Disable the port completely obj.port = None sessions = obj.sessions() assert isinstance(sessions, dict) is True assert len(sessions) == 0 # Let's get some results mock_post.return_value.content = dumps([ { u'Id': u'abc123', }, { u'Id': u'def456', }, { u'InvalidEntry': None, }, ]) mock_get.return_value.content = mock_post.return_value.content sessions = obj.sessions(user_controlled=True) assert isinstance(sessions, dict) is True assert len(sessions) == 2 # Test it without setting user-controlled sessions sessions = obj.sessions(user_controlled=False) assert isinstance(sessions, dict) is True assert len(sessions) == 2 # Triggers an authentication failure obj.user_id = None mock_login.return_value = False sessions = obj.sessions() assert isinstance(sessions, dict) is True assert len(sessions) == 0 @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_flock_plugin(mock_post, mock_get): """ API: NotifyFlock() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Initializes the plugin with an invalid token with pytest.raises(TypeError): plugins.NotifyFlock(token=None) # Whitespace also acts as an invalid token value with pytest.raises(TypeError): plugins.NotifyFlock(token=" ") def test_notify_gitter_plugin(): """ API: NotifyGitter() Extra Checks """ # Define our channels targets = ['apprise'] # Initializes the plugin with an invalid token with pytest.raises(TypeError): plugins.NotifyGitter(token=None, targets=targets) # Whitespace also acts as an invalid token value with pytest.raises(TypeError): plugins.NotifyGitter(token=" ", targets=targets) def test_notify_gotify_plugin(): """ API: NotifyGotify() Extra Checks """ # Initializes the plugin with an invalid token with pytest.raises(TypeError): plugins.NotifyGotify(token=None) # Whitespace also acts as an invalid token value with pytest.raises(TypeError): plugins.NotifyGotify(token=" ") def test_notify_lametric_plugin(): """ API: NotifyLametric() Extra Checks """ # Initializes the plugin with an invalid API Key with pytest.raises(TypeError): plugins.NotifyLametric(apikey=None, mode="device") # Initializes the plugin with an invalid Client Secret with pytest.raises(TypeError): plugins.NotifyLametric(client_id='valid', secret=None, mode="cloud") @mock.patch('requests.post') def test_notify_msg91_plugin(mock_post): """ API: NotifyMSG91() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare our response response = requests.Request() response.status_code = requests.codes.ok # Prepare Mock mock_post.return_value = response # Initialize some generic (but valid) tokens # authkey = '{}'.format('a' * 24) target = '+1 (555) 123-3456' # No authkey specified with pytest.raises(TypeError): plugins.NotifyMSG91(authkey=None, targets=target) with pytest.raises(TypeError): plugins.NotifyMSG91(authkey=" ", targets=target) def test_notify_msteams_plugin(): """ API: NotifyMSTeams() Extra Checks """ # Initializes the plugin with an invalid token with pytest.raises(TypeError): plugins.NotifyMSTeams(token_a=None, token_b='abcd', token_c='abcd') # Whitespace also acts as an invalid token value with pytest.raises(TypeError): plugins.NotifyMSTeams(token_a=' ', token_b='abcd', token_c='abcd') with pytest.raises(TypeError): plugins.NotifyMSTeams(token_a='abcd', token_b=None, token_c='abcd') # Whitespace also acts as an invalid token value with pytest.raises(TypeError): plugins.NotifyMSTeams(token_a='abcd', token_b=' ', token_c='abcd') with pytest.raises(TypeError): plugins.NotifyMSTeams(token_a='abcd', token_b='abcd', token_c=None) # Whitespace also acts as an invalid token value with pytest.raises(TypeError): plugins.NotifyMSTeams(token_a='abcd', token_b='abcd', token_c=' ') def test_notify_prowl_plugin(): """ API: NotifyProwl() Extra Checks """ # Initializes the plugin with an invalid apikey with pytest.raises(TypeError): plugins.NotifyProwl(apikey=None) # Whitespace also acts as an invalid apikey value with pytest.raises(TypeError): plugins.NotifyProwl(apikey=' ') # Whitespace also acts as an invalid provider key with pytest.raises(TypeError): plugins.NotifyProwl(apikey='abcd', providerkey=object()) with pytest.raises(TypeError): plugins.NotifyProwl(apikey='abcd', providerkey=' ') @mock.patch('requests.post') def test_notify_sinch_plugin(mock_post): """ API: NotifySinch() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare our response response = requests.Request() response.status_code = requests.codes.ok # Prepare Mock mock_post.return_value = response # Initialize some generic (but valid) tokens service_plan_id = '{}'.format('b' * 32) api_token = '{}'.format('b' * 32) source = '+1 (555) 123-3456' # No service_plan_id specified with pytest.raises(TypeError): plugins.NotifySinch( service_plan_id=None, api_token=api_token, source=source) # No api_token specified with pytest.raises(TypeError): plugins.NotifySinch( service_plan_id=service_plan_id, api_token=None, source=source) # a error response response.status_code = 400 response.content = dumps({ 'code': 21211, 'message': "The 'To' number +1234567 is not a valid phone number.", }) mock_post.return_value = response # Initialize our object obj = plugins.NotifySinch( service_plan_id=service_plan_id, api_token=api_token, source=source) # We will fail with the above error code assert obj.notify('title', 'body', 'info') is False @mock.patch('requests.post') def test_notify_twilio_plugin(mock_post): """ API: NotifyTwilio() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare our response response = requests.Request() response.status_code = requests.codes.ok # Prepare Mock mock_post.return_value = response # Initialize some generic (but valid) tokens account_sid = 'AC{}'.format('b' * 32) auth_token = '{}'.format('b' * 32) source = '+1 (555) 123-3456' # No account_sid specified with pytest.raises(TypeError): plugins.NotifyTwilio( account_sid=None, auth_token=auth_token, source=source) # No auth_token specified with pytest.raises(TypeError): plugins.NotifyTwilio( account_sid=account_sid, auth_token=None, source=source) # a error response response.status_code = 400 response.content = dumps({ 'code': 21211, 'message': "The 'To' number +1234567 is not a valid phone number.", }) mock_post.return_value = response # Initialize our object obj = plugins.NotifyTwilio( account_sid=account_sid, auth_token=auth_token, source=source) # We will fail with the above error code assert obj.notify('title', 'body', 'info') is False @mock.patch('requests.post') def test_notify_nexmo_plugin(mock_post): """ API: NotifyNexmo() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare our response response = requests.Request() response.status_code = requests.codes.ok # Prepare Mock mock_post.return_value = response # Initialize some generic (but valid) tokens apikey = 'AC{}'.format('b' * 8) secret = '{}'.format('b' * 16) source = '+1 (555) 123-3456' # No apikey specified with pytest.raises(TypeError): plugins.NotifyNexmo(apikey=None, secret=secret, source=source) with pytest.raises(TypeError): plugins.NotifyNexmo(apikey=" ", secret=secret, source=source) # No secret specified with pytest.raises(TypeError): plugins.NotifyNexmo(apikey=apikey, secret=None, source=source) with pytest.raises(TypeError): plugins.NotifyNexmo(apikey=apikey, secret=" ", source=source) # a error response response.status_code = 400 response.content = dumps({ 'code': 21211, 'message': "The 'To' number +1234567 is not a valid phone number.", }) mock_post.return_value = response # Initialize our object obj = plugins.NotifyNexmo( apikey=apikey, secret=secret, source=source) # We will fail with the above error code assert obj.notify('title', 'body', 'info') is False @mock.patch('apprise.plugins.NotifyEmby.login') @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_emby_plugin_logout(mock_post, mock_get, mock_login): """ API: NotifyEmby.sessions() """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() # This is done so we don't obstruct our access_token and user_id values mock_login.return_value = True obj = Apprise.instantiate('emby://l2g:l2gpass@localhost') assert isinstance(obj, plugins.NotifyEmby) obj.access_token = 'abc' obj.user_id = '123' # Test our exception handling for _exception in REQUEST_EXCEPTIONS: mock_post.side_effect = _exception mock_get.side_effect = _exception # We'll fail to log in each time obj.logout() obj.access_token = 'abc' obj.user_id = '123' # Disable Exceptions mock_post.side_effect = None mock_get.side_effect = None # Our login flat out fails if we don't have proper parseable content mock_post.return_value.content = u'' mock_get.return_value.content = mock_post.return_value.content # KeyError handling mock_post.return_value.status_code = 999 mock_get.return_value.status_code = 999 obj.logout() obj.access_token = 'abc' obj.user_id = '123' # General Internal Server Error mock_post.return_value.status_code = requests.codes.internal_server_error mock_get.return_value.status_code = requests.codes.internal_server_error obj.logout() obj.access_token = 'abc' obj.user_id = '123' mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok mock_get.return_value.content = mock_post.return_value.content # Disable the port completely obj.port = None # Perform logout obj.logout() # Calling logout on an object already logged out obj.logout() # Test Python v3.5 LookupError Bug: https://bugs.python.org/issue29288 mock_post.side_effect = LookupError() mock_get.side_effect = LookupError() obj.access_token = 'abc' obj.user_id = '123' # Tidy object del obj @mock.patch('apprise.plugins.NotifyEmby.sessions') @mock.patch('apprise.plugins.NotifyEmby.login') @mock.patch('apprise.plugins.NotifyEmby.logout') @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_emby_plugin_notify(mock_post, mock_get, mock_logout, mock_login, mock_sessions): """ API: NotifyEmby.notify() """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 req = requests.Request() req.status_code = requests.codes.ok req.content = '' mock_get.return_value = req mock_post.return_value = req # This is done so we don't obstruct our access_token and user_id values mock_login.return_value = True mock_logout.return_value = True mock_sessions.return_value = {'abcd': {}} obj = Apprise.instantiate('emby://l2g:l2gpass@localhost?modal=False') assert isinstance(obj, plugins.NotifyEmby) assert obj.notify('title', 'body', 'info') is True obj.access_token = 'abc' obj.user_id = '123' # Test Modal support obj = Apprise.instantiate('emby://l2g:l2gpass@localhost?modal=True') assert isinstance(obj, plugins.NotifyEmby) assert obj.notify('title', 'body', 'info') is True obj.access_token = 'abc' obj.user_id = '123' # Test our exception handling for _exception in REQUEST_EXCEPTIONS: mock_post.side_effect = _exception mock_get.side_effect = _exception # We'll fail to log in each time assert obj.notify('title', 'body', 'info') is False # Disable Exceptions mock_post.side_effect = None mock_get.side_effect = None # Our login flat out fails if we don't have proper parseable content mock_post.return_value.content = u'' mock_get.return_value.content = mock_post.return_value.content # KeyError handling mock_post.return_value.status_code = 999 mock_get.return_value.status_code = 999 assert obj.notify('title', 'body', 'info') is False # General Internal Server Error mock_post.return_value.status_code = requests.codes.internal_server_error mock_get.return_value.status_code = requests.codes.internal_server_error assert obj.notify('title', 'body', 'info') is False mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok mock_get.return_value.content = mock_post.return_value.content # Disable the port completely obj.port = None assert obj.notify('title', 'body', 'info') is True # An Empty return set (no query is made, but notification will still # succeed mock_sessions.return_value = {} assert obj.notify('title', 'body', 'info') is True # Tidy our object del obj @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_ifttt_plugin(mock_post, mock_get): """ API: NotifyIFTTT() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Initialize some generic (but valid) tokens webhook_id = 'webhook_id' events = ['event1', 'event2'] # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok mock_get.return_value.content = '{}' mock_post.return_value.content = '{}' # No webhook_id specified with pytest.raises(TypeError): plugins.NotifyIFTTT(webhook_id=None, events=None) # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Initializes the plugin with an invalid webhook id with pytest.raises(TypeError): plugins.NotifyIFTTT(webhook_id=None, events=events) # Whitespace also acts as an invalid webhook id with pytest.raises(TypeError): plugins.NotifyIFTTT(webhook_id=" ", events=events) # No events specified with pytest.raises(TypeError): plugins.NotifyIFTTT(webhook_id=webhook_id, events=None) obj = plugins.NotifyIFTTT(webhook_id=webhook_id, events=events) assert isinstance(obj, plugins.NotifyIFTTT) is True assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is True # Test the addition of tokens obj = plugins.NotifyIFTTT( webhook_id=webhook_id, events=events, add_tokens={'Test': 'ValueA', 'Test2': 'ValueB'}) assert isinstance(obj, plugins.NotifyIFTTT) is True assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is True # Invalid del_tokens entry with pytest.raises(TypeError): plugins.NotifyIFTTT( webhook_id=webhook_id, events=events, del_tokens=plugins.NotifyIFTTT.ifttt_default_title_key) assert isinstance(obj, plugins.NotifyIFTTT) is True assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is True # Test removal of tokens by a list obj = plugins.NotifyIFTTT( webhook_id=webhook_id, events=events, add_tokens={ 'MyKey': 'MyValue' }, del_tokens=( plugins.NotifyIFTTT.ifttt_default_title_key, plugins.NotifyIFTTT.ifttt_default_body_key, plugins.NotifyIFTTT.ifttt_default_type_key)) assert isinstance(obj, plugins.NotifyIFTTT) is True assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is True # Test removal of tokens as dict obj = plugins.NotifyIFTTT( webhook_id=webhook_id, events=events, add_tokens={ 'MyKey': 'MyValue' }, del_tokens={ plugins.NotifyIFTTT.ifttt_default_title_key: None, plugins.NotifyIFTTT.ifttt_default_body_key: None, plugins.NotifyIFTTT.ifttt_default_type_key: None}) assert isinstance(obj, plugins.NotifyIFTTT) is True @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_join_plugin(mock_post, mock_get): """ API: NotifyJoin() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Generate some generic message types device = 'A' * 32 group = 'group.chrome' apikey = 'a' * 32 # Initializes the plugin with devices set to a string plugins.NotifyJoin(apikey=apikey, targets=group) # Initializes the plugin with devices set to None plugins.NotifyJoin(apikey=apikey, targets=None) # Initializes the plugin with an invalid apikey with pytest.raises(TypeError): plugins.NotifyJoin(apikey=None) # Whitespace also acts as an invalid apikey with pytest.raises(TypeError): plugins.NotifyJoin(apikey=" ") # Initializes the plugin with devices set to a set p = plugins.NotifyJoin(apikey=apikey, targets=[group, device]) # Prepare our mock responses req = requests.Request() req.status_code = requests.codes.created req.content = '' mock_get.return_value = req mock_post.return_value = req # Test notifications without a body or a title; nothing to send # so we return False p.notify(body=None, title=None, notify_type=NotifyType.INFO) is False def test_notify_kumulos_plugin(): """ API: NotifyKumulos() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Invalid API Key with pytest.raises(TypeError): plugins.NotifyKumulos(None, None) with pytest.raises(TypeError): plugins.NotifyKumulos(" ", None) # Invalid Server Key with pytest.raises(TypeError): plugins.NotifyKumulos("abcd", None) with pytest.raises(TypeError): plugins.NotifyKumulos("abcd", " ") def test_notify_mattermost_plugin(): """ API: NotifyMatterMost() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Invalid Authorization Token with pytest.raises(TypeError): plugins.NotifyMatterMost(None) with pytest.raises(TypeError): plugins.NotifyMatterMost(" ") @mock.patch('requests.post') def test_notify_messagebird_plugin(mock_post): """ API: NotifyMessageBird() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Prepare our response response = requests.Request() response.status_code = requests.codes.ok # Prepare Mock mock_post.return_value = response # Initialize some generic (but valid) tokens # authkey = '{}'.format('a' * 24) source = '+1 (555) 123-3456' # No apikey specified with pytest.raises(TypeError): plugins.NotifyMessageBird(apikey=None, source=source) with pytest.raises(TypeError): plugins.NotifyMessageBird(apikey=" ", source=source) def test_notify_pover_plugin(): """ API: NotifyPushover() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # No token with pytest.raises(TypeError): plugins.NotifyPushover(token=None) def test_notify_ryver_plugin(): """ API: NotifyRyver() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # No token with pytest.raises(TypeError): plugins.NotifyRyver(organization="abc", token=None) with pytest.raises(TypeError): plugins.NotifyRyver(organization="abc", token=" ") # No organization with pytest.raises(TypeError): plugins.NotifyRyver(organization=None, token="abc") with pytest.raises(TypeError): plugins.NotifyRyver(organization=" ", token="abc") def test_notify_simplepush_plugin(): """ API: NotifySimplePush() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # No token with pytest.raises(TypeError): plugins.NotifySimplePush(apikey=None) with pytest.raises(TypeError): plugins.NotifySimplePush(apikey=" ") # Bad event with pytest.raises(TypeError): plugins.NotifySimplePush(apikey="abc", event=object) with pytest.raises(TypeError): plugins.NotifySimplePush(apikey="abc", event=" ") def test_notify_zulip_plugin(): """ API: NotifyZulip() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # must be 32 characters long token = 'a' * 32 # Invalid organization with pytest.raises(TypeError): plugins.NotifyZulip( botname='test', organization='#', token=token) @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_sendgrid_plugin(mock_post, mock_get): """ API: NotifySendGrid() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # no apikey with pytest.raises(TypeError): plugins.NotifySendGrid( apikey=None, from_email='[email protected]') # invalid from email with pytest.raises(TypeError): plugins.NotifySendGrid( apikey='abcd', from_email='!invalid') # no email with pytest.raises(TypeError): plugins.NotifySendGrid(apikey='abcd', from_email=None) # Invalid To email address plugins.NotifySendGrid( apikey='abcd', from_email='[email protected]', targets="!invalid") # Test invalid bcc/cc entries mixed with good ones assert isinstance(plugins.NotifySendGrid( apikey='abcd', from_email='[email protected]', bcc=('[email protected]', '!invalid'), cc=('[email protected]', '!invalid')), plugins.NotifySendGrid) @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_pushbullet_plugin(mock_post, mock_get): """ API: NotifyPushBullet() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Initialize some generic (but valid) tokens accesstoken = 'a' * 32 # Support strings recipients = '#chan1,#chan2,device,[email protected],,,' # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok # Invalid Access Token with pytest.raises(TypeError): plugins.NotifyPushBullet(accesstoken=None) with pytest.raises(TypeError): plugins.NotifyPushBullet(accesstoken=" ") obj = plugins.NotifyPushBullet( accesstoken=accesstoken, targets=recipients) assert isinstance(obj, plugins.NotifyPushBullet) is True assert len(obj.targets) == 4 obj = plugins.NotifyPushBullet(accesstoken=accesstoken) assert isinstance(obj, plugins.NotifyPushBullet) is True # Default is to send to all devices, so there will be a # recipient here assert len(obj.targets) == 1 obj = plugins.NotifyPushBullet(accesstoken=accesstoken, targets=set()) assert isinstance(obj, plugins.NotifyPushBullet) is True # Default is to send to all devices, so there will be a # recipient here assert len(obj.targets) == 1 @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_pushed_plugin(mock_post, mock_get): """ API: NotifyPushed() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Chat ID recipients = '@ABCDEFG, @DEFGHIJ, #channel, #channel2' # Some required input app_key = 'ABCDEFG' app_secret = 'ABCDEFG' # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok # No application Key specified with pytest.raises(TypeError): plugins.NotifyPushed( app_key=None, app_secret=app_secret, recipients=None, ) with pytest.raises(TypeError): plugins.NotifyPushed( app_key=" ", app_secret=app_secret, recipients=None, ) # No application Secret specified with pytest.raises(TypeError): plugins.NotifyPushed( app_key=app_key, app_secret=None, recipients=None, ) with pytest.raises(TypeError): plugins.NotifyPushed( app_key=app_key, app_secret=" ", ) # recipients list set to (None) is perfectly fine; in this case it will # notify the App obj = plugins.NotifyPushed( app_key=app_key, app_secret=app_secret, recipients=None, ) assert isinstance(obj, plugins.NotifyPushed) is True assert len(obj.channels) == 0 assert len(obj.users) == 0 obj = plugins.NotifyPushed( app_key=app_key, app_secret=app_secret, targets=recipients, ) assert isinstance(obj, plugins.NotifyPushed) is True assert len(obj.channels) == 2 assert len(obj.users) == 2 # Prepare Mock to fail mock_post.return_value.status_code = requests.codes.internal_server_error mock_get.return_value.status_code = requests.codes.internal_server_error def test_notify_pushjet_plugin(): """ API: NotifyPushjet() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # No application Key specified with pytest.raises(TypeError): plugins.NotifyPushjet(secret_key=None) with pytest.raises(TypeError): plugins.NotifyPushjet(secret_key=" ") @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_pushover_plugin(mock_post, mock_get): """ API: NotifyPushover() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Initialize some generic (but valid) tokens token = 'a' * 30 user_key = 'u' * 30 invalid_device = 'd' * 35 # Support strings devices = 'device1,device2,,,,%s' % invalid_device # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok # No webhook id specified with pytest.raises(TypeError): plugins.NotifyPushover(user_key=user_key, webhook_id=None) obj = plugins.NotifyPushover( user_key=user_key, token=token, targets=devices) assert isinstance(obj, plugins.NotifyPushover) is True assert len(obj.targets) == 3 # This call fails because there is 1 invalid device assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is False obj = plugins.NotifyPushover(user_key=user_key, token=token) assert isinstance(obj, plugins.NotifyPushover) is True # Default is to send to all devices, so there will be a # device defined here assert len(obj.targets) == 1 # This call succeeds because all of the devices are valid assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is True obj = plugins.NotifyPushover(user_key=user_key, token=token, targets=set()) assert isinstance(obj, plugins.NotifyPushover) is True # Default is to send to all devices, so there will be a # device defined here assert len(obj.targets) == 1 # No User Key specified with pytest.raises(TypeError): plugins.NotifyPushover(user_key=None, token="abcd") # No Access Token specified with pytest.raises(TypeError): plugins.NotifyPushover(user_key="abcd", token=None) with pytest.raises(TypeError): plugins.NotifyPushover(user_key="abcd", token=" ") @mock.patch('requests.get') @mock.patch('requests.post') def test_notify_rocketchat_plugin(mock_post, mock_get): """ API: NotifyRocketChat() Extra Checks """ # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Chat ID recipients = 'AbcD1245, @l2g, @lead2gold, #channel, #channel2' # Authentication user = 'myuser' password = 'mypass' # Prepare Mock mock_get.return_value = requests.Request() mock_post.return_value = requests.Request() mock_post.return_value.status_code = requests.codes.ok mock_get.return_value.status_code = requests.codes.ok mock_post.return_value.content = '' mock_get.return_value.content = '' obj = plugins.NotifyRocketChat( user=user, password=password, targets=recipients) assert isinstance(obj, plugins.NotifyRocketChat) is True assert len(obj.channels) == 2 assert len(obj.users) == 2 assert len(obj.rooms) == 1 # No Webhook specified with pytest.raises(TypeError): obj = plugins.NotifyRocketChat(webhook=None, mode='webhook') # # Logout # assert obj.logout() is True # Invalid JSON during Login mock_post.return_value.content = '{' mock_get.return_value.content = '}' assert obj.login() is False # Prepare Mock to fail mock_post.return_value.content = '' mock_get.return_value.content = '' mock_post.return_value.status_code = requests.codes.internal_server_error mock_get.return_value.status_code = requests.codes.internal_server_error # # Send Notification # assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is False assert obj._send(payload='test', notify_type=NotifyType.INFO) is False # # Logout # assert obj.logout() is False # KeyError handling mock_post.return_value.status_code = 999 mock_get.return_value.status_code = 999 # # Send Notification # assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is False assert obj._send(payload='test', notify_type=NotifyType.INFO) is False # # Logout # assert obj.logout() is False # Generate exceptions mock_get.side_effect = requests.ConnectionError( 0, 'requests.ConnectionError() not handled') mock_post.side_effect = mock_get.side_effect # # Send Notification # assert obj._send(payload='test', notify_type=NotifyType.INFO) is False # Attempt the check again but fake a successful login obj.login = mock.Mock() obj.login.return_value = True assert obj.notify( body='body', title='title', notify_type=NotifyType.INFO) is False # # Logout # assert obj.logout() is False def test_notify_overflow_truncate(): """ API: Overflow Truncate Functionality Testing """ # # A little preparation # # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Number of characters per line row = 24 # Some variables we use to control the data we work with body_len = 1024 title_len = 1024 # Create a large body and title with random data body = ''.join(choice(str_alpha + str_num + ' ') for _ in range(body_len)) body = '\r\n'.join([body[i: i + row] for i in range(0, len(body), row)]) # the new lines add a large amount to our body; lets force the content # back to being 1024 characters. body = body[0:1024] # Create our title using random data title = ''.join(choice(str_alpha + str_num) for _ in range(title_len)) # # First Test: Truncated Title # class TestNotification(NotifyBase): # Test title max length title_maxlen = 10 def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # We should throw an exception because our specified overflow is wrong. with pytest.raises(TypeError): # Load our object obj = TestNotification(overflow='invalid') # Load our object obj = TestNotification(overflow=OverflowMode.TRUNCATE) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title, overflow=None) chunks = obj._apply_overflow( body=body, title=title, overflow=OverflowMode.SPLIT) assert len(chunks) == 1 assert body.rstrip() == chunks[0].get('body') assert title[0:TestNotification.title_maxlen] == chunks[0].get('title') # # Next Test: Line Count Control # class TestNotification(NotifyBase): # Test title max length title_maxlen = 5 # Maximum number of lines body_max_line_count = 5 def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.TRUNCATE) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 assert len(chunks[0].get('body').split('\n')) == \ TestNotification.body_max_line_count assert title[0:TestNotification.title_maxlen] == chunks[0].get('title') # # Next Test: Truncated body # class TestNotification(NotifyBase): # Test title max length title_maxlen = title_len # Enforce a body length of just 10 body_maxlen = 10 def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.TRUNCATE) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 assert body[0:TestNotification.body_maxlen] == chunks[0].get('body') assert title == chunks[0].get('title') # # Next Test: Append title to body + Truncated body # class TestNotification(NotifyBase): # Enforce no title title_maxlen = 0 # Enforce a body length of just 100 body_maxlen = 100 def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.TRUNCATE) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched obj.notify_format = NotifyFormat.HTML chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 obj.notify_format = NotifyFormat.MARKDOWN chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 obj.notify_format = NotifyFormat.TEXT chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 # The below line should be read carefully... We're actually testing to see # that our title is matched against our body. Behind the scenes, the title # was appended to the body. The body was then truncated to the maxlen. # The thing is, since the title is so large, all of the body was lost # and a good chunk of the title was too. The message sent will just be a # small portion of the title assert len(chunks[0].get('body')) == TestNotification.body_maxlen assert title[0:TestNotification.body_maxlen] == chunks[0].get('body') def test_notify_overflow_split(): """ API: Overflow Split Functionality Testing """ # # A little preparation # # Disable Throttling to speed testing plugins.NotifyBase.request_rate_per_sec = 0 # Number of characters per line row = 24 # Some variables we use to control the data we work with body_len = 1024 title_len = 1024 # Create a large body and title with random data body = ''.join(choice(str_alpha + str_num) for _ in range(body_len)) body = '\r\n'.join([body[i: i + row] for i in range(0, len(body), row)]) # the new lines add a large amount to our body; lets force the content # back to being 1024 characters. body = body[0:1024] # Create our title using random data title = ''.join(choice(str_alpha + str_num) for _ in range(title_len)) # # First Test: Truncated Title # class TestNotification(NotifyBase): # Test title max length title_maxlen = 10 def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.SPLIT) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 assert body == chunks[0].get('body') assert title[0:TestNotification.title_maxlen] == chunks[0].get('title') # # Next Test: Line Count Control # class TestNotification(NotifyBase): # Test title max length title_maxlen = 5 # Maximum number of lines body_max_line_count = 5 def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.SPLIT) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title) assert len(chunks) == 1 assert len(chunks[0].get('body').split('\n')) == \ TestNotification.body_max_line_count assert title[0:TestNotification.title_maxlen] == chunks[0].get('title') # # Next Test: Split body # class TestNotification(NotifyBase): # Test title max length title_maxlen = title_len # Enforce a body length # Wrap in int() so Python v3 doesn't convert the response into a float body_maxlen = int(body_len / 4) def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.SPLIT) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title) offset = 0 assert len(chunks) == 4 for chunk in chunks: # Our title never changes assert title == chunk.get('title') # Our body is only broken up; not lost _body = chunk.get('body') assert body[offset: len(_body) + offset].rstrip() == _body offset += len(_body) # # Next Test: Append title to body + split body # class TestNotification(NotifyBase): # Enforce no title title_maxlen = 0 # Enforce a body length based on the title # Wrap in int() so Python v3 doesn't convert the response into a float body_maxlen = int(title_len / 4) def __init__(self, *args, **kwargs): super(TestNotification, self).__init__(**kwargs) def notify(self, *args, **kwargs): # Pretend everything is okay return True # Load our object obj = TestNotification(overflow=OverflowMode.SPLIT) assert obj is not None # Verify that we break the title to a max length of our title_max # and that the body remains untouched chunks = obj._apply_overflow(body=body, title=title) # Our final product is that our title has been appended to our body to # create one great big body. As a result we'll get quite a few lines back # now. offset = 0 # Our body will look like this in small chunks at the end of the day bulk = title + '\r\n' + body # Due to the new line added to the end assert len(chunks) == ( # wrap division in int() so Python 3 doesn't convert it to a float on # us int(len(bulk) / TestNotification.body_maxlen) + (1 if len(bulk) % TestNotification.body_maxlen else 0)) for chunk in chunks: # Our title is empty every time assert chunk.get('title') == '' _body = chunk.get('body') assert bulk[offset: len(_body) + offset] == _body offset += len(_body)
py
1a4065a91fb430057ccf6b4ccd911b5070d60b6b
from django.contrib.auth import get_user_model, authenticate from django.utils.translation import ugettext_lazy as _ from rest_framework import serializers class UserSerializer(serializers.ModelSerializer): """Serializer for the users object""" class Meta: model = get_user_model() fields = ('email', 'password', 'name') extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} def create(self, validated_data): """Create a new user with encrypted password and return it""" return get_user_model().objects.create_user(**validated_data) def update(self, indtance, validated_data): """Update a user, setting the password correctly and return it""" password = validated_data.pop('password', None) user = super().update(indtance, validated_data) if password: user.set_password(password) user.save() return user class AuthTokenSerializer(serializers.Serializer): """Serializer for the user authentication object""" email = serializers.CharField() password = serializers.CharField( style={'input_type': 'password'}, trim_whitespace=False ) def validate(self, attrs): """Validate and authenticate the user""" email = attrs.get('email') password = attrs.get('password') user = authenticate( request=self.context.get('request'), username=email, password=password ) if not user: msg = _('Unable to authenticate with provided credentials') raise serializers.ValidationError(msg, code='authentication') attrs['user'] = user return attrs
py
1a4065e06984206f93ec7f81d8afbd3344a43f35
# coding:utf-8 import os import logging import datetime import requests import json from pagarme.config import __endpoint__, __user_agent__ from pagarme.common import merge_dict, make_url from pagarme import exceptions logger = logging.getLogger('pygarme') class PagarmeApi(object): def __init__(self, options=None, **kwargs): """`PagarmeApi`:class: Creates an API object """ kwargs = merge_dict(options or {}, kwargs) self.endpoint = kwargs.get('endpoint', self.default_endpoint) self.apikey = kwargs.get('api_key') self.encryption_key = kwargs.get('encryption_key') if not self.apikey or not self.encryption_key: raise exceptions.NullAPIKeyError('The `api_key` and `encryption_key` must be set.') @property def default_endpoint(self): """Returns the default endpoint """ return __endpoint__ @property def default_user_agent(self): """Returns the api user agent """ return __user_agent__ @property def default_headers(self): """Returns the default headers """ return { "Content-Type": "application/json", "Accept": "application/json", "User-Agent": self.default_user_agent } def request(self, url, method, data=None, headers=None): """Makes a HTTP call, formats response and does error handling. """ http_headers = merge_dict(self.default_headers, headers or {}) request_data = merge_dict({'api_key': self.apikey}, data or {}) logger.info('HTTP %s REQUEST TO %s' % (method, url)) start = datetime.datetime.now() try: response = requests.request(method=method, url=url, data=json.dumps(request_data), headers=http_headers) except exceptions.BadRequestError as e: return json.loads({'errors': e.content}) duration = datetime.datetime.now() - start logger.info('RESPONSE %s DURATION %s.%s' % (response.encoding, duration.seconds, duration.microseconds)) return json.loads(response.content) if response.content else {} def get(self, action, params=None, headers=None): """Makes a GET request """ return self.request(make_url(self.endpoint, action), method='GET', data=params, headers=headers) def post(self, action, data=None, headers=None): """Makes a GET request """ return self.request(make_url(self.endpoint, action), method='POST', data=data, headers=headers) def put(self, action, data=None, headers=None): """Makes a GET request """ return self.request(make_url(self.endpoint, action), method='PUT', data=data, headers=headers) def delete(self, action, headers=None): """Makes a GET request """ return self.request(make_url(self.endpoint, action), method='DELETE', headers=headers) __default_api__ = None def default_api(): global __default_api__ if __default_api__ is None: try: api_key = os.environ["PAGARME_API_KEY"] encryption_key = os.environ["PAGARME_ENCRYPTION_KEY"] except KeyError: raise exceptions.NullAPIKeyError("Required PAGARME_API_KEY and PAGARME_ENCRYPTION_KEY") __default_api__ = PagarmeApi(api_key=api_key, encryption_key=encryption_key) return __default_api__ def configure(**kwargs): global __default_api__ __default_api__ = PagarmeApi(**kwargs) return __default_api__
py
1a40664d402a15eaf7973f32caf28fdae1d1984d
"""Kata url: https://www.codewars.com/kata/57a55c8b72292d057b000594.""" def reverse(st: str) -> str: return ' '.join(reversed(st.split()))
py
1a4066fc5356414dfa1514b308404fb956c94bd3
import pyaf.Bench.TS_datasets as tsds import pyaf.tests.artificial.process_artificial_dataset as art art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 0);
py
1a40676094d9241b79bf64eb6029968002806e10
#!/usr/bin/python # -*- coding: utf-8 -*- try: from PyQt5.QtGui import * from PyQt5.QtCore import * except ImportError: from PyQt4.QtGui import * from PyQt4.QtCore import * from libs.utils import distance import sys DEFAULT_LINE_COLOR = QColor(0, 255, 0, 128) DEFAULT_FILL_COLOR = QColor(255, 0, 0, 128) DEFAULT_SELECT_LINE_COLOR = QColor(255, 255, 255) DEFAULT_SELECT_FILL_COLOR = QColor(0, 128, 255, 155) DEFAULT_VERTEX_FILL_COLOR = QColor(0, 255, 0, 255) DEFAULT_HVERTEX_FILL_COLOR = QColor(255, 0, 0) MIN_Y_LABEL = 10 class Shape(object): P_SQUARE, P_ROUND = range(2) MOVE_VERTEX, NEAR_VERTEX = range(2) # The following class variables influence the drawing # of _all_ shape objects. line_color = DEFAULT_LINE_COLOR fill_color = DEFAULT_FILL_COLOR select_line_color = DEFAULT_SELECT_LINE_COLOR select_fill_color = DEFAULT_SELECT_FILL_COLOR vertex_fill_color = DEFAULT_VERTEX_FILL_COLOR hvertex_fill_color = DEFAULT_HVERTEX_FILL_COLOR point_type = P_ROUND point_size = 8 scale = 1.0 def __init__(self, label=None, line_color=None, difficult=False, paintLabel=False): self.label = label self.points = [] self.fill = False self.selected = False self.difficult = difficult self.paintLabel = paintLabel self._highlightIndex = None self._highlightMode = self.NEAR_VERTEX self._highlightSettings = { self.NEAR_VERTEX: (4, self.P_ROUND), self.MOVE_VERTEX: (1.5, self.P_SQUARE), } self._closed = False if line_color is not None: # Override the class line_color attribute # with an object attribute. Currently this # is used for drawing the pending line a different color. self.line_color = line_color def close(self): self._closed = True def setPoints(self, points): self.points = [] for p in points: self.points.append(QPointF(p[0],p[1])) def reachMaxPoints(self): if len(self.points) >= 4: return True return False def addPoint(self, point): if not self.reachMaxPoints(): self.points.append(point) def popPoint(self): if self.points: return self.points.pop() return None def isClosed(self): return self._closed def setOpen(self): self._closed = False def paint(self, painter): if self.points: color = self.select_line_color if self.selected else self.line_color pen = QPen(color) # Try using integer sizes for smoother drawing(?) pen.setWidth(max(1, int(round(2.0 / self.scale)))) painter.setPen(pen) line_path = QPainterPath() vrtx_path = QPainterPath() line_path.moveTo(self.points[0]) # Uncommenting the following line will draw 2 paths # for the 1st vertex, and make it non-filled, which # may be desirable. #self.drawVertex(vrtx_path, 0) for i, p in enumerate(self.points): line_path.lineTo(p) self.drawVertex(vrtx_path, i) if self.isClosed(): line_path.lineTo(self.points[0]) painter.drawPath(line_path) painter.drawPath(vrtx_path) painter.fillPath(vrtx_path, self.vertex_fill_color) # Draw text at the top-left if self.paintLabel: min_x = sys.maxsize min_y = sys.maxsize for point in self.points: min_x = min(min_x, point.x()) min_y = min(min_y, point.y()) if min_x != sys.maxsize and min_y != sys.maxsize: font = QFont() font.setPointSize(8) font.setBold(True) painter.setFont(font) if(self.label == None): self.label = "" if(min_y < MIN_Y_LABEL): min_y += MIN_Y_LABEL painter.drawText(min_x, min_y, self.label) if self.fill: color = self.select_fill_color if self.selected else self.fill_color painter.fillPath(line_path, color) def drawVertex(self, path, i): d = self.point_size / self.scale shape = self.point_type point = self.points[i] if i == self._highlightIndex: size, shape = self._highlightSettings[self._highlightMode] d *= size if self._highlightIndex is not None: self.vertex_fill_color = self.hvertex_fill_color else: self.vertex_fill_color = Shape.vertex_fill_color if shape == self.P_SQUARE: path.addRect(point.x() - d / 2, point.y() - d / 2, d, d) elif shape == self.P_ROUND: path.addEllipse(point, d / 2.0, d / 2.0) else: assert False, "unsupported vertex shape" def nearestVertex(self, point, epsilon): for i, p in enumerate(self.points): if distance(p - point) <= epsilon: return i return None def containsPoint(self, point): return self.makePath().contains(point) def makePath(self): path = QPainterPath(self.points[0]) for p in self.points[1:]: path.lineTo(p) return path def boundingRect(self): return self.makePath().boundingRect() def moveBy(self, offset): self.points = [p + offset for p in self.points] def moveVertexBy(self, i, offset): self.points[i] = self.points[i] + offset def highlightVertex(self, i, action): self._highlightIndex = i self._highlightMode = action def highlightClear(self): self._highlightIndex = None def copy(self): shape = Shape("%s" % self.label) shape.points = [p for p in self.points] shape.fill = self.fill shape.selected = self.selected shape._closed = self._closed if self.line_color != Shape.line_color: shape.line_color = self.line_color if self.fill_color != Shape.fill_color: shape.fill_color = self.fill_color shape.difficult = self.difficult return shape def __len__(self): return len(self.points) def __getitem__(self, key): return self.points[key] def __setitem__(self, key, value): self.points[key] = value
py
1a4067dab6e072781d43dab3a3080338b3020a6e
# !/usr/local/python/bin/python # -*- coding: utf-8 -*- # (C) Wu Dong, 2021 # All rights reserved # @Author: 'Wu Dong <[email protected]>' # @Time: '2021/12/30 2:52 下午' class TestSet: def test_sadd(self, redis): redis.sadd("SET:K1", "V1", "V1", "V2") redis.sadd("-SET:K2", "T1", "T1", "T2", "T3") assert redis.scard("SET:K1") == 2 assert redis.scard("-SET:K2") == 3 redis.delete("SET:K1", "-SET:K2") def test_sdiff(self, redis): redis.sadd("SET:K1", "V1", "V2", "V2", "V3") redis.sadd("-SET:K2", "T1", "V2", "V2", "T3") assert redis.sdiff("SET:K1", "-SET:K2") == {"V1", "V3"} assert redis.sdiff(["SET:K1", "-SET:K2"]) == {"V1", "V3"} assert redis.sdiff("-SET:K2", "SET:K1") == {"T1", "T3"} assert redis.sdiff(["-SET:K2", "SET:K1"]) == {"T1", "T3"} redis.delete("SET:K1", "-SET:K2") def test_sdiffstore(self, redis): redis.sadd("SET:K1", "V1", "V2", "V2", "V3") redis.sadd("-SET:K2", "T1", "V2", "V2", "T3") redis.sdiffstore("SET:K3", "-SET:K2", "SET:K1") redis.sdiffstore("-SET:K4", ["SET:K1", "-SET:K2"]) assert redis.smembers("SET:K3") == {"T1", "T3"} assert redis.smembers("-SET:K4") == {"V1", "V3"} redis.delete("SET:K1", "-SET:K2", "SET:K3", "-SET:K4") def test_sinter(self, redis): redis.sadd("SET:K1", "V1", "V2", "V2", "V3") redis.sadd("-SET:K2", "T1", "V2", "V2", "T3") assert redis.sinter("SET:K1", "-SET:K2") == {"V2"} assert redis.sinter(["SET:K1", "-SET:K2"]) == {"V2"} redis.delete("SET:K1", "-SET:K2") def test_sinterstore(self, redis): redis.sadd("SET:K1", "V1", "V2", "V2", "V3") redis.sadd("-SET:K2", "T1", "V2", "V2", "T3") redis.sinterstore("SET:K3", "SET:K1", "-SET:K2") redis.sinterstore("-SET:K4", ["SET:K1", "-SET:K2"]) assert redis.smembers("SET:K3") == {"V2"} assert redis.smembers("-SET:K4") == {"V2"} redis.delete("SET:K1", "-SET:K2", "SET:K3", "-SET:K4") def test_sismember(self, redis): redis.sadd("SET:K1", "V1", "V2", "V2", "V3") redis.sadd("-SET:K2", "T1", "V2", "V2", "T3") assert redis.sismember("SET:K1", "V2") is True assert redis.sismember("SET:K1", "V5") is not True assert redis.sismember("-SET:K2", "T1") is True assert redis.sismember("-SET:K2", "T55") is not True redis.delete("SET:K1", "-SET:K2") def test_smove(self, redis): redis.sadd("SET:K1", "V1", "V2", "V2", "V3") redis.sadd("-SET:K2", "T1", "V2", "V2", "T3") redis.smove("SET:K1", "-SET:K2", "V3") redis.smove("-SET:K2", "SET:K1", "T1") assert redis.smembers("SET:K1") == {"T1", "V2", "V1"} assert redis.smembers("-SET:K2") == {"V2", "V3", "T3"} redis.delete("SET:K1", "-SET:K2") def test_spop(self, redis): redis.sadd("SET:K1", "V1", "V2", "V3") redis.sadd("-SET:K2", "T1", "T2", "T3") assert redis.spop("SET:K1") in {"V1", "V2", "V3"} assert redis.scard("SET:K1") == 2 assert redis.spop("-SET:K2") in {"T1", "T2", "T3"} assert redis.scard("-SET:K2") == 2 redis.delete("SET:K1", "-SET:K2") def test_srandmember(self, redis): redis.sadd("SET:K1", "V1", "V2", "V3") redis.sadd("-SET:K2", "T1", "T2", "T3") assert len({"V1", "V2", "V3"} - set(redis.srandmember("SET:K1", 2))) == 1 assert len({"T1", "T2", "T3"} - set(redis.srandmember("-SET:K2", 2))) == 1 redis.delete("SET:K1", "-SET:K2") def test_srem(self, redis): redis.sadd("SET:K1", "V1", "V2", "V3") redis.sadd("-SET:K2", "T1", "T2", "T3") redis.srem("SET:K1", "V1", "V2") redis.srem("-SET:K2", "T1") assert redis.smembers("SET:K1") == {"V3"} assert redis.smembers("-SET:K2") == {"T2", "T3"} redis.delete("SET:K1", "-SET:K2") def test_sunion(self, redis): redis.sadd("SET:K1", "V1", "V2") redis.sadd("-SET:K2", "T1", "T2") assert redis.sunion("SET:K1", "-SET:K2") == {"V1", "V2", "T1", "T2"} assert redis.sunion(["SET:K1", "-SET:K2"]) == {"V1", "V2", "T1", "T2"} redis.delete("SET:K1", "-SET:K2") def test_sunionstore(self, redis): redis.sadd("SET:K1", "V1", "V2") redis.sadd("-SET:K2", "T1", "T2") redis.sunionstore("SET:K3", "SET:K1", "-SET:K2") redis.sunionstore("-SET:K4", ["SET:K1", "-SET:K2"]) assert redis.smembers("SET:K3") == {"V1", "V2", "T1", "T2"} assert redis.smembers("-SET:K4") == {"V1", "V2", "T1", "T2"} redis.delete("SET:K1", "-SET:K2", "SET:K3", "-SET:K4") def test_sscan(self, redis): redis.sadd("SET:K1", "V1", "V2", "U2", "U3") redis.sadd("-SET:K2", "T1", "T2", "U1", "U2") assert redis.sscan("SET:K1", 0, match="V*") == (0, ["V1", "V2"]) or (0, ["V2", "V1"]) assert redis.sscan("-SET:K2", 0, match="T*") == (0, ["T1", "T2"]) or (0, ["T2", "T1"]) redis.delete("SET:K1", "-SET:K2") def test_sscan_iter(self, redis): redis.sadd("SET:K1", "V1", "V2", "U2", "U3") redis.sadd("-SET:K2", "T1", "T2", "U1", "U2") for v in redis.sscan_iter("SET:K1", match="V*"): assert v in {"V1", "V2"} for v in redis.sscan_iter("-SET:K2", match="U*"): assert v in {"U1", "U2"} redis.delete("SET:K1", "-SET:K2")
py
1a4067ebdd37bf44ce451222c9f30b1e9005bcc6
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'eigadb.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
py
1a406820d2dbf5f984ab570d04a5713c6ac65d82
from setuptools import setup, find_packages with open('README.md') as f: readme = f.read() setup( name='midi-websocket-server', version='1.0.0', description='Python Websocket server to facilitate two-way communication with all connected MIDI devices.', long_description=readme, url='https://github.com/PeterSR/python-midi-websocket-server', author='Peter Severin Rasmussen', author_email='[email protected]', license='MIT', classifiers=[ 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.7', ], packages=find_packages(exclude=('tests', 'docs')), install_requires=[ 'websockets>=8.1', 'python-rtmidi>=1.4.0', ], python_requires='>=3.7', )
py
1a40687862d2c90a116e3230d333ff7804d2cf5e
from shutil import get_terminal_size from typing import Iterable, List from verdandi.result import BenchmarkResult def print_stdout(result: BenchmarkResult) -> None: for iter_index, iter_stdout in enumerate(result.stdout, start=1): if not iter_stdout: continue print_header(f"{result.name}: iteration {iter_index}", padding_symbol="-") print(iter_stdout) def print_stderr(result: BenchmarkResult) -> None: for iter_index, iter_stderr in enumerate(result.stderr, start=1): if not iter_stderr: continue print_header(f"{result.name}: iteration {iter_index}", padding_symbol="-") print(iter_stderr) def print_exceptions(result: BenchmarkResult) -> None: for iter_index, iter_exc in enumerate(result.exceptions, start=1): print_header(f"{result.name}: iteration {iter_index}", padding_symbol="-") print(f"{iter_exc.__class__.__name__}: {str(iter_exc)}") def print_results_as_table(results: Iterable[BenchmarkResult]) -> None: """ Accepts a list of BenchmarkResult, formats them and prints out a table with details. """ def format_table_row(row: List[str], width: int) -> str: s = "".join(str(cell).ljust(width) for cell in row[:-1]) s += str(row[-1]) return s headers = ["Name", "Result", "Duration (in seconds)"] col_width = max(len(result.name) for result in results) + 2 print_header("", padding_symbol="-") print(format_table_row(headers, col_width)) print_header("", padding_symbol="-") for r in results: print(format_table_row([r.name, r.rtype.name, f"{r.duration_sec:.4f}"], col_width)) print_header("", padding_symbol="-") print() # Empty line def print_header(text: str, padding_symbol: str = "=") -> None: """ Prints given text padded from both sides with `padding_symbol` up to terminal width """ text_length = len(text) columns = get_terminal_size()[0] padding_length = ((columns - text_length) // 2) - 1 # Substract one whitespace from each side padding = padding_symbol * padding_length if text: print(f"{padding} {text} {padding}") else: print(padding * 2)
py
1a4068a4003a6a6f42c889673a3aad28ff1bfce3
# Copyright 2021 BlackRock, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import cast import numpy as np import numpy.typing as npt def shubert(x1: float, x2: float) -> float: """https://www.sfu.ca/~ssurjano/shubert.html.""" factor_1 = np.sum([i * np.cos((i + 1) * x1 + i) for i in range(1, 6)]) factor_2 = np.sum([i * np.cos((i + 1) * x2 + i) for i in range(1, 6)]) return cast(float, factor_1 * factor_2) def shubert_np(x: npt.NDArray[np.floating]) -> float: if len(x) != 2: raise AssertionError("Exactly 2 items expected") return shubert(x[0], x[1])
py
1a406a9e846114eb1998785ce951822a138bdff4
"""114. Flatten Binary Tree to Linked List""" # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution(object): def flatten(self, root): """ :type root: TreeNode :rtype: None Do not return anything, modify root in-place instead. """ ## Practice: self.dfs(root) def dfs(self, node): if not node: return self.dfs(node.left) self.dfs(node.right) left = node.left right = node.right node.right = left node.left = None while node.right: node = node.right node.right = right ### self.prev = None self.dfs(root) return root def dfs(self, node): if not node: #base case return self.dfs(node.left) self.dfs(node.right) # 后序遍历位置 # 1 左右子树已经被拉平成一条链表 left = node.left right = node.right # 2 将左子树作为右子树 node.left = None node.right = left # 3 将原先的右子树接到当前右子树的末端 while node.right: node = node.right node.right = right ### if not root: return None self.flatten(root.right) self.flatten(root.left) root.right = self.prev root.left = None self.prev = root
py
1a406b3f8ec0199e9e25a39510dfe247b3975759
#!/usr/bin/env python """ wikipedia.py - Phenny Wikipedia Module Copyright 2008-9, Sean B. Palmer, inamidst.com Licensed under the Eiffel Forum License 2. http://inamidst.com/phenny/ """ import re, urllib, gzip, StringIO import web wikiuri = 'http://%s.wikipedia.org/wiki/%s' # wikisearch = 'http://%s.wikipedia.org/wiki/Special:Search?' \ # + 'search=%s&fulltext=Search' r_tr = re.compile(r'(?ims)<tr[^>]*>.*?</tr>') r_paragraph = re.compile(r'(?ims)<p[^>]*>.*?</p>|<li(?!n)[^>]*>.*?</li>') r_tag = re.compile(r'<(?!!)[^>]+>') r_whitespace = re.compile(r'[\t\r\n ]+') r_redirect = re.compile( r'(?ims)class=.redirectText.>\s*<a\s*href=./wiki/([^"/]+)' ) abbrs = ['etc', 'ca', 'cf', 'Co', 'Ltd', 'Inc', 'Mt', 'Mr', 'Mrs', 'Dr', 'Ms', 'Rev', 'Fr', 'St', 'Sgt', 'pron', 'approx', 'lit', 'syn', 'transl', 'sess', 'fl', 'Op', 'Dec', 'Brig', 'Gen'] \ + list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') \ + list('abcdefghijklmnopqrstuvwxyz') t_sentence = r'^.{5,}?(?<!\b%s)(?:\.(?=[\[ ][A-Z0-9]|\Z)|\Z)' r_sentence = re.compile(t_sentence % r')(?<!\b'.join(abbrs)) def unescape(s): s = s.replace('&gt;', '>') s = s.replace('&lt;', '<') s = s.replace('&amp;', '&') s = s.replace('&#160;', ' ') return s def text(html): html = r_tag.sub('', html) html = r_whitespace.sub(' ', html) return unescape(html).strip() def search(term): try: import search except ImportError, e: print e return term if isinstance(term, unicode): term = term.encode('utf-8') else: term = term.decode('utf-8') term = term.replace('_', ' ') try: uri = search.google_search('site:en.wikipedia.org %s' % term) except IndexError: return term if uri: return uri[len('http://en.wikipedia.org/wiki/'):] else: return term def wikipedia(term, language='en', last=False): global wikiuri if not '%' in term: if isinstance(term, unicode): t = term.encode('utf-8') else: t = term q = urllib.quote(t) u = wikiuri % (language, q) bytes = web.get(u) else: bytes = web.get(wikiuri % (language, term)) if bytes.startswith('\x1f\x8b\x08\x00\x00\x00\x00\x00'): f = StringIO.StringIO(bytes) f.seek(0) gzip_file = gzip.GzipFile(fileobj=f) bytes = gzip_file.read() gzip_file.close() f.close() bytes = r_tr.sub('', bytes) if not last: r = r_redirect.search(bytes[:4096]) if r: term = urllib.unquote(r.group(1)) return wikipedia(term, language=language, last=True) paragraphs = r_paragraph.findall(bytes) if not paragraphs: if not last: term = search(term) return wikipedia(term, language=language, last=True) return None # Pre-process paragraphs = [para for para in paragraphs if (para and 'technical limitations' not in para and 'window.showTocToggle' not in para and 'Deletion_policy' not in para and 'Template:AfD_footer' not in para and not (para.startswith('<p><i>') and para.endswith('</i></p>')) and not 'disambiguation)"' in para) and not '(images and media)' in para and not 'This article contains a' in para and not 'id="coordinates"' in para and not 'class="thumb' in para] # and not 'style="display:none"' in para] for i, para in enumerate(paragraphs): para = para.replace('<sup>', '|') para = para.replace('</sup>', '|') paragraphs[i] = text(para).strip() # Post-process paragraphs = [para for para in paragraphs if (para and not (para.endswith(':') and len(para) < 150))] para = text(paragraphs[0]) m = r_sentence.match(para) if not m: if not last: term = search(term) return wikipedia(term, language=language, last=True) return None sentence = m.group(0) maxlength = 275 if len(sentence) > maxlength: sentence = sentence[:maxlength] words = sentence[:-5].split(' ') words.pop() sentence = ' '.join(words) + ' [...]' if (('using the Article Wizard if you wish' in sentence) or ('or add a request for it' in sentence) or ('in existing articles' in sentence)): if not last: term = search(term) return wikipedia(term, language=language, last=True) return None sentence = '"' + sentence.replace('"', "'") + '"' sentence = sentence.decode('utf-8').encode('utf-8') wikiuri = wikiuri.decode('utf-8').encode('utf-8') term = term.decode('utf-8').encode('utf-8') return sentence + ' - ' + (wikiuri % (language, term)) def wik(phenny, input): origterm = input.groups()[1] if not origterm: return phenny.say('Perhaps you meant ".wik Zen"?') origterm = origterm.encode('utf-8') term = urllib.unquote(origterm) language = 'en' if term.startswith(':') and (' ' in term): a, b = term.split(' ', 1) a = a.lstrip(':') if a.isalpha(): language, term = a, b term = term[0].upper() + term[1:] term = term.replace(' ', '_') try: result = wikipedia(term, language) except IOError: args = (language, wikiuri % (language, term)) error = "Can't connect to %s.wikipedia.org (%s)" % args return phenny.say(error) if result is not None: phenny.say(result) else: phenny.say('Can\'t find anything in Wikipedia for "%s".' % origterm) wik.commands = ['wik'] wik.priority = 'high' if __name__ == '__main__': print __doc__.strip()
py
1a406b459a991f83f96b88faa9786c6933c0a34f
from vector2D import Vector2D as vec from typing import List, Tuple Point = Tuple[int, int] def ear_clipping(polygon: List[Point]) -> List[List[Point]]: if len(polygon) > 3: polygon = vec.convert(polygon) total_triangles = len(polygon) - 2 triangles = [] while len(triangles) < total_triangles: for ind, center_point in enumerate(polygon): right_point = polygon[(ind + 1) % len(polygon)] left_point = polygon[(ind - 1) % len(polygon)] if left_point.cross(right_point, origin=center_point) > 0: temp_triangle = (left_point, center_point, right_point) check_triangle_validity = lambda point: point not in temp_triangle and point.in_polygon(temp_triangle) if not any(filter(check_triangle_validity, polygon)): triangles.append(temp_triangle) polygon.pop(ind) return triangles return polygon
py
1a406be9f83e069465383dddcd034db5b38b5d57
from config import HNConfig as Config import numpy as np # type: ignore from matplotlib import pyplot as plt # type: ignore from matplotlib import cm # type: ignore from matplotlib import colors # type: ignore import pandas as pd # type: ignore import util window_size = 5 dpi = 100 iter_lim = 1000 record_moment = np.arange(0, iter_lim, 10) record = True delta_t = 0.01 noise = 0.001 u_0 = 0.02 param_a = 1.0 param_b = 1.0 param_c = 2.0 param_d = 1.0 @np.vectorize def sigmoid(input: float) -> float: sigmoid_range = 34.538776394910684 if input <= -sigmoid_range: return 1e-15 if input >= sigmoid_range: return 1.0 - 1e-15 return 1.0 / (1.0 + np.exp(-input / u_0)) def kronecker_delta(i: int, j: int) -> float: if i == j: return 1.0 return 0.0 def calc_weight_matrix(city_array: np.array) -> np.array: city_num: int = city_array.shape[0] n: int = city_num ** 2 tmp: np.array = np.zeros((n, n)) for s0 in range(n): x: int = int(s0 / city_num) i: int = s0 % city_num for s1 in range(n): y: int = int(s1 / city_num) j: int = s1 % city_num dxy: float = util.dist(city_array[x, :], city_array[y, :]) tmp[s0, s1] = ( -param_a * kronecker_delta(x, y) * (1.0 - kronecker_delta(i, j)) - param_b * kronecker_delta(i, j) * (1.0 - kronecker_delta(x, y)) - param_c - param_d * dxy * ( kronecker_delta(j, (i - 1) % city_num) + kronecker_delta(j, (i + 1) % city_num) ) ) return tmp def calc_bias(city_array: np.array) -> np.array: city_num: int = city_array.shape[0] n: int = city_num ** 2 tmp: np.array = param_c * city_num * np.ones(n) return tmp def update_inner_vals( nodes_array: np.matrix, inner_vals: np.matrix, weight_matrix: np.matrix, biases: np.matrix, ) -> np.matrix: tau = 1.0 asdf: np.matrix = np.matmul(weight_matrix, nodes_array) delta: np.matrix = (-inner_vals / tau + asdf + biases) * delta_t return inner_vals + delta def hp_begin( inner_vals_array: np.matrix, nodes_array: np.matrix, weights_matrix: np.matrix, biases_array: np.matrix, ) -> None: if record: dir_name: str = util.make_directory(Config) for i in range(iter_lim): if i in record_moment: filename: str = "iteration-" + str(i) + ".png" file_path: str = dir_name + filename plt.savefig(file_path) inner_vals_array = update_inner_vals( nodes_array, inner_vals_array, weights_matrix, biases_array ) nodes_array = sigmoid(inner_vals_array) plt.title("iteration=" + str(i + 1)) mat_visual.set_data(np.reshape(nodes_array, (city_num, city_num))) plt.pause(0.0001) else: i = 1 # while plt.get_fignums(): # inner_vals_array = update_inner_vals(nodes_array, inner_vals_array, weights_matrix, biases_array) # nodes_array = sigmoid(inner_vals_array) # plt.title("iteration=" + str(i)) # mat_visual.set_data(np.reshape(nodes_array, (city_num, city_num))) # i += 1 # plt.pause(.01) while plt.get_fignums(): # print(nodes_array.shape, inner_vals_array.shape, weights_matrix.shape, biases_array.shape) inner_vals_array = update_inner_vals( nodes_array, inner_vals_array, weights_matrix, biases_array ) nodes_array = sigmoid(inner_vals_array) plt.title("iteration=" + str(i)) mat_visual.set_data(np.reshape(nodes_array, (city_num, city_num))) i += 1 plt.pause(0.0001) if __name__ == "__main__": if Config.read_file: np_cities = np.genfromtxt(Config.file_path + Config.city_file, delimiter=",") city_num = np_cities.shape[0] # width_x = (np.max(np_cities[:, 0]) - np.min(np_cities[:, 0])) # width_y = (np.max(np_cities[:, 1]) - np.min(np_cities[:, 1])) # width = np.amax([width_x, width_y]) # np_cities[:, 0] -= np.min(np_cities[:, 0]) # np_cities[:, 0] /= width # np_cities[:, 1] -= np.min(np_cities[:, 1]) # np_cities[:, 1] /= width # center_x = np.average(np_cities[:, 0]) # center_y = np.average(np_cities[:, 1]) figsize = (window_size, window_size) else: city_num = Config.city_num # “continuous uniform” distribution random np_cities = np.random.random((city_num, 2)) center_x = 0.5 center_y = 0.5 figsize = (window_size, window_size) inner_vals = np.matrix((np.random.random((city_num ** 2)) - 0.5) * noise).T nodes = np.matrix(sigmoid(inner_vals)) weights = np.matrix(calc_weight_matrix(np_cities)) df = pd.DataFrame(weights) df.to_csv("weigths.csv", header=False, index=False) biases = np.matrix(calc_bias(np_cities)).T fig = plt.figure(figsize=figsize, dpi=dpi) mat_visual = plt.matshow( np.reshape(nodes, (city_num, city_num)), fignum=0, cmap=cm.Greys, norm=colors.Normalize(vmin=0.0, vmax=1.0), ) fig.colorbar(mat_visual) plt.title("iteration=" + str(0)) plt.pause(0.0001) hp_begin(inner_vals, nodes, weights, biases)
py
1a406c0160aad44e8a2aca5f922ff80aa1b59bed
import flask_restplus import marshmallow from znail.netem.disciplines import PacketReordering from znail.netem.tc import Tc from znail.ui import api from znail.ui.util import NoneAttributes, json_request_handler class PacketReorderingSchema(marshmallow.Schema): milliseconds = marshmallow.fields.Integer(required=True, validate=lambda n: n > 0) percent = marshmallow.fields.Float(required=True, validate=lambda n: n >= 0 and n <= 100) packet_reordering_schema = PacketReorderingSchema() packet_reordering_model = api.model( 'PacketReordering', { 'milliseconds': flask_restplus.fields.Integer(min=0), 'percent': flask_restplus.fields.Float(min=0, max=100), }) @api.route('/api/disciplines/packet_reordering') class PacketReorderingResource(flask_restplus.Resource): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.tc = Tc.adapter('eth1') @api.response(200, 'Success', packet_reordering_model) def get(self): reordering = self.tc.disciplines.get('reorder', NoneAttributes) return { 'milliseconds': reordering.milliseconds, 'percent': reordering.percent, }, 200 @json_request_handler(packet_reordering_schema, packet_reordering_model) def post(self, data): disciplines = self.tc.disciplines disciplines['reorder'] = PacketReordering(data['percent'], data['milliseconds']) self.tc.apply(disciplines) @api.route('/api/disciplines/packet_reordering/clear') class ClearPacketReorderingResource(flask_restplus.Resource): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.tc = Tc.adapter('eth1') @json_request_handler() def post(self, data): disciplines = self.tc.disciplines if 'reorder' in disciplines: del disciplines['reorder'] self.tc.apply(disciplines)
py
1a406c15b4b077c937c83e457136ed5f136a3574
from OHA.Defaults import Defaults from OHA.helpers.converters.LengthConverter import LengthConverter __author__ = 'indrajit' __email__ = '[email protected]' class HeightConverter(LengthConverter): def _default_from_unit(self): return Defaults.height_unit def _default_to_unit(self): return Defaults.height_unit
py
1a406c981d5719d888eb2034a60f1bc8801c6824
import json from os.path import join data = join('data', 'ThemeProtoSet.json') with open(data, encoding='UTF-8') as file: data = json.load(file) data = data['dataArray']['Array']['data'] theme_ionheight = {} for theme in data: theme_ionheight[theme['DisplayName']] = theme['IonHeight'] print(json.dumps(theme_ionheight, ensure_ascii=False, indent=2))
py
1a406ce95a90781e949bffcd38142c0bf0f3c6be
from time import sleep from picamera import PiCamera camera = PiCamera() camera.resolution = (1024, 768) camera.capture('foo.jpg')
py
1a406d743b7cfac58cbafd9233b0c09d60e8b13e
"""Support for exposing Concord232 elements as sensors.""" import datetime import logging import requests import voluptuous as vol from homeassistant.components.binary_sensor import ( BinarySensorDevice, PLATFORM_SCHEMA, DEVICE_CLASSES, ) from homeassistant.const import CONF_HOST, CONF_PORT import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) CONF_EXCLUDE_ZONES = "exclude_zones" CONF_ZONE_TYPES = "zone_types" DEFAULT_HOST = "localhost" DEFAULT_NAME = "Alarm" DEFAULT_PORT = "5007" DEFAULT_SSL = False SCAN_INTERVAL = datetime.timedelta(seconds=10) ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)}) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All( cv.ensure_list, [cv.positive_int] ), vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Concord232 binary sensor platform.""" from concord232 import client as concord232_client host = config.get(CONF_HOST) port = config.get(CONF_PORT) exclude = config.get(CONF_EXCLUDE_ZONES) zone_types = config.get(CONF_ZONE_TYPES) sensors = [] try: _LOGGER.debug("Initializing client") client = concord232_client.Client(f"http://{host}:{port}") client.zones = client.list_zones() client.last_zone_update = dt_util.utcnow() except requests.exceptions.ConnectionError as ex: _LOGGER.error("Unable to connect to Concord232: %s", str(ex)) return False # The order of zones returned by client.list_zones() can vary. # When the zones are not named, this can result in the same entity # name mapping to different sensors in an unpredictable way. Sort # the zones by zone number to prevent this. client.zones.sort(key=lambda zone: zone["number"]) for zone in client.zones: _LOGGER.info("Loading Zone found: %s", zone["name"]) if zone["number"] not in exclude: sensors.append( Concord232ZoneSensor( hass, client, zone, zone_types.get(zone["number"], get_opening_type(zone)), ) ) add_entities(sensors, True) def get_opening_type(zone): """Return the result of the type guessing from name.""" if "MOTION" in zone["name"]: return "motion" if "KEY" in zone["name"]: return "safety" if "SMOKE" in zone["name"]: return "smoke" if "WATER" in zone["name"]: return "water" return "opening" class Concord232ZoneSensor(BinarySensorDevice): """Representation of a Concord232 zone as a sensor.""" def __init__(self, hass, client, zone, zone_type): """Initialize the Concord232 binary sensor.""" self._hass = hass self._client = client self._zone = zone self._number = zone["number"] self._zone_type = zone_type @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return self._zone_type @property def should_poll(self): """No polling needed.""" return True @property def name(self): """Return the name of the binary sensor.""" return self._zone["name"] @property def is_on(self): """Return true if the binary sensor is on.""" # True means "faulted" or "open" or "abnormal state" return bool(self._zone["state"] != "Normal") def update(self): """Get updated stats from API.""" last_update = dt_util.utcnow() - self._client.last_zone_update _LOGGER.debug("Zone: %s ", self._zone) if last_update > datetime.timedelta(seconds=1): self._client.zones = self._client.list_zones() self._client.last_zone_update = dt_util.utcnow() _LOGGER.debug("Updated from zone: %s", self._zone["name"]) if hasattr(self._client, "zones"): self._zone = next( (x for x in self._client.zones if x["number"] == self._number), None )
py
1a406e8dcb32ea5889b26933cf11ca80a3a13aed
import argparse import json import os import pickle import sys import sagemaker_containers import pandas as pd import numpy as np import torch import torch.nn as nn import torch.optim as optim import torch.utils.data from model import LSTMClassifier from utils import review_to_words, convert_and_pad def model_fn(model_dir): """Load the PyTorch model from the `model_dir` directory.""" print("Loading model.") # First, load the parameters used to create the model. model_info = {} model_info_path = os.path.join(model_dir, 'model_info.pth') with open(model_info_path, 'rb') as f: model_info = torch.load(f) print("model_info: {}".format(model_info)) # Determine the device and construct the model. device = torch.device("cuda" if torch.cuda.is_available() else "cpu") model = LSTMClassifier(model_info['embedding_dim'], model_info['hidden_dim'], model_info['vocab_size']) # Load the store model parameters. model_path = os.path.join(model_dir, 'model.pth') with open(model_path, 'rb') as f: model.load_state_dict(torch.load(f)) # Load the saved word_dict. word_dict_path = os.path.join(model_dir, 'word_dict.pkl') with open(word_dict_path, 'rb') as f: model.word_dict = pickle.load(f) model.to(device).eval() print("Done loading model.") return model def input_fn(serialized_input_data, content_type): print('Deserializing the input data.') if content_type == 'text/plain': data = serialized_input_data.decode('utf-8') return data raise Exception('Requested unsupported ContentType in content_type: ' + content_type) def output_fn(prediction_output, accept): print('Serializing the generated output.') return str(prediction_output) def predict_fn(input_data, model): print('Inferring sentiment of input data.') device = torch.device("cuda" if torch.cuda.is_available() else "cpu") if model.word_dict is None: raise Exception('Model has not been loaded properly, no word_dict.') # TODO: Process input_data so that it is ready to be sent to our model. # You should produce two variables: # data_X - A sequence of length 500 which represents the converted review # data_len - The length of the review review_words = review_to_words(input_data) data_X, data_len = convert_and_pad(model.word_dict, review_words) # Using data_X and data_len we construct an appropriate input tensor. Remember # that our model expects input data of the form 'len, review[500]'. # Solution: data_pack = np.hstack((data_len, data_X)) data_pack = data_pack.reshape(1, -1) data = torch.from_numpy(data_pack) data = data.to(device) # Make sure to put the model into evaluation mode model.eval() # TODO: Compute the result of applying the model to the input data. The variable `result` should # be a numpy array which contains a single integer which is either 1 or 0 # Solution: with torch.no_grad(): output = model.forward(data) return np.round(output.numpy()).astype(np.int)
py
1a406eb00daf7e9f48ee527ca1282b0b0dc2f97c
# coding=utf-8 # Copyright 2020-present the HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Torch utilities for the Trainer class. """ import math import warnings from contextlib import contextmanager from dataclasses import dataclass from typing import Iterator, List, Optional, Union import numpy as np import torch from torch.utils.data.dataset import Dataset from torch.utils.data.distributed import DistributedSampler from torch.utils.data.sampler import RandomSampler, Sampler from .file_utils import is_sagemaker_distributed_available, is_torch_tpu_available from .utils import logging if is_sagemaker_distributed_available(): import smdistributed.dataparallel.torch.distributed as dist else: import torch.distributed as dist if is_torch_tpu_available(): import torch_xla.core.xla_model as xm # this is used to supress an undesired warning emitted by pytorch versions 1.4.2-1.7.0 try: from torch.optim.lr_scheduler import SAVE_STATE_WARNING except ImportError: SAVE_STATE_WARNING = "" logger = logging.get_logger(__name__) def torch_pad_and_concatenate(tensor1, tensor2, padding_index=-100): """Concatenates `tensor1` and `tensor2` on first axis, applying padding on the second if necessary.""" if len(tensor1.shape) == 1 or tensor1.shape[1] == tensor2.shape[1]: return torch.cat((tensor1, tensor2), dim=0) # Let's figure out the new shape new_shape = (tensor1.shape[0] + tensor2.shape[0], max(tensor1.shape[1], tensor2.shape[1])) + tensor1.shape[2:] # Now let's fill the result tensor result = tensor1.new_full(new_shape, padding_index) result[: tensor1.shape[0], : tensor1.shape[1]] = tensor1 result[tensor1.shape[0] :, : tensor2.shape[1]] = tensor2 return result def numpy_pad_and_concatenate(array1, array2, padding_index=-100): """Concatenates `array1` and `array2` on first axis, applying padding on the second if necessary.""" if len(array1.shape) == 1 or array1.shape[1] == array2.shape[1]: return np.concatenate((array1, array2), dim=0) # Let's figure out the new shape new_shape = (array1.shape[0] + array2.shape[0], max(array1.shape[1], array2.shape[1])) + array1.shape[2:] # Now let's fill the result tensor result = np.full_like(array1, padding_index, shape=new_shape) result[: array1.shape[0], : array1.shape[1]] = array1 result[array1.shape[0] :, : array2.shape[1]] = array2 return result def nested_concat(tensors, new_tensors, padding_index=-100): """ Concat the `new_tensors` to `tensors` on the first dim and pad them on the second if needed. Works for tensors or nested list/tuples of tensors. """ assert type(tensors) == type( new_tensors ), f"Expected `tensors` and `new_tensors` to have the same type but found {type(tensors)} and {type(new_tensors)}." if isinstance(tensors, (list, tuple)): return type(tensors)(nested_concat(t, n, padding_index=padding_index) for t, n in zip(tensors, new_tensors)) elif isinstance(tensors, torch.Tensor): return torch_pad_and_concatenate(tensors, new_tensors, padding_index=padding_index) elif isinstance(tensors, np.ndarray): return numpy_pad_and_concatenate(tensors, new_tensors, padding_index=padding_index) else: raise TypeError(f"Unsupported type for concatenation: got {type(tensors)}") def nested_numpify(tensors): "Numpify `tensors` (even if it's a nested list/tuple of tensors)." if isinstance(tensors, (list, tuple)): return type(tensors)(nested_numpify(t) for t in tensors) return tensors.cpu().numpy() def nested_detach(tensors): "Detach `tensors` (even if it's a nested list/tuple of tensors)." if isinstance(tensors, (list, tuple)): return type(tensors)(nested_detach(t) for t in tensors) return tensors.detach() def nested_xla_mesh_reduce(tensors, name): if is_torch_tpu_available(): import torch_xla.core.xla_model as xm if isinstance(tensors, (list, tuple)): return type(tensors)(nested_xla_mesh_reduce(t, f"{name}_{i}") for i, t in enumerate(tensors)) return xm.mesh_reduce(name, tensors, torch.cat) else: raise ImportError("Torch xla must be installed to use `nested_xla_mesh_reduce`") def distributed_concat(tensor: "torch.Tensor", num_total_examples: Optional[int] = None) -> torch.Tensor: try: if isinstance(tensor, (tuple, list)): return type(tensor)(distributed_concat(t, num_total_examples) for t in tensor) output_tensors = [tensor.clone() for _ in range(dist.get_world_size())] dist.all_gather(output_tensors, tensor) concat = torch.cat(output_tensors, dim=0) # truncate the dummy elements added by SequentialDistributedSampler if num_total_examples is not None: concat = concat[:num_total_examples] return concat except AssertionError: raise AssertionError("Not currently using distributed training") def distributed_broadcast_scalars( scalars: List[Union[int, float]], num_total_examples: Optional[int] = None ) -> torch.Tensor: try: tensorized_scalar = torch.tensor(scalars).cuda() output_tensors = [tensorized_scalar.clone() for _ in range(dist.get_world_size())] dist.all_gather(output_tensors, tensorized_scalar) concat = torch.cat(output_tensors, dim=0) # truncate the dummy elements added by SequentialDistributedSampler if num_total_examples is not None: concat = concat[:num_total_examples] return concat except AssertionError: raise AssertionError("Not currently using distributed training") def reissue_pt_warnings(caught_warnings): # Reissue warnings that are not the SAVE_STATE_WARNING if len(caught_warnings) > 1: for w in caught_warnings: if w.category != UserWarning or w.message != SAVE_STATE_WARNING: warnings.warn(w.message, w.category) @contextmanager def torch_distributed_zero_first(local_rank: int): """ Decorator to make all processes in distributed training wait for each local_master to do something. Args: local_rank (:obj:`int`): The rank of the local process. """ if local_rank not in [-1, 0]: dist.barrier() yield if local_rank == 0: dist.barrier() class SequentialDistributedSampler(Sampler): """ Distributed Sampler that subsamples indices sequentially, making it easier to collate all results at the end. Even though we only use this sampler for eval and predict (no training), which means that the model params won't have to be synced (i.e. will not hang for synchronization even if varied number of forward passes), we still add extra samples to the sampler to make it evenly divisible (like in `DistributedSampler`) to make it easy to `gather` or `reduce` resulting tensors at the end of the loop. """ def __init__(self, dataset, num_replicas=None, rank=None): if num_replicas is None: if not dist.is_available(): raise RuntimeError("Requires distributed package to be available") num_replicas = dist.get_world_size() if rank is None: if not dist.is_available(): raise RuntimeError("Requires distributed package to be available") rank = dist.get_rank() self.dataset = dataset self.num_replicas = num_replicas self.rank = rank self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas)) self.total_size = self.num_samples * self.num_replicas def __iter__(self): indices = list(range(len(self.dataset))) # add extra samples to make it evenly divisible indices += indices[: (self.total_size - len(indices))] assert ( len(indices) == self.total_size ), f"Indices length {len(indices)} and total size {self.total_size} mismatched" # subsample indices = indices[self.rank * self.num_samples : (self.rank + 1) * self.num_samples] assert ( len(indices) == self.num_samples ), f"Indices length {len(indices)} and sample number {self.num_samples} mismatched" return iter(indices) def __len__(self): return self.num_samples def get_tpu_sampler(dataset: torch.utils.data.dataset.Dataset): if xm.xrt_world_size() <= 1: return RandomSampler(dataset) return DistributedSampler(dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal()) def nested_new_like(arrays, num_samples, padding_index=-100): """ Create the same nested structure as `arrays` with a first dimension always at `num_samples`.""" if isinstance(arrays, (list, tuple)): return type(arrays)(nested_new_like(x, num_samples) for x in arrays) return np.full_like(arrays, padding_index, shape=(num_samples, *arrays.shape[1:])) def nested_expand_like(arrays, new_seq_length, padding_index=-100): """ Expand the `arrays` so that the second dimension grows to `new_seq_length`. Uses `padding_index` for padding.""" if isinstance(arrays, (list, tuple)): return type(arrays)(nested_expand_like(x, new_seq_length, padding_index=padding_index) for x in arrays) result = np.full_like(arrays, padding_index, shape=(arrays.shape[0], new_seq_length) + arrays.shape[2:]) result[:, : arrays.shape[1]] = arrays return result def nested_truncate(tensors, limit): "Truncate `tensors` at `limit` (even if it's a nested list/tuple of tensors)." if isinstance(tensors, (list, tuple)): return type(tensors)(nested_truncate(t, limit) for t in tensors) return tensors[:limit] def _get_first_shape(arrays): """Return the shape of the first array found in the nested struct `arrays`.""" if isinstance(arrays, (list, tuple)): return _get_first_shape(arrays[0]) return arrays.shape class DistributedTensorGatherer: """ A class responsible for properly gathering tensors (or nested list/tuple of tensors) on the CPU by chunks. If our dataset has 16 samples with a batch size of 2 on 3 processes and we gather then transfer on CPU at every step, our sampler will generate the following indices: :obj:`[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 1]` to get something of size a multiple of 3 (so that each process gets the same dataset length). Then process 0, 1 and 2 will be responsible of making predictions for the following samples: - P0: :obj:`[0, 1, 2, 3, 4, 5]` - P1: :obj:`[6, 7, 8, 9, 10, 11]` - P2: :obj:`[12, 13, 14, 15, 0, 1]` The first batch treated on each process will be - P0: :obj:`[0, 1]` - P1: :obj:`[6, 7]` - P2: :obj:`[12, 13]` So if we gather at the end of the first batch, we will get a tensor (nested list/tuple of tensor) corresponding to the following indices: :obj:`[0, 1, 6, 7, 12, 13]` If we directly concatenate our results without taking any precautions, the user will then get the predictions for the indices in this order at the end of the prediction loop: :obj:`[0, 1, 6, 7, 12, 13, 2, 3, 8, 9, 14, 15, 4, 5, 10, 11, 0, 1]` For some reason, that's not going to roll their boat. This class is there to solve that problem. Args: world_size (:obj:`int`): The number of processes used in the distributed training. num_samples (:obj:`int`): The number of samples in our dataset. make_multiple_of (:obj:`int`, `optional`): If passed, the class assumes the datasets passed to each process are made to be a multiple of this argument (by adding samples). padding_index (:obj:`int`, `optional`, defaults to -100): The padding index to use if the arrays don't all have the same sequence length. """ def __init__(self, world_size, num_samples, make_multiple_of=None, padding_index=-100): self.world_size = world_size self.num_samples = num_samples total_size = world_size if make_multiple_of is None else world_size * make_multiple_of self.total_samples = int(np.ceil(num_samples / total_size)) * total_size self.process_length = self.total_samples // world_size self._storage = None self._offsets = None self.padding_index = padding_index def add_arrays(self, arrays): """ Add :obj:`arrays` to the internal storage, Will initialize the storage to the full size at the first arrays passed so that if we're bound to get an OOM, it happens at the beginning. """ if arrays is None: return if self._storage is None: self._storage = nested_new_like(arrays, self.total_samples, padding_index=self.padding_index) self._offsets = list(range(0, self.total_samples, self.process_length)) else: storage_shape = _get_first_shape(self._storage) arrays_shape = _get_first_shape(arrays) if len(storage_shape) > 1 and storage_shape[1] < arrays_shape[1]: # If we get new arrays that are too big too fit, we expand the shape fo the storage self._storage = nested_expand_like(self._storage, arrays_shape[1], padding_index=self.padding_index) slice_len = self._nested_set_tensors(self._storage, arrays) for i in range(self.world_size): self._offsets[i] += slice_len def _nested_set_tensors(self, storage, arrays): if isinstance(arrays, (list, tuple)): for x, y in zip(storage, arrays): slice_len = self._nested_set_tensors(x, y) return slice_len assert ( arrays.shape[0] % self.world_size == 0 ), f"Arrays passed should all have a first dimension multiple of {self.world_size}, found {arrays.shape[0]}." slice_len = arrays.shape[0] // self.world_size for i in range(self.world_size): if len(arrays.shape) == 1: storage[self._offsets[i] : self._offsets[i] + slice_len] = arrays[i * slice_len : (i + 1) * slice_len] else: storage[self._offsets[i] : self._offsets[i] + slice_len, : arrays.shape[1]] = arrays[ i * slice_len : (i + 1) * slice_len ] return slice_len def finalize(self): """ Return the properly gathered arrays and truncate to the number of samples (since the sampler added some extras to get each process a dataset of the same length). """ if self._storage is None: return if self._offsets[0] != self.process_length: logger.warn("Not all data has been set. Are you sure you passed all values?") return nested_truncate(self._storage, self.num_samples) @dataclass class LabelSmoother: """ Adds label-smoothing on a pre-computed output from a Transformers model. Args: epsilon (:obj:`float`, `optional`, defaults to 0.1): The label smoothing factor. ignore_index (:obj:`int`, `optional`, defaults to -100): The index in the labels to ignore when computing the loss. """ epsilon: float = 0.1 ignore_index: int = -100 def __call__(self, model_output, labels): logits = model_output["logits"] if isinstance(model_output, dict) else model_output[0] log_probs = -torch.nn.functional.log_softmax(logits, dim=-1) if labels.dim() == log_probs.dim() - 1: labels = labels.unsqueeze(-1) padding_mask = labels.eq(self.ignore_index) # In case the ignore_index is -100, the gather will fail, so we replace labels by 0. The padding_mask # will ignore them in any case. labels.clamp_min_(0) nll_loss = log_probs.gather(dim=-1, index=labels) smoothed_loss = log_probs.sum(dim=-1, keepdim=True) nll_loss.masked_fill_(padding_mask, 0.0) smoothed_loss.masked_fill_(padding_mask, 0.0) # Take the mean over the label dimensions, then divide by the number of active elements (i.e. not-padded): num_active_elements = padding_mask.numel() - padding_mask.long().sum() nll_loss = nll_loss.sum() / num_active_elements smoothed_loss = smoothed_loss.sum() / (num_active_elements * log_probs.shape[-1]) return (1 - self.epsilon) * nll_loss + self.epsilon * smoothed_loss def get_length_grouped_indices(lengths, batch_size, mega_batch_mult=None, generator=None): """ Return a list of indices so that each slice of :obj:`batch_size` consecutive indices correspond to elements of similar lengths. To do this, the indices are: - randomly permuted - grouped in mega-batches of size :obj:`mega_batch_mult * batch_size` - sorted by length in each mega-batch The result is the concatenation of all mega-batches, with the batch of :obj:`batch_size` containing the element of maximum length placed first, so that an OOM happens sooner rather than later. """ # Default for mega_batch_mult: 50 or the number to get 4 megabatches, whichever is smaller. if mega_batch_mult is None: mega_batch_mult = min(len(lengths) // (batch_size * 4), 50) # Just in case, for tiny datasets if mega_batch_mult == 0: mega_batch_mult = 1 # We need to use torch for the random part as a distributed sampler will set the random seed for torch. indices = torch.randperm(len(lengths), generator=generator) megabatch_size = mega_batch_mult * batch_size megabatches = [indices[i : i + megabatch_size].tolist() for i in range(0, len(lengths), megabatch_size)] megabatches = [list(sorted(megabatch, key=lambda i: lengths[i], reverse=True)) for megabatch in megabatches] # The rest is to get the biggest batch first. # Since each megabatch is sorted by descending length, the longest element is the first megabatch_maximums = [lengths[megabatch[0]] for megabatch in megabatches] max_idx = torch.argmax(torch.tensor(megabatch_maximums)).item() # Switch to put the longest element in first position megabatches[0][0], megabatches[max_idx][0] = megabatches[max_idx][0], megabatches[0][0] return sum(megabatches, []) class LengthGroupedSampler(Sampler): r""" Sampler that samples indices in a way that groups together features of the dataset of roughly the same length while keeping a bit of randomness. """ def __init__(self, dataset: Dataset, batch_size: int, lengths: Optional[List[int]] = None): self.dataset = dataset self.batch_size = batch_size if lengths is None: if not isinstance(dataset[0], dict) or "input_ids" not in dataset[0]: raise ValueError( "Can only automatically infer lengths for datasets whose items are dictionaries with an " "'input_ids' key." ) lengths = [len(feature["input_ids"]) for feature in dataset] self.lengths = lengths def __len__(self): return len(self.lengths) def __iter__(self): indices = get_length_grouped_indices(self.lengths, self.batch_size) return iter(indices) class DistributedLengthGroupedSampler(DistributedSampler): r""" Distributed Sampler that samples indices in a way that groups together features of the dataset of roughly the same length while keeping a bit of randomness. """ # Copied and adapted from PyTorch DistributedSampler. def __init__( self, dataset: Dataset, batch_size: int, num_replicas: Optional[int] = None, rank: Optional[int] = None, seed: int = 0, drop_last: bool = False, lengths: Optional[List[int]] = None, ): if num_replicas is None: if not dist.is_available(): raise RuntimeError("Requires distributed package to be available") num_replicas = dist.get_world_size() if rank is None: if not dist.is_available(): raise RuntimeError("Requires distributed package to be available") rank = dist.get_rank() self.dataset = dataset self.batch_size = batch_size self.num_replicas = num_replicas self.rank = rank self.epoch = 0 self.drop_last = drop_last # If the dataset length is evenly divisible by # of replicas, then there # is no need to drop any data, since the dataset will be split equally. if self.drop_last and len(self.dataset) % self.num_replicas != 0: # Split to nearest available length that is evenly divisible. # This is to ensure each rank receives the same amount of data when # using this Sampler. self.num_samples = math.ceil((len(self.dataset) - self.num_replicas) / self.num_replicas) else: self.num_samples = math.ceil(len(self.dataset) / self.num_replicas) self.total_size = self.num_samples * self.num_replicas self.seed = seed if lengths is None: if not isinstance(dataset[0], dict) or "input_ids" not in dataset[0]: raise ValueError( "Can only automatically infer lengths for datasets whose items are dictionaries with an " "'input_ids' key." ) lengths = [len(feature["input_ids"]) for feature in dataset] self.lengths = lengths def __iter__(self) -> Iterator: # Deterministically shuffle based on epoch and seed g = torch.Generator() g.manual_seed(self.seed + self.epoch) indices = get_length_grouped_indices(self.lengths, self.batch_size, generator=g) if not self.drop_last: # add extra samples to make it evenly divisible indices += indices[: (self.total_size - len(indices))] else: # remove tail of data to make it evenly divisible. indices = indices[: self.total_size] assert len(indices) == self.total_size # subsample indices = indices[self.rank : self.total_size : self.num_replicas] assert len(indices) == self.num_samples return iter(indices)
py
1a406f2545dd08564298f0a19efc320be77969f2
# (C) Copyright 2005-2021 Enthought, Inc., Austin, TX # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in LICENSE.txt and may be redistributed only under # the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # # Thanks for using Enthought open source! import unittest from traits.api import HasTraits, Instance, Str, Any, Property class Foo(HasTraits): s = Str class ClassWithAny(HasTraits): x = Property _x = Any def _get_x(self): return self._x def _set_x(self, x): self._x = x class ClassWithInstance(HasTraits): x = Property _x = Instance(Foo) def _get_x(self): return self._x def _set_x(self, x): self._x = x class ClassWithClassAttribute(HasTraits): name = "class defined name" foo = Str class BazAny(HasTraits): other = Any class BarAny(HasTraits): other = Any class BazInstance(HasTraits): # A BarInstance owned by this object. other = Instance("BarInstance") # A Foo owned by this object and not referenced by others. unique = Instance(Foo) # A Foo owned by this object and referenced by others. shared = Instance(Foo) # A Foo not owned by this object, may or may not be shared with other # objects found via owned references (e.g. other.ref). For the tests, # ref will always reference a Foo that is not owned by any of the objects # reachable via owned references, and therefore, that Foo object should # not be cloned. ref = Instance(Foo, copy="ref") class BarInstance(HasTraits): # used as circular reference back to owning BazInstance # NOTE: Setting copy to 'ref' will mean that when BarInstance is cloned, # the 'other' trait will not be copied, and will still point to the # 'other' attribute of the original BarInstance. other = Instance("BazInstance", copy="ref") # A Foo owned by this object and not referenced by others. unique = Instance(Foo) # A Foo owned by the 'other' object and referenced by this object. shared = Instance(Foo) # A Foo not owned by this object, may or may not be shared with other # objects found via owned references (e.g. other.ref). For the tests, # ref will always reference a Foo that is not owned by any of the objects # reachable via owned references, and therefore, that Foo object should # not be cloned. ref = Instance(Foo, copy="ref") class CloneTestCase(unittest.TestCase): """ Test cases for traits clone """ def test_any(self): b = ClassWithAny() f = Foo() f.s = "the f" b.x = f bc = b.clone_traits(traits="all", copy="deep") self.assertNotEqual(id(bc.x), id(f), "Foo x not cloned") def test_instance(self): b = ClassWithInstance() f = Foo() f.s = "the f" b.x = f bc = b.clone_traits(traits="all", copy="deep") self.assertNotEqual(id(bc.x), id(f), "Foo x not cloned") def test_class_attribute_missing(self): """ This test demonstrates a problem with Traits objects with class attributes. A change to the value of a class attribute via one instance causes the attribute to be removed from other instances. AttributeError: 'ClassWithClassAttribute' object has no attribute 'name' """ s = "class defined name" c = ClassWithClassAttribute() self.assertEqual(s, c.name) c2 = ClassWithClassAttribute() self.assertEqual(s, c.name) self.assertEqual(s, c2.name) s2 = "name class attribute changed via clone" c2.name = s2 self.assertEqual(s2, c2.name) # this is failing with AttributeError: 'ClassWithClassAttribute' # object has no attribute 'name' self.assertEqual(s, c.name) def test_Any_circular_references(self): # Demonstrates that Any traits default to copy='ref' bar = BarAny() baz = BazAny() bar.other = baz baz.other = bar bar_copy = bar.clone_traits() self.assertIsNot(bar_copy, bar) self.assertIs(bar_copy.other, baz) self.assertIs(bar_copy.other.other, bar) def test_Any_circular_references_deep(self): # Demonstrates that Any traits can be forced to deep copy. bar = BarAny() baz = BazAny() bar.other = baz baz.other = bar bar_copy = bar.clone_traits(copy="deep") self.assertIsNot(bar_copy, bar) self.assertIsNot(bar_copy.other, baz) self.assertIsNot(bar_copy.other.other, bar) self.assertIs(bar_copy.other.other, bar_copy) def test_Instance_circular_references(self): ref = Foo(s="ref") bar_unique = Foo(s="bar.foo") shared = Foo(s="shared") baz_unique = Foo(s="baz.unique") baz = BazInstance() baz.unique = baz_unique baz.shared = shared baz.ref = ref bar = BarInstance() bar.unique = bar_unique bar.shared = shared bar.ref = ref bar.other = baz baz.other = bar baz_copy = baz.clone_traits() # Check Baz and Baz attributes.... self.assertIsNot(baz_copy, baz) self.assertIsNot(baz_copy.other, bar) self.assertIsNot(baz_copy.unique, baz.unique) self.assertIsNot(baz_copy.shared, baz.shared) self.assertIs(baz_copy.ref, ref) # Check Bar and Bar attributes.... bar_copy = baz_copy.other # Check the Bar owned object self.assertIsNot(bar_copy.unique, bar.unique) # Check the Bar reference to an object 'outside' the cloned graph. self.assertIs(bar_copy.ref, ref) # Check references to objects that where cloned, they should reference # the new clones not the original objects, except when copy is set # to 'ref' (as in the case of the 'other' trait). # When copy is set to ref, the trait does not get cloned. Therefore, # baz_copy.other.other is baz (and not baz_copy). self.assertIsNot(bar_copy.other, baz_copy) self.assertIs(bar_copy.other, baz) # 'shared' does not have copy set to 'ref', and so bar_copy.shared # should reference the new clone. # should reference the new clones self.assertIsNot(bar_copy.shared, baz.shared) self.assertIs(bar_copy.shared, baz_copy.shared) def test_Instance_circular_references_deep(self): ref = Foo(s="ref") bar_unique = Foo(s="bar.foo") shared = Foo(s="shared") baz_unique = Foo(s="baz.unique") baz = BazInstance() baz.unique = baz_unique baz.shared = shared baz.ref = ref bar = BarInstance() bar.unique = bar_unique bar.shared = shared bar.ref = ref bar.other = baz baz.other = bar baz_copy = baz.clone_traits(copy="deep") # Check Baz and Baz attributes.... self.assertIsNot(baz_copy, baz) self.assertIsNot(baz_copy.other, bar) self.assertIsNot(baz_copy.unique, baz.unique) self.assertIsNot(baz_copy.shared, baz.shared) # baz_copy.ref is checked below with bar_copy.ref. # Check Bar and Bar attributes.... bar_copy = baz_copy.other # Check the Bar owned object self.assertIsNot(bar_copy.unique, bar.unique) # Since the two original 'ref' links were to a shared object, # the cloned links should be to a shared object. Also, the shared # object should be the original 'ref' object, since copy was set to # 'ref'. self.assertIs(baz_copy.ref, bar_copy.ref) self.assertIs(bar_copy.ref, ref) # Check references to objects that where cloned, they should reference # the new clones not the original objects, except when copy is set # to 'ref' (as in the case of the 'other' trait). That is, the 'deep' # flag on clone_traits should not override the 'copy' metadata on # the trait. self.assertIsNot(bar_copy.other, baz_copy) self.assertIs(bar_copy.other, baz) # 'shared' does not have copy set to 'ref', and so bar_copy.shared # should reference the new clone. self.assertIsNot(bar_copy.shared, baz.shared) self.assertIs(bar_copy.shared, baz_copy.shared)
py
1a406fc07121265ca171670d985fa1f6f3d071cb
# coding: utf-8 from django.conf.urls import include, url from customers.cbv_base.CreateView import CreateViewCustom from customers.cbv_base.DeleteView import DeleteViewCustom from customers.cbv_base.UpdateView import UpdateViewCustom from customers.cbv_base.ListView import ListViewCustomOrderBy from customers.cbv_base.DetailView import DetailViewCustom from .models import Task from .cbv import TaskCreate from .forms import TaskForm urlpatterns = [ url(r'^(list)?$', ListViewCustomOrderBy.as_view( model = Task, cbv_order_by = "created", url_delete_name = "tasks:delete", url_update_name = "tasks:update", url_create_name = "tasks:create", url_list_name = "tasks:list", url_detail_name = "tasks:detail", template_name = "cbv/ListViewCustom.html", ), name = 'list' ), url(r'^create$', TaskCreate.as_view( model=Task, success_url = "tasks:list", url_name = "tasks:create", template_name = "cbv/CreateViewCustom.html", form_class = TaskForm, ), name='create' ), url(r'^update-(?P<pk>\d+)$', UpdateViewCustom.as_view( model=Task, success_url="tasks:list", url_name="tasks:update", template_name="cbv/UpdateViewCustom.html", form_class = TaskForm, ), name='update' ), url(r'^delete-(?P<pk>\d+)$', DeleteViewCustom.as_view( model=Task, url_name="tasks:delete", success_url="tasks:list", template_name="cbv/DeleteViewCustom.html" ), name='delete' ), url(r'^detail-(?P<pk>\d+)$', DetailViewCustom.as_view( model=Task, url_name="tasks:detail", template_name="tasks/detail.html" ), name='detail' ), ]
py
1a407045f9799fd14de7b584bf3af4515130fa1e
df.resample("W").std().plot();
py
1a40719d1c03fdfdca4d867db66a80fbf434d083
import collections class Solution: def pacificAtlantic(self, matrix: List[List[int]]) -> List[List[int]]: m = len(matrix) if m == 0: return [] n = len(matrix[0]) if n == 0: return [] visitedTimes = [[0] * n for _ in range(m)] def bfs(start): Q = collections.deque(start) visited = [[False]*n for _ in range(m)] for row, col, height in start: visited[row][col] = True visitedTimes[row][col] += 1 while Q: row, col, height = Q.popleft() for nr, nc in (row-1, col), (row+1, col), (row, col-1), (row, col+1): if 0 <= nr < m and 0 <= nc < n and not visited[nr][nc] and matrix[nr][nc] >= height: visited[nr][nc] = True visitedTimes[nr][nc] += 1 Q.append((nr, nc, matrix[nr][nc])) bfs([[i, 0, matrix[i][0]] for i in range(m)] + [[0, j, matrix[0][j]] for j in range(1, n)]) bfs([[i, n - 1, matrix[i][n - 1]] for i in range(m)] + [[m - 1, j, matrix[m - 1][j]] for j in range(n - 1)]) return [[row, col] for row in range(m) for col in range(n) if visitedTimes[row][col] == 2]
py
1a4071d42ee626e9f34180b81855b63da0d52acc
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re from distutils.command.build_ext import build_ext from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError import setuptools import sys cext = setuptools.Extension( "pyignite._cutils", sources=[ "./cext/cutils.c" ], include_dirs=["./cext"] ) if sys.platform == 'win32': ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError, ValueError) else: ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) class BuildFailed(Exception): pass class ve_build_ext(build_ext): # This class allows C extension building to fail. def run(self): try: build_ext.run(self) except DistutilsPlatformError: raise BuildFailed() def build_extension(self, ext): try: build_ext.build_extension(self, ext) except ext_errors: raise BuildFailed() def is_a_requirement(line): return not any([ line.startswith('#'), line.startswith('-r'), len(line) == 0, ]) install_requirements = [] with open('requirements/install.txt', 'r', encoding='utf-8') as requirements_file: for line in requirements_file.readlines(): line = line.strip('\n') if is_a_requirement(line): install_requirements.append(line) with open('README.md', 'r', encoding='utf-8') as readme_file: long_description = readme_file.read() with open('pyignite/__init__.py', 'r') as fd: version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: raise RuntimeError('Cannot find version information') def run_setup(with_binary=True): if with_binary: kw = dict( ext_modules=[cext], cmdclass=dict(build_ext=ve_build_ext), ) else: kw = dict() setuptools.setup( name='pyignite', version=version, python_requires='>=3.6', author='The Apache Software Foundation', author_email='[email protected]', description='Apache Ignite binary client Python API', long_description=long_description, long_description_content_type='text/markdown', url='https://github.com/apache/ignite-python-thin-client', packages=setuptools.find_packages(), install_requires=install_requirements, license="Apache License 2.0", license_files=('LICENSE', 'NOTICE'), classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3 :: Only', 'Intended Audience :: Developers', 'Topic :: Database :: Front-Ends', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', ], **kw ) try: run_setup() except BuildFailed: BUILD_EXT_WARNING = ("WARNING: The C extension could not be compiled, " "speedups are not enabled.") print('*' * 75) print(BUILD_EXT_WARNING) print("Failure information, if any, is above.") print("I'm retrying the build without the C extension now.") print('*' * 75) run_setup(False) print('*' * 75) print(BUILD_EXT_WARNING) print("Plain python installation succeeded.") print('*' * 75)
py
1a4072d366aa881bcbd57167eb8d43d465d0a17c
from __future__ import division import array import os import subprocess from tempfile import TemporaryFile, NamedTemporaryFile import wave import sys import struct from .logging_utils import log_conversion, log_subprocess_output from .utils import mediainfo_json, fsdecode import base64 from collections import namedtuple try: from StringIO import StringIO except: from io import StringIO from io import BytesIO try: from itertools import izip except: izip = zip from .utils import ( _fd_or_path_or_tempfile, db_to_float, ratio_to_db, get_encoder_name, get_array_type, audioop, ) from .exceptions import ( TooManyMissingFrames, InvalidDuration, InvalidID3TagVersion, InvalidTag, CouldntDecodeError, CouldntEncodeError, MissingAudioParameter, ) if sys.version_info >= (3, 0): basestring = str xrange = range StringIO = BytesIO class ClassPropertyDescriptor(object): def __init__(self, fget, fset=None): self.fget = fget self.fset = fset def __get__(self, obj, klass=None): if klass is None: klass = type(obj) return self.fget.__get__(obj, klass)() def __set__(self, obj, value): if not self.fset: raise AttributeError("can't set attribute") type_ = type(obj) return self.fset.__get__(obj, type_)(value) def setter(self, func): if not isinstance(func, (classmethod, staticmethod)): func = classmethod(func) self.fset = func return self def classproperty(func): if not isinstance(func, (classmethod, staticmethod)): func = classmethod(func) return ClassPropertyDescriptor(func) AUDIO_FILE_EXT_ALIASES = { "m4a": "mp4", "wave": "wav", } WavSubChunk = namedtuple('WavSubChunk', ['id', 'position', 'size']) WavData = namedtuple('WavData', ['audio_format', 'channels', 'sample_rate', 'bits_per_sample', 'raw_data']) def extract_wav_headers(data): # def search_subchunk(data, subchunk_id): pos = 12 # The size of the RIFF chunk descriptor subchunks = [] while pos + 8 <= len(data) and len(subchunks) < 10: subchunk_id = data[pos:pos + 4] subchunk_size = struct.unpack_from('<I', data[pos + 4:pos + 8])[0] subchunks.append(WavSubChunk(subchunk_id, pos, subchunk_size)) if subchunk_id == b'data': # 'data' is the last subchunk break pos += subchunk_size + 8 return subchunks def read_wav_audio(data, headers=None): if not headers: headers = extract_wav_headers(data) fmt = [x for x in headers if x.id == b'fmt '] if not fmt or fmt[0].size < 16: raise CouldntDecodeError("Couldn't find fmt header in wav data") fmt = fmt[0] pos = fmt.position + 8 audio_format = struct.unpack_from('<H', data[pos:pos + 2])[0] if audio_format != 1 and audio_format != 0xFFFE: raise CouldntDecodeError("Unknown audio format 0x%X in wav data" % audio_format) channels = struct.unpack_from('<H', data[pos + 2:pos + 4])[0] sample_rate = struct.unpack_from('<I', data[pos + 4:pos + 8])[0] bits_per_sample = struct.unpack_from('<H', data[pos + 14:pos + 16])[0] data_hdr = headers[-1] if data_hdr.id != b'data': raise CouldntDecodeError("Couldn't find data header in wav data") pos = data_hdr.position + 8 return WavData(audio_format, channels, sample_rate, bits_per_sample, data[pos:pos + data_hdr.size]) def fix_wav_headers(data): headers = extract_wav_headers(data) if not headers or headers[-1].id != b'data': return # TODO: Handle huge files in some other way if len(data) > 2**32: raise CouldntDecodeError("Unable to process >4GB files") # Set the file size in the RIFF chunk descriptor data[4:8] = struct.pack('<I', len(data) - 8) # Set the data size in the data subchunk pos = headers[-1].position data[pos + 4:pos + 8] = struct.pack('<I', len(data) - pos - 8) class AudioSegment(object): """ AudioSegments are *immutable* objects representing segments of audio that can be manipulated using python code. AudioSegments are slicable using milliseconds. for example: a = AudioSegment.from_mp3(mp3file) first_second = a[:1000] # get the first second of an mp3 slice = a[5000:10000] # get a slice from 5 to 10 seconds of an mp3 """ converter = get_encoder_name() # either ffmpeg or avconv # TODO: remove in 1.0 release # maintain backwards compatibility for ffmpeg attr (now called converter) @classproperty def ffmpeg(cls): return cls.converter @ffmpeg.setter def ffmpeg(cls, val): cls.converter = val DEFAULT_CODECS = { "ogg": "libvorbis" } def __init__(self, data=None, *args, **kwargs): self.sample_width = kwargs.pop("sample_width", None) self.frame_rate = kwargs.pop("frame_rate", None) self.channels = kwargs.pop("channels", None) audio_params = (self.sample_width, self.frame_rate, self.channels) if isinstance(data, array.array): try: data = data.tobytes() except: data = data.tostring() # prevent partial specification of arguments if any(audio_params) and None in audio_params: raise MissingAudioParameter("Either all audio parameters or no parameter must be specified") # all arguments are given elif self.sample_width is not None: if len(data) % (self.sample_width * self.channels) != 0: raise ValueError("data length must be a multiple of '(sample_width * channels)'") self.frame_width = self.channels * self.sample_width self._data = data # keep support for 'metadata' until audio params are used everywhere elif kwargs.get('metadata', False): # internal use only self._data = data for attr, val in kwargs.pop('metadata').items(): setattr(self, attr, val) else: # normal construction try: data = data if isinstance(data, (basestring, bytes)) else data.read() except(OSError): d = b'' reader = data.read(2 ** 31 - 1) while reader: d += reader reader = data.read(2 ** 31 - 1) data = d wav_data = read_wav_audio(data) if not wav_data: raise CouldntDecodeError("Couldn't read wav audio from data") self.channels = wav_data.channels self.sample_width = wav_data.bits_per_sample // 8 self.frame_rate = wav_data.sample_rate self.frame_width = self.channels * self.sample_width self._data = wav_data.raw_data if self.sample_width == 1: # convert from unsigned integers in wav self._data = audioop.bias(self._data, 1, -128) # Convert 24-bit audio to 32-bit audio. # (stdlib audioop and array modules do not support 24-bit data) if self.sample_width == 3: byte_buffer = BytesIO() # Workaround for python 2 vs python 3. _data in 2.x are length-1 strings, # And in 3.x are ints. pack_fmt = 'BBB' if isinstance(self._data[0], int) else 'ccc' # This conversion maintains the 24 bit values. The values are # not scaled up to the 32 bit range. Other conversions could be # implemented. i = iter(self._data) padding = {False: b'\x00', True: b'\xFF'} for b0, b1, b2 in izip(i, i, i): byte_buffer.write(padding[b2 > b'\x7f'[0]]) old_bytes = struct.pack(pack_fmt, b0, b1, b2) byte_buffer.write(old_bytes) self._data = byte_buffer.getvalue() self.sample_width = 4 self.frame_width = self.channels * self.sample_width super(AudioSegment, self).__init__(*args, **kwargs) @property def raw_data(self): """ public access to the raw audio data as a bytestring """ return self._data def get_array_of_samples(self, array_type_override=None): """ returns the raw_data as an array of samples """ if array_type_override is None: array_type_override = self.array_type return array.array(array_type_override, self._data) @property def array_type(self): return get_array_type(self.sample_width * 8) def __len__(self): """ returns the length of this audio segment in milliseconds """ return round(1000 * (self.frame_count() / self.frame_rate)) def __eq__(self, other): try: return self._data == other._data except: return False def __hash__(self): return hash(AudioSegment) ^ hash((self.channels, self.frame_rate, self.sample_width, self._data)) def __ne__(self, other): return not (self == other) def __iter__(self): return (self[i] for i in xrange(len(self))) def __getitem__(self, millisecond): if isinstance(millisecond, slice): if millisecond.step: return ( self[i:i + millisecond.step] for i in xrange(*millisecond.indices(len(self))) ) start = millisecond.start if millisecond.start is not None else 0 end = millisecond.stop if millisecond.stop is not None \ else len(self) start = min(start, len(self)) end = min(end, len(self)) else: start = millisecond end = millisecond + 1 start = self._parse_position(start) * self.frame_width end = self._parse_position(end) * self.frame_width data = self._data[start:end] # ensure the output is as long as the requester is expecting expected_length = end - start missing_frames = (expected_length - len(data)) // self.frame_width if missing_frames: if missing_frames > self.frame_count(ms=2): raise TooManyMissingFrames( "You should never be filling in " " more than 2 ms with silence here, " "missing frames: %s" % missing_frames) silence = audioop.mul(data[:self.frame_width], self.sample_width, 0) data += (silence * missing_frames) return self._spawn(data) def get_sample_slice(self, start_sample=None, end_sample=None): """ Get a section of the audio segment by sample index. NOTE: Negative indices do *not* address samples backword from the end of the audio segment like a python list. This is intentional. """ max_val = int(self.frame_count()) def bounded(val, default): if val is None: return default if val < 0: return 0 if val > max_val: return max_val return val start_i = bounded(start_sample, 0) * self.frame_width end_i = bounded(end_sample, max_val) * self.frame_width data = self._data[start_i:end_i] return self._spawn(data) def __add__(self, arg): if isinstance(arg, AudioSegment): return self.append(arg, crossfade=0) else: return self.apply_gain(arg) def __radd__(self, rarg): """ Permit use of sum() builtin with an iterable of AudioSegments """ if rarg == 0: return self raise TypeError("Gains must be the second addend after the " "AudioSegment") def __sub__(self, arg): if isinstance(arg, AudioSegment): raise TypeError("AudioSegment objects can't be subtracted from " "each other") else: return self.apply_gain(-arg) def __mul__(self, arg): """ If the argument is an AudioSegment, overlay the multiplied audio segment. If it's a number, just use the string multiply operation to repeat the audio. The following would return an AudioSegment that contains the audio of audio_seg eight times `audio_seg * 8` """ if isinstance(arg, AudioSegment): return self.overlay(arg, position=0, loop=True) else: return self._spawn(data=self._data * arg) def _spawn(self, data, overrides={}): """ Creates a new audio segment using the metadata from the current one and the data passed in. Should be used whenever an AudioSegment is being returned by an operation that would alters the current one, since AudioSegment objects are immutable. """ # accept lists of data chunks if isinstance(data, list): data = b''.join(data) if isinstance(data, array.array): try: data = data.tobytes() except: data = data.tostring() # accept file-like objects if hasattr(data, 'read'): if hasattr(data, 'seek'): data.seek(0) data = data.read() metadata = { 'sample_width': self.sample_width, 'frame_rate': self.frame_rate, 'frame_width': self.frame_width, 'channels': self.channels } metadata.update(overrides) return self.__class__(data=data, metadata=metadata) @classmethod def _sync(cls, *segs): channels = max(seg.channels for seg in segs) frame_rate = max(seg.frame_rate for seg in segs) sample_width = max(seg.sample_width for seg in segs) return tuple( seg.set_channels(channels).set_frame_rate(frame_rate).set_sample_width(sample_width) for seg in segs ) def _parse_position(self, val): if val < 0: val = len(self) - abs(val) val = self.frame_count(ms=len(self)) if val == float("inf") else \ self.frame_count(ms=val) return int(val) @classmethod def empty(cls): return cls(b'', metadata={ "channels": 1, "sample_width": 1, "frame_rate": 1, "frame_width": 1 }) @classmethod def silent(cls, duration=1000, frame_rate=11025): """ Generate a silent audio segment. duration specified in milliseconds (default duration: 1000ms, default frame_rate: 11025). """ frames = int(frame_rate * (duration / 1000.0)) data = b"\0\0" * frames return cls(data, metadata={"channels": 1, "sample_width": 2, "frame_rate": frame_rate, "frame_width": 2}) @classmethod def from_mono_audiosegments(cls, *mono_segments): if not len(mono_segments): raise ValueError("At least one AudioSegment instance is required") segs = cls._sync(*mono_segments) if segs[0].channels != 1: raise ValueError( "AudioSegment.from_mono_audiosegments requires all arguments are mono AudioSegment instances") channels = len(segs) sample_width = segs[0].sample_width frame_rate = segs[0].frame_rate frame_count = max(int(seg.frame_count()) for seg in segs) data = array.array( segs[0].array_type, b'\0' * (frame_count * sample_width * channels) ) for i, seg in enumerate(segs): data[i::channels] = seg.get_array_of_samples() return cls( data, channels=channels, sample_width=sample_width, frame_rate=frame_rate, ) @classmethod def from_file_using_temporary_files(cls, file, format=None, codec=None, parameters=None, **kwargs): orig_file = file file, close_file = _fd_or_path_or_tempfile(file, 'rb', tempfile=False) if format: format = format.lower() format = AUDIO_FILE_EXT_ALIASES.get(format, format) def is_format(f): f = f.lower() if format == f: return True if isinstance(orig_file, basestring): return orig_file.lower().endswith(".{0}".format(f)) if isinstance(orig_file, bytes): return orig_file.lower().endswith((".{0}".format(f)).encode('utf8')) return False if is_format("wav"): try: obj = cls._from_safe_wav(file) if close_file: file.close() return obj except: file.seek(0) elif is_format("raw") or is_format("pcm"): sample_width = kwargs['sample_width'] frame_rate = kwargs['frame_rate'] channels = kwargs['channels'] metadata = { 'sample_width': sample_width, 'frame_rate': frame_rate, 'channels': channels, 'frame_width': channels * sample_width } obj = cls(data=file.read(), metadata=metadata) if close_file: file.close() return obj input_file = NamedTemporaryFile(mode='wb', delete=False) try: input_file.write(file.read()) except(OSError): input_file.flush() input_file.close() input_file = NamedTemporaryFile(mode='wb', delete=False, buffering=2 ** 31 - 1) if close_file: file.close() close_file = True file = open(orig_file, buffering=2 ** 13 - 1, mode='rb') reader = file.read(2 ** 31 - 1) while reader: input_file.write(reader) reader = file.read(2 ** 31 - 1) input_file.flush() if close_file: file.close() output = NamedTemporaryFile(mode="rb", delete=False) conversion_command = [cls.converter, '-y', # always overwrite existing files ] # If format is not defined # ffmpeg/avconv will detect it automatically if format: conversion_command += ["-f", format] if codec: # force audio decoder conversion_command += ["-acodec", codec] conversion_command += [ "-i", input_file.name, # input_file options (filename last) "-vn", # Drop any video streams if there are any "-f", "wav", # output options (filename last) output.name ] if parameters is not None: # extend arguments with arbitrary set conversion_command.extend(parameters) log_conversion(conversion_command) with open(os.devnull, 'rb') as devnull: p = subprocess.Popen(conversion_command, stdin=devnull, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=0x08000000) p_out, p_err = p.communicate() log_subprocess_output(p_out) log_subprocess_output(p_err) try: if p.returncode != 0: raise CouldntDecodeError( "Decoding failed. ffmpeg returned error code: {0}\n\nOutput from ffmpeg/avlib:\n\n{1}".format( p.returncode, p_err.decode(errors='ignore') )) obj = cls._from_safe_wav(output) finally: input_file.close() output.close() os.unlink(input_file.name) os.unlink(output.name) return obj @classmethod def from_file(cls, file, format=None, codec=None, parameters=None, **kwargs): orig_file = file try: filename = fsdecode(file) except TypeError: filename = None file, close_file = _fd_or_path_or_tempfile(file, 'rb', tempfile=False) if format: format = format.lower() format = AUDIO_FILE_EXT_ALIASES.get(format, format) def is_format(f): f = f.lower() if format == f: return True if filename: return filename.lower().endswith(".{0}".format(f)) return False if is_format("wav"): try: return cls._from_safe_wav(file) except: file.seek(0) elif is_format("raw") or is_format("pcm"): sample_width = kwargs['sample_width'] frame_rate = kwargs['frame_rate'] channels = kwargs['channels'] metadata = { 'sample_width': sample_width, 'frame_rate': frame_rate, 'channels': channels, 'frame_width': channels * sample_width } return cls(data=file.read(), metadata=metadata) conversion_command = [cls.converter, '-y', # always overwrite existing files ] # If format is not defined # ffmpeg/avconv will detect it automatically if format: conversion_command += ["-f", format] if codec: # force audio decoder conversion_command += ["-acodec", codec] read_ahead_limit = kwargs.get('read_ahead_limit', -1) if filename: conversion_command += ["-i", filename] stdin_parameter = subprocess.DEVNULL stdin_data = None else: if cls.converter == 'ffmpeg': conversion_command += ["-read_ahead_limit", str(read_ahead_limit), "-i", "cache:pipe:0"] else: conversion_command += ["-i", "-"] stdin_parameter = subprocess.PIPE stdin_data = file.read() if codec: info = None else: info = mediainfo_json(orig_file, read_ahead_limit=read_ahead_limit) if info: audio_streams = [x for x in info['streams'] if x['codec_type'] == 'audio'] # This is a workaround for some ffprobe versions that always say # that mp3/mp4/aac/webm/ogg files contain fltp samples audio_codec = audio_streams[0].get('codec_name') if (audio_streams[0].get('sample_fmt') == 'fltp' and audio_codec in ['mp3', 'mp4', 'aac', 'webm', 'ogg']): bits_per_sample = 16 else: bits_per_sample = audio_streams[0]['bits_per_sample'] if bits_per_sample == 8: acodec = 'pcm_u8' else: acodec = 'pcm_s%dle' % bits_per_sample conversion_command += ["-acodec", acodec] conversion_command += [ "-vn", # Drop any video streams if there are any "-f", "wav", # output options (filename last) "-" ] if parameters is not None: # extend arguments with arbitrary set conversion_command.extend(parameters) log_conversion(conversion_command) p = subprocess.Popen(conversion_command, stdin=stdin_parameter, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=0x08000000) p_out, p_err = p.communicate(input=stdin_data) if p.returncode != 0 or len(p_out) == 0: if close_file: file.close() raise CouldntDecodeError( "Decoding failed. ffmpeg returned error code: {0}\n\nOutput from ffmpeg/avlib:\n\n{1}".format( p.returncode, p_err.decode(errors='ignore') )) p_out = bytearray(p_out) fix_wav_headers(p_out) obj = cls._from_safe_wav(BytesIO(p_out)) if close_file: file.close() return obj @classmethod def from_mp3(cls, file, parameters=None): return cls.from_file(file, 'mp3', parameters=parameters) @classmethod def from_flv(cls, file, parameters=None): return cls.from_file(file, 'flv', parameters=parameters) @classmethod def from_ogg(cls, file, parameters=None): return cls.from_file(file, 'ogg', parameters=parameters) @classmethod def from_wav(cls, file, parameters=None): return cls.from_file(file, 'wav', parameters=parameters) @classmethod def from_raw(cls, file, **kwargs): return cls.from_file(file, 'raw', sample_width=kwargs['sample_width'], frame_rate=kwargs['frame_rate'], channels=kwargs['channels']) @classmethod def _from_safe_wav(cls, file): file, close_file = _fd_or_path_or_tempfile(file, 'rb', tempfile=False) file.seek(0) obj = cls(data=file) if close_file: file.close() return obj def export(self, out_f=None, format='mp3', codec=None, bitrate=None, parameters=None, tags=None, id3v2_version='4', cover=None): """ Export an AudioSegment to a file with given options out_f (string): Path to destination audio file. Also accepts os.PathLike objects on python >= 3.6 format (string) Format for destination audio file. ('mp3', 'wav', 'raw', 'ogg' or other ffmpeg/avconv supported files) codec (string) Codec used to encode the destination file. bitrate (string) Bitrate used when encoding destination file. (64, 92, 128, 256, 312k...) Each codec accepts different bitrate arguments so take a look at the ffmpeg documentation for details (bitrate usually shown as -b, -ba or -a:b). parameters (list of strings) Aditional ffmpeg/avconv parameters tags (dict) Set metadata information to destination files usually used as tags. ({title='Song Title', artist='Song Artist'}) id3v2_version (string) Set ID3v2 version for tags. (default: '4') cover (file) Set cover for audio file from image file. (png or jpg) """ id3v2_allowed_versions = ['3', '4'] if format == "raw" and (codec is not None or parameters is not None): raise AttributeError( 'Can not invoke ffmpeg when export format is "raw"; ' 'specify an ffmpeg raw format like format="s16le" instead ' 'or call export(format="raw") with no codec or parameters') out_f, _ = _fd_or_path_or_tempfile(out_f, 'wb+') out_f.seek(0) if format == "raw": out_f.write(self._data) out_f.seek(0) return out_f # wav with no ffmpeg parameters can just be written directly to out_f easy_wav = format == "wav" and codec is None and parameters is None if easy_wav: data = out_f else: data = NamedTemporaryFile(mode="wb", delete=False) pcm_for_wav = self._data if self.sample_width == 1: # convert to unsigned integers for wav pcm_for_wav = audioop.bias(self._data, 1, 128) wave_data = wave.open(data, 'wb') wave_data.setnchannels(self.channels) wave_data.setsampwidth(self.sample_width) wave_data.setframerate(self.frame_rate) # For some reason packing the wave header struct with # a float in python 2 doesn't throw an exception wave_data.setnframes(int(self.frame_count())) wave_data.writeframesraw(pcm_for_wav) wave_data.close() # for easy wav files, we're done (wav data is written directly to out_f) if easy_wav: return out_f output = NamedTemporaryFile(mode="w+b", delete=False) # build converter command to export conversion_command = [ self.converter, '-y', # always overwrite existing files "-f", "wav", "-i", data.name, # input options (filename last) ] if codec is None: codec = self.DEFAULT_CODECS.get(format, None) if cover is not None: if cover.lower().endswith(('.png', '.jpg', '.jpeg', '.bmp', '.tif', '.tiff')) and format == "mp3": conversion_command.extend(["-i", cover, "-map", "0", "-map", "1", "-c:v", "mjpeg"]) else: raise AttributeError( "Currently cover images are only supported by MP3 files. The allowed image formats are: .tif, .jpg, .bmp, .jpeg and .png.") if codec is not None: # force audio encoder conversion_command.extend(["-acodec", codec]) if bitrate is not None: conversion_command.extend(["-b:a", bitrate]) if parameters is not None: # extend arguments with arbitrary set conversion_command.extend(parameters) if tags is not None: if not isinstance(tags, dict): raise InvalidTag("Tags must be a dictionary.") else: # Extend converter command with tags # print(tags) for key, value in tags.items(): conversion_command.extend( ['-metadata', '{0}={1}'.format(key, value)]) if format == 'mp3': # set id3v2 tag version if id3v2_version not in id3v2_allowed_versions: raise InvalidID3TagVersion( "id3v2_version not allowed, allowed versions: %s" % id3v2_allowed_versions) conversion_command.extend([ "-id3v2_version", id3v2_version ]) if sys.platform == 'darwin' and codec == 'mp3': conversion_command.extend(["-write_xing", "0"]) conversion_command.extend([ "-f", format, output.name, # output options (filename last) ]) log_conversion(conversion_command) # read stdin / write stdout with open(os.devnull, 'rb') as devnull: p = subprocess.Popen(conversion_command, stdin=devnull, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=0x08000000) p_out, p_err = p.communicate() log_subprocess_output(p_out) log_subprocess_output(p_err) if p.returncode != 0: raise CouldntEncodeError( "Encoding failed. ffmpeg/avlib returned error code: {0}\n\nCommand:{1}\n\nOutput from ffmpeg/avlib:\n\n{2}".format( p.returncode, conversion_command, p_err.decode(errors='ignore') )) output.seek(0) out_f.write(output.read()) data.close() output.close() os.unlink(data.name) os.unlink(output.name) out_f.seek(0) return out_f def get_frame(self, index): frame_start = index * self.frame_width frame_end = frame_start + self.frame_width return self._data[frame_start:frame_end] def frame_count(self, ms=None): """ returns the number of frames for the given number of milliseconds, or if not specified, the number of frames in the whole AudioSegment """ if ms is not None: return ms * (self.frame_rate / 1000.0) else: return float(len(self._data) // self.frame_width) def set_sample_width(self, sample_width): if sample_width == self.sample_width: return self frame_width = self.channels * sample_width return self._spawn( audioop.lin2lin(self._data, self.sample_width, sample_width), overrides={'sample_width': sample_width, 'frame_width': frame_width} ) def set_frame_rate(self, frame_rate): if frame_rate == self.frame_rate: return self if self._data: converted, _ = audioop.ratecv(self._data, self.sample_width, self.channels, self.frame_rate, frame_rate, None) else: converted = self._data return self._spawn(data=converted, overrides={'frame_rate': frame_rate}) def set_channels(self, channels): if channels == self.channels: return self if channels == 2 and self.channels == 1: fn = audioop.tostereo frame_width = self.frame_width * 2 fac = 1 converted = fn(self._data, self.sample_width, fac, fac) elif channels == 1 and self.channels == 2: fn = audioop.tomono frame_width = self.frame_width // 2 fac = 0.5 converted = fn(self._data, self.sample_width, fac, fac) elif channels == 1: channels_data = [seg.get_array_of_samples() for seg in self.split_to_mono()] frame_count = int(self.frame_count()) converted = array.array( channels_data[0].typecode, b'\0' * (frame_count * self.sample_width) ) for raw_channel_data in channels_data: for i in range(frame_count): converted[i] += raw_channel_data[i] // self.channels frame_width = self.frame_width // self.channels elif self.channels == 1: dup_channels = [self for iChannel in range(channels)] return AudioSegment.from_mono_audiosegments(*dup_channels) else: raise ValueError( "AudioSegment.set_channels only supports mono-to-multi channel and multi-to-mono channel conversion") return self._spawn(data=converted, overrides={ 'channels': channels, 'frame_width': frame_width}) def split_to_mono(self): if self.channels == 1: return [self] samples = self.get_array_of_samples() mono_channels = [] for i in range(self.channels): samples_for_current_channel = samples[i::self.channels] try: mono_data = samples_for_current_channel.tobytes() except AttributeError: mono_data = samples_for_current_channel.tostring() mono_channels.append( self._spawn(mono_data, overrides={"channels": 1, "frame_width": self.sample_width}) ) return mono_channels @property def rms(self): return audioop.rms(self._data, self.sample_width) @property def dBFS(self): rms = self.rms if not rms: return -float("infinity") return ratio_to_db(self.rms / self.max_possible_amplitude) @property def max(self): return audioop.max(self._data, self.sample_width) @property def max_possible_amplitude(self): bits = self.sample_width * 8 max_possible_val = (2 ** bits) # since half is above 0 and half is below the max amplitude is divided return max_possible_val / 2 @property def max_dBFS(self): return ratio_to_db(self.max, self.max_possible_amplitude) @property def duration_seconds(self): return self.frame_rate and self.frame_count() / self.frame_rate or 0.0 def get_dc_offset(self, channel=1): """ Returns a value between -1.0 and 1.0 representing the DC offset of a channel (1 for left, 2 for right). """ if not 1 <= channel <= 2: raise ValueError("channel value must be 1 (left) or 2 (right)") if self.channels == 1: data = self._data elif channel == 1: data = audioop.tomono(self._data, self.sample_width, 1, 0) else: data = audioop.tomono(self._data, self.sample_width, 0, 1) return float(audioop.avg(data, self.sample_width)) / self.max_possible_amplitude def remove_dc_offset(self, channel=None, offset=None): """ Removes DC offset of given channel. Calculates offset if it's not given. Offset values must be in range -1.0 to 1.0. If channel is None, removes DC offset from all available channels. """ if channel and not 1 <= channel <= 2: raise ValueError("channel value must be None, 1 (left) or 2 (right)") if offset and not -1.0 <= offset <= 1.0: raise ValueError("offset value must be in range -1.0 to 1.0") if offset: offset = int(round(offset * self.max_possible_amplitude)) def remove_data_dc(data, off): if not off: off = audioop.avg(data, self.sample_width) return audioop.bias(data, self.sample_width, -off) if self.channels == 1: return self._spawn(data=remove_data_dc(self._data, offset)) left_channel = audioop.tomono(self._data, self.sample_width, 1, 0) right_channel = audioop.tomono(self._data, self.sample_width, 0, 1) if not channel or channel == 1: left_channel = remove_data_dc(left_channel, offset) if not channel or channel == 2: right_channel = remove_data_dc(right_channel, offset) left_channel = audioop.tostereo(left_channel, self.sample_width, 1, 0) right_channel = audioop.tostereo(right_channel, self.sample_width, 0, 1) return self._spawn(data=audioop.add(left_channel, right_channel, self.sample_width)) def apply_gain(self, volume_change): return self._spawn(data=audioop.mul(self._data, self.sample_width, db_to_float(float(volume_change)))) def overlay(self, seg, position=0, loop=False, times=None, gain_during_overlay=None): """ Overlay the provided segment on to this segment starting at the specificed position and using the specfied looping beahvior. seg (AudioSegment): The audio segment to overlay on to this one. position (optional int): The position to start overlaying the provided segment in to this one. loop (optional bool): Loop seg as many times as necessary to match this segment's length. Overrides loops param. times (optional int): Loop seg the specified number of times or until it matches this segment's length. 1 means once, 2 means twice, ... 0 would make the call a no-op gain_during_overlay (optional int): Changes this segment's volume by the specified amount during the duration of time that seg is overlaid on top of it. When negative, this has the effect of 'ducking' the audio under the overlay. """ if loop: # match loop=True's behavior with new times (count) mechinism. times = -1 elif times is None: # no times specified, just once through times = 1 elif times == 0: # it's a no-op, make a copy since we never mutate return self._spawn(self._data) output = StringIO() seg1, seg2 = AudioSegment._sync(self, seg) sample_width = seg1.sample_width spawn = seg1._spawn output.write(seg1[:position]._data) # drop down to the raw data seg1 = seg1[position:]._data seg2 = seg2._data pos = 0 seg1_len = len(seg1) seg2_len = len(seg2) while times: remaining = max(0, seg1_len - pos) if seg2_len >= remaining: seg2 = seg2[:remaining] seg2_len = remaining # we've hit the end, we're done looping (if we were) and this # is our last go-around times = 1 if gain_during_overlay: seg1_overlaid = seg1[pos:pos + seg2_len] seg1_adjusted_gain = audioop.mul(seg1_overlaid, self.sample_width, db_to_float(float(gain_during_overlay))) output.write(audioop.add(seg1_adjusted_gain, seg2, sample_width)) else: output.write(audioop.add(seg1[pos:pos + seg2_len], seg2, sample_width)) pos += seg2_len # dec times to break our while loop (eventually) times -= 1 output.write(seg1[pos:]) return spawn(data=output) def append(self, seg, crossfade=100): seg1, seg2 = AudioSegment._sync(self, seg) if not crossfade: return seg1._spawn(seg1._data + seg2._data) elif crossfade > len(self): raise ValueError("Crossfade is longer than the original AudioSegment ({}ms > {}ms)".format( crossfade, len(self) )) elif crossfade > len(seg): raise ValueError("Crossfade is longer than the appended AudioSegment ({}ms > {}ms)".format( crossfade, len(seg) )) xf = seg1[-crossfade:].fade(to_gain=-120, start=0, end=float('inf')) xf *= seg2[:crossfade].fade(from_gain=-120, start=0, end=float('inf')) output = TemporaryFile() output.write(seg1[:-crossfade]._data) output.write(xf._data) output.write(seg2[crossfade:]._data) output.seek(0) obj = seg1._spawn(data=output) output.close() return obj def fade(self, to_gain=0, from_gain=0, start=None, end=None, duration=None): """ Fade the volume of this audio segment. to_gain (float): resulting volume_change in db start (int): default = beginning of the segment when in this segment to start fading in milliseconds end (int): default = end of the segment when in this segment to start fading in milliseconds duration (int): default = until the end of the audio segment the duration of the fade """ if None not in [duration, end, start]: raise TypeError('Only two of the three arguments, "start", ' '"end", and "duration" may be specified') # no fade == the same audio if to_gain == 0 and from_gain == 0: return self start = min(len(self), start) if start is not None else None end = min(len(self), end) if end is not None else None if start is not None and start < 0: start += len(self) if end is not None and end < 0: end += len(self) if duration is not None and duration < 0: raise InvalidDuration("duration must be a positive integer") if duration: if start is not None: end = start + duration elif end is not None: start = end - duration else: duration = end - start from_power = db_to_float(from_gain) output = [] # original data - up until the crossfade portion, as is before_fade = self[:start]._data if from_gain != 0: before_fade = audioop.mul(before_fade, self.sample_width, from_power) output.append(before_fade) gain_delta = db_to_float(to_gain) - from_power # fades longer than 100ms can use coarse fading (one gain step per ms), # shorter fades will have audible clicks so they use precise fading # (one gain step per sample) if duration > 100: scale_step = gain_delta / duration for i in range(duration): volume_change = from_power + (scale_step * i) chunk = self[start + i] chunk = audioop.mul(chunk._data, self.sample_width, volume_change) output.append(chunk) else: start_frame = self.frame_count(ms=start) end_frame = self.frame_count(ms=end) fade_frames = end_frame - start_frame scale_step = gain_delta / fade_frames for i in range(int(fade_frames)): volume_change = from_power + (scale_step * i) sample = self.get_frame(int(start_frame + i)) sample = audioop.mul(sample, self.sample_width, volume_change) output.append(sample) # original data after the crossfade portion, at the new volume after_fade = self[end:]._data if to_gain != 0: after_fade = audioop.mul(after_fade, self.sample_width, db_to_float(to_gain)) output.append(after_fade) return self._spawn(data=output) def fade_out(self, duration): return self.fade(to_gain=-120, duration=duration, end=float('inf')) def fade_in(self, duration): return self.fade(from_gain=-120, duration=duration, start=0) def reverse(self): return self._spawn( data=audioop.reverse(self._data, self.sample_width) ) def _repr_html_(self): src = """ <audio controls> <source src="data:audio/mpeg;base64,{base64}" type="audio/mpeg"/> Your browser does not support the audio element. </audio> """ fh = self.export() data = base64.b64encode(fh.read()).decode('ascii') return src.format(base64=data) from . import effects
py
1a40743dd454101291a953687eee6dab7ce7d515
""" The :mod:`sklearn.metrics.cluster` submodule contains evaluation metrics for cluster analysis results. There are two forms of evaluation: - supervised, which uses a ground truth class values for each sample. - unsupervised, which does not and measures the 'quality' of the model itself. """ from .supervised import adjusted_mutual_info_score from .supervised import normalized_mutual_info_score from .supervised import adjusted_rand_score from .supervised import completeness_score from .supervised import contingency_matrix from .supervised import expected_mutual_information from .supervised import homogeneity_completeness_v_measure from .supervised import homogeneity_score from .supervised import mutual_info_score from .supervised import v_measure_score from .supervised import fowlkes_mallows_score from .supervised import entropy from .unsupervised import silhouette_samples from .unsupervised import silhouette_score from .unsupervised import calinski_harabasz_score from .unsupervised import calinski_harabaz_score from .unsupervised import davies_bouldin_score from .bicluster import consensus_score __all__ = ["adjusted_mutual_info_score", "normalized_mutual_info_score", "adjusted_rand_score", "completeness_score", "contingency_matrix", "expected_mutual_information", "homogeneity_completeness_v_measure", "homogeneity_score", "mutual_info_score", "v_measure_score", "fowlkes_mallows_score", "entropy", "silhouette_samples", "silhouette_score", "calinski_harabaz_score", "calinski_harabasz_score", "davies_bouldin_score", "consensus_score"]
py
1a4074aa58e6718bbf214ad9bc7032b60afec597
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_luda ---------------------------------- Tests for `luda` module. """ import pytest # from contextlib import contextmanager from click.testing import CliRunner # from luda import luda from luda import cli @pytest.fixture def response(): """Sample pytest fixture. See more at: http://doc.pytest.org/en/latest/fixture.html """ # import requests # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') def test_content(response): """Sample pytest test function with the pytest fixture as an argument. """ # from bs4 import BeautifulSoup # assert 'GitHub' in BeautifulSoup(response.content).title.string def test_command_line_interface(): runner = CliRunner() result = runner.invoke(cli.main) assert result.exit_code == 0 assert 'luda.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this message and exit.' in help_result.output
py
1a407520f8d9e317e43b8a8bb0eda87ffb1bbd0b
#!/usr/bin/env python3 import ipaddress net6 = ipaddress.ip_network('12:3456:78:90ab:cd:ef01:23:30/125') for ip in net6: print(ip)
py
1a40757ce05f29fb3e28a5aaf5e2790b43e699c0
#! /usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from botorch.acquisition.acquisition import ( AcquisitionFunction, OneShotAcquisitionFunction, ) from botorch.utils.testing import BotorchTestCase class TestAcquisitionFunction(BotorchTestCase): def test_abstract_raises(self): with self.assertRaises(TypeError): AcquisitionFunction() class TestOneShotAcquisitionFunction(BotorchTestCase): def test_abstract_raises(self): with self.assertRaises(TypeError): OneShotAcquisitionFunction()
bzl
1a4075986f7e6abda4537594bf5b3394fb450c07
load("@rules_pkg//:pkg.bzl", "pkg_zip") def copy_file(name, src, out): native.genrule( name = name, srcs = [src], outs = [out], cmd = "cp $< $@" ) def pkg_asset(name, srcs = [], **kwargs): """Package MediaPipe assets This task renames asset files so that they can be added to an AssetBundle (e.g. x.tflte -> x.bytes) and zip them. Args: name: the name of the output zip file srcs: files to be packaged """ rename_target = "normalize_%s_exts" % name _normalize_exts(name = rename_target, srcs = srcs) pkg_zip( name = name, srcs = [":" + rename_target], **kwargs, ) def _normalize_exts_impl(ctx): output_files = [] for src in ctx.files.srcs: ext = "bytes" if src.extension in ctx.attr.bytes_exts else ("txt" if src.extension in ctx.attr.txt_exts else src.extension) if ext == src.extension: output_files.append(src) else: dest = ctx.actions.declare_file(src.path[:-1 * len(src.extension)] + ext) ctx.actions.run_shell( inputs = [src], outputs = [dest], arguments = [src.path, dest.path], command = "test $1 != $2 && cp $1 $2", progress_message = "Copying {} to {}...".format(src.path, dest.path), ) output_files.append(dest) return [ DefaultInfo(files = depset(output_files)), ] _normalize_exts = rule( implementation = _normalize_exts_impl, attrs = { "srcs": attr.label_list(allow_files = True), "bytes_exts": attr.string_list(default = ["jpg", "png", "tflite", "uuu"]), "txt_exts": attr.string_list(default = ["pbtxt"]), }, )
py
1a4075b92daa3e48737ca505c50aaadc9215714f
from celery import shared_task from checkerapp.models import AlertPlugin from checkerapp.models import AlertSent from django.db import models from django.db.models import Q from .telegrambot import send_alert class TelegramAlertPlugin(AlertPlugin): url = "accounts:telegram_plugin:telegram_pluginview" telegram_id = models.CharField(max_length=50) @shared_task def send_alert_task(task_obj): check_obj = task_obj["base_check_obj"] message = str(check_obj.content_object) + " is down" users = list(check_obj.service_set.first().users.all()) for user in users: telegram_user_obj = TelegramAlertPlugin.objects.filter( Q(alert_receiver=user) & Q(active_status=True) ).first() if not telegram_user_obj: print("Inactive") break send_alert(message, telegram_user_obj) AlertSent.objects.create(check_obj=check_obj) return "Success !"
py
1a4075e4236d60f7aba93489b1c33a5741aa7965
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- import os import tempfile import unittest import mock from msrestazure.azure_exceptions import CloudError from azure.cli.core.util import CLIError from azure.cli.core.commands.validators import DefaultStr from azure.cli.core.keys import is_valid_ssh_rsa_public_key from azure.cli.command_modules.vm._validators import (validate_ssh_key, _figure_out_storage_source, _validate_admin_username, _validate_admin_password, _parse_image_argument, process_disk_or_snapshot_create_namespace, _validate_vmss_create_subnet, _get_next_subnet_addr_suffix, _validate_vm_vmss_msi) class TestActions(unittest.TestCase): def test_generate_specfied_ssh_key_files(self): _, private_key_file = tempfile.mkstemp() public_key_file = private_key_file + '.pub' args = mock.MagicMock() args.ssh_key_value = public_key_file args.generate_ssh_keys = True # 1 verify we generate key files if not existing validate_ssh_key(args) generated_public_key_string = args.ssh_key_value self.assertTrue(bool(args.ssh_key_value)) self.assertTrue(is_valid_ssh_rsa_public_key(generated_public_key_string)) self.assertTrue(os.path.isfile(private_key_file)) # 2 verify we load existing key files # for convinience we will reuse the generated file in the previous step args2 = mock.MagicMock() args2.ssh_key_value = generated_public_key_string args2.generate_ssh_keys = False validate_ssh_key(args2) # we didn't regenerate self.assertEqual(generated_public_key_string, args.ssh_key_value) # 3 verify we do not generate unless told so _, private_key_file2 = tempfile.mkstemp() public_key_file2 = private_key_file2 + '.pub' args3 = mock.MagicMock() args3.ssh_key_value = public_key_file2 args3.generate_ssh_keys = False with self.assertRaises(CLIError): validate_ssh_key(args3) # 4 verify file naming if the pub file doesn't end with .pub _, public_key_file4 = tempfile.mkstemp() public_key_file4 += '1' # make it nonexisting args4 = mock.MagicMock() args4.ssh_key_value = public_key_file4 args4.generate_ssh_keys = True validate_ssh_key(args4) self.assertTrue(os.path.isfile(public_key_file4 + '.private')) self.assertTrue(os.path.isfile(public_key_file4)) def test_figure_out_storage_source(self): test_data = 'https://av123images.blob.core.windows.net/images/TDAZBET.vhd' src_blob_uri, src_disk, src_snapshot = _figure_out_storage_source('tg1', test_data) self.assertFalse(src_disk) self.assertFalse(src_snapshot) self.assertEqual(src_blob_uri, test_data) def test_source_storage_account_err_case(self): np = mock.MagicMock() np.source_storage_account_id = '/subscriptions/123/resourceGroups/ygsrc/providers/Microsoft.Storage/storageAccounts/s123' np.source = '/subscriptions/123/resourceGroups/yugangw/providers/Microsoft.Compute/disks/d2' # action (should throw) with self.assertRaises(CLIError): process_disk_or_snapshot_create_namespace(np) # with blob uri, should be fine np.source = 'https://s1.blob.core.windows.net/vhds/s1.vhd' process_disk_or_snapshot_create_namespace(np) def test_validate_admin_username_linux(self): # pylint: disable=line-too-long err_invalid_char = r'admin user name cannot contain upper case character A-Z, special characters \/"[]:|<>+=;,?*@#()! or start with $ or -' self._verify_username_with_ex('!@#', 'linux', err_invalid_char) self._verify_username_with_ex('gue[', 'linux', err_invalid_char) self._verify_username_with_ex('Aguest', 'linux', err_invalid_char) self._verify_username_with_ex('-gguest', 'linux', err_invalid_char) self._verify_username_with_ex('', 'linux', 'admin user name can not be empty') self._verify_username_with_ex('guest', 'linux', "This user name 'guest' meets the general requirements, but is specifically disallowed for this image. Please try a different value.") _validate_admin_username('g-uest1', 'linux') _validate_admin_username('guest1', 'linux') _validate_admin_username('guest1.', 'linux') def test_validate_admin_username_windows(self): # pylint: disable=line-too-long err_invalid_char = r'admin user name cannot contain special characters \/"[]:|<>+=;,?*@# or ends with .' self._verify_username_with_ex('!@#', 'windows', err_invalid_char) self._verify_username_with_ex('gue[', 'windows', err_invalid_char) self._verify_username_with_ex('dddivid.', 'windows', err_invalid_char) self._verify_username_with_ex('backup', 'windows', "This user name 'backup' meets the general requirements, but is specifically disallowed for this image. Please try a different value.") _validate_admin_username('AGUEST', 'windows') _validate_admin_username('g-uest1', 'windows') _validate_admin_username('guest1', 'windows') def test_validate_admin_password_linux(self): # pylint: disable=line-too-long err_length = 'The password length must be between 12 and 72' err_variety = 'Password must have the 3 of the following: 1 lower case character, 1 upper case character, 1 number and 1 special character' self._verify_password_with_ex('te', 'linux', err_length) self._verify_password_with_ex('P12' + '3' * 70, 'linux', err_length) self._verify_password_with_ex('te12312312321', 'linux', err_variety) _validate_admin_password('Password22345', 'linux') _validate_admin_password('Password12!@#', 'linux') def test_validate_admin_password_windows(self): # pylint: disable=line-too-long err_length = 'The password length must be between 12 and 123' err_variety = 'Password must have the 3 of the following: 1 lower case character, 1 upper case character, 1 number and 1 special character' self._verify_password_with_ex('P1', 'windows', err_length) self._verify_password_with_ex('te14' + '3' * 120, 'windows', err_length) self._verify_password_with_ex('te12345678997', 'windows', err_variety) _validate_admin_password('Password22!!!', 'windows') _validate_admin_password('Pas' + '1' * 70, 'windows') def _verify_username_with_ex(self, admin_username, is_linux, expected_err): with self.assertRaises(CLIError) as context: _validate_admin_username(admin_username, is_linux) self.assertTrue(expected_err in str(context.exception)) def _verify_password_with_ex(self, admin_password, is_linux, expected_err): with self.assertRaises(CLIError) as context: _validate_admin_password(admin_password, is_linux) self.assertTrue(expected_err in str(context.exception)) @mock.patch('azure.cli.command_modules.vm._validators._compute_client_factory', autospec=True) def test_parse_image_argument(self, client_factory_mock): compute_client = mock.MagicMock() image = mock.MagicMock() image.plan.name = 'plan1' image.plan.product = 'product1' image.plan.publisher = 'publisher1' compute_client.virtual_machine_images.get.return_value = image client_factory_mock.return_value = compute_client np = mock.MagicMock() np.location = 'some region' np.plan_name, np.plan_publisher, np.plan_product = '', '', '' np.image = 'publisher1:offer1:sku1:1.0.0' # action _parse_image_argument(np) # assert self.assertEqual('plan1', np.plan_name) self.assertEqual('product1', np.plan_product) self.assertEqual('publisher1', np.plan_publisher) @mock.patch('azure.cli.command_modules.vm._validators._compute_client_factory', autospec=True) @mock.patch('azure.cli.command_modules.vm._validators.logger.warning', autospec=True) def test_parse_staging_image_argument(self, logger_mock, client_factory_mock): compute_client = mock.MagicMock() resp = mock.MagicMock() resp.status_code = 404 resp.text = '{"Message": "Not Found"}' compute_client.virtual_machine_images.get.side_effect = CloudError(resp, error='image not found') client_factory_mock.return_value = compute_client np = mock.MagicMock() np.location = 'some region' np.image = 'publisher1:offer1:sku1:1.0.0' np.plan_name, np.plan_publisher, np.plan_product = '', '', '' # action _parse_image_argument(np) # assert logger_mock.assert_called_with("Querying the image of '%s' failed for an error '%s'. " "Configuring plan settings will be skipped", 'publisher1:offer1:sku1:1.0.0', 'image not found') def test_get_next_subnet_addr_suffix(self): result = _get_next_subnet_addr_suffix('10.0.0.0/16', '10.0.0.0/24', 24) self.assertEqual(result, '10.0.1.0/24') # for 254~510 instances VMSS result = _get_next_subnet_addr_suffix('10.0.0.0/16', '10.0.0.0/23', 24) self.assertEqual(result, '10.0.2.0/24') # +1 overflows, so we go with -1 result = _get_next_subnet_addr_suffix('12.0.0.0/16', '12.0.255.0/24', 24) self.assertEqual(result, '12.0.254.0/24') # handle carry bits to the next section result = _get_next_subnet_addr_suffix('12.0.0.0/15', '12.0.255.0/24', 24) self.assertEqual(result, '12.1.0.0/24') # error cases with self.assertRaises(CLIError): _get_next_subnet_addr_suffix('12.0.0.0/16', '12.0.255.0/15', 24) with self.assertRaises(CLIError): _get_next_subnet_addr_suffix('12.0.0.0/16', '12.1.0.0/16', 24) with self.assertRaises(CLIError): _get_next_subnet_addr_suffix('12.0.0.0/22', '12.0.0.0/22', 24) # verify end to end np_mock = mock.MagicMock() np_mock.vnet_type = 'new' np_mock.vnet_address_prefix = '10.0.0.0/16' np_mock.subnet_address_prefix = None np_mock.instance_count = 1000 np_mock.app_gateway_type = 'new' np_mock.app_gateway_subnet_address_prefix = None _validate_vmss_create_subnet(np_mock) self.assertEqual(np_mock.app_gateway_subnet_address_prefix, '10.0.4.0/24') @mock.patch('azure.cli.command_modules.vm._validators._resolve_role_id', autospec=True) @mock.patch('azure.cli.core.commands.client_factory.get_subscription_id', autospec=True) def test_validate_msi_on_create(self, mock_get_subscription, mock_resolve_role_id): # check throw on : az vm/vmss create --assign-identity --role reader --scope "" np_mock = mock.MagicMock() np_mock.assign_identity = True np_mock.identity_scope = None np_mock.identity_role = 'reader' with self.assertRaises(CLIError) as err: _validate_vm_vmss_msi(np_mock) self.assertTrue("usage error: '--role reader' is not applicable as the '--scope' is " "not provided" in str(err.exception)) # check throw on : az vm/vmss create --scope "some scope" np_mock = mock.MagicMock() np_mock.assign_identity = False np_mock.identity_scope = 'foo-scope' with self.assertRaises(CLIError) as err: _validate_vm_vmss_msi(np_mock) self.assertTrue('usage error: --assign-identity [--scope SCOPE] [--role ROLE]' in str(err.exception)) # check throw on : az vm/vmss create --role "reader" np_mock = mock.MagicMock() np_mock.assign_identity = False np_mock.identity_role = 'reader' with self.assertRaises(CLIError) as err: _validate_vm_vmss_msi(np_mock) self.assertTrue('usage error: --assign-identity [--scope SCOPE] [--role ROLE]' in str(err.exception)) # check we set right role id np_mock = mock.MagicMock() np_mock.assign_identity = True np_mock.identity_scope = 'foo-scope' np_mock.identity_role = 'reader' mock_resolve_role_id.return_value = 'foo-role-id' _validate_vm_vmss_msi(np_mock) self.assertEqual(np_mock.identity_role_id, 'foo-role-id') self.assertEqual(np_mock.identity_role, 'reader') mock_resolve_role_id.assert_called_with('reader', 'foo-scope') @mock.patch('azure.cli.command_modules.vm._validators._resolve_role_id', autospec=True) def test_validate_msi_on_assign_identity_command(self, mock_resolve_role_id): # check throw on : az vm/vmss assign-identity --role reader --scope "" np_mock = mock.MagicMock() np_mock.identity_scope = '' np_mock.identity_role = 'reader' with self.assertRaises(CLIError) as err: _validate_vm_vmss_msi(np_mock, from_set_command=True) self.assertTrue("usage error: '--role reader' is not applicable as the '--scope' is set to None", str(err.exception)) # check we set right role id np_mock = mock.MagicMock() np_mock.identity_scope = 'foo-scope' np_mock.identity_role = 'reader' mock_resolve_role_id.return_value = 'foo-role-id' _validate_vm_vmss_msi(np_mock, from_set_command=True) self.assertEqual(np_mock.identity_role_id, 'foo-role-id') mock_resolve_role_id.assert_called_with('reader', 'foo-scope') if __name__ == '__main__': unittest.main()
py
1a40764b11ead567fdf69be070d79187d03eea15
# Copyright (C) 2013 by Ben Morris ([email protected]) # Based on Bio.Nexus, copyright 2005-2008 by Frank Kauff & Cymon J. Cox # and Bio.Phylo.Newick, copyright 2009 by Eric Talevich. # All rights reserved. # # This file is part of the Biopython distribution and governed by your # choice of the "Biopython License Agreement" or the "BSD 3-Clause License". # Please see the LICENSE file that should have been included as part of this # package. """I/O function wrappers for the RDF/CDAO file format. This is an RDF format that conforms to the Comparative Data Analysis Ontology (CDAO). See: http://evolutionaryontology.org/cdao This module requires the librdf Python bindings (http://www.librdf.org) The CDAOIO.Parser, in addition to parsing text files, can also parse directly from a triple store that implements the Redland storage interface; similarly, the CDAOIO.Writer can store triples in a triple store instead of serializing them to a file. """ import os from Bio._py3k import StringIO from Bio import MissingPythonDependencyError from Bio.Phylo import CDAO from ._cdao_owl import cdao_namespaces, resolve_uri # import of cdao_elements from ._cdao_owl removed in Biopython 1.74 try: import rdflib rdfver = rdflib.__version__ if rdfver[0] in ["1", "2"] or (rdfver in ["3.0.0", "3.1.0", "3.2.0"]): raise MissingPythonDependencyError( "Support for CDAO tree format requires RDFlib v3.2.1 or later." ) except ImportError: raise MissingPythonDependencyError("Support for CDAO tree format requires RDFlib.") RDF_NAMESPACES = { "owl": "http://www.w3.org/2002/07/owl#", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "rdfs": "http://www.w3.org/2000/01/rdf-schema#", } RDF_NAMESPACES.update(cdao_namespaces) # pad node ids with zeroes until they're at least this length ZEROES = 8 def qUri(x): """Resolve URI for librdf.""" return resolve_uri(x, namespaces=RDF_NAMESPACES) def format_label(x): """Format label for librdf.""" return x.replace("_", " ") # --------------------------------------------------------- # Public API def parse(handle, **kwargs): """Iterate over the trees in a CDAO file handle. :returns: generator of Bio.Phylo.CDAO.Tree objects. """ return Parser(handle).parse(**kwargs) def write(trees, handle, plain=False, **kwargs): """Write a trees in CDAO format to the given file handle. :returns: number of trees written. """ return Writer(trees).write(handle, plain=plain, **kwargs) # --------------------------------------------------------- # Input class Parser(object): """Parse a CDAO tree given a file handle.""" def __init__(self, handle=None): """Initialize CDAO tree parser.""" self.handle = handle self.graph = None self.node_info = None self.children = {} self.rooted = False @classmethod def from_string(cls, treetext): """Instantiate the class from the given string.""" handle = StringIO(treetext) return cls(handle) def parse(self, **kwargs): """Parse the text stream this object was initialized with.""" self.parse_handle_to_graph(**kwargs) return self.parse_graph() def parse_handle_to_graph( self, rooted=False, parse_format="turtle", context=None, **kwargs ): """Parse self.handle into RDF model self.model.""" if self.graph is None: self.graph = rdflib.Graph() graph = self.graph for k, v in RDF_NAMESPACES.items(): graph.bind(k, v) self.rooted = rooted if "base_uri" in kwargs: base_uri = kwargs["base_uri"] else: # Windows style slashes cannot be used in an RDF URI base_uri = "file://" + os.path.abspath(self.handle.name).replace("\\", "/") graph.parse(file=self.handle, publicID=base_uri, format=parse_format) return self.parse_graph(graph, context=context) def parse_graph(self, graph=None, context=None): """Iterate over RDF model yielding CDAO.Tree instances.""" if graph is None: graph = self.graph # look up branch lengths/TUs for all nodes self.get_node_info(graph, context=context) for root_node in self.tree_roots: clade = self.parse_children(root_node) yield CDAO.Tree(root=clade, rooted=self.rooted) def new_clade(self, node): """Return a CDAO.Clade object for a given named node.""" result = self.node_info[node] kwargs = {} if "branch_length" in result: kwargs["branch_length"] = result["branch_length"] if "label" in result: kwargs["name"] = result["label"].replace("_", " ") if "confidence" in result: kwargs["confidence"] = result["confidence"] clade = CDAO.Clade(**kwargs) return clade def get_node_info(self, graph, context=None): """Create a dictionary containing information about all nodes in the tree.""" self.node_info = {} self.obj_info = {} self.children = {} self.nodes = set() self.tree_roots = set() assignments = { qUri("cdao:has_Parent"): "parent", qUri("cdao:belongs_to_Edge_as_Child"): "edge", qUri("cdao:has_Annotation"): "annotation", qUri("cdao:has_Value"): "value", qUri("cdao:represents_TU"): "tu", qUri("rdfs:label"): "label", qUri("cdao:has_Support_Value"): "confidence", } for s, v, o in graph: # process each RDF triple in the graph sequentially s, v, o = str(s), str(v), str(o) if s not in self.obj_info: self.obj_info[s] = {} this = self.obj_info[s] try: # if the predicate is one we care about, store information for # later this[assignments[v]] = o except KeyError: pass if v == qUri("rdf:type"): if o in (qUri("cdao:AncestralNode"), qUri("cdao:TerminalNode")): # this is a tree node; store it in set of all nodes self.nodes.add(s) if v == qUri("cdao:has_Root"): # this is a tree; store its root in set of all tree roots self.tree_roots.add(o) for node in self.nodes: # for each node, look up all information needed to create a # CDAO.Clade self.node_info[node] = {} node_info = self.node_info[node] obj = self.obj_info[node] if "edge" in obj: # if this object points to an edge, we need a branch length from # the annotation on that edge edge = self.obj_info[obj["edge"]] if "annotation" in edge: annotation = self.obj_info[edge["annotation"]] if "value" in annotation: node_info["branch_length"] = float(annotation["value"]) if "tu" in obj: # if this object points to a TU, we need the label of that TU tu = self.obj_info[obj["tu"]] if "label" in tu: node_info["label"] = tu["label"] if "parent" in obj: # store this node as a child of its parent, if it has one, # so that the tree can be traversed from parent to children parent = obj["parent"] if parent not in self.children: self.children[parent] = [] self.children[parent].append(node) def parse_children(self, node): """Traverse the tree to create a nested clade structure. Return a CDAO.Clade, and calls itself recursively for each child, traversing the entire tree and creating a nested structure of CDAO.Clade objects. """ clade = self.new_clade(node) children = self.children[node] if node in self.children else [] clade.clades = [self.parse_children(child_node) for child_node in children] return clade # --------------------------------------------------------- # Output class Writer(object): """Based on the writer in Bio.Nexus.Trees (str, to_string).""" prefixes = RDF_NAMESPACES def __init__(self, trees): """Initialize parameters for writing a CDAO tree.""" self.trees = trees self.node_counter = 0 self.edge_counter = 0 self.tu_counter = 0 self.tree_counter = 0 def write( self, handle, tree_uri="", record_complete_ancestry=False, rooted=False, **kwargs ): """Write this instance's trees to a file handle.""" self.rooted = rooted self.record_complete_ancestry = record_complete_ancestry if tree_uri and not tree_uri.endswith("/"): tree_uri += "/" trees = self.trees if tree_uri: handle.write("@base <%s>\n" % tree_uri) for k, v in self.prefixes.items(): handle.write("@prefix %s: <%s> .\n" % (k, v)) handle.write("<%s> a owl:Ontology .\n" % self.prefixes["cdao"]) for tree in trees: self.tree_counter += 1 self.tree_uri = "tree%s" first_clade = tree.clade statements = self.process_clade(first_clade, root=tree) for stmt in statements: self.add_stmt_to_handle(handle, stmt) def add_stmt_to_handle(self, handle, stmt): """Add URI prefix to handle.""" # apply URI prefixes stmt_strings = [] for n, part in enumerate(stmt): if isinstance(part, rdflib.URIRef): node_uri = str(part) changed = False for prefix, uri in self.prefixes.items(): if node_uri.startswith(uri): node_uri = node_uri.replace(uri, "%s:" % prefix, 1) if node_uri == "rdf:type": node_uri = "a" changed = True if changed or ":" in node_uri: stmt_strings.append(node_uri) else: stmt_strings.append("<%s>" % node_uri) elif isinstance(part, rdflib.Literal): stmt_strings.append(part.n3()) else: stmt_strings.append(str(part)) handle.write("%s .\n" % " ".join(stmt_strings)) def process_clade(self, clade, parent=None, root=False): """Recursively generate triples describing a tree of clades.""" self.node_counter += 1 clade.uri = "node%s" % str(self.node_counter).zfill(ZEROES) if parent: clade.ancestors = parent.ancestors + [parent.uri] else: clade.ancestors = [] def nUri(s): # nUri = lambda s: rdflib.URIRef(s) return rdflib.URIRef(s) def pUri(s): # pUri = lambda s: rdflib.URIRef(qUri(s)) return rdflib.URIRef(qUri(s)) tree_id = nUri("") statements = [] if root is not False: # create a cdao:RootedTree with reference to the tree root tree_type = ( pUri("cdao:RootedTree") if self.rooted else pUri("cdao:UnrootedTree") ) statements += [ (tree_id, pUri("rdf:type"), tree_type), (tree_id, pUri("cdao:has_Root"), nUri(clade.uri)), ] try: tree_attributes = root.attributes except AttributeError: tree_attributes = [] for predicate, obj in tree_attributes: statements.append((tree_id, predicate, obj)) if clade.name: # create TU self.tu_counter += 1 tu_uri = "tu%s" % str(self.tu_counter).zfill(ZEROES) statements += [ (nUri(tu_uri), pUri("rdf:type"), pUri("cdao:TU")), (nUri(clade.uri), pUri("cdao:represents_TU"), nUri(tu_uri)), ( nUri(tu_uri), pUri("rdfs:label"), rdflib.Literal(format_label(clade.name)), ), ] try: tu_attributes = clade.tu_attributes except AttributeError: tu_attributes = [] for predicate, obj in tu_attributes: yield (nUri(tu_uri), predicate, obj) # create this node node_type = "cdao:TerminalNode" if clade.is_terminal() else "cdao:AncestralNode" statements += [ (nUri(clade.uri), pUri("rdf:type"), pUri(node_type)), (nUri(clade.uri), pUri("cdao:belongs_to_Tree"), tree_id), ] if parent is not None: # create edge from the parent node to this node self.edge_counter += 1 edge_uri = "edge%s" % str(self.edge_counter).zfill(ZEROES) statements += [ (nUri(edge_uri), pUri("rdf:type"), pUri("cdao:DirectedEdge")), (nUri(edge_uri), pUri("cdao:belongs_to_Tree"), tree_id), (nUri(edge_uri), pUri("cdao:has_Parent_Node"), nUri(parent.uri)), (nUri(edge_uri), pUri("cdao:has_Child_Node"), nUri(clade.uri)), ( nUri(clade.uri), pUri("cdao:belongs_to_Edge_as_Child"), nUri(edge_uri), ), (nUri(clade.uri), pUri("cdao:has_Parent"), nUri(parent.uri)), ( nUri(parent.uri), pUri("cdao:belongs_to_Edge_as_Parent"), nUri(edge_uri), ), ] if hasattr(clade, "confidence") and clade.confidence is not None: confidence = rdflib.Literal( clade.confidence, datatype="http://www.w3.org/2001/XMLSchema#decimal", ) statements += [ (nUri(clade.uri), pUri("cdao:has_Support_Value"), confidence) ] if self.record_complete_ancestry and len(clade.ancestors) > 0: statements += [ (nUri(clade.uri), pUri("cdao:has_Ancestor"), nUri(ancestor)) for ancestor in clade.ancestors ] if clade.branch_length is not None: # add branch length edge_ann_uri = "edge_annotation%s" % str(self.edge_counter).zfill( ZEROES ) branch_length = rdflib.Literal( clade.branch_length, datatype=rdflib.URIRef("http://www.w3.org/2001/XMLSchema#decimal"), ) statements += [ (nUri(edge_ann_uri), pUri("rdf:type"), pUri("cdao:EdgeLength")), (nUri(edge_uri), pUri("cdao:has_Annotation"), nUri(edge_ann_uri)), (nUri(edge_ann_uri), pUri("cdao:has_Value"), branch_length), ] try: edge_attributes = clade.edge_attributes except AttributeError: edge_attributes = [] for predicate, obj in edge_attributes: yield (nUri(edge_uri), predicate, obj) for stmt in statements: yield stmt try: clade_attributes = clade.attributes except AttributeError: clade_attributes = [] for predicate, obj in clade_attributes: yield (nUri(clade.uri), predicate, obj) if not clade.is_terminal(): for new_clade in clade.clades: for stmt in self.process_clade(new_clade, parent=clade, root=False): yield stmt
py
1a4076c43b508004a729cb92ac912ed52818339f
#!/bin/python # Simple script, which gathers information about GPUs and puts it into a file to expose it for node-exporter from __future__ import print_function from pynvml import * import sys, traceback try: nvmlInit() try: driverVersion = nvmlSystemGetDriverVersion() try: gpuNum = nvmlDeviceGetCount() for i in range(gpuNum): try: filename = "/etc/node-exporter/gpu_%d.prom" % (i,) with open(filename,"w") as file: handle = nvmlDeviceGetHandleByIndex(i) info = nvmlDeviceGetMemoryInfo(handle) device_name = nvmlDeviceGetName(handle) try: utilization = nvmlDeviceGetUtilizationRates(handle) file.write ("node_gpu_%d_util{device_name=\"%s\", device_id=\"%d\"} %d\n" %(i,device_name,i,utilization.gpu)) except NVMLError as error: traceback.print_exc(file=sys.stderr) print("Failed nvmlDeviceGetUtilizationRates", file=sys.stderr) file.write ("node_gpu_%d_total_memory{device_name=\"%s\", device_id=\"%d\"} %d\n" %(i,device_name,i,info.total)) file.write ("node_gpu_%d_used_memory{device_name=\"%s\", device_id=\"%d\"} %d\n" %(i,device_name,i,info.used)) file.write ("node_gpu_%d_core_temp{device_name=\"%s\", device_id=\"%d\"} %d\n" %(i,device_name,i,nvmlDeviceGetTemperature(handle, NVML_TEMPERATURE_GPU))) except NVMLError as error: traceback.print_exc(file=sys.stderr) with open("/etc/node-exporter/gpu_issues.prom", "w") as file: file.write("node_gpu_errors 1\n") exit(1) with open("/etc/node-exporter/gpu_issues.prom", "w") as file: file.write("node_gpu_errors 0\n") exit(0) except NVMLError as error: traceback.print_exc(file=sys.stderr) with open("/etc/node-exporter/gpu_issues.prom","w") as file: file.write("node_gpu_errors 1\n") exit(1) except NVMLError as error: traceback.print_exc(file=sys.stderr) with open("/etc/node-exporter/gpu_issues.prom","w") as file: file.write("node_gpu_errors 1\n") exit(1) except NVMLError as error: traceback.print_exc(file=sys.stderr) with open("/etc/node-exporter/gpu_issues.prom","w") as file: file.write("node_gpu_errors 1\n") exit(1)
py
1a4077fa83d9d93b61f9249f138584da9b67aa1a
from __future__ import print_function import random __author__ = "Konrad Zemek" __copyright__ = """(C) 2015 ACK CYFRONET AGH, This software is released under the MIT license cited in 'LICENSE.txt'.""" import os import sys from threading import Thread from multiprocessing import Pool import time import pytest from stat import * script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, os.path.dirname(script_dir)) from test_common import * # noinspection PyUnresolvedReferences from environment import appmock, common, docker # noinspection PyUnresolvedReferences import fslogic # noinspection PyUnresolvedReferences from proto import messages_pb2, fuse_messages_pb2, event_messages_pb2, \ common_messages_pb2, stream_messages_pb2 SYNCHRONIZE_BLOCK_PRIORITY_IMMEDIATE = 32 @pytest.fixture def endpoint(appmock_client): return appmock_client.tcp_endpoint(443) @pytest.fixture def fl(endpoint): return fslogic.FsLogicProxy(endpoint.ip, endpoint.port) @pytest.fixture def uuid(): return random_str() @pytest.fixture def parentUuid(): return random_str() @pytest.fixture def stat(endpoint, fl, uuid): response = prepare_attr_response(uuid, fuse_messages_pb2.REG) with reply(endpoint, response): return fl.getattr(uuid) @pytest.fixture def parentStat(endpoint, fl, parentUuid): response = prepare_attr_response(parentUuid, fuse_messages_pb2.REG) with reply(endpoint, response): return fl.getattr(parentUuid) def prepare_file_blocks(blocks=[]): file_blocks = [] for file_block in blocks: block = common_messages_pb2.FileBlock() if len(file_block) == 2: offset, block_size = file_block else: offset, block_size, storage_id, file_id = file_block block.storage_id = storage_id block.file_id = file_id block.offset = offset block.size = block_size file_blocks.append(block) return file_blocks def prepare_sync_response(uuid, data, blocks): location = prepare_location(uuid, blocks) server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_location_changed.file_location.CopyFrom(location) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_partial_sync_response(uuid, data, blocks, start, end): location = prepare_location(uuid, blocks) server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_location_changed.file_location.CopyFrom(location) server_response.fuse_response.file_location_changed.change_beg_offset = start server_response.fuse_response.file_location_changed.change_end_offset = end server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_sync_eagain_response(uuid, data, blocks): location = prepare_location(uuid, blocks) server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_location.CopyFrom(location) server_response.fuse_response.status.code = common_messages_pb2.Status.eagain return server_response def prepare_sync_request(offset, size): block = common_messages_pb2.FileBlock() block.offset = offset block.size = size req = fuse_messages_pb2.SynchronizeBlockAndComputeChecksum() req.uuid = 'uuid1' req.block.CopyFrom(block) client_request = messages_pb2.ClientMessage() client_request.fuse_request.synchronize_block_and_compute_checksum.CopyFrom(req) return client_request def prepare_attr_response(uuid, filetype, size=None): repl = fuse_messages_pb2.FileAttr() repl.uuid = uuid repl.name = 'filename' repl.mode = random.randint(0, 1023) repl.uid = random.randint(0, 20000) repl.gid = random.randint(0, 20000) repl.mtime = int(time.time()) - random.randint(0, 1000000) repl.atime = repl.mtime - random.randint(0, 1000000) repl.ctime = repl.atime - random.randint(0, 1000000) repl.type = filetype repl.size = size if size else random.randint(0, 1000000000) repl.owner_id = '' repl.provider_id = '' server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_attr.CopyFrom(repl) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_helper_response(): repl = fuse_messages_pb2.HelperParams() repl.helper_name = 'null' server_response = messages_pb2.ServerMessage() server_response.fuse_response.helper_params.CopyFrom(repl) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_location(uuid, blocks=[]): file_blocks = prepare_file_blocks(blocks) repl = fuse_messages_pb2.FileLocation() repl.uuid = uuid repl.space_id = 'space1' repl.storage_id = 'storage1' repl.file_id = 'file1' repl.provider_id = 'provider1' repl.blocks.extend(file_blocks) repl.version = 1 return repl def prepare_location_response(uuid, blocks=[]): location = prepare_location(uuid, blocks) server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_location.CopyFrom(location) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_rename_response(new_uuid): repl = fuse_messages_pb2.FileRenamed() repl.new_uuid = new_uuid server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_renamed.CopyFrom(repl) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_processing_status_response(status): repl = messages_pb2.ProcessingStatus() repl.code = status server_response = messages_pb2.ServerMessage() server_response.processing_status.CopyFrom(repl) return server_response def prepare_open_response(handle_id='handle_id'): repl = fuse_messages_pb2.FileOpened() repl.handle_id = handle_id server_response = messages_pb2.ServerMessage() server_response.fuse_response.file_opened.CopyFrom(repl) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def prepare_file_children_attr_response(uuid, prefix, count): child_attrs = [] for i in range(count): f = prepare_attr_response(uuid, fuse_messages_pb2.REG).\ fuse_response.file_attr f.uuid = random_str() f.name = prefix+str(i) child_attrs.append(f) response = fuse_messages_pb2.FileChildrenAttrs() response.child_attrs.extend(child_attrs) return response def do_open(endpoint, fl, uuid, size=None, blocks=[], handle_id='handle_id'): attr_response = prepare_attr_response(uuid, fuse_messages_pb2.REG, size=size) location_response = prepare_location_response(uuid, blocks) open_response = prepare_open_response(handle_id) with reply(endpoint, [attr_response, location_response, open_response]): handle = fl.open(uuid, 0) assert handle >= 0 return handle def do_release(endpoint, fl, uuid, fh): fsync_response = messages_pb2.ServerMessage() fsync_response.fuse_response.status.code = common_messages_pb2.Status.ok release_response = messages_pb2.ServerMessage() release_response.fuse_response.status.code = common_messages_pb2.Status.ok result = None with reply(endpoint, [fsync_response, release_response]) as queue: fl.release(uuid, fh) result = queue return result def get_stream_id_from_location_subscription(subscription_message_data): location_subsc = messages_pb2.ClientMessage() location_subsc.ParseFromString(subscription_message_data) return location_subsc.message_stream.stream_id def test_getattrs_should_get_attrs(endpoint, fl, uuid): response = prepare_attr_response(uuid, fuse_messages_pb2.REG) with reply(endpoint, response) as queue: stat = fl.getattr(uuid) client_message = queue.get() assert client_message.HasField('fuse_request') fuse_request = client_message.fuse_request assert fuse_request.file_request.HasField('get_file_attr') assert fuse_request.file_request.context_guid == uuid repl = response.fuse_response.file_attr assert repl.uuid == uuid assert stat.atime == repl.atime assert stat.mtime == repl.mtime assert stat.ctime == repl.ctime assert stat.gid == repl.gid assert stat.uid == repl.uid assert stat.mode == repl.mode | fslogic.regularMode() assert stat.size == repl.size def test_getattrs_should_pass_errors(endpoint, fl, uuid): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.enoent with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.getattr(uuid) assert 'No such file or directory' in str(excinfo.value) def test_getattrs_should_cache_attrs(endpoint, fl, uuid): fuse_response = prepare_attr_response(uuid, fuse_messages_pb2.REG) with reply(endpoint, fuse_response): stat = fl.getattr(uuid) new_stat = fl.getattr(uuid) assert stat == new_stat assert 3 == endpoint.all_messages_count() def test_mkdir_should_mkdir(endpoint, fl): getattr_response = prepare_attr_response('parentUuid', fuse_messages_pb2.DIR) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, [response, getattr_response]) as queue: fl.mkdir('parentUuid', 'name', 0123) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.context_guid == 'parentUuid' assert file_request.HasField('create_dir') create_dir = file_request.create_dir assert create_dir.name == 'name' assert create_dir.mode == 0123 assert file_request.context_guid == \ getattr_response.fuse_response.file_attr.uuid def test_mkdir_should_pass_mkdir_errors(endpoint, fl): getattr_response = prepare_attr_response('parentUuid', fuse_messages_pb2.DIR) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, [getattr_response, response]): fl.mkdir('parentUuid', 'name', 0123) assert 'Operation not permitted' in str(excinfo.value) def test_rmdir_should_rmdir(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, [getattr_response, response]) as queue: fl.rmdir('parentUuid', 'name') queue.get() client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('delete_file') assert file_request.context_guid == \ getattr_response.fuse_response.file_attr.uuid def test_rmdir_should_pass_rmdir_errors(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, [getattr_response, response]): fl.rmdir('parentUuid', 'name') assert 'Operation not permitted' in str(excinfo.value) def test_rename_should_rename(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) rename_response = prepare_rename_response('newUuid') with reply(endpoint, [getattr_response, rename_response]) as queue: fl.rename('parentUuid', 'name', 'newParentUuid', 'newName') queue.get() client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('rename') rename = file_request.rename assert rename.target_parent_uuid == 'newParentUuid' assert rename.target_name == 'newName' assert file_request.context_guid == \ getattr_response.fuse_response.file_attr.uuid def test_rename_should_change_caches(appmock_client, endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) rename_response = prepare_rename_response('newUuid') with reply(endpoint, [getattr_response, rename_response]): fl.rename('parentUuid', 'name', 'newParentUuid', 'newName') stat = fl.getattr('newUuid') assert stat.size == getattr_response.fuse_response.file_attr.size appmock_client.reset_tcp_history() response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.enoent with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.getattr(uuid) assert 'No such file or directory' in str(excinfo.value) def test_rename_should_pass_rename_errors(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, [getattr_response, response]): fl.rename('parentUuid', 'name', 'newParentUuid', 'newName') assert 'Operation not permitted' in str(excinfo.value) def test_chmod_should_change_mode(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, [response, response, getattr_response]) as queue: fl.chmod(uuid, 0123) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('change_mode') change_mode = file_request.change_mode assert change_mode.mode == 0123 assert file_request.context_guid == \ getattr_response.fuse_response.file_attr.uuid def test_chmod_should_change_cached_mode(appmock_client, endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.REG) with reply(endpoint, getattr_response): stat = fl.getattr(uuid) assert stat.mode == getattr_response.fuse_response.file_attr.mode | \ fslogic.regularMode() assert 3 == endpoint.all_messages_count() appmock_client.reset_tcp_history() response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, [response, response]): fl.chmod(uuid, 0356) stat = fl.getattr(uuid) assert stat.mode == 0356 | fslogic.regularMode() def test_chmod_should_pass_chmod_errors(endpoint, fl, uuid): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.enoent with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.chmod(uuid, 0312) assert 'No such file or directory' in str(excinfo.value) def test_utime_should_update_times(endpoint, fl, uuid, stat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, response) as queue: fl.utime(uuid) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('update_times') update_times = file_request.update_times assert update_times.atime == update_times.mtime assert update_times.atime == update_times.ctime assert update_times.atime <= time.time() assert file_request.context_guid == uuid def test_utime_should_change_cached_times(appmock_client, endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.REG) with reply(endpoint, getattr_response): stat = fl.getattr(uuid) assert stat.atime == getattr_response.fuse_response.file_attr.atime assert stat.mtime == getattr_response.fuse_response.file_attr.mtime assert 3 == endpoint.all_messages_count() appmock_client.reset_tcp_history() response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, response): fl.utime(uuid) stat = fslogic.Stat() fl.getattr(uuid) assert stat.atime != getattr_response.fuse_response.file_attr.atime assert stat.mtime != getattr_response.fuse_response.file_attr.mtime def test_utime_should_update_times_with_buf(endpoint, fl, uuid, stat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok ubuf = fslogic.Ubuf() ubuf.actime = 54321 ubuf.modtime = 12345 with reply(endpoint, response) as queue: fl.utime_buf(uuid, ubuf) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('update_times') update_times = file_request.update_times assert update_times.atime == ubuf.actime assert update_times.mtime == ubuf.modtime assert file_request.context_guid == uuid def test_utime_should_pass_utime_errors(endpoint, fl, uuid, stat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.utime(uuid) assert 'Operation not permitted' in str(excinfo.value) ubuf = fslogic.Ubuf() with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.utime_buf(uuid, ubuf) assert 'Operation not permitted' in str(excinfo.value) def test_readdir_should_read_dir(endpoint, fl, uuid, stat): # # Prepare first response with 5 files # repl1 = prepare_file_children_attr_response(uuid, "afiles-", 5) repl1.is_last = False response1 = messages_pb2.ServerMessage() response1.fuse_response.file_children_attrs.CopyFrom(repl1) response1.fuse_response.status.code = common_messages_pb2.Status.ok # # Prepare second response with another 5 file # repl2 = prepare_file_children_attr_response(uuid, "bfiles-", 5) repl2.is_last = True response2 = messages_pb2.ServerMessage() response2.fuse_response.file_children_attrs.CopyFrom(repl2) response2.fuse_response.status.code = common_messages_pb2.Status.ok children = [] offset = 0 chunk_size = 50 with reply(endpoint, [response1, response2]) as queue: children_chunk = fl.readdir(uuid, chunk_size, offset) _ = queue.get() assert len(children_chunk) == 12 # # Immediately after the last request the value should be available # from readdir cache, without any communication with provider # for i in range(3): with reply(endpoint, []) as queue: children_chunk = fl.readdir(uuid, 5, 0) assert len(children_chunk) == 5 time.sleep(1) # # After time validity has passed, the cache should be empty again # time.sleep(3) repl4 = fuse_messages_pb2.FileChildrenAttrs() repl4.child_attrs.extend([]) response4 = messages_pb2.ServerMessage() response4.fuse_response.file_children_attrs.CopyFrom(repl4) response4.fuse_response.status.code = common_messages_pb2.Status.ok children = [] with reply(endpoint, [response4]) as queue: children_chunk = fl.readdir(uuid, 5, 0) _ = queue.get() assert len(children_chunk) == 2 children += children_chunk assert sorted(children) == sorted(['..', '.']) def test_readdir_should_return_unique_entries(endpoint, fl, uuid, stat): # # Prepare first response with 5 files # repl1 = prepare_file_children_attr_response(uuid, "afiles-", 5) repl1.is_last = False response1 = messages_pb2.ServerMessage() response1.fuse_response.file_children_attrs.CopyFrom(repl1) response1.fuse_response.status.code = common_messages_pb2.Status.ok # # Prepare second response with the same 5 files # repl2 = prepare_file_children_attr_response(uuid, "afiles-", 5) repl2.is_last = True response2 = messages_pb2.ServerMessage() response2.fuse_response.file_children_attrs.CopyFrom(repl2) response2.fuse_response.status.code = common_messages_pb2.Status.ok children = [] offset = 0 chunk_size = 50 with reply(endpoint, [response1, response2]) as queue: children_chunk = fl.readdir(uuid, chunk_size, offset) _ = queue.get() children.extend(children_chunk) assert len(children) == 5 + 2 def test_readdir_should_pass_readdir_errors(endpoint, fl, uuid, stat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.readdir(uuid, 1024, 0) assert 'Operation not permitted' in str(excinfo.value) def test_readdir_should_not_get_stuck_on_errors(endpoint, fl, uuid, stat): response0 = messages_pb2.ServerMessage() response0.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response0): fl.readdir(uuid, 1024, 0) assert 'Operation not permitted' in str(excinfo.value) # # Prepare first response with 5 files # repl1 = prepare_file_children_attr_response(uuid, "afiles-", 5) repl1.is_last = False response1 = messages_pb2.ServerMessage() response1.fuse_response.file_children_attrs.CopyFrom(repl1) response1.fuse_response.status.code = common_messages_pb2.Status.ok # # Prepare second response with another 5 file # repl2 = prepare_file_children_attr_response(uuid, "bfiles-", 5) repl2.is_last = True response2 = messages_pb2.ServerMessage() response2.fuse_response.file_children_attrs.CopyFrom(repl2) response2.fuse_response.status.code = common_messages_pb2.Status.ok children = [] offset = 0 chunk_size = 50 with reply(endpoint, [response1, response2]) as queue: children_chunk = fl.readdir(uuid, chunk_size, offset) _ = queue.get() assert len(children_chunk) == 12 def test_mknod_should_make_new_location(endpoint, fl, uuid, parentUuid, parentStat): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.REG) with reply(endpoint, [getattr_response]) as queue: fl.mknod(parentUuid, 'childName', 0762 | S_IFREG) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('make_file') make_file = file_request.make_file assert make_file.name == 'childName' assert make_file.mode == 0762 assert file_request.context_guid == parentUuid def test_mknod_should_pass_location_errors(endpoint, fl, parentUuid, parentStat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.mknod(parentUuid, 'childName', 0123) assert 'Operation not permitted' in str(excinfo.value) def test_mknod_should_throw_on_unsupported_file_type(endpoint, fl, parentUuid, parentStat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: fl.mknod(parentUuid, 'childName', 0664 | S_IFSOCK) assert 'Operation not supported' in str(excinfo.value) with pytest.raises(RuntimeError) as excinfo: fl.mknod(parentUuid, 'childName', 0664 | S_IFBLK) assert 'Operation not supported' in str(excinfo.value) with pytest.raises(RuntimeError) as excinfo: fl.mknod(parentUuid, 'childName', 0664 | S_IFDIR) assert 'Operation not supported' in str(excinfo.value) with pytest.raises(RuntimeError) as excinfo: fl.mknod(parentUuid, 'childName', 0664 | S_IFCHR) assert 'Operation not supported' in str(excinfo.value) with pytest.raises(RuntimeError) as excinfo: fl.mknod(parentUuid, 'childName', 0664 | S_IFIFO) assert 'Operation not supported' in str(excinfo.value) def test_read_should_read(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, blocks=[(0, 10)]) assert 5 == len(fl.read(uuid, fh, 0, 5)) def test_read_should_read_zero_on_eof(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(0, 10)]) assert 10 == len(fl.read(uuid, fh, 0, 12)) assert 0 == len(fl.read(uuid, fh, 10, 2)) def test_read_should_pass_helper_errors(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(0, 10)]) with pytest.raises(RuntimeError) as excinfo: fl.failHelper() fl.read(uuid, fh, 0, 10) assert 'Owner died' in str(excinfo.value) def test_write_should_write(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(0, 10)]) assert 5 == fl.write(uuid, fh, 0, 5) def test_write_should_change_file_size(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=5, blocks=[(0, 5)]) assert 20 == fl.write(uuid, fh, 10, 20) stat = fl.getattr(uuid) assert 30 == stat.size def test_write_should_pass_helper_errors(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(0, 10)]) with pytest.raises(RuntimeError) as excinfo: fl.failHelper() fl.write(uuid, fh, 0, 10) assert 'Owner died' in str(excinfo.value) def test_truncate_should_truncate(endpoint, fl, uuid, stat): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok location_response = prepare_location_response(uuid) with reply(endpoint, [response, location_response]) as queue: fl.truncate(uuid, 4) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('truncate') truncate = file_request.truncate assert truncate.size == 4 assert file_request.context_guid == uuid def test_truncate_should_pass_truncate_errors(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.REG) response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.eperm with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, [getattr_response, response]): fl.truncate(uuid, 3) assert 'Operation not permitted' in str(excinfo.value) @pytest.mark.skip(reason="TODO VFS-3718") def test_readdir_big_directory(endpoint, fl, uuid, stat): chunk_size = 2500 children_num = 10*chunk_size # Prepare an array of responses of appropriate sizes to client # requests responses = [] for i in xrange(0, children_num/chunk_size): repl = fuse_messages_pb2.FileChildrenAttrs() for j in xrange(0, chunk_size): link = prepare_attr_response(uuid, fuse_messages_pb2.REG).\ fuse_response.file_attr link.uuid = "childUuid_"+str(i)+"_"+str(j) link.name = "file_"+str(i)+"+"+str(j) repl.child_attrs.extend([link]) response = messages_pb2.ServerMessage() response.fuse_response.file_children_attrs.CopyFrom(repl) response.fuse_response.status.code = common_messages_pb2.Status.ok responses.append(response) empty_repl = fuse_messages_pb2.FileChildrenAttrs() empty_repl.child_attrs.extend([]) empty_repl.is_last = True empty_response = messages_pb2.ServerMessage() empty_response.fuse_response.file_children_attrs.CopyFrom(empty_repl) empty_response.fuse_response.status.code = common_messages_pb2.Status.ok responses.append(empty_response) assert len(responses) == children_num/chunk_size + 1 children = [] offset = 0 with reply(endpoint, responses) as queue: while True: children_chunk = fl.readdir(uuid, chunk_size, offset) client_message = queue.get() children.extend(children_chunk) if len(children_chunk) < chunk_size: break offset += len(children_chunk) assert len(children) == children_num + 2 def test_write_should_save_blocks(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=0) assert 5 == fl.write(uuid, fh, 0, 5) assert 5 == len(fl.read(uuid, fh, 0, 10)) def test_read_should_read_partial_content(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(4, 6)]) data = fl.read(uuid, fh, 6, 4) assert len(data) == 4 def test_read_should_request_synchronization(appmock_client, endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(4, 6)]) sync_response = prepare_sync_response(uuid, '', [(0, 10)]) appmock_client.reset_tcp_history() with reply(endpoint, sync_response) as queue: fl.read(uuid, fh, 2, 5) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('synchronize_block') block = common_messages_pb2.FileBlock() block.offset = 2 block.size = 5 sync = file_request.synchronize_block assert sync.block == block assert sync.priority == SYNCHRONIZE_BLOCK_PRIORITY_IMMEDIATE assert file_request.context_guid == uuid def test_read_should_retry_request_synchronization(appmock_client, endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(4, 6)]) responses = [] responses.append(prepare_sync_eagain_response(uuid, '', [(0, 10)])) responses.append(prepare_sync_response(uuid, '', [(0, 10)])) appmock_client.reset_tcp_history() with reply(endpoint, responses) as queue: fl.read(uuid, fh, 2, 5) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('synchronize_block') block = common_messages_pb2.FileBlock() block.offset = 2 block.size = 5 sync = file_request.synchronize_block assert sync.block == block assert sync.priority == SYNCHRONIZE_BLOCK_PRIORITY_IMMEDIATE assert file_request.context_guid == uuid def test_read_should_not_retry_request_synchronization_too_many_times(appmock_client, endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(4, 6)]) responses = [] responses.append(prepare_sync_eagain_response(uuid, '', [(0, 10)])) responses.append(prepare_sync_eagain_response(uuid, '', [(0, 10)])) responses.append(prepare_sync_eagain_response(uuid, '', [(0, 10)])) appmock_client.reset_tcp_history() with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, responses) as queue: fl.read(uuid, fh, 2, 5) client_message = queue.get() assert 'Resource temporarily unavailable' in str(excinfo.value) def test_read_should_continue_reading_after_synchronization(appmock_client, endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(4, 6)]) sync_response = prepare_sync_response(uuid, '', [(0, 10)]) appmock_client.reset_tcp_history() with reply(endpoint, sync_response): assert 5 == len(fl.read(uuid, fh, 2, 5)) def test_read_should_continue_reading_after_synchronization_partial(appmock_client, endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[(4, 6)]) sync_response = prepare_partial_sync_response(uuid, '', [(0, 10)], 0, 10) appmock_client.reset_tcp_history() with reply(endpoint, sync_response): assert 5 == len(fl.read(uuid, fh, 2, 5)) def test_read_should_should_open_file_block_once(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[ (0, 5, 'storage1', 'file1'), (5, 5, 'storage2', 'file2')]) fl.expect_call_sh_open("file1", 1) fl.expect_call_sh_open("file2", 1) assert 5 == len(fl.read(uuid, fh, 0, 5)) assert 5 == len(fl.read(uuid, fh, 5, 5)) assert 5 == len(fl.read(uuid, fh, 0, 5)) assert 5 == len(fl.read(uuid, fh, 0, 5)) assert 5 == len(fl.read(uuid, fh, 5, 5)) assert 5 == len(fl.read(uuid, fh, 5, 5)) assert fl.verify_and_clear_expectations() def test_release_should_release_open_file_blocks(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[ (0, 5, 'storage1', 'file1'), (5, 5, 'storage2', 'file2')]) assert 5 == len(fl.read(uuid, fh, 0, 5)) assert 5 == len(fl.read(uuid, fh, 5, 5)) fl.expect_call_sh_release('file1', 1) fl.expect_call_sh_release('file2', 1) do_release(endpoint, fl, uuid, fh) assert fl.verify_and_clear_expectations() def test_release_should_pass_helper_errors(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=10, blocks=[ (0, 5, 'storage1', 'file1'), (5, 5, 'storage2', 'file2')]) assert 5 == len(fl.read(uuid, fh, 0, 5)) assert 5 == len(fl.read(uuid, fh, 5, 5)) fl.expect_call_sh_release('file1', 1) fl.expect_call_sh_release('file2', 1) with pytest.raises(RuntimeError) as excinfo: fl.failHelper() do_release(endpoint, fl, uuid, fh) assert 'Owner died' in str(excinfo.value) assert fl.verify_and_clear_expectations() def test_release_should_send_release_message(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=0) sent_messages = do_release(endpoint, fl, uuid, fh) sent_messages.get() # skip fsync message client_message = sent_messages.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') assert client_message.fuse_request.file_request.HasField('release') def test_release_should_send_fsync_message(endpoint, fl, uuid): fh = do_open(endpoint, fl, uuid, size=0) sent_messages = do_release(endpoint, fl, uuid, fh) client_message = sent_messages.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') assert client_message.fuse_request.file_request.HasField('fsync') def test_fslogic_should_handle_processing_status_message(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.DIR) rename_response = prepare_rename_response('newUuid') processing_status_responses = \ [prepare_processing_status_response(messages_pb2.IN_PROGRESS) for _ in range(5)] responses = [getattr_response] responses.extend(processing_status_responses) responses.append(rename_response) with reply(endpoint, responses) as queue: fl.rename('parentUuid', 'name', 'newParentUuid', 'newName') queue.get() client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('rename') rename = file_request.rename assert rename.target_parent_uuid == 'newParentUuid' assert rename.target_name == 'newName' assert file_request.context_guid == \ getattr_response.fuse_response.file_attr.uuid def prepare_listxattr_response(uuid): repl = fuse_messages_pb2.XattrList() repl.names.extend(["xattr1", "xattr2", "xattr3", "xattr4"]) server_response = messages_pb2.ServerMessage() server_response.fuse_response.xattr_list.CopyFrom(repl) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def test_listxattrs_should_return_listxattrs(endpoint, fl, uuid): getattr_response = prepare_attr_response(uuid, fuse_messages_pb2.REG) listxattr_response = prepare_listxattr_response(uuid) listxattrs = [] with reply(endpoint, [listxattr_response, getattr_response]) as queue: listxattrs = fl.listxattr(uuid) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('list_xattr') assert file_request.context_guid == uuid assert listxattr_response.status.code == common_messages_pb2.Status.ok assert "xattr1" in set(listxattrs) assert "xattr2" in set(listxattrs) assert "xattr3" in set(listxattrs) assert "xattr4" in set(listxattrs) def prepare_getxattr_response(uuid, name, value): repl = fuse_messages_pb2.Xattr() repl.name = name repl.value = value server_response = messages_pb2.ServerMessage() server_response.fuse_response.xattr.CopyFrom(repl) server_response.fuse_response.status.code = common_messages_pb2.Status.ok return server_response def test_getxattr_should_return_xattr(endpoint, fl, uuid): xattr_name = "org.onedata.acl" xattr_value = "READ | WRITE | DELETE" response = prepare_getxattr_response(uuid, xattr_name, xattr_value) xattr = None with reply(endpoint, response) as queue: xattr = fl.getxattr(uuid, xattr_name) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('get_xattr') assert xattr.name == xattr_name assert xattr.value == xattr_value def test_getxattr_should_return_enoattr_for_invalid_xattr(endpoint, fl, uuid): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.enodata with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.getxattr(uuid, "org.onedata.dontexist") assert 'No data available' in str(excinfo.value) def test_setxattr_should_set_xattr(endpoint, fl, uuid): xattr_name = "org.onedata.acl" xattr_value = "READ | WRITE | DELETE" response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, response) as queue: fl.setxattr(uuid, xattr_name, xattr_value, False, False) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('set_xattr') assert file_request.set_xattr.HasField('xattr') assert file_request.set_xattr.xattr.name == xattr_name assert file_request.set_xattr.xattr.value == xattr_value def test_setxattr_should_set_xattr_with_binary_data(endpoint, fl, uuid): xattr_name = "org.onedata.acl" xattr_value = b'BEGINSTRINGWITHNULLS\x00\x0F\x00\x0F\x00\x0F\x00\x0F\x00\x0F\x00\x0FENDSTRINGWITHNULLS' response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, response) as queue: fl.setxattr(uuid, xattr_name, xattr_value, False, False) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('set_xattr') assert file_request.set_xattr.HasField('xattr') assert file_request.set_xattr.xattr.name == xattr_name assert file_request.set_xattr.xattr.value == xattr_value def test_setxattr_should_set_xattr_with_long_value(endpoint, fl, uuid): xattr_name = "org.onedata.acl" xattr_value = "askljdhflajkshdfjklhasjkldfhajklshdfljkashdfjklhasljkdhfjklashdfjklhasljdfhljkashdfljkhasjkldfhkljasdfhaslkdhfljkashdfljkhasdjklfhajklsdhfljkashdflkjhasjkldfhlakjsdhflkjahsfjklhasdjklfghlajksdgjklashfjklashfljkahsdljkfhasjkldfhlkajshdflkjahsdfljkhasldjkfhlkashdflkjashdfljkhasldkjfhalksdhfljkashdfljkhasdlfjkhaljksdhfjklashdfjklhasjkldfhljkasdhfljkashdlfjkhasldjkfhaljskdhfljkashdfljkhaspeuwshfiuawhgelrfihjasdgffhjgsdfhjgaskhjdfgjkaszgdfjhasdkfgaksjdfgkjahsdgfkhjasgdfkjhagsdkhjfgakhsjdfgkjhasgdfkhjgasdkjhfgakjshdgfkjhasgdkjhfgaskjhdfgakjhsdgfkjhasdgfkjhagsdkfhjgaskjdhfgkajsgdfkhjagsdkfjhgasdkjhfgaksjhdgfkajshdgfkjhasdgfkjhagskjdhfgakjshdgfkhjasdgfkjhasgdkfhjgaskdhjfgaksjdfgkasjdhgfkajshdgfkjhasgdfkhjagskdhjfgaskhjdfgkjasdhgfkjasgdkhjasdgkfhjgaksjhdfgkajshdgfkjhasdgfkjhagsdhjkfgaskhjdfgahjksdgfkhjasdgfhasgdfjhgaskdhjfgadkshjgfakhjsdgfkjhadsgkfhjagshjkdfgadhjsaskljdhflajkshdfjklhasjkldfhajklshdfljkashdfjklhasljkdhfjklashdfjklhasljdfhljkashdfljkhasjkldfhkljasdfhaslkdhfljkashdfljkhasdjklfhajklsdhfljkashdflkjhasjkldfhlakjsdhflkjahsfjklhasdjklfghlajksdgjklashfjklashfljkahsdljkfhasjkldfhlkajshdflkjahsdfljkhasldjkfhlkashdflkjashdfljkhasldkjfhalksdhfljkashdfljkhasdlfjkhaljksdhfjklashdfjklhasjkldfhljkasdhfljkashdlfjkhasldjkfhaljskdhfljkashdfljkhaspeuwshfiuawhgelrfihjasdgffhjgsdfhjgaskhjdfgjkaszgdfjhasdkfgaksjdfgkjahsdgfkhjasgdfkjhagsdkhjfgakhsjdfgkjhasgdfkhjgasdkjhfgakjshdgfkjhasgdkjhfgaskjhdfgakjhsdgfkjhasdgfkjhagsdkfhjgaskjdhfgkajsgdfkhjagsdkfjhgasdkjhfgaksjhdgfkajshdgfkjhasdgfkjhagskjdhfgakjshdgfkhjasdgfkjhasgdkfhjgaskdhjfgaksjdfgkasjdhgfkajshdgfkjhasgdfkhjagskdhjfgaskhjdfgkjasdhgfkjasgdkhjasdgkfhjgaksjhdfgkajshdgfkjhasdgfkjhagsdhjkfgaskhjdfgahjksdgfkhjasdgfhasgdfjhgaskdhjfgadkshjgfakhjsdgfkjhadsgkfhjagshjkdfgadhjsaskljdhflajkshdfjklhasjkldfhajklshdfljkashdfjklhasljkdhfjklashdfjklhasljdfhljkashdfljkhasjkldfhkljasdfhaslkdhfljkashdfljkhasdjklfhajklsdhfljkashdflkjhasjkldfhlakjsdhflkjahsfjklhasdjklfghlajksdgjklashfjklashfljkahsdljkfhasjkldfhlkajshdflkjahsdfljkhasldjkfhlkashdflkjashdfljkhasldkjfhalksdhfljkashdfljkhasdlfjkhaljksdhfjklashdfjklhasjkldfhljkasdhfljkashdlfjkhasldjkfhaljskdhfljkashdfljkhaspeuwshfiuawhgelrfihjasdgffhjgsdfhjgaskhjdfgjkaszgdfjhasdkfgaksjdfgkjahsdgfkhjasgdfkjhagsdkhjfgakhsjdfgkjhasgdfkhjgasdkjhfgakjshdgfkjhasgdkjhfgaskjhdfgakjhsdgfkjhasdgfkjhagsdkfhjgaskjdhfgkajsgdfkhjagsdkfjhgasdkjhfgaksjhdgfkajshdgfkjhasdgfkjhagskjdhfgakjshdgfkhjasdgfkjhasgdkfhjgaskdhjfgaksjdfgkasjdhgfkajshdgfkjhasgdfkhjagskdhjfgaskhjdfgkjasdhgfkjasgdkhjasdgkfhjgaksjhdfgkajshdgfkjhasdgfkjhagsdhjkfgaskhjdfgahjksdgfkhjasdgfhasgdfjhgaskdhjfgadkshjgfakhjsdgfkjhadsgkfhjagshjkdfgadhjsaskljdhflajkshdfjklhasjkldfhajklshdfljkashdfjklhasljkdhfjklashdfjklhasljdfhljkashdfljkhasjkldfhkljasdfhaslkdhfljkashdfljkhasdjklfhajklsdhfljkashdflkjhasjkldfhlakjsdhflkjahsfjklhasdjklfghlajksdgjklashfjklashfljkahsdljkfhasjkldfhlkajshdflkjahsdfljkhasldjkfhlkashdflkjashdfljkhasldkjfhalksdhfljkashdfljkhasdlfjkhaljksdhfjklashdfjklhasjkldfhljkasdhfljkashdlfjkhasldjkfhaljskdhfljkashdfljkhaspeuwshfiuawhgelrfihjasdgffhjgsdfhjgaskhjdfgjkaszgdfjhasdkfgaksjdfgkjahsdgfkhjasgdfkjhagsdkhjfgakhsjdfgkjhasgdfkhjgasdkjhfgakjshdgfkjhasgdkjhfgaskjhdfgakjhsdgfkjhasdgfkjhagsdkfhjgaskjdhfgkajsgdfkhjagsdkfjhgasdkjhfgaksjhdgfkajshdgfkjhasdgfkjhagskjdhfgakjshdgfkhjasdgfkjhasgdkfhjgaskdhjfgaksjdfgkasjdhgfkajshdgfkjhasgdfkhjagskdhjfgaskhjdfgkjasdhgfkjasgdkhjasdgkfhjgaksjhdfgkajshdgfkjhasdgfkjhagsdhjkfgaskhjdfgahjksdgfkhjasdgfhasgdfjhgaskdhjfgadkshjgfakhjsdgfkjhadsgkfhjagshjkdfgadhjs" response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, response) as queue: fl.setxattr(uuid, xattr_name, xattr_value, False, False) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.HasField('set_xattr') assert file_request.set_xattr.HasField('xattr') assert file_request.set_xattr.xattr.name == xattr_name assert file_request.set_xattr.xattr.value == xattr_value def test_removexattr_should_remove_xattr(endpoint, fl, uuid): xattr_name = "org.onedata.acl" response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.ok with reply(endpoint, response) as queue: fl.removexattr(uuid, xattr_name) client_message = queue.get() assert client_message.HasField('fuse_request') assert client_message.fuse_request.HasField('file_request') file_request = client_message.fuse_request.file_request assert file_request.context_guid == uuid remove_xattr_request = file_request.remove_xattr assert remove_xattr_request.HasField('name') assert remove_xattr_request.name == xattr_name def test_removexattr_should_return_enoattr_for_invalid_xattr(endpoint, fl, uuid): response = messages_pb2.ServerMessage() response.fuse_response.status.code = common_messages_pb2.Status.enodata with pytest.raises(RuntimeError) as excinfo: with reply(endpoint, response): fl.removexattr(uuid, "org.onedata.dontexist") assert 'No data available' in str(excinfo.value)
py
1a4079f71608c999bf5402623ade98ffa797ee78
from math import acos, cos, hypot, pi from models.event import * HALT_ENGINE_INPUT = (0, EngineDirection.FORWARD) HALT_ENGINE_EVENT = EngineInput(HALT_ENGINE_INPUT, HALT_ENGINE_INPUT) def is_on_right_half(x, y): return x >= 0 def is_on_left_half(x, y): return not is_on_right_half(x, y) def is_on_upper_half(x, y): return y >= 0 def is_on_bottom_half(x, y): return not is_on_upper_half(x, y) def to_engine_inputs(left, right, intensity): left_direction = EngineDirection.BACKWARD if left < 0 else EngineDirection.FORWARD right_direction = EngineDirection.BACKWARD if right < 0 else EngineDirection.FORWARD left_input = (round(abs(left * intensity), 2), left_direction) right_input = (round(abs(right * intensity), 2), right_direction) return left_input, right_input def relative_intensity_of_auxiliary_engine(x, y, intensity): ''' Given a point (x, y) and their hypotenuse, return what strength to apply to the auxiliary motor. ''' try: x_relative = abs(x / intensity) except ZeroDivisionError: return 0 # aux = 1 - 2 * x_relative # Naive solution, not symmetrical # aux = (4 * acos(x_relative) - pi) / pi # Linear movement, abrupt change on edges aux = -cos(2 * acos(x_relative)) # Natural movement return aux def direction_event_to_engine_input(event: DirectionEvent): ''' Get a position in a circle and transform into left/right engine powers. ''' (x, y) = event.get_coordinates() intensity = hypot(x, y) l = 1 r = relative_intensity_of_auxiliary_engine(x, y, intensity) assert abs(r) <= 1 if is_on_left_half(x, y) ^ is_on_bottom_half(x, y): l, r = r, l if is_on_bottom_half(x, y): l = -l r = -r return EngineInput(*to_engine_inputs(l, r, min(intensity, 1))) def _is_halt_event(event): return type(event) is HaltEvent def _is_direction_event(event): return type(event) is DirectionEvent def handle_event(event: InputEvent): if _is_halt_event(event): return HALT_ENGINE_EVENT elif _is_direction_event(event): return direction_event_to_engine_input(event) else: raise ValueError("Could not understand event type " + type(event).__name__)
py
1a407b1b356e722637f4e2da45eb80d16ccc5ac6
""" Generate emojipasta from text. """ import random import io import json import emojipasta.util.emoji import emojipasta.util.files import emojipasta.util.text class EmojipastaGenerator: _WORD_DELIMITER = " " _MAX_EMOJIS_PER_BLOCK = 2 """Creates with default emoji mappings, loaded from a JSON file in the package. """ @classmethod def of_default_mappings(cls): return EmojipastaGenerator(_get_emoji_mappings()) """Create with custom emoji mappings. emoji_mappings: a dict that maps from a lowercase word to a list of emojis (the emojis being single-character strings). """ @classmethod def of_custom_mappings(cls, emoji_mappings): return EmojipastaGenerator(emoji_mappings) def __init__(self, emoji_mappings): self._emoji_mappings = emoji_mappings def generate_emojipasta(self, text): blocks = emojipasta.util.text.split_into_blocks(text) new_blocks = [] for i, block in enumerate(blocks): new_blocks.append(block) emojis = self._generate_emojis_from(block) if emojis: new_blocks.append(" " + emojis) return "".join(new_blocks) def _generate_emojis_from(self, block): trimmed_block = emojipasta.util.text.trim_nonalphabetical_characters(block) matching_emojis = self._get_matching_emojis(trimmed_block) emojis = [] if matching_emojis: num_emojis = random.randint(0, self._MAX_EMOJIS_PER_BLOCK) for _ in range(num_emojis): emojis.append(random.choice(matching_emojis)) return "".join(emojis) def _get_matching_emojis(self, trimmed_block): key = self._get_alphanumeric_prefix(trimmed_block.lower()) if key in self._emoji_mappings: return self._emoji_mappings[self._get_alphanumeric_prefix(key)] return [] def _get_alphanumeric_prefix(self, s): i = 0 while i < len(s) and s[i].isalnum(): i += 1 return s[:i] _EMOJI_MAPPINGS = None def _get_emoji_mappings(): global _EMOJI_MAPPINGS if _EMOJI_MAPPINGS is None: with io.open( emojipasta.util.files.PATH_TO_MAPPINGS_FILE, "r", encoding="utf-8" ) as mappings_file: _EMOJI_MAPPINGS = json.load(mappings_file) return _EMOJI_MAPPINGS def main(): generator = EmojipastaGenerator.of_default_mappings() print(generator.generate_emojipasta("fuck this i hate python")) if __name__ == "__main__": main()
py
1a407c5ca9726e5242f2d687936e64222370f797
# Lint as: python3 # Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow_models.core.base_task.""" import functools from absl.testing import parameterized import tensorflow as tf from tensorflow.python.distribute import combinations from tensorflow.python.distribute import strategy_combinations from official.utils.testing import mock_task def all_strategy_combinations(): return combinations.combine( distribution=[ strategy_combinations.default_strategy, strategy_combinations.tpu_strategy, strategy_combinations.one_device_strategy_gpu, ], mode='eager', ) class TaskKerasTest(tf.test.TestCase, parameterized.TestCase): @combinations.generate(all_strategy_combinations()) def test_task_with_step_override(self, distribution): with distribution.scope(): task = mock_task.MockTask() model = task.build_model() model = task.compile_model( model, optimizer=tf.keras.optimizers.SGD(learning_rate=1e-3), metrics=task.build_metrics(), train_step=task.train_step, validation_step=task.validation_step) dataset = task.build_inputs(params=None) logs = model.fit(dataset, epochs=1, steps_per_epoch=2) self.assertIn('loss', logs.history) self.assertIn('acc', logs.history) # Without specifying metrics through compile. with distribution.scope(): train_metrics = task.build_metrics(training=True) val_metrics = task.build_metrics(training=False) model = task.build_model() model = task.compile_model( model, optimizer=tf.keras.optimizers.SGD(learning_rate=1e-3), train_step=functools.partial(task.train_step, metrics=train_metrics), validation_step=functools.partial( task.validation_step, metrics=val_metrics)) logs = model.fit(dataset, epochs=1, steps_per_epoch=2) self.assertIn('loss', logs.history) self.assertIn('acc', logs.history) def test_task_with_fit(self): task = mock_task.MockTask() model = task.build_model() model = task.compile_model( model, optimizer=tf.keras.optimizers.SGD(learning_rate=1e-3), loss=tf.keras.losses.CategoricalCrossentropy(), metrics=task.build_metrics()) dataset = task.build_inputs(params=None) logs = model.fit(dataset, epochs=1, steps_per_epoch=2) self.assertIn('loss', logs.history) self.assertIn('acc', logs.history) self.assertLen(model.evaluate(dataset, steps=1), 2) def test_task_invalid_compile(self): task = mock_task.MockTask() model = task.build_model() with self.assertRaises(ValueError): _ = task.compile_model( model, optimizer=tf.keras.optimizers.SGD(learning_rate=1e-3), loss=tf.keras.losses.CategoricalCrossentropy(), metrics=task.build_metrics(), train_step=task.train_step) if __name__ == '__main__': tf.test.main()
py
1a407f1b81e924e76e723bbec91e1b65ee6884d2
""" @brief Packet Template class Instances of this class describe a telemetry packet. For example an instance may describe the packet with ID 5 and channels A, B, and C in that order. @date Created July 2, 2018 @date Modified July 12, 2018 @author R. Joseph Paetz @bug No known bugs """ from fprime.common.models.serialize.type_exceptions import TypeMismatchException from . import ch_template, data_template class PktTemplate(data_template.DataTemplate): """Class to create packet templates to describe specific packet types""" def __init__(self, pkt_id, pkt_name, ch_temp_list): """ Constructor Args: pkt_id (int): The ID of the telemetry packet being described pkt_name (str): Packet name ch_temp_list (ch_template list): List of ch_template objects describing the channels included in the packer. The order of the list is the order of the channels in the packet. """ super().__init__() if not isinstance(pkt_id, int): raise TypeMismatchException(int, type(pkt_id)) if not isinstance(pkt_name, str): raise TypeMismatchException(str, type(pkt_name)) if not isinstance(ch_temp_list, list): raise TypeMismatchException(list, type(ch_temp_list)) for ch in ch_temp_list: if not isinstance(ch, ch_template.ChTemplate): raise TypeMismatchException(type(ch_template.ChTemplate), type(ch)) self.id = pkt_id self.name = pkt_name self.ch_list = ch_temp_list def get_id(self): return self.id def get_name(self): return self.name def get_ch_list(self): return self.ch_list
gyp
1a407ff54e71512bd0b64cb41b1a753de94c4605
{ 'variables': { # The libraries brightray will be compiled to. 'linux_system_libraries': 'gtk+-3.0 dbus-1 x11 x11-xcb xcb xi xcursor xdamage xrandr xcomposite xext xfixes xrender xtst xscrnsaver gconf-2.0 gmodule-2.0 nss', 'conditions': [ ['target_arch=="mips64el"', { 'linux_system_libraries': '<(linux_system_libraries) libpulse', }], ], }, 'includes': [ 'filenames.gypi', ], 'targets': [ { 'target_name': 'brightray', 'type': 'static_library', 'include_dirs': [ '..', '<(libchromiumcontent_src_dir)', '<(libchromiumcontent_src_dir)/skia/config', '<(libchromiumcontent_src_dir)/third_party/boringssl/src/include', '<(libchromiumcontent_src_dir)/third_party/skia/include/core', '<(libchromiumcontent_src_dir)/third_party/skia/include/gpu', '<(libchromiumcontent_src_dir)/third_party/mojo/src', '<(libchromiumcontent_src_dir)/third_party/WebKit', '<(libchromiumcontent_src_dir)/third_party/khronos', '<(libchromiumcontent_dir)/gen', ], 'direct_dependent_settings': { 'include_dirs': [ '../vendor', '<(libchromiumcontent_src_dir)', '<(libchromiumcontent_src_dir)/gpu', '<(libchromiumcontent_src_dir)/skia/config', '<(libchromiumcontent_src_dir)/third_party/boringssl/src/include', '<(libchromiumcontent_src_dir)/third_party/skia/include/core', '<(libchromiumcontent_src_dir)/third_party/skia/include/gpu', '<(libchromiumcontent_src_dir)/third_party/skia/include/config', '<(libchromiumcontent_src_dir)/third_party/icu/source/common', '<(libchromiumcontent_src_dir)/third_party/mojo/src', '<(libchromiumcontent_src_dir)/third_party/khronos', '<(libchromiumcontent_src_dir)/third_party/WebKit', '<(libchromiumcontent_dir)/gen', '<(libchromiumcontent_dir)/gen/third_party/WebKit', ], }, 'sources': [ '<@(brightray_sources)' ], 'conditions': [ # Link with libraries of libchromiumcontent. ['OS=="linux" and libchromiumcontent_component==0', { # On Linux we have to use "--whole-archive" to force executable # to include all symbols, otherwise we will have plenty of # unresolved symbols errors. 'direct_dependent_settings': { 'ldflags': [ '-Wl,--whole-archive', '<@(libchromiumcontent_libraries)', '-Wl,--no-whole-archive', ], } }, { # (Release build on Linux) 'link_settings': { 'libraries': [ '<@(libchromiumcontent_libraries)' ] }, }], # (Normal builds) # Linux specific link settings. ['OS=="linux"', { 'link_settings': { 'ldflags': [ '<!@(<(pkg-config) --libs-only-L --libs-only-other <(linux_system_libraries))', ], 'libraries': [ '-lpthread', '-latomic', '<!@(<(pkg-config) --libs-only-l <(linux_system_libraries))', ], }, 'cflags': [ '<!@(<(pkg-config) --cflags <(linux_system_libraries))', ], 'direct_dependent_settings': { 'cflags': [ '<!@(<(pkg-config) --cflags <(linux_system_libraries))', ], }, 'conditions': [ ['clang==1', { 'cflags_cc': [ '-Wno-reserved-user-defined-literal', ], 'cflags': [ # Needed by using libgtkui: '-Wno-deprecated-register', '-Wno-sentinel', ], 'direct_dependent_settings': { 'cflags': [ '-Wno-deprecated-register', '-Wno-sentinel', ], }, }], ['libchromiumcontent_component', { 'link_settings': { 'libraries': [ # Following libraries are always linked statically. '<(libchromiumcontent_dir)/libgtkui.a', '<(libchromiumcontent_dir)/libhttp_server.a', '<(libchromiumcontent_dir)/libdesktop_capture.a', '<(libchromiumcontent_dir)/libdom_keycode_converter.a', '<(libchromiumcontent_dir)/libsystem_wrappers.a', '<(libchromiumcontent_dir)/librtc_base.a', '<(libchromiumcontent_dir)/librtc_base_generic.a', '<(libchromiumcontent_dir)/libwebrtc_common.a', '<(libchromiumcontent_dir)/libinit_webrtc.a', '<(libchromiumcontent_dir)/libyuv.a', '<(libchromiumcontent_dir)/librenderer.a', '<(libchromiumcontent_dir)/libsecurity_state.a', # components/network_session_configurator/common/ '<(libchromiumcontent_dir)/libcommon.a', # services/device/wake_lock/power_save_blocker/ '<(libchromiumcontent_dir)/libpower_save_blocker.a', # Friends of libpdf.a: # On Linux we have to use "--whole-archive" to include # all symbols, otherwise there will be plenty of # unresolved symbols errors. '-Wl,--whole-archive', '<(libchromiumcontent_dir)/libpdf.a', '<(libchromiumcontent_dir)/libppapi_cpp_objects.a', '<(libchromiumcontent_dir)/libppapi_internal_module.a', '<(libchromiumcontent_dir)/libpdfium.a', '<(libchromiumcontent_dir)/libfdrm.a', '<(libchromiumcontent_dir)/libformfiller.a', '<(libchromiumcontent_dir)/libfpdfapi.a', '<(libchromiumcontent_dir)/libfpdfdoc.a', '<(libchromiumcontent_dir)/libfpdftext.a', '<(libchromiumcontent_dir)/libfxcodec.a', '<(libchromiumcontent_dir)/libfxge.a', '<(libchromiumcontent_dir)/libfxjs.a', '<(libchromiumcontent_dir)/libjavascript.a', '<(libchromiumcontent_dir)/libpwl.a', '<(libchromiumcontent_dir)/libfx_agg.a', '<(libchromiumcontent_dir)/libfx_lcms2.a', '<(libchromiumcontent_dir)/libfx_libopenjpeg.a', '<(libchromiumcontent_dir)/libfx_zlib.a', '-Wl,--no-whole-archive', ], }, }, { 'link_settings': { 'libraries': [ # Link with ffmpeg. '<(libchromiumcontent_dir)/libffmpeg.so', # Following libraries are required by libchromiumcontent: '-lasound', '-lcap', '-lcups', '-lrt', '-ldl', '-lresolv', '-lfontconfig', '-lexpat', ], }, }], ['target_arch=="arm"', { 'link_settings': { 'libraries': [ '<(libchromiumcontent_dir)/libjpeg.a', ], 'libraries!': [ '<(libchromiumcontent_dir)/libdesktop_capture_differ_sse2.a', ], }, }], ], }], # OS=="linux" ['OS=="mac"', { 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/AppKit.framework', # Required by webrtc: '$(SDKROOT)/System/Library/Frameworks/OpenGL.framework', '$(SDKROOT)/System/Library/Frameworks/IOKit.framework', # Required by media: '$(SDKROOT)/System/Library/Frameworks/VideoToolbox.framework', ], }, 'conditions': [ ['libchromiumcontent_component', { 'link_settings': { 'libraries': [ # Following libraries are always linked statically. '<(libchromiumcontent_dir)/libhttp_server.a', '<(libchromiumcontent_dir)/libdesktop_capture.a', '<(libchromiumcontent_dir)/libdom_keycode_converter.a', '<(libchromiumcontent_dir)/librtc_base.a', '<(libchromiumcontent_dir)/librtc_base_generic.a', '<(libchromiumcontent_dir)/libsystem_wrappers.a', '<(libchromiumcontent_dir)/libwebrtc_common.a', '<(libchromiumcontent_dir)/libinit_webrtc.a', '<(libchromiumcontent_dir)/libyuv.a', '<(libchromiumcontent_dir)/librenderer.a', '<(libchromiumcontent_dir)/libsecurity_state.a', # components/network_session_configurator/common/ '<(libchromiumcontent_dir)/libcommon.a', # services/device/wake_lock/power_save_blocker/ '<(libchromiumcontent_dir)/libpower_save_blocker.a', # Friends of libpdf.a: '<(libchromiumcontent_dir)/libpdf.a', '<(libchromiumcontent_dir)/libppapi_cpp_objects.a', '<(libchromiumcontent_dir)/libppapi_internal_module.a', '<(libchromiumcontent_dir)/libjpeg.a', '<(libchromiumcontent_dir)/libpdfium.a', '<(libchromiumcontent_dir)/libfdrm.a', '<(libchromiumcontent_dir)/libformfiller.a', '<(libchromiumcontent_dir)/libfpdfapi.a', '<(libchromiumcontent_dir)/libfpdfdoc.a', '<(libchromiumcontent_dir)/libfpdftext.a', '<(libchromiumcontent_dir)/libfxcodec.a', '<(libchromiumcontent_dir)/libfxcrt.a', '<(libchromiumcontent_dir)/libfxge.a', '<(libchromiumcontent_dir)/libfxjs.a', '<(libchromiumcontent_dir)/libjavascript.a', '<(libchromiumcontent_dir)/libpwl.a', '<(libchromiumcontent_dir)/libfx_agg.a', '<(libchromiumcontent_dir)/libfx_lcms2.a', '<(libchromiumcontent_dir)/libfx_libopenjpeg.a', '<(libchromiumcontent_dir)/libfx_zlib.a', ], }, }, { 'link_settings': { 'libraries': [ # Link with ffmpeg. '<(libchromiumcontent_dir)/libffmpeg.dylib', # Link with system frameworks. # ui_base.gypi: '$(SDKROOT)/System/Library/Frameworks/Accelerate.framework', # net.gypi: '$(SDKROOT)/System/Library/Frameworks/Foundation.framework', '$(SDKROOT)/System/Library/Frameworks/Security.framework', '$(SDKROOT)/System/Library/Frameworks/SystemConfiguration.framework', '-lresolv', # media.gyp: '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework', '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework', '$(SDKROOT)/System/Library/Frameworks/AVFoundation.framework', '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework', '$(SDKROOT)/System/Library/Frameworks/CoreMedia.framework', '$(SDKROOT)/System/Library/Frameworks/CoreMIDI.framework', '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework', # surface.gyp: '$(SDKROOT)/System/Library/Frameworks/IOSurface.framework', # content_common.gypi: '$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework', # base.gyp: '$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework', '$(SDKROOT)/System/Library/Frameworks/Carbon.framework', '$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework', # device/gamepad/BUILD.gn: '$(SDKROOT)/System/Library/Frameworks/GameController.framework', # content_browser.gypi: '-lbsm', # content_common.gypi: '-lsandbox', # bluetooth.gyp: '$(SDKROOT)/System/Library/Frameworks/IOBluetooth.framework', # components/wifi/BUILD.gn: '$(SDKROOT)/System/Library/Frameworks/CoreWLAN.framework', # printing/BUILD.gn: '-lcups', ], }, }], ] }], # OS=="mac" ['OS=="win"', { 'link_settings': { 'msvs_settings': { 'VCLinkerTool': { 'AdditionalOptions': [ # warning /DELAYLOAD:dll ignored; no imports found from dll '/ignore:4199', ], 'AdditionalDependencies': [ 'delayimp.lib', ], 'DelayLoadDLLs': [ 'wtsapi32.dll', # content_common.gypi: 'd3d9.dll', 'd3d11.dll', 'dxva2.dll', # media.gyp: 'mf.dll', 'mfplat.dll', 'mfreadwrite.dll', # bluetooth.gyp: 'BluetoothApis.dll', 'Bthprops.cpl', 'setupapi.dll', # base.gyp: 'cfgmgr32.dll', 'powrprof.dll', 'setupapi.dll', # net_common.gypi: 'crypt32.dll', 'dhcpcsvc.dll', 'rpcrt4.dll', 'secur32.dll', 'urlmon.dll', 'winhttp.dll', # windows runtime 'API-MS-WIN-CORE-WINRT-L1-1-0.DLL', 'API-MS-WIN-CORE-WINRT-STRING-L1-1-0.DLL', ], }, }, }, 'conditions': [ ['libchromiumcontent_component', { 'link_settings': { 'libraries': [ # Needed by desktop_capture.lib: '-ld3d11.lib', '-ldxgi.lib', # Following libs are always linked statically. '<(libchromiumcontent_dir)/base_static.lib', '<(libchromiumcontent_dir)/sandbox.lib', '<(libchromiumcontent_dir)/sandbox_helper_win.lib', '<(libchromiumcontent_dir)/http_server.lib', '<(libchromiumcontent_dir)/desktop_capture.lib', '<(libchromiumcontent_dir)/dom_keycode_converter.lib', '<(libchromiumcontent_dir)/rtc_base.lib', '<(libchromiumcontent_dir)/rtc_base_generic.lib', '<(libchromiumcontent_dir)/system_wrappers.lib', '<(libchromiumcontent_dir)/webrtc_common.lib', '<(libchromiumcontent_dir)/init_webrtc.lib', '<(libchromiumcontent_dir)/libyuv.lib', '<(libchromiumcontent_dir)/renderer.lib', '<(libchromiumcontent_dir)/security_state.lib', # components/network_session_configurator/common/ '<(libchromiumcontent_dir)/common.lib', # services/device/wake_lock/power_save_blocker/ '<(libchromiumcontent_dir)/power_save_blocker.lib', # Friends of pdf.lib: '<(libchromiumcontent_dir)/pdf.lib', '<(libchromiumcontent_dir)/ppapi_cpp_objects.lib', '<(libchromiumcontent_dir)/ppapi_internal_module.lib', '<(libchromiumcontent_dir)/libjpeg.lib', '<(libchromiumcontent_dir)/pdfium.lib', '<(libchromiumcontent_dir)/fdrm.lib', '<(libchromiumcontent_dir)/formfiller.lib', '<(libchromiumcontent_dir)/fpdfapi.lib', '<(libchromiumcontent_dir)/fpdfdoc.lib', '<(libchromiumcontent_dir)/fpdftext.lib', '<(libchromiumcontent_dir)/fpdftext.lib', '<(libchromiumcontent_dir)/fxcodec.lib', '<(libchromiumcontent_dir)/fxcrt.lib', '<(libchromiumcontent_dir)/fxge.lib', '<(libchromiumcontent_dir)/fxjs.lib', '<(libchromiumcontent_dir)/javascript.lib', '<(libchromiumcontent_dir)/pwl.lib', '<(libchromiumcontent_dir)/fx_agg.lib', '<(libchromiumcontent_dir)/fx_lcms2.lib', '<(libchromiumcontent_dir)/fx_libopenjpeg.lib', '<(libchromiumcontent_dir)/fx_zlib.lib', ], }, }, { # Link with system libraries. 'link_settings': { 'libraries': [ # Link with ffmpeg. '<(libchromiumcontent_dir)/ffmpeg.dll', # content_browser.gypi: '-lsensorsapi.lib', '-lportabledeviceguids.lib', # content_common.gypi: '-ld3d9.lib', '-ld3d11.lib', '-ldxgi.lib', '-ldxva2.lib', '-lstrmiids.lib', '-lmf.lib', '-lmfplat.lib', '-lmfuuid.lib', # media.gyp: '-ldxguid.lib', '-lmfreadwrite.lib', '-lmfuuid.lib', ], 'msvs_settings': { 'VCLinkerTool': { 'AdditionalDependencies': [ 'advapi32.lib', 'dbghelp.lib', 'dwmapi.lib', 'gdi32.lib', 'hid.lib', 'netapi32.lib', 'oleacc.lib', 'user32.lib', 'usp10.lib', 'version.lib', 'winspool.lib', 'wtsapi32.lib', # bluetooth.gyp: 'Bthprops.lib', 'BluetoothApis.lib', # base.gyp: 'cfgmgr32.lib', 'powrprof.lib', 'setupapi.lib', # net_common.gypi: 'crypt32.lib', 'dhcpcsvc.lib', 'ncrypt.lib', 'rpcrt4.lib', 'secur32.lib', 'urlmon.lib', 'winhttp.lib', # ui/gfx/BUILD.gn: 'dwrite.lib', # skia/BUILD.gn: 'fontsub.lib', ], }, }, }, }], # libchromiumcontent_component ], }], # OS=="win" ], }, ], }
py
1a40801edd478730525fa83f0ddbdc0dfc86bf40
''' Takes all pickle files from 1. pre-preprocessing, 2. post-preprocessing, 3. kernelizing, 4. decomposing and creates single csv file with all data. ''' import pandas as pd import networkx as nx import os, sys, pickle, pprint, shutil, datetime, time from os.path import dirname,realpath sys.path.insert(0,dirname(realpath(__file__))[:-10]) #print(sys.path) import algs.utils_misc as utils_misc def check_for_missing_data(final_output, kernel_output, kdistinct, wscale, witer, fname): ''' true_total input: TF: wecp, ipart, lp - witer=0, wscale=sml,med, kdistinct <= k_distinct_max LV: wecp, ipart, lp - wscale=sml,med, kdistinct <= k_distinct_max true_dist input: TF: ipart, lp - witer <= 4 [0-4], kdistinct <= k_distinct_max LV: ipart, lp - kdistinct <= k_distinct_max guess input: TF: ipart, lp - witer=0, 5 <= kdistinct <= k_distinct_max LV: ipart, lp - 5 <= kdistinct <= k_distinct_max ''' k_distinct_max = 11 ######### NOTE if kdistinct==0: return if witer is None: witer=0 # first check true_total input if wscale=='sml' or wscale=='med': if kdistinct <= k_distinct_max: if witer==0: wecp_dat = final_output['decomp_data']['bsd_dw']['true_total'] ip_dat = final_output['decomp_data']['bswd_dw_ip']['true_total'] lp_dat = final_output['decomp_data']['bswd_dw_lp']['true_total'] if wecp_dat is None: print('ERROR: {} (true_total input) missing wecp k={}'.format(fname, kdistinct)) # second check for true_dist input for weight permutations if witer <= 5 and kdistinct <= 7: ip_dat = final_output['decomp_data']['bswd_dw_ip']['true_distinct'] lp_dat = final_output['decomp_data']['bswd_dw_lp']['true_distinct'] if ip_dat is None: print('ERROR: {} (true_distinct input) missing ip_dat k={}'.format(fname, kdistinct)) if lp_dat is None: print('ERROR: {} (true_distinct input) missing lp_dat k={}'.format(fname, kdistinct)) # third check for true_dist input for weight permutations if witer==0 and kdistinct <= k_distinct_max: ip_dat = final_output['decomp_data']['bswd_dw_ip']['true_distinct'] lp_dat = final_output['decomp_data']['bswd_dw_lp']['true_distinct'] if ip_dat is None: print('ERROR: {} (true_distinct input) missing ip_dat k={}'.format(fname, kdistinct)) if lp_dat is None: print('ERROR: {} (true_distinct input) missing lp_dat k={}'.format(fname, kdistinct)) # fourth check for guess input if witer==0 and kdistinct >= 5 and kdistinct <= 7: vals = [-0.6, -0.4, -0.2, 0.2, 0.4, 0.6] for val in vals: ip_dat = final_output['decomp_data']['bswd_dw_ip']['guesses'] lp_dat = final_output['decomp_data']['bswd_dw_lp']['guesses'] kern_dat = kernel_output['guess_kernels'][val] if kern_dat['passed_kernel']: if ip_dat[val] is None: print('ERROR: {} (guess {} input) missing ip_dat k={}'.format(fname, val, kdistinct)) if lp_dat[val] is None: print('ERROR: {} (guess {} input) missing lp_dat k={}'.format(fname, val, kdistinct)) def print_keys(final_output): pre_preprocessing = final_output['pre_preprocessing'] post_preprocessing = final_output['post_preprocessing'] kernel_data = final_output['kernel_data'] true_total_kernel = kernel_data['true_total_kernel'] true_distinct_kernel = kernel_data['true_distinct_kernel'] guess_kernels = kernel_data['guess_kernels'] decomp_data = final_output['decomp_data'] bsd_dw = decomp_data['bsd_dw'] bswd_dw_lp = decomp_data['bswd_dw_lp'] bswd_dw_ip = decomp_data['bswd_dw_ip'] print('\nMain keys: ', list(final_output.keys())) print('pre_preprocessing keys: ', list(pre_preprocessing.keys())) print('\npost_preprocessing keys: ', list(post_preprocessing.keys())) print('\nkernel_data keys: ', list(kernel_data.keys())) if true_total_kernel is not None: print('\ntrue_total_kernel keys: ', list(true_total_kernel.keys())) else: print('\ntrue_total_kernel keys: ', true_total_kernel) if true_distinct_kernel is not None: print('\ntrue_distinct_kernel keys: ', list(true_distinct_kernel.keys())) else: print('\ntrue_distinct_kernel keys: ', true_distinct_kernel) if guess_kernels is not None: print('\nguess_kernels keys: ', list(guess_kernels.keys())) else: print('\nguess_kernels keys: ', guess_kernels.keys()) print('\ndecomp_data keys: ', list(decomp_data.keys())) print('bsd_dw keys: ', list(bsd_dw.keys())) print('bswd_dw_lp keys: ', list(bswd_dw_lp.keys())) print('bswd_dw_ip keys: ', list(bswd_dw_ip.keys())) def get_data(witer, wscale, fname, datatype, final_output, kernel_output, postproc_output, preproc_output, get_colnames=False): ''' filename pre_preprocessing: n-init, m-init, kdistinct-init, ktotal-init, witer, wscale(sml, med, lrg) post_preprocessing/pre_kernel: n-postproc, m-postproc, kdistinct-postproc, ktotal-postproc, time_preproc - true_total input: post_kernel/pre_bsd: kinput-1, n-1, m-1, passed_kernel-1, time_kernel-1 wecp: passed_bsd-1, reconstructs-1, found_cliq_frac-1, time_bsd-1 - true_dist input: post_kernel/pre_bsd: kinput-2, n-2, m-2, passed_kernel-2, time_kernel-2 lp: passed_bsd-2-1, reconstructs-2-1, found_cliq_frac-2-1, time_bsd-2-1 ipart: passed_bsd-2-2, reconstructs-2-2, found_cliq_frac-2-2, time_bsd-2-2 - guess0 input: post_kernel/pre_bsd: kinput-3, n-3, m-3, passed_kernel-3, time_kernel-3 lp: passed_bsd-3-1, reconstructs-3-1, found_cliq_frac-3-1, time_bsd-3-1 ipart: passed_bsd-3-2, reconstructs-3-2, found_cliq_frac-3-2, time_bsd-3-2 .... - guess5 input: post_kernel/pre_bsd: kinput-8, n-8, m-8, passed_kernel-8, time_kernel-8 lp: passed_bsd-8-1, reconstructs-8-1, found_cliq_frac-8-1, time_bsd-8-1 ipart: passed_bsd-8-2, reconstructs-8-2, found_cliq_frac-8-2, time_bsd-8-2 ''' colnames = {'filename' : None, 'datatype' : None, #pre-preprocessing 'n-init' : None, 'm-init' : None, 'kdistinct-init' : None, 'ktotal-init' : None, 'witer' : None, 'wscale' : None, # post-preprocessing 'n-postproc' : None, 'm-postproc' : None, 'kdistinct-postproc' : None, 'ktotal-postproc' : None, 'time_preproc' : None, #----- true_total input 'kinput-1' : None, 'n-1' : None, 'm-1' : None, 'max_edgeweight-1' : None, 'passed_kernel-1' : None, 'time_kernel-1' : None, # wecp 'passed_bsd-1' : None, 'reconstructs-1' : None, 'found_cliq_frac-1' : None, 'time_bsd-1' : None} # kdistinct + guess input info for i in range(2, 8+1): # post kernel info colnames['kinput-'+str(i)]=None colnames['n-'+str(i)]=None colnames['m-'+str(i)]=None colnames['max_edgeweight-'+str(i)]=None colnames['passed_kernel-'+str(i)]=None colnames['time_kernel-'+str(i)]=None # lp colnames['passed_bsd-'+str(i)+'-1']=None colnames['reconstructs-'+str(i)+'-1']=None colnames['found_cliq_frac-'+str(i)+'-1']=None colnames['time_bsd-'+str(i)+'-1']=None colnames['mem_usage-'+str(i)+'-1']=None # ipart colnames['passed_bsd-'+str(i)+'-2']=None colnames['reconstructs-'+str(i)+'-2']=None colnames['found_cliq_frac-'+str(i)+'-2']=None colnames['time_bsd-'+str(i)+'-2']=None colnames['mem_usage-'+str(i)+'-2']=None if get_colnames: return list(colnames.keys()) else: pre_preprocessing = preproc_output post_preprocessing = postproc_output['post_preprocessing'] kernel_data = kernel_output true_total_kernel = kernel_data['true_total_kernel'] true_distinct_kernel = kernel_data['true_distinct_kernel'] guess_kernels = kernel_data['guess_kernels'] decomp_data = final_output['decomp_data'] bsd_dw = decomp_data['bsd_dw'] bswd_dw_lp = decomp_data['bswd_dw_lp'] bswd_dw_ip = decomp_data['bswd_dw_ip'] upd_fname = fname.split('/')[-1] if datatype=='tf': # remove 'witer' from fname newfnm = '' comps = upd_fname.split('_') for u in comps: if 'witer' not in u: newfnm+='_'+u upd_fname=newfnm[1:-1] #### fill in the data colnames['filename'] = upd_fname colnames['datatype'] = datatype colnames['n-init'] = pre_preprocessing['n'] colnames['m-init'] = pre_preprocessing['m'] colnames['kdistinct-init'] = pre_preprocessing['kdistinct'] colnames['ktotal-init'] = pre_preprocessing['ktotal'] colnames['witer'] = witer colnames['wscale'] = wscale colnames['n-postproc'] = post_preprocessing['n'] colnames['m-postproc'] = post_preprocessing['m'] colnames['kdistinct-postproc'] = post_preprocessing['kdistinct'] colnames['ktotal-postproc'] = post_preprocessing['ktotal'] colnames['time_preproc'] = post_preprocessing['preprocess_time'] ##----- true_total input if true_total_kernel is not None: colnames['kinput-1'] = true_total_kernel['kinput'] colnames['n-1'] = true_total_kernel['n'] colnames['m-1'] = true_total_kernel['m'] colnames['max_edgeweight-1'] = true_total_kernel['max_eweight'] colnames['passed_kernel-1'] = true_total_kernel['passed_kernel'] colnames['time_kernel-1'] = true_total_kernel['kernel_time'] # wecp data truetot = bsd_dw['true_total'] colnames['passed_bsd-1'] = truetot['passed_bsd'] colnames['reconstructs-1'] = truetot['reconstructs'] colnames['found_cliq_frac-1'] = truetot['found_cliq_fraction'] colnames['time_bsd-1'] = truetot['time_bsd'] ##----- true distinct input if true_distinct_kernel is not None: # post kernel info colnames['kinput-2'] = true_distinct_kernel['kinput'] colnames['n-2'] = true_distinct_kernel['n'] colnames['m-2'] = true_distinct_kernel['m'] colnames['max_edgeweight-2'] = true_distinct_kernel['max_eweight'] colnames['passed_kernel-2'] = true_distinct_kernel['passed_kernel'] colnames['time_kernel-2'] = true_distinct_kernel['kernel_time'] # lp truedist_lp = bswd_dw_lp['true_distinct'] colnames['passed_bsd-2-1'] = truedist_lp['passed_bsd'] colnames['reconstructs-2-1'] = truedist_lp['reconstructs'] colnames['found_cliq_frac-2-1'] = truedist_lp['found_cliq_fraction'] colnames['time_bsd-2-1'] = truedist_lp['time_bsd'] if 'mem_usage' in truedist_lp.keys(): colnames['mem_usage-2-1']=truedist_lp['mem_usage'] # ipart truedist_ip = bswd_dw_ip['true_distinct'] colnames['passed_bsd-2-2'] = truedist_ip['passed_bsd'] colnames['reconstructs-2-2'] = truedist_ip['reconstructs'] colnames['found_cliq_frac-2-2'] = truedist_ip['found_cliq_fraction'] colnames['time_bsd-2-2'] = truedist_ip['time_bsd'] if 'mem_usage' in truedist_ip.keys(): colnames['mem_usage-2-2']=truedist_ip['mem_usage'] i=3 vals = [-0.6, -0.4, -0.2, 0.2, 0.4, 0.6] for val in vals: if guess_kernels is not None: guess_kern = guess_kernels[val] if guess_kern is not None: guess_ip = bswd_dw_ip['guesses'][val] guess_lp = bswd_dw_lp['guesses'][val] # post kernel info colnames['kinput-'+str(i)] = guess_kern['kinput'] colnames['n-'+str(i)] = guess_kern['n'] colnames['m-'+str(i)] = guess_kern['m'] colnames['max_edgeweight-'+str(i)] = guess_kern['max_eweight'] colnames['passed_kernel-'+str(i)] = guess_kern['passed_kernel'] colnames['time_kernel-'+str(i)] = guess_kern['kernel_time'] if guess_lp is not None: # lp colnames['passed_bsd-'+str(i)+'-1'] = guess_lp['passed_bsd'] colnames['reconstructs-'+str(i)+'-1'] = guess_lp['reconstructs'] colnames['found_cliq_frac-'+str(i)+'-1'] = guess_lp['found_cliq_fraction'] colnames['time_bsd-'+str(i)+'-1'] = guess_lp['time_bsd'] if guess_ip is not None: # ipart colnames['passed_bsd-'+str(i)+'-2'] = guess_ip['passed_bsd'] colnames['reconstructs-'+str(i)+'-2'] = guess_ip['reconstructs'] colnames['found_cliq_frac-'+str(i)+'-2'] = guess_ip['found_cliq_fraction'] colnames['time_bsd-'+str(i)+'-2'] = guess_ip['time_bsd'] i+=1 return colnames def get_overlapping_nodes(fname, dat, final_output, kernel_output, postproc_output, preproc_output): ''' For each clique compute/save: 1. # of nodes that are in other cliques 2. # of cliques that are overlapping w. current clique 3. clique size ''' #print('\n\n') #print(final_output['pre_preprocessing']) #clique_vertices = final_output['post_preprocessing']['clique_vertices'] #clique_vertices = final_output['pre_preprocessing']['clique_vertices'] #clique_vertices = postproc_output['post_preprocessing']['clique_vertices'] clique_vertices = preproc_output['clique_vertices'] total_cliques = len(clique_vertices) clique_i=0 for cliquei in clique_vertices: cliquen = len(cliquei) dat['clique_n-'+str(clique_i)]=cliquen # clique size num_node_ovl=set() num_cliq_ovl=0 clique_j=0 for cliquej in clique_vertices: if clique_i!=clique_j: #nno = len([x for x in cliquei if x in cliquej]) #num_node_ovl+=nno nno=0 for x in cliquei: if x in cliquej: nno+=1 num_node_ovl.add(x) if nno>0: # # of cliques overlapping num_cliq_ovl+=1 clique_j+=1 dat['clique_nodeovl-'+str(clique_i)]=len(num_node_ovl)/cliquen dat['clique_clqovl-'+str(clique_i)]=num_cliq_ovl/total_cliques clique_i+=1 def run(final_datadir, out_datadir, kernel_dir, postproc_dir, preproc_dir, typ): print('preproc dirname: ', preproc_dir) print('postproc dirname: ', postproc_dir) print('kern dirname: ', kernel_dir) print('final data dirname: ', final_datadir) print('out_dirname: ', out_datadir) print() colnames = get_data(None, None, None, None, None, None, None, None, get_colnames=True) if not os.path.exists(out_datadir): os.makedirs(out_datadir) final_files = utils_misc.get_files(final_datadir, '.pkl') data = [] for fname in final_files: preproc_file = preproc_dir+fname.split('/')[-1] postproc_file = postproc_dir+fname.split('/')[-1] kern_file = kernel_dir+fname.split('/')[-1] witer=None wscale=None # sml, med, lrg if typ=='tf': witer = int(utils_misc.get_fname_value(fname, 'witer')) ws = int(utils_misc.get_fname_value(fname, 'scalefac')) if ws==1: wscale='sml' elif ws==4: wscale='med' elif ws==16: wscale='lrg' elif typ=='lv': ws = int(utils_misc.get_fname_value(fname, 'scalefac')) if ws==1: wscale='sml' elif ws==2: wscale='med' elif ws==4: wscale='lrg' # get pkl file info with open(fname, 'rb') as infile: final_output = pickle.load(infile) with open(kern_file, 'rb') as infile: kernel_output = pickle.load(infile) with open(postproc_file, 'rb') as infile: postproc_output = pickle.load(infile) with open(preproc_file, 'rb') as infile: preproc_output = pickle.load(infile) if final_output is None: print('Warning: final_output is None') if kernel_output is None: print('Warning: kernel_output is None') if postproc_output is None: print('Warning: postproc_output is None') if preproc_output is None: print('Warning: preproc_output is None') k_total = postproc_output['post_preprocessing']['ktotal'] k_distinct = postproc_output['post_preprocessing']['kdistinct'] dat=None try: #NOTE dat = get_data(witer, wscale, fname, typ, final_output, kernel_output, postproc_output, preproc_output) except: print("ERROR: couldnt get complete data for ", fname) check_for_missing_data(final_output, kernel_output, k_distinct, wscale, witer, fname) if dat is not None: get_overlapping_nodes(fname, dat, final_output, kernel_output, postproc_output, preproc_output) data.append(dat) df = pd.DataFrame(data) return df def main(): preproc_dir_tf = 'data/pre_preprocessing/tf/' preproc_dir_lv = 'data/pre_preprocessing/lv/' postproc_dir_tf = 'data/post_preprocessing/tf/' postproc_dir_lv = 'data/post_preprocessing/lv/' kernel_dir_tf = 'data/kernels/tf/' kernel_dir_lv = 'data/kernels/lv/' in_datadir_tf = 'data/finaldata/tf/' in_datadir_lv = 'data/finaldata/lv/' out_datadir_tf = 'data/csvfiles/' out_datadir_lv = 'data/csvfiles/' start = time.time() print('Creating tf csv') tf_df = run(in_datadir_tf, out_datadir_tf, kernel_dir_tf, postproc_dir_tf, preproc_dir_tf, 'tf') #print(tf_df['num_overlap']) print('\nCreating lv csv') lv_df = run(in_datadir_lv, out_datadir_lv, kernel_dir_lv, postproc_dir_lv, preproc_dir_lv, 'lv') #print(lv_df) end = time.time() print(tf_df.shape, lv_df.shape) df = tf_df.append(lv_df) print(df) now = datetime.datetime.now() date_time = now.strftime("%m-%d-%Y_%H-%M-%S") name = 'fulldata_'+date_time+'.csv' print(name, 'time: ', end-start) source_dir = 'data/csvfiles/mostrecent/' target_dir = 'data/csvfiles/' if not os.path.exists(source_dir): os.makedirs(source_dir) file_names = os.listdir(source_dir) #move files from mostrecent dir for file_name in file_names: shutil.move(os.path.join(source_dir, file_name), target_dir) #save csv to mostrecent dir df.to_csv(source_dir+name) if __name__=="__main__": main()
py
1a40808c3f1845c481866f095b78e29ecdaf1b32
import torch.nn as nn import torch from typing import Optional import torch.optim as optim import os from .params import MnistTrainingParams from .dataset import create_mnist_loaders from .models import load_mnist_classifier from online_attacks.classifiers.trainer import Trainer from online_attacks.attacks import create_attacker from online_attacks.utils.optimizer import create_optimizer def train_mnist( params: MnistTrainingParams = MnistTrainingParams(), device: Optional[torch.device] = None, ) -> nn.Module: train_loader, test_loader = create_mnist_loaders(params.dataset_params) model = load_mnist_classifier(params.model_type) optimizer = create_optimizer(model.parameters(), params.optimizer_params) if params.train_on_test: train_loader, test_loader = test_loader, train_loader # TODO: Implement Ensemble Adversarial Training, where a list of attacker is provided model_attacker = model if params.model_attacker is not None: model_attacker = load_mnist_classifier( params.model_type, name=params.model_attacker, model_dir=params.model_dir, device=device, ) attacker = create_attacker(model_attacker, params.attacker, params.attacker_params) trainer = Trainer( model, train_loader, test_loader, optimizer, attacker=attacker, device=device ) filename = None if params.save_model: filename = os.path.join( params.save_dir, "mnist", params.model_type.value, "%s.pth" % (params.name) ) dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) for epoch in range(1, params.num_epochs): trainer.train(epoch) trainer.test(epoch) if params.save_model: torch.save(model.state_dict(), filename) return model
py
1a4082104f49d211a8abefe813b082c6987e47ae
import os import re import sys import json import time import signal import logging import traceback import boto3 import subprocess from moto import core as moto_core from requests.models import Response from localstack import constants, config from localstack.constants import ( ENV_DEV, LOCALSTACK_VENV_FOLDER, LOCALSTACK_INFRA_PROCESS, DEFAULT_SERVICE_PORTS) from localstack.utils import common, persistence from localstack.utils.common import (TMP_THREADS, run, get_free_tcp_port, is_linux, start_thread, ShellCommandThread, in_docker, is_port_open, sleep_forever, print_debug, edge_ports_info) from localstack.utils.server import multiserver from localstack.utils.testutil import is_local_test_mode from localstack.utils.bootstrap import ( setup_logging, is_debug, canonicalize_api_names, load_plugins, in_ci) from localstack.utils.analytics import event_publisher from localstack.services import generic_proxy, install from localstack.services.es import es_api from localstack.services.plugins import SERVICE_PLUGINS, record_service_health, check_infra from localstack.services.firehose import firehose_api from localstack.services.awslambda import lambda_api from localstack.services.generic_proxy import GenericProxyHandler, ProxyListener, start_proxy_server from localstack.services.dynamodbstreams import dynamodbstreams_api from localstack.utils.analytics.profiler import log_duration # flag to indicate whether signal handlers have been set up already SIGNAL_HANDLERS_SETUP = False # output string that indicates that the stack is ready READY_MARKER_OUTPUT = 'Ready.' # default backend host address DEFAULT_BACKEND_HOST = '127.0.0.1' # maps ports to proxy listener details PROXY_LISTENERS = {} # set up logger LOG = logging.getLogger(__name__) # fix moto account ID - note: keep this at the top level here moto_core.ACCOUNT_ID = constants.TEST_AWS_ACCOUNT_ID # ----------------------- # CONFIG UPDATE BACKDOOR # ----------------------- def update_config_variable(variable, new_value): if new_value is not None: LOG.info('Updating value of config variable "%s": %s' % (variable, new_value)) setattr(config, variable, new_value) class ConfigUpdateProxyListener(ProxyListener): """ Default proxy listener that intercepts requests to retrieve or update config variables. """ def forward_request(self, method, path, data, headers): if path != constants.CONFIG_UPDATE_PATH or method != 'POST': return True response = Response() data = json.loads(data) variable = data.get('variable', '') response._content = '{}' response.status_code = 200 if not re.match(r'^[_a-zA-Z0-9]+$', variable): response.status_code = 400 return response new_value = data.get('value') update_config_variable(variable, new_value) value = getattr(config, variable, None) result = {'variable': variable, 'value': value} response._content = json.dumps(result) return response GenericProxyHandler.DEFAULT_LISTENERS.append(ConfigUpdateProxyListener()) # ----------------- # API ENTRY POINTS # ----------------- def start_sns(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_SNS return start_moto_server('sns', port, name='SNS', asynchronous=asynchronous, update_listener=update_listener) def start_sts(port=None, asynchronous=False): port = port or config.PORT_STS return start_moto_server('sts', port, name='STS', asynchronous=asynchronous) def start_redshift(port=None, asynchronous=False): port = port or config.PORT_REDSHIFT return start_moto_server('redshift', port, name='Redshift', asynchronous=asynchronous) def start_acm(port=None, asynchronous=False): port = port or config.PORT_ACM return start_moto_server('acm', port, name='ACM', asynchronous=asynchronous) # TODO still needed? def start_ses(port=None, asynchronous=False): port = port or config.PORT_SES return start_moto_server('ses', port, name='SES', asynchronous=asynchronous) # TODO move to es_starter.py? def start_elasticsearch_service(port=None, asynchronous=False): port = port or config.PORT_ES return start_local_api('ES', port, api='es', method=es_api.serve, asynchronous=asynchronous) def start_firehose(port=None, asynchronous=False): port = port or config.PORT_FIREHOSE return start_local_api('Firehose', port, api='firehose', method=firehose_api.serve, asynchronous=asynchronous) def start_dynamodbstreams(port=None, asynchronous=False): port = port or config.PORT_DYNAMODBSTREAMS return start_local_api('DynamoDB Streams', port, api='dynamodbstreams', method=dynamodbstreams_api.serve, asynchronous=asynchronous) def start_lambda(port=None, asynchronous=False): port = port or config.PORT_LAMBDA return start_local_api('Lambda', port, api='lambda', method=lambda_api.serve, asynchronous=asynchronous) def start_ssm(port=None, asynchronous=False, update_listener=None): port = port or config.PORT_SSM return start_moto_server('ssm', port, name='SSM', asynchronous=asynchronous, update_listener=update_listener) # --------------- # HELPER METHODS # --------------- def patch_urllib3_connection_pool(**constructor_kwargs): """ Override the default parameters of HTTPConnectionPool, e.g., set the pool size via maxsize=16 """ try: from urllib3 import connectionpool, poolmanager class MyHTTPSConnectionPool(connectionpool.HTTPSConnectionPool): def __init__(self, *args, **kwargs): kwargs.update(constructor_kwargs) super(MyHTTPSConnectionPool, self).__init__(*args, **kwargs) poolmanager.pool_classes_by_scheme['https'] = MyHTTPSConnectionPool class MyHTTPConnectionPool(connectionpool.HTTPConnectionPool): def __init__(self, *args, **kwargs): kwargs.update(constructor_kwargs) super(MyHTTPConnectionPool, self).__init__(*args, **kwargs) poolmanager.pool_classes_by_scheme['http'] = MyHTTPConnectionPool except Exception: pass def patch_instance_tracker_meta(): """ Avoid instance collection for moto dashboard """ def new_intance(meta, name, bases, dct): cls = super(moto_core.models.InstanceTrackerMeta, meta).__new__(meta, name, bases, dct) if name == 'BaseModel': return cls cls.instances = [] return cls moto_core.models.InstanceTrackerMeta.__new__ = new_intance def new_basemodel(cls, *args, **kwargs): instance = super(moto_core.models.BaseModel, cls).__new__(cls) return instance moto_core.models.BaseModel.__new__ = new_basemodel def set_service_status(data): command = data.get('command') service = data.get('service') service_ports = config.parse_service_ports() if command == 'start': existing = service_ports.get(service) port = DEFAULT_SERVICE_PORTS.get(service) if existing: status = get_service_status(service, port) if status == 'running': return key_upper = service.upper().replace('-', '_') port_variable = 'PORT_%s' % key_upper service_list = os.environ.get('SERVICES', '').strip() services = [e for e in re.split(r'[\s,]+', service_list) if e] contained = [s for s in services if s.startswith(service)] if not contained: services.append(service) update_config_variable(port_variable, port) new_service_list = ','.join(services) os.environ['SERVICES'] = new_service_list config.populate_configs() LOG.info('Starting service %s on port %s' % (service, port)) SERVICE_PLUGINS[service].start(asynchronous=True) return {} def get_services_status(): result = {} for service, port in config.parse_service_ports().items(): status = get_service_status(service, port) result[service] = { 'port': port, 'status': status } return result def get_service_status(service, port=None): port = port or config.parse_service_ports().get(service) status = 'disabled' if (port or 0) <= 0 else 'running' if is_port_open(port) else 'stopped' return status def get_multiserver_or_free_service_port(): if config.FORWARD_EDGE_INMEM: return multiserver.get_moto_server_port() return get_free_tcp_port() def register_signal_handlers(): global SIGNAL_HANDLERS_SETUP if SIGNAL_HANDLERS_SETUP: return # register signal handlers def signal_handler(signal, frame): stop_infra() os._exit(0) signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGINT, signal_handler) SIGNAL_HANDLERS_SETUP = True def do_run(cmd, asynchronous, print_output=None, env_vars={}): sys.stdout.flush() if asynchronous: if is_debug() and print_output is None: print_output = True outfile = subprocess.PIPE if print_output else None t = ShellCommandThread(cmd, outfile=outfile, env_vars=env_vars) t.start() TMP_THREADS.append(t) return t return run(cmd, env_vars=env_vars) def start_proxy_for_service(service_name, port, backend_port, update_listener, quiet=False, params={}): # TODO: remove special switch for Elasticsearch (see also note in service_port(...) in config.py) if config.FORWARD_EDGE_INMEM and service_name != 'elasticsearch': if backend_port: PROXY_LISTENERS[service_name] = (service_name, backend_port, update_listener) return # check if we have a custom backend configured custom_backend_url = os.environ.get('%s_BACKEND' % service_name.upper()) backend_url = custom_backend_url or ('http://%s:%s' % (DEFAULT_BACKEND_HOST, backend_port)) return start_proxy(port, backend_url=backend_url, update_listener=update_listener, quiet=quiet, params=params) def start_proxy(port, backend_url, update_listener=None, quiet=False, params={}, use_ssl=None): use_ssl = config.USE_SSL if use_ssl is None else use_ssl proxy_thread = start_proxy_server(port=port, forward_url=backend_url, use_ssl=use_ssl, update_listener=update_listener, quiet=quiet, params=params) return proxy_thread def start_moto_server(key, port, name=None, backend_port=None, asynchronous=False, update_listener=None): if not name: name = key print('Starting mock %s service on %s ...' % (name, edge_ports_info())) if not backend_port: if config.FORWARD_EDGE_INMEM: backend_port = multiserver.get_moto_server_port() elif config.USE_SSL or update_listener: backend_port = get_free_tcp_port() if backend_port or config.FORWARD_EDGE_INMEM: start_proxy_for_service(key, port, backend_port, update_listener) if config.BUNDLE_API_PROCESSES: return multiserver.start_api_server(key, backend_port or port) return start_moto_server_separate(key, port, name=name, backend_port=backend_port, asynchronous=asynchronous) def start_moto_server_separate(key, port, name=None, backend_port=None, asynchronous=False): moto_server_cmd = '%s/bin/moto_server' % LOCALSTACK_VENV_FOLDER if not os.path.exists(moto_server_cmd): moto_server_cmd = run('which moto_server').strip() cmd = 'VALIDATE_LAMBDA_S3=0 %s %s -p %s -H %s' % (moto_server_cmd, key, backend_port or port, constants.BIND_HOST) return do_run(cmd, asynchronous) def start_local_api(name, port, api, method, asynchronous=False): print('Starting mock %s service on %s ...' % (name, edge_ports_info())) if config.FORWARD_EDGE_INMEM: port = get_free_tcp_port() PROXY_LISTENERS[api] = (api, port, None) if asynchronous: thread = start_thread(method, port, quiet=True) return thread else: method(port) def stop_infra(debug=False): if common.INFRA_STOPPED: return common.INFRA_STOPPED = True event_publisher.fire_event(event_publisher.EVENT_STOP_INFRA) generic_proxy.QUIET = True print_debug('[shutdown] Cleaning up files ...', debug) common.cleanup(files=True, quiet=True) print_debug('[shutdown] Cleaning up resources ...', debug) common.cleanup_resources(debug=debug) print_debug('[shutdown] Cleaning up Lambda resources ...', debug) lambda_api.cleanup() time.sleep(2) # TODO: optimize this (takes too long currently) # check_infra(retries=2, expect_shutdown=True) def check_aws_credentials(): session = boto3.Session() credentials = None # hardcode credentials here, to allow us to determine internal API calls made via boto3 os.environ['AWS_ACCESS_KEY_ID'] = constants.INTERNAL_AWS_ACCESS_KEY_ID os.environ['AWS_SECRET_ACCESS_KEY'] = constants.INTERNAL_AWS_ACCESS_KEY_ID try: credentials = session.get_credentials() except Exception: pass session = boto3.Session() credentials = session.get_credentials() assert credentials # ------------- # MAIN STARTUP # ------------- def start_infra(asynchronous=False, apis=None): try: os.environ[LOCALSTACK_INFRA_PROCESS] = '1' is_in_docker = in_docker() # print a warning if we're not running in Docker but using Docker based LAMBDA_EXECUTOR if not is_in_docker and 'docker' in config.LAMBDA_EXECUTOR and not is_linux(): print(('!WARNING! - Running outside of Docker with $LAMBDA_EXECUTOR=%s can lead to ' 'problems on your OS. The environment variable $LOCALSTACK_HOSTNAME may not ' 'be properly set in your Lambdas.') % config.LAMBDA_EXECUTOR) if is_in_docker and not config.LAMBDA_REMOTE_DOCKER and not os.environ.get('HOST_TMP_FOLDER'): print('!WARNING! - Looks like you have configured $LAMBDA_REMOTE_DOCKER=0 - ' "please make sure to configure $HOST_TMP_FOLDER to point to your host's $TMPDIR") # apply patches patch_urllib3_connection_pool(maxsize=128) patch_instance_tracker_meta() # load plugins load_plugins() # with plugins loaded, now start the infrastructure thread = do_start_infra(asynchronous, apis, is_in_docker) if not asynchronous and thread: # this is a bit of an ugly hack, but we need to make sure that we # stay in the execution context of the main thread, otherwise our # signal handlers don't work sleep_forever() return thread except KeyboardInterrupt: print('Shutdown') except Exception as e: print('Error starting infrastructure: %s %s' % (e, traceback.format_exc())) sys.stdout.flush() raise e finally: if not asynchronous: stop_infra() def do_start_infra(asynchronous, apis, is_in_docker): event_publisher.fire_event(event_publisher.EVENT_START_INFRA, {'d': is_in_docker and 1 or 0, 'c': in_ci() and 1 or 0}) # set up logging setup_logging() # prepare APIs apis = canonicalize_api_names(apis) @log_duration() def prepare_environment(): # set environment os.environ['AWS_REGION'] = config.DEFAULT_REGION os.environ['ENV'] = ENV_DEV # register signal handlers if not is_local_test_mode(): register_signal_handlers() # make sure AWS credentials are configured, otherwise boto3 bails on us check_aws_credentials() @log_duration() def prepare_installation(): # install libs if not present install.install_components(apis) @log_duration() def start_api_services(): # Some services take a bit to come up sleep_time = 5 # start services thread = None # loop through plugins and start each service for name, plugin in SERVICE_PLUGINS.items(): if plugin.is_enabled(api_names=apis): record_service_health(name, 'starting') t1 = plugin.start(asynchronous=True) thread = thread or t1 time.sleep(sleep_time) # ensure that all infra components are up and running check_infra(apis=apis) # restore persisted data persistence.restore_persisted_data(apis=apis) return thread prepare_environment() prepare_installation() thread = start_api_services() print(READY_MARKER_OUTPUT) sys.stdout.flush() return thread
py
1a40821ca042871a96558fda41a25e1d03366d37
from django.urls import path from . import views urlpatterns = [ path('<int:id_number>/<str:auth_status>/', views.leaks, name='leaks'), path('login/', views.login_view, name='login'), path('logout/', views.logout_view, name='logout'), path('test/', views.test_login, name='test'), path('ver/', views.get_ver, name='ver'), path('save_dict/', views.save_dict, name='save_dict'), path('info_url/<int:url_id>/', views.get_info, name='info_url'), path('set_cookies/', views.set_cookies, name='set_cookies'), path('check_cookies/', views.check_cookies, name='check_cookies'), ]
py
1a40830f2602cda32290ab8b7dbc0c32a7d53089
from django.core.exceptions import ObjectDoesNotExist from qfieldcloud.core import permissions_utils, serializers from qfieldcloud.core.models import Job, Project from rest_framework import generics, permissions, viewsets from rest_framework.response import Response from rest_framework.status import HTTP_201_CREATED class JobPermissions(permissions.BasePermission): def has_permission(self, request, view): project_id = permissions_utils.get_param_from_request(request, "project_id") try: project = Project.objects.get(id=project_id) except ObjectDoesNotExist: return False return permissions_utils.can_read_files(request.user, project) class JobViewSet(viewsets.ReadOnlyModelViewSet): serializer_class = serializers.JobSerializer lookup_url_kwarg = "job_id" permission_classes = [permissions.IsAuthenticated] def get_serializer_by_job_type(self, job_type, *args, **kwargs): if job_type == Job.Type.DELTA_APPLY: return serializers.ApplyJobSerializer(*args, **kwargs) elif job_type == Job.Type.PACKAGE: return serializers.PackageJobSerializer(*args, **kwargs) elif job_type == Job.Type.PROCESS_PROJECTFILE: return serializers.ProcessProjectfileJobSerializer(*args, **kwargs) else: raise NotImplementedError(f'Unknown job type "{job_type}"') def get_serializer(self, *args, **kwargs): kwargs.setdefault("context", self.get_serializer_context()) if self.action in ("create"): if "data" in kwargs: job_type = kwargs["data"]["type"] else: job_type = args[0].type return self.get_serializer_by_job_type(job_type, *args, **kwargs) if self.action in ("retrieve",): job_type = args[0].type return self.get_serializer_by_job_type(job_type, *args, **kwargs) return serializers.JobSerializer(*args, **kwargs) def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) if bool(int(request.data.get("force", 0))): serializer.is_valid(raise_exception=True) serializer.save() else: serializer.is_valid(raise_exception=True) if not serializer.Meta.allow_parallel_jobs: job = serializer.get_lastest_not_finished_job() if job: return Response(self.get_serializer(job).data) serializer.save() return Response(serializer.data, status=HTTP_201_CREATED) def get_queryset(self): qs = Job.objects.select_subclasses() if self.action == "list": project_id = self.request.data.get("project_id") project = generics.get_object_or_404(Project, pk=project_id) qs = qs.filter(project=project) return qs
py
1a4083d73e44f6f19fdd386794591a8ae3682009
# -*- coding: utf-8 -*- # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys import inspect import shutil __location__ = os.path.join(os.getcwd(), os.path.dirname( inspect.getfile(inspect.currentframe()))) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.join(__location__, '../src')) # -- Run sphinx-apidoc ------------------------------------------------------ # This hack is necessary since RTD does not issue `sphinx-apidoc` before running # `sphinx-build -b html . _build/html`. See Issue: # https://github.com/rtfd/readthedocs.org/issues/1139 # DON'T FORGET: Check the box "Install your project inside a virtualenv using # setup.py install" in the RTD Advanced Settings. # Additionally it helps us to avoid running apidoc manually try: # for Sphinx >= 1.7 from sphinx.ext import apidoc except ImportError: from sphinx import apidoc output_dir = os.path.join(__location__, "api") module_dir = os.path.join(__location__, "../src/online_stats") try: shutil.rmtree(output_dir) except FileNotFoundError: pass try: import sphinx from pkg_resources import parse_version cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) args = cmd_line.split(" ") if parse_version(sphinx.__version__) >= parse_version('1.7'): args = args[1:] apidoc.main(args) except Exception as e: print("Running `sphinx-apidoc` failed!\n{}".format(e)) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.autosummary', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.ifconfig', 'sphinx.ext.mathjax', 'sphinx.ext.napoleon'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'online-stats' copyright = u'2019, Christian Donnerer' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '' # Is set by calling `setup.py docs` # The full version, including alpha/beta/rc tags. release = '' # Is set by calling `setup.py docs` # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'sidebar_width': '300px', 'page_width': '1200px' } # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". try: from online_stats import __version__ as version except ImportError: pass else: release = version # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = "" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'online_stats-doc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'user_guide.tex', u'online-stats Documentation', u'Christian Donnerer', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = "" # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- External mapping ------------------------------------------------------------ python_version = '.'.join(map(str, sys.version_info[0:2])) intersphinx_mapping = { 'sphinx': ('http://www.sphinx-doc.org/en/stable', None), 'python': ('https://docs.python.org/' + python_version, None), 'matplotlib': ('https://matplotlib.org', None), 'numpy': ('https://docs.scipy.org/doc/numpy', None), 'sklearn': ('http://scikit-learn.org/stable', None), 'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None), 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), }
py
1a40856b61c93ce1bf0f128a1e5d014d7d6739aa
food={"vegetables":["carrots","kale","cucumber","tomato"],"desserts":["cake","ice cream", "donut"]} strVeg = "My favorite vegetable is " strDes = "My favorite dessert is " print(strVeg + food["vegetables"][0]) print(strVeg + food["vegetables"][1]) print(strVeg + food["vegetables"][2]) print(strVeg + food["vegetables"][3]) print(strDes + food["desserts"][0]) print(strDes + food["desserts"][1]) print(strDes + food["desserts"][2]) cars={"sports":{"Volkswagon":"Porsche","Dodge":"Viper","Chevy":"Corvette"},"classic":{"Mercedes-Benz":"300SL","Toyota":"2000GT","Lincoln":"Continental"}} strSport = "My favorite sports car is a" strClassic = "My favorite classic car is a" print(strSport + " Dodge " + cars["sports"]["Dodge"]) print(strSport + " Chevy " + cars["sports"]["Chevy"]) print(strSport + " Volkswagon " + cars["sports"]["Volkswagon"]) print(strClassic + " Mercedes-Benz " + cars["classic"]["Mercedes-Benz"]) print(strClassic + " Toyota " + cars["classic"]["Toyota"]) print(strClassic + " Lincoln " + cars["classic"]["Lincoln"]) dessert={"iceCream":["Rocky Road","strawberry","Pistachio Cashew","Pecan Praline"]} strCream = "My favorite ice cream is " print(strCream + dessert["iceCream"][0]) print(strCream + dessert["iceCream"][1]) print(strCream + dessert["iceCream"][2]) print(strCream + dessert["iceCream"][3]) soup={"soup":{"tomato":"healthy","onion":"bleh!","vegetable":"good for you"}} print("This tomato soup is " + soup["soup"]["tomato"]) print("This onion soup is " + soup["soup"]["onion"]) print("This vegetable soup is " + soup["soup"]["vegetable"])
py
1a4085756ba86ef87f2c4e31e8317ded19a799b4
"""Plugin system for strax A 'plugin' is something that outputs an array and gets arrays from one or more other plugins. """ from concurrent.futures import wait from enum import IntEnum import inspect import itertools import logging import time import typing from immutabledict import immutabledict import numpy as np import strax export, __all__ = strax.exporter() @export class SaveWhen(IntEnum): """Plugin's preference for having it's data saved""" NEVER = 0 # Throw an error if the user lists it EXPLICIT = 1 # Save ONLY if the user lists it explicitly TARGET = 2 # Save if the user asks for it as a final target ALWAYS = 3 # Save even if the user does not list it @export class InputTimeoutExceeded(Exception): pass @export class PluginGaveWrongOutput(Exception): pass @export class Plugin: """Plugin containing strax computation You should NOT instantiate plugins directly. Do NOT add unpickleable things (e.g. loggers) as attributes. """ __version__ = '0.0.0' # For multi-output plugins these should be (immutable)dicts data_kind: typing.Union[str, immutabledict, dict] dtype: typing.Union[tuple, np.dtype, immutabledict, dict] depends_on: tuple provides: tuple input_buffer: typing.Dict[str, strax.Chunk] compressor = 'blosc' rechunk_on_save = True # Saver is allowed to rechunk # For a source with online input (e.g. DAQ readers), crash if no new input # has appeared for this many seconds # This should be smaller than the mailbox timeout (which is intended as # a deep fallback) input_timeout = 80 save_when = SaveWhen.ALWAYS # Instructions how to parallelize # False: never parallellize; # 'process': use processpool; # 'thread' (or just True): use threadpool. parallel = False # For the computation itself # Maximum number of output messages max_messages = None # use default # Do not specify attributes below # Set using the takes_config decorator takes_config = immutabledict() # These are set on plugin initialization, which is done in the core run_id: str run_i: int config: typing.Dict deps: typing.Dict # Dictionary of dependency plugin instances compute_takes_chunk_i = False # Autoinferred, no need to set yourself compute_takes_start_end = False def __init__(self): if not hasattr(self, 'depends_on'): raise ValueError('depends_on not provided for ' f'{self.__class__.__name__}') self.depends_on = strax.to_str_tuple(self.depends_on) # Store compute parameter names, see if we take chunk_i too compute_pars = list( inspect.signature(self.compute).parameters.keys()) if 'chunk_i' in compute_pars: self.compute_takes_chunk_i = True del compute_pars[compute_pars.index('chunk_i')] if 'start' in compute_pars: if 'end' not in compute_pars: raise ValueError(f"Compute of {self} takes start, " f"so it should also take end.") self.compute_takes_start_end = True del compute_pars[compute_pars.index('start')] del compute_pars[compute_pars.index('end')] self.compute_pars = compute_pars self.input_buffer = dict() def fix_dtype(self): if not hasattr(self, 'dtype'): self.dtype = self.infer_dtype() if self.multi_output: # Convert to a dict of numpy dtypes if (not hasattr(self, 'data_kind') or not isinstance(self.data_kind, (dict, immutabledict))): raise ValueError( f"{self.__class__.__name__} has multiple outputs and " "must declare its data kind as a dict: " "{dtypename: data kind}.") if not isinstance(self.dtype, dict): raise ValueError( f"{self.__class__.__name__} has multiple outputs, so its " "dtype must be specified as a dict: {output: dtype}.") self.dtype = {k: strax.to_numpy_dtype(dt) for k, dt in self.dtype.items()} else: # Convert to a numpy dtype self.dtype = strax.to_numpy_dtype(self.dtype) # Check required time information is present for d in self.provides: fieldnames = self.dtype_for(d).names ok = 'time' in fieldnames and ( ('dt' in fieldnames and 'length' in fieldnames) or 'endtime' in fieldnames) if not ok: raise ValueError( f"Missing time and endtime information for {d}") @property def multi_output(self): return len(self.provides) > 1 def setup(self): """Hook if plugin wants to do something on initialization """ pass def infer_dtype(self): """Return dtype of computed data; used only if no dtype attribute defined""" # Don't raise NotImplementedError, IDE will complain you're not # implementing all abstract methods... raise RuntimeError("No infer dtype method defined") def version(self, run_id=None): """Return version number applicable to the run_id. Most plugins just have a single version (in .__version__) but some may be at different versions for different runs (e.g. time-dependent corrections). """ return self.__version__ def __repr__(self): return self.__class__.__name__ def dtype_for(self, data_type): if self.multi_output: return self.dtype[data_type] return self.dtype def empty_result(self): if self.multi_output: return {d: np.empty(0, self.dtype_for(d)) for d in self.provides} return np.empty(0, self.dtype) def data_kind_for(self, data_type): if self.multi_output: return self.data_kind[data_type] return self.data_kind def metadata(self, run_id, data_type): """Metadata to save along with produced data""" if not data_type in self.provides: raise RuntimeError(f"{data_type} not in {self.provides}?") return dict( run_id=run_id, data_type=data_type, data_kind=self.data_kind_for(data_type), dtype=self.dtype_for(data_type), lineage_hash=strax.DataKey( run_id, data_type, self.lineage).lineage_hash, compressor=self.compressor, lineage=self.lineage) def dependencies_by_kind(self): """Return dependencies grouped by data kind i.e. {kind1: [dep0, dep1], kind2: [dep, dep]} :param require_time: If True, one dependency of each kind must provide time information. It will be put first in the list. If require_time is omitted, we will require time only if there is more than one data kind in the dependencies. """ return strax.group_by_kind( self.depends_on, plugins=self.deps) def is_ready(self, chunk_i): """Return whether the chunk chunk_i is ready for reading. Returns True by default; override if you make an online input plugin. """ return True def source_finished(self): """Return whether all chunks the plugin wants to read have been written. Only called for online input plugins. """ # Don't raise NotImplementedError, IDE complains raise RuntimeError("source_finished called on a regular plugin") def _fetch_chunk(self, d, iters, check_end_not_before=None): """Add a chunk of the datatype d to the input buffer. Return True if this succeeded, False if the source is exhausted. :param d: data type to fetch :param iters: iterators that produce data :param check_end_not_before: Raise a runtimeError if the source is exhausted, but the input buffer ends before this time. """ try: # print(f"Fetching {d} in {self}, hope to see {hope_to_see}") self.input_buffer[d] = strax.Chunk.concatenate( [self.input_buffer[d], next(iters[d])]) # print(f"Fetched {d} in {self}, " # f"now have {self.input_buffer[d]}") return True except StopIteration: # print(f"Got StopIteration while fetching for {d} in {self}") if (check_end_not_before is not None and self.input_buffer[d].end < check_end_not_before): raise RuntimeError( f"Tried to get data until {check_end_not_before}, but {d} " f"ended prematurely at {self.input_buffer[d].end}") return False def iter(self, iters, executor=None): """Iterate over dependencies and yield results :param iters: dict with iterators over dependencies :param executor: Executor to punt computation tasks to. If None, will compute inside the plugin's thread. """ pending_futures = [] last_input_received = time.time() self.input_buffer = {d: None for d in self.depends_on} # Fetch chunks from all inputs. Whoever is the slowest becomes the # pacemaker pacemaker = None _end = float('inf') for d in self.depends_on: self._fetch_chunk(d, iters) if self.input_buffer[d].end < _end: pacemaker = d _end = self.input_buffer[d].end for chunk_i in itertools.count(): # Online input support while not self.is_ready(chunk_i): if self.source_finished(): # Chunk_i does not exist. We are done. print("Source finished!") self.cleanup(iters, wait_for=pending_futures) return if time.time() > last_input_received + self.input_timeout: raise InputTimeoutExceeded( f"{self.__class__.__name__}:{id(self)} waited for " f"more than {self.input_timeout} sec for arrival of " f"input chunk {chunk_i}, and has given up.") print(f"{self.__class__.__name__}:{id(self)} " f"waiting for chunk {chunk_i}") time.sleep(2) last_input_received = time.time() if pacemaker is None: inputs_merged = dict() else: if chunk_i != 0: # Fetch the pacemaker, to figure out when this chunk ends # (don't do it for chunk 0, for which we already fetched) if not self._fetch_chunk(pacemaker, iters): # Source exhausted. Cleanup will do final checks. self.cleanup(iters, wait_for=pending_futures) return this_chunk_end = self.input_buffer[pacemaker].end inputs = dict() # Fetch other inputs (when needed) for d in self.depends_on: if d != pacemaker: while (self.input_buffer[d] is None or self.input_buffer[d].end < this_chunk_end): self._fetch_chunk( d, iters, check_end_not_before=this_chunk_end) inputs[d], self.input_buffer[d] = \ self.input_buffer[d].split( t=this_chunk_end, allow_early_split=True) # If any of the inputs were trimmed due to early splits, # trim the others too. # In very hairy cases this can take multiple passes. # TODO: can we optimize this, or code it more elegantly? max_passes_left = 10 while max_passes_left > 0: this_chunk_end = min([x.end for x in inputs.values()] + [this_chunk_end]) if len(set([x.end for x in inputs.values()])) <= 1: break for d in self.depends_on: inputs[d], back_to_buffer = \ inputs[d].split( t=this_chunk_end, allow_early_split=True) self.input_buffer[d] = strax.Chunk.concatenate( [back_to_buffer, self.input_buffer[d]]) max_passes_left -= 1 else: raise RuntimeError( f"{self} was unable to get time-consistent " f"inputs after ten passess. Inputs: \n{inputs}\n" f"Input buffer:\n{self.input_buffer}") # Merge inputs of the same kind inputs_merged = { kind: strax.Chunk.merge([inputs[d] for d in deps_of_kind]) for kind, deps_of_kind in self.dependencies_by_kind().items()} # Submit the computation # print(f"{self} calling with {inputs_merged}") if self.parallel and executor is not None: new_future = executor.submit( self.do_compute, chunk_i=chunk_i, **inputs_merged) pending_futures.append(new_future) pending_futures = [f for f in pending_futures if not f.done()] yield new_future else: yield self.do_compute(chunk_i=chunk_i, **inputs_merged) raise RuntimeError("This cannot happen.") def cleanup(self, iters: typing.Dict[str, typing.Iterable], wait_for): # The wait_for option is only used in child classes; # A standard plugin doesn't need to do anything with the computation # future results. # Check all sources are exhausted. # This is more than a check though -- it ensure the content of # all sources are requested all the way (including the final # Stopiteration), as required by lazy-mode processing requires for d in iters.keys(): if self._fetch_chunk(d, iters): raise RuntimeError( f"Plugin {d} terminated without fetching last {d}!") # Check the input buffer is empty for d, buffer in self.input_buffer.items(): if buffer is not None and len(buffer): raise RuntimeError( f"Plugin {d} terminated with leftover {d}: {buffer}") def _check_dtype(self, x, d=None): # There is an additional 'last resort' data type check # in the chunk initialization. # This one is broader and gives a more context-aware message. if d is None: assert not self.multi_output d = self.provides[0] pname = self.__class__.__name__ if not isinstance(x, np.ndarray): raise strax.PluginGaveWrongOutput( f"Plugin {pname} did not deliver " f"data type {d} as promised.\n" f"Delivered a {type(x)}") expect = strax.remove_titles_from_dtype(self.dtype_for(d)) if not isinstance(expect, np.dtype): raise ValueError(f"Plugin {pname} expects {expect} as dtype??") got = strax.remove_titles_from_dtype(x.dtype) if got != expect: raise strax.PluginGaveWrongOutput( f"Plugin {pname} did not deliver " f"data type {d} as promised.\n" f"Promised: {expect}\n" f"Delivered: {got}.") def do_compute(self, chunk_i=None, **kwargs): """Wrapper for the user-defined compute method This is the 'job' that gets executed in different processes/threads during multiprocessing """ for k, v in kwargs.items(): if not isinstance(v, strax.Chunk): raise RuntimeError( f"do_compute of {self.__class__.__name__} got a {type(v)} " f"instead of a strax Chunk for {k}") if len(kwargs): # Check inputs describe the same time range tranges = {k: (v.start, v.end) for k, v in kwargs.items()} if len(set(tranges.values())) != 1: raise ValueError(f"{self.__class__.__name__} got inconsistent " f"time ranges of inputs: {tranges}") start, end = list(tranges.values())[0] else: # This plugin starts from scratch start, end = None, None kwargs = {k: v.data for k, v in kwargs.items()} if self.compute_takes_chunk_i: kwargs['chunk_i'] = chunk_i if self.compute_takes_start_end: kwargs['start'] = start kwargs['end'] = end result = self.compute(**kwargs) return self._fix_output(result, start, end) def _fix_output(self, result, start, end, _dtype=None): if self.multi_output and _dtype is None: if not isinstance(result, dict): raise ValueError( f"{self.__class__.__name__} is multi-output and should " "provide a dict output {dtypename: result}") return {d: self._fix_output(result[d], start, end, _dtype=d) for d in self.provides} if _dtype is None: assert not self.multi_output _dtype = self.provides[0] if not isinstance(result, strax.Chunk): if start is None: assert len(self.depends_on) == 0 raise ValueError( "Plugins without dependencies must return full strax " f"Chunks, but {self.__class__.__name__} produced a " f"{type(result)}!") result = strax.dict_to_rec(result, dtype=self.dtype_for(_dtype)) self._check_dtype(result, _dtype) result = self.chunk( start=start, end=end, data_type=_dtype, data=result) return result def chunk(self, *, start, end, data, data_type=None, run_id=None): if data_type is None: if self.multi_output: raise ValueError("Must give data_type when making chunks from " "a multi-output plugin") data_type = self.provides[0] if run_id is None: run_id = self.run_id return strax.Chunk( start=start, end=end, run_id=run_id, data_kind=self.data_kind_for(data_type), data_type=data_type, dtype=self.dtype_for(data_type), data=data) def compute(self, **kwargs): raise NotImplementedError ## # Special plugins ## @export class OverlapWindowPlugin(Plugin): """Plugin whose computation depends on having its inputs extend a certain window on both sides. Current implementation assumes: - All inputs are sorted by *endtime*. Since everything in strax is sorted by time, this only works for disjoint intervals such as peaks or events, but NOT records! - You must read time info for your data kind, or create a new data kind. """ parallel = False def __init__(self): super().__init__() self.cached_input = {} self.cached_results = None self.sent_until = 0 # This guy can have a logger, it's not parallelized anyway self.log = logging.getLogger(self.__class__.__name__) def get_window_size(self): """Return the required window size in nanoseconds""" raise NotImplementedError def iter(self, iters, executor=None): yield from super().iter(iters, executor=executor) # Yield final results, kept at bay in fear of a new chunk if self.cached_results is not None: yield self.cached_results def do_compute(self, chunk_i=None, **kwargs): if not len(kwargs): raise RuntimeError("OverlapWindowPlugin must have a dependency") # Add cached inputs to compute arguments for k, v in kwargs.items(): if len(self.cached_input): kwargs[k] = strax.Chunk.concatenate( [self.cached_input[k], v]) # Compute new results result = super().do_compute(chunk_i=chunk_i, **kwargs) # Throw away results we already sent out _, result = result.split(t=self.sent_until, allow_early_split=False) # When does this batch of inputs end? ends = [v.end for v in kwargs.values()] if not len(set(ends)) == 1: raise RuntimeError( f"OverlapWindowPlugin got incongruent inputs: {kwargs}") end = ends[0] # When can we no longer trust our results? # Take slightly larger windows for safety: it is very easy for me # (or the user) to have made an off-by-one error invalid_beyond = int(end - self.get_window_size() - 1) # Prepare to send out valid results, cache the rest # Do not modify result anymore after this # Note result.end <= invalid_beyond, with equality if there are # no overlaps result, self.cached_results = result.split(t=invalid_beyond, allow_early_split=True) self.sent_until = result.end # Cache a necessary amount of input for next time # Again, take a bit of overkill for good measure cache_inputs_beyond = int(self.sent_until - 2 * self.get_window_size() - 1) for k, v in kwargs.items(): _, self.cached_input[k] = v.split(t=cache_inputs_beyond, allow_early_split=True) return result @export class LoopPlugin(Plugin): """Plugin that disguises multi-kind data-iteration by an event loop """ def compute(self, **kwargs): # If not otherwise specified, data kind to loop over # is that of the first dependency (e.g. events) # Can't be in __init__: deps not initialized then if hasattr(self, 'loop_over'): loop_over = self.loop_over else: loop_over = self.deps[self.depends_on[0]].data_kind # Group into lists of things (e.g. peaks) # contained in the base things (e.g. events) base = kwargs[loop_over] if len(base) > 1: assert np.all(base[1:]['time'] >= strax.endtime(base[:-1])), \ f'{base}s overlap' for k, things in kwargs.items(): # Check for sorting difs = np.diff(things['time']) if difs.min(initial=0) < 0: i_bad = np.argmin(difs) examples = things[i_bad-1:i_bad+3] t0 = examples['time'].min() raise ValueError( f'Expected {k} to be sorted, but found ' + str([(x['time'] - t0, strax.endtime(x) - t0) for x in examples])) if k != loop_over: r = strax.split_by_containment(things, base) if len(r) != len(base): raise RuntimeError(f"Split {k} into {len(r)}, " f"should be {len(base)}!") kwargs[k] = r results = np.zeros(len(base), dtype=self.dtype) deps_by_kind = self.dependencies_by_kind() for i in range(len(base)): r = self.compute_loop(base[i], **{k: kwargs[k][i] for k in deps_by_kind if k != loop_over}) # Convert from dict to array row: for k, v in r.items(): results[i][k] = v return results def compute_loop(self, *args, **kwargs): raise NotImplementedError ## # "Plugins" for internal use # These do not actually do computations, but do other tasks # for which posing as a plugin is helpful. # Do not subclass unless you know what you are doing.. ## @export class MergeOnlyPlugin(Plugin): """Plugin that merges data from its dependencies """ save_when = SaveWhen.NEVER def infer_dtype(self): deps_by_kind = self.dependencies_by_kind() if len(deps_by_kind) != 1: raise ValueError("MergeOnlyPlugins can only merge data " "of the same kind, but got multiple kinds: " + str(deps_by_kind)) return strax.merged_dtype([ self.deps[d].dtype_for(d) # Sorting is needed here to match what strax.Chunk does in merging for d in sorted(self.depends_on)]) def compute(self, **kwargs): return kwargs[list(kwargs.keys())[0]] @export class ParallelSourcePlugin(Plugin): """An plugin that inlines the computations of other plugins and the saving of their results. This evades data transfer (pickling and/or memory copy) penalties while multiprocessing. """ parallel = 'process' @classmethod def inline_plugins(cls, components, start_from, log): plugins = components.plugins.copy() sub_plugins = {start_from: plugins[start_from]} del plugins[start_from] # Gather all plugins that do not rechunk and which branch out as a # simple tree from the input plugin. # We'll run these all together in one process. while True: # Scan for plugins we can inline for p in plugins.values(): if (p.parallel and all([d in sub_plugins for d in p.depends_on])): for d in p.provides: sub_plugins[d] = p if d in plugins: del plugins[d] # Rescan break else: # No more plugins we can inline break if len(set(list(sub_plugins.values()))) == 1: # Just one plugin to inline: no use log.debug("Just one plugin to inline: skipping") return components # Which data types should we output? Three cases follow. outputs_to_send = set() # Case 1. Requested as a final target for p in sub_plugins.values(): outputs_to_send.update(set(components.targets) .intersection(set(p.provides))) # Case 2. Requested by a plugin we did not inline for d, p in plugins.items(): outputs_to_send.update(set(p.depends_on)) outputs_to_send &= sub_plugins.keys() # Inline savers that do not require rechunking savers = components.savers sub_savers = dict() for p in sub_plugins.values(): for d in p.provides: if d not in savers: continue if p.rechunk_on_save: # Case 3. has a saver we can't inline outputs_to_send.add(d) continue remaining_savers = [] for s_i, s in enumerate(savers[d]): if not s.allow_fork: # Case 3 again, cannot inline saver outputs_to_send.add(d) remaining_savers.append(s) continue if d not in sub_savers: sub_savers[d] = [] s.is_forked = True sub_savers[d].append(s) savers[d] = remaining_savers if not len(savers[d]): del savers[d] p = cls(depends_on=sub_plugins[start_from].depends_on) p.sub_plugins = sub_plugins assert len(outputs_to_send) p.provides = tuple(outputs_to_send) p.sub_savers = sub_savers p.start_from = start_from if p.multi_output: p.dtype = {d: p.sub_plugins[d].dtype_for(d) for d in outputs_to_send} else: to_send = list(outputs_to_send)[0] p.dtype = p.sub_plugins[to_send].dtype_for(to_send) for d in p.provides: plugins[d] = p p.deps = {d: plugins[d] for d in p.depends_on} log.debug(f"Inlined plugins: {p.sub_plugins}." f"Inlined savers: {p.sub_savers}") return strax.ProcessorComponents( plugins, components.loaders, savers, components.targets) def __init__(self, depends_on): self.depends_on = depends_on super().__init__() def source_finished(self): return self.sub_plugins[self.start_from].source_finished() def is_ready(self, chunk_i): return self.sub_plugins[self.start_from].is_ready(chunk_i) def do_compute(self, chunk_i=None, **kwargs): results = kwargs # Run the different plugin computations while True: for output_name, p in self.sub_plugins.items(): if output_name in results: continue if any([d not in results for d in p.depends_on]): continue compute_kwargs = dict(chunk_i=chunk_i) for kind, d_of_kind in p.dependencies_by_kind().items(): compute_kwargs[kind] = strax.Chunk.merge( [results[d] for d in d_of_kind]) # Store compute result(s) r = p.do_compute(**compute_kwargs) if p.multi_output: for d in r: results[d] = r[d] else: results[output_name] = r # Rescan plugins to see if we can compute anything more break else: # Nothing further to compute break for d in self.provides: assert d in results, f"Output {d} missing!" # Save anything we can through the inlined savers for d, savers in self.sub_savers.items(): for s in savers: s.save(chunk=results[d], chunk_i=chunk_i) # Remove results we do not need to send for d in list(results.keys()): if d not in self.provides: del results[d] if self.multi_output: for k in self.provides: assert k in results assert isinstance(results[k], strax.Chunk) r0 = results[k] else: results = r0 = results[self.provides[0]] assert isinstance(r0, strax.Chunk) return self._fix_output(results, start=r0.start, end=r0.end) def cleanup(self, iters, wait_for): print(f"{self.__class__.__name__} exhausted. " f"Waiting for {len(wait_for)} pending futures.") for savers in self.sub_savers.values(): for s in savers: s.close(wait_for=wait_for) super().cleanup(iters, wait_for)
py
1a4085a11c706dfbebfa140a5882700e513d2497
# Copyright 2019 TerraPower, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import io import os from ruamel.yaml import YAML from armi.tests.test_plugins import TestPlugin from armi.physics import neutronics from armi.settings import caseSettings from armi.physics.neutronics.const import CONF_CROSS_SECTION XS_EXAMPLE = """AA: geometry: 0D criticalBuckling: true blockRepresentation: Median BA: geometry: 1D slab criticalBuckling: false blockRepresentation: Median """ class Test_NeutronicsPlugin(TestPlugin): plugin = neutronics.NeutronicsPlugin def test_customSettingObjectIO(self): """Check specialized settings can build objects as values and write.""" cs = caseSettings.Settings() yaml = YAML() inp = yaml.load(io.StringIO(XS_EXAMPLE)) cs[CONF_CROSS_SECTION] = inp self.assertEqual(cs[CONF_CROSS_SECTION]["AA"].geometry, "0D") fname = "test_setting_obj_io_.yaml" cs.writeToYamlFile(fname) os.remove(fname) def test_customSettingRoundTrip(self): """Check specialized settings can go back and forth.""" cs = caseSettings.Settings() yaml = YAML() inp = yaml.load(io.StringIO(XS_EXAMPLE)) cs[CONF_CROSS_SECTION] = inp cs[CONF_CROSS_SECTION] = cs[CONF_CROSS_SECTION] fname = "test_setting_obj_io_round.yaml" cs.writeToYamlFile(fname) os.remove(fname) if __name__ == "__main__": unittest.main()
py
1a4085bbe6aa1f076d8725d314db2de9655016c6
# -*- coding: utf-8 -*- ''' This runner is designed to mirror the execution module config.py, but for master settings ''' from __future__ import absolute_import from __future__ import print_function import salt.utils import salt.utils.sdb def get(key, default='', delimiter=':'): ''' Retrieve master config options, with optional nesting via the delimiter argument. **Arguments** default If the key is not found, the default will be returned instead delimiter Override the delimiter used to separate nested levels of a data structure. CLI Example: .. code-block:: bash salt-run config.get gitfs_remotes salt-run config.get file_roots:base salt-run config.get file_roots,base delimiter=',' ''' ret = salt.utils.traverse_dict_and_list(__opts__, key, default='_|-', delimiter=delimiter) if ret == '_|-': return default else: return salt.utils.sdb.sdb_get(ret, __opts__)
py
1a408685b936f1c89ed6bb668fec2cbbf57f82de
import os import re import numpy as np import csv def write2csv(path): # path='Planetoid_node_classification/results/result_GAT_pyg_Citeseer_GPU0_23h12m32s_on_Oct_28_2020.txt' csv_file=open('results.csv','w',encoding='gbk',newline='') csv_writer=csv.writer(csv_file) csv_writer.writerow(['data','model','L','params','train','val','test','epoch']) totals = [] for path in findAllFile(path): print(path) file=open(path) iterf=iter(file) for line in iterf: a = line.find('Dataset:') b = line.find('net_params={\'L\':') c=line.find('Model:') d=line.find('Total Parameters:') e=line.find('TEST ACCURACY') h = line.find('val ACCURACY') f=line.find('TRAIN ACCURACY') g=line.find(' Convergence Time (Epochs):') # h=line.find('params={\'seed\':') # print(g) if a == 0: dataset = line[line.index(':') + 2:line.index(',')] if b == 0: net = line[line.index(':') + 2:line.index(',')] if c == 0: model = line[line.index(':')+2:line.index('_')] if d == 0: Parameters = line[line.index(':')+2:line.index('\n')] if e == 0: TEST = line[line.index(':')+2:line.index('\n')] if h == 0: val = line[line.index(':')+2:line.index('\n')] if f == 0: TRAIN = line[line.index(':') + 2:line.index('\n')] # if h == 0: # seed = line[line.index(':') + 2:line.index(',')] if g == 0: Epochs = line[line.index(':') + 2:line.index('\n')] totals.append([dataset, model, net, Parameters, TRAIN, val,TEST, Epochs]) # csv_writer.writerow([dataset, model, net, Parameters, TRAIN, TEST, Epochs]) break totals.sort(key=lambda x: ((x[0]), (x[1]), int(x[2])), reverse=False) out = [] calculate = [] for i in range(totals.__len__()): out.append(totals[i]) csv_writer.writerow(out[i]) if (i+1)%4 == 0: avg_train_acc = np.array(totals[i-3:i+1])[:,4] avg_val_acc = np.array(totals[i-3:i+1])[:,5] avg_test_acc = np.array(totals[i-3:i+1])[:,6] # avg_test_acc [totals[i-4:i][0][4], totals[:4][1][4], totals[:4][2][4], totals[:4][3][4]] avg_epoch = np.array(totals[i-3:i+1])[:,7] train_acc=str(np.around(np.mean(np.array(avg_train_acc, dtype=np.float32)),decimals=4))+'±'+str(np.around(np.std(np.array(avg_train_acc, dtype=np.float32),ddof = 1),decimals=4)) val_acc = str(np.around(np.mean(np.array(avg_val_acc, dtype=np.float32)),decimals=4)) + '±' + str(np.around(np.std(np.array(avg_val_acc, dtype=np.float32), ddof=1),decimals=4)) test_acc= str(np.around(np.mean(np.array(avg_test_acc, dtype=np.float32)),decimals=4))+'±'+str(np.around(np.std(np.array(avg_test_acc, dtype=np.float32),ddof = 1),decimals=4)) Epochs_acc = str(np.around(np.mean(np.array(avg_epoch, dtype=np.float32)),decimals=4))+'±'+str(np.around(np.std(np.array(avg_epoch, dtype=np.float32),ddof = 1),decimals=4)) calculate.append([out[i-1][0], out[i-1][1], out[i-1][2], out[i-1][3], train_acc, val_acc ,test_acc, Epochs_acc]) csv_writer.writerow(calculate[int((i+1)/4-1)]) csv_file.close() file.close() def findAllFile(base): for root, ds, fs in os.walk(base): for f in fs: if f.endswith('.txt'): fullname = os.path.join(root, f) yield fullname def main(): # base = 'Planetoid_node_classification/results/'SBMs_node_classification base = 'SBMs_node_classification/results/' # for path in findAllFile(base): # print(path) np.set_printoptions(precision=4) write2csv(base) if __name__ == '__main__': main()
py
1a4087168fc5ea2c484776ee9a86ad09281b6f21
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: disable=invalid-name,unused-variable,invalid-name """Bitserial conv2d schedule on arm cpu""" from __future__ import absolute_import as _abs import tvm from tvm import autotvm from .. import tag from ..nn.pad import pad from ..nn.bitserial_conv2d import bitpack, bitserial_conv2d_nhwc from ..nn.util import get_pad_tuple from ..util import get_const_int, get_const_tuple from .. import generic def _kernel_vec_spatial_pack_nhwc(kernel, kernel_bits, VC, use_bitpack=True): if use_bitpack: kernel_q = bitpack(kernel, kernel_bits, pack_axis=2, bit_axis=2, pack_type='uint8') else: kernel_q = kernel KH, KW, KB, CI, CO = kernel_q.shape kvshape = (CO//VC, KH, KW, KB, VC, CI) return tvm.compute(kvshape, lambda co, dh, dw, b, vc, ci: \ kernel_q[dh][dw][b][ci][co*VC+vc], name='kernel_vec') @autotvm.register_topi_compute(bitserial_conv2d_nhwc, 'arm_cpu', 'direct') def spatial_pack_nhwc(cfg, data, kernel, stride, padding, activation_bits, weight_bits, pack_dtype, out_dtype, unipolar): """ Compute convolution with pack on spatial axes. """ assert data.shape[0].value == 1, "spatial pack convolution only support batch size=1" assert pack_dtype == 'uint8', "only support packing into uint8 bits" assert out_dtype == 'int16', "only support output type of int16" N, H, W, CI = get_const_tuple(data.shape) if len(kernel.shape) == 4: KH, KW, _, CO = get_const_tuple(kernel.shape) CI_packed = CI // 8 else: KH, KW, KB, CI_packed, CO = get_const_tuple(kernel.shape) if isinstance(padding, int) or (isinstance(padding, (tuple, list)) and len(padding) == 2): TPAD, LPAD, DPAD, RPAD = get_pad_tuple(padding, kernel) else: TPAD, LPAD, DPAD, RPAD = padding if isinstance(stride, (tuple, list)): HSTR, WSTR = stride else: HSTR, WSTR = stride, stride HCAT, WCAT = KH-1, KW-1 PAD_H = H + (TPAD + DPAD) PAD_W = W + (LPAD + RPAD) OH = (PAD_H - KH) // HSTR + 1 OW = (PAD_W - KW) // WSTR + 1 oshape = (1, OH, OW, CO) # Pad input channels of weights and data when it is not a multiple of 8 if CI_packed % 8 != 0: CI_PAD = CI_packed % 8 CI_packed += CI_PAD else: CI_PAD = 0 # ==================== define configuration space ==================== n, oh, ow, co = cfg.axis(N), cfg.axis(OH), cfg.axis(OW), cfg.axis(CO) ci, kh, kw = cfg.reduce_axis(CI_packed), cfg.reduce_axis(KH), cfg.reduce_axis(KW) ib, kb = cfg.reduce_axis(activation_bits), cfg.reduce_axis(weight_bits) co, vc = cfg.define_split('tile_co', co, policy='all', num_outputs=2, filter=lambda x: x.size[-1] == 8) oh, vh = cfg.define_split('tile_oh', oh, policy='all', num_outputs=2, filter=lambda x: x.size[-1] >= 2) ow, vw = cfg.define_split('tile_ow', ow, policy='all', num_outputs=2, filter=lambda x: x.size[-1] >= 2) ci_o, ci_i = cfg.define_split("tile_ci", ci, num_outputs=2, filter=lambda x: x.size[-1] == 8 or x.size[-1] == 16) re_axes = cfg.define_reorder("reorder_0", [n, oh, ow, co, vh, vw, kh, kw, ci_o, kb, ib, vc, ci_i], policy='candidate', candidate=[ [n, oh, ow, co, vh, vw, kh, kw, ci_o, kb, ib, vc, ci_i], [n, oh, ow, co, vh, vw, kw, kh, ci_o, kb, ib, vc, ci_i],]) cfg.add_flop(2 * N * OH * OW * CO * CI * 8 * KH * KW) # these are actually binary ops # ==================== VC = cfg["tile_co"].size[-1] VH = cfg["tile_oh"].size[-1] VW = cfg["tile_ow"].size[-1] data_q = bitpack(data, activation_bits, pack_axis=3, bit_axis=3, pack_type='uint8') kernel_vec = _kernel_vec_spatial_pack_nhwc(kernel, weight_bits, VC, len(kernel.shape) == 4) if kernel_vec.shape[-1] % 8 != 0 and CI_PAD != 0: kernel_vec = pad(kernel_vec, [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, CI_PAD]) N, H, W, IB, CI = data_q.shape OCO, KH, KW, KB, VC, CI = kernel_vec.shape dvshape = (N, PAD_H//(VH*HSTR), PAD_W//(VW*WSTR), VH*HSTR+HCAT, VW*WSTR+WCAT, IB, CI) ovshape = (1, OH // VH, OW // VW, CO // VC, VH, VW, VC) if (TPAD != 0 and RPAD != 0): data_pad = pad(data_q, (0, TPAD, LPAD, 0, 0), (0, DPAD, RPAD, 0, CI_PAD), name="data_pad") elif CI_PAD != 0: data_pad = pad(data_q, (0, 0, 0, 0, 0), (0, 0, 0, 0, CI_PAD), name="data_pad") else: data_pad = data_q data_vec = tvm.compute(dvshape, lambda n, h, w, vh, vw, b, ci: \ data_pad[n][h*VH*HSTR+vh][w*VW*WSTR+vw][b][ci], name='data_vec') ci = tvm.reduce_axis((0, CI), name='ci') dh = tvm.reduce_axis((0, KH), name='dh') dw = tvm.reduce_axis((0, KW), name='dw') ib = tvm.reduce_axis((0, IB), name='ib') kb = tvm.reduce_axis((0, KB), name='kb') def _bipolar_conv(n, h, w, co, vh, vw, vc): return tvm.sum((tvm.popcount( kernel_vec[co, dh, dw, kb, vc, ci].astype('uint16') & data_vec[n, h, w, vh*HSTR+dh, vw*WSTR+dw, ib, ci].astype('uint16')) << (kb + ib).astype('uint16')), axis=[dh, dw, kb, ib, ci]) def _unipolar_conv(n, h, w, co, vh, vw, vc): return tvm.sum( ((tvm.popcount(kernel_vec[co, dh, dw, kb, vc, ci].astype('int16') & data_vec[n, h, w, vh*HSTR+dh, vw*WSTR+dw, ib, ci].astype('int16')) - tvm.popcount(~kernel_vec[co, dh, dw, kb, vc, ci].astype('int16') & data_vec[n, h, w, vh*HSTR+dh, vw*WSTR+dw, ib, ci]).astype('int16')) << (kb + ib).astype('int16')), axis=[dh, dw, kb, ib, ci]) if unipolar: conv_vec = tvm.compute(ovshape, _unipolar_conv, name='conv_vec', tag='unipolar') else: conv_vec = tvm.compute(ovshape, _bipolar_conv, name='conv_vec', tag='bipolar') conv = tvm.compute(oshape, lambda n, h, w, co: conv_vec[n][h//VH][w//VW][co//VC][h%VH][w%VW][co%VC].astype(out_dtype), name='conv', tag='spatial_bitserial_conv_nhwc') return conv def _intrin_popcount(m, k_i, w_b, x_b, unipolar): pack_dtype = 'uint8' w = tvm.placeholder((w_b, m, k_i), dtype=pack_dtype, name='w') x = tvm.placeholder((x_b, k_i,), dtype=pack_dtype, name='x') k = tvm.reduce_axis((0, k_i), name='k') bw = tvm.reduce_axis((0, w_b), name='bw') bx = tvm.reduce_axis((0, x_b), name='bx') if unipolar: dtype = 'int16' z = tvm.compute((m,), lambda i: tvm.sum((tvm.popcount(w[bw, i, k].astype(dtype) & x[bx, k].astype(dtype)) - tvm.popcount(~w[bw, i, k].astype(dtype) & x[bx, k].astype(dtype))) << (bw+bx).astype(dtype), axis=[bw, bx, k]), name='z') else: dtype = 'uint16' z = tvm.compute((m,), lambda i: tvm.sum(tvm.popcount(w[bw, i, k].astype(dtype) & x[bx, k].astype(dtype)) << (bw+bx).astype(dtype), axis=[bw, bx, k]), name='z') Wb = tvm.decl_buffer(w.shape, w.dtype, name="W", offset_factor=k_i, strides=[tvm.var('ldw'), tvm.var('ldw'), 1]) # stride can be inferred Xb = tvm.decl_buffer(x.shape, x.dtype, name="X", offset_factor=k_i, strides=[tvm.var('ldw'), 1]) Zb = tvm.decl_buffer(z.shape, z.dtype, name="Z", offset_factor=1, strides=[1]) def _intrin_func(ins, outs): ww, xx = ins zz = outs[0] args_1 = tvm.const(1, 'uint32') args_2 = tvm.const(2, 'uint32') if unipolar: vpadd = "llvm.arm.neon.vpadd.v8i8" vpadalu = "llvm.arm.neon.vpadals.v16i8.v8i16" full_dtype = 'int8x16' half_dtype = 'int8x8' return_dtype = 'int16x8' else: vpadd = "llvm.arm.neon.vpadd.v8u8" vpadalu = "llvm.arm.neon.vpadalu.v16u8.v8u16" full_dtype = 'uint8x16' half_dtype = 'uint8x8' return_dtype = 'uint16x8' def _instr(index): irb = tvm.ir_builder.create() if index == 1: # reduce reset irb.emit(zz.vstore(0, tvm.const(0, return_dtype))) return irb.get() # body and reduce update cnts8 = [None] * 8 cnts4 = [None] * 4 cnts2 = [None] * 2 for bw in range(w_b): for bx in range(x_b): if k_i == 16: for i in range(m): w_ = ww.vload([bw, i, 0], 'uint8x16').astype(full_dtype) x_ = xx.vload([bx, 0], 'uint8x16').astype(full_dtype) if unipolar: cnts = tvm.popcount(w_ & x_) - tvm.popcount(~w_ & x_) else: cnts = tvm.popcount(w_ & x_) upper_half = tvm.call_pure_intrin(half_dtype, 'vectorhigh', cnts) lower_half = tvm.call_pure_intrin(half_dtype, 'vectorlow', cnts) cnts8[i] = upper_half + lower_half for i in range(m//2): cnts4[i] = tvm.call_llvm_intrin(half_dtype, vpadd, args_1, cnts8[i*2], cnts8[i*2+1]) for i in range(m//4): cnts2[i] = tvm.call_llvm_intrin(half_dtype, vpadd, args_1, cnts4[i*2], cnts4[i*2+1]) cnts = tvm.call_pure_intrin(full_dtype, 'vectorcombine', cnts2[0], cnts2[1]) shifted_cnts = cnts << tvm.const(bw+bx, pack_dtype) out = tvm.call_llvm_intrin(return_dtype, vpadalu, args_2, zz.vload(0, return_dtype), shifted_cnts) else: # ki == 8 for i in range(m): w_ = ww.vload([bw, i, 0], 'uint8x8').astype(half_dtype) x_ = xx.vload([bx, 0], 'uint8x8').astype(half_dtype) if unipolar: cnts8[i] = tvm.popcount(w_ & x_) - tvm.popcount(~w_ & x_) else: cnts8[i] = tvm.popcount(w_ & x_) for i in range(m//2): cnts4[i] = tvm.call_llvm_intrin(half_dtype, vpadd, args_1, cnts8[i*2], cnts8[i*2+1]) for i in range(m//4): cnts2[i] = tvm.call_llvm_intrin(half_dtype, vpadd, args_1, cnts4[i*2], cnts4[i*2+1]) cnts = tvm.call_pure_intrin(full_dtype, 'vectorcombine', cnts2[0], cnts2[1]) shifted_cnts = cnts << tvm.const(bw+bx, pack_dtype) out = tvm.call_llvm_intrin(return_dtype, vpadalu, args_2, zz.vload(0, return_dtype), shifted_cnts) irb.emit(zz.vstore(0, out)) return irb.get() # body, reset, update return _instr(0), _instr(1), _instr(2) with tvm.build_config(offset_factor=1, partition_const_loop=True): return tvm.decl_tensor_intrin(z.op, _intrin_func, binds={w: Wb, x:Xb, z:Zb}) # ARM specific schedule that using custom microkernel def _schedule_spatial_conv2d_nhwc(cfg, s, data_pad, data_vec, kernel_vec, conv_out, output, last, unipolar): _, _, _, _, _, IB, CI = data_vec.shape _, KH, KW, KB, _, _ = kernel_vec.shape KB = get_const_int(KB) IB = get_const_int(IB) VC = cfg["tile_co"].size[-1] VH = cfg["tile_oh"].size[-1] VW = cfg["tile_ow"].size[-1] ##### Schedule data padding and packing if data_pad is not None: s[data_pad].compute_inline() _, h, _, _, _, _, _ = s[data_vec].op.axis cfg.define_split("tile_ah", cfg.axis(h), policy="all", num_outputs=2, max_factor=32) oh, ih = cfg["tile_ah"].apply(s, data_vec, h) s[data_vec].parallel(oh) #### Schedule kernel packing co, _, _, _, _, _ = s[kernel_vec].op.axis cfg.define_split("tile_bco", cfg.axis(co), policy="all", num_outputs=2, max_factor=32) oco, ico = cfg["tile_bco"].apply(s, kernel_vec, co) s[kernel_vec].parallel(oco) ##### Schedule Convolution n, oh, ow, co, vh, vw, vc = s[conv_out].op.axis kh, kw, kb, ib, ci = s[conv_out].op.reduce_axis ci_o, ci_i = cfg['tile_ci'].apply(s, conv_out, ci) re_axes = cfg["reorder_0"].apply(s, conv_out, [n, oh, ow, co, vh, vw, kh, kw, ci_o, kb, ib, vc, ci_i]) # Use microkernel kfactor = cfg['tile_ci'].size[1] if kfactor % 8 == 0: pc = _intrin_popcount(VC, kfactor, KB, IB, unipolar) s[conv_out].tensorize(kb, pc) n, h, w, co = s[last].op.axis co, vc = cfg['tile_co'].apply(s, last, co) oh, vh = cfg['tile_oh'].apply(s, last, h) ow, vw = cfg['tile_ow'].apply(s, last, w) s[last].reorder(n, oh, ow, co, vh, vw, vc) s[last].vectorize(vc) if last != output: s[last].compute_inline() s[conv_out].compute_at(s[last], co) s[last].parallel(oh) s = s.normalize() return s @autotvm.register_topi_schedule(generic.nn.schedule_bitserial_conv2d_nhwc, 'arm_cpu', 'direct') def schedule_bitserial_conv2d_nhwc(cfg, outs): """Arm cpu schedule for bitserial conv2d""" s = tvm.create_schedule([x.op for x in outs]) scheduled_ops = [] def traverse(op): """Traverse operators from computation graph""" # inline all one-to-one-mapping operators except the last stage (output) if tag.is_broadcast(op.tag): if op not in s.outputs: s[op].compute_inline() for tensor in op.input_tensors: if tensor.op.input_tensors and tensor.op not in scheduled_ops: traverse(tensor.op) if 'spatial_bitserial_conv_nhwc' in op.tag: output = op.output(0) conv_out = op.input_tensors[0] kernel_vec = conv_out.op.input_tensors[0] kernel_q = kernel_vec.op.input_tensors[0] data_vec = conv_out.op.input_tensors[1] data_q = data_vec.op.input_tensors[0] data = data_q.op.input_tensors[0] data_pad = None if isinstance(data_q.op, tvm.tensor.ComputeOp) and "pad" in data_q.op.tag: data_pad = data_q data_q = data data = data.op.input_tensors[0] unipolar = "unipolar" in conv_out.op.tag _schedule_spatial_conv2d_nhwc(cfg, s, data_pad, data_vec, kernel_vec, conv_out, output, outs[0], unipolar) scheduled_ops.append(op) traverse(outs[0].op) return s
py
1a40884f8e3655b3dfc6d99906ed6b026d45047f
import copy from math import floor from Objects.Object import Object class Repeater(Object): def __init__(self, isVisible, position, content, pixellength, numRepeats=-1, spacing=0): super().__init__(isVisible, position, content) self.numRepeats = numRepeats self.spacing = spacing self.pixellength = pixellength def getContent(self): max_reps = floor(self.pixellength / (len(self.content) + self.spacing)) reps = max_reps if self.numRepeats == -1 else min(self.numRepeats, max_reps) full = copy.deepcopy(self.content) full.extend([[-1,-1,-1]]*self.spacing) return full * reps
py
1a40887c5468f67d85b98a8522027714ec578ef4
import adv_test import adv from slot.d import * def module(): return Curran class Curran(adv.Adv): comment = "no fs" a1 = ('od',0.13) a3 = ('lo',0.5) if __name__ == '__main__': conf = {} conf['acl'] = """ `s1 `s2, seq=2 `s3 """ conf['slot.d'] = Shinobi() adv_test.test(module(), conf, verbose=-2)
py
1a408b0011d3012601c2899b295c1f98793c062e
def _apply_entities(text, entities, escape_map, format_map): def inside_entities(i): return any(map(lambda e: e['offset'] <= i < e['offset']+e['length'], entities)) # Split string into char sequence and escape in-place to # preserve index positions. seq = list(map(lambda c,i: escape_map[c] # escape special characters if c in escape_map and not inside_entities(i) else c, list(text), # split string to char sequence range(0,len(text)))) # along with each char's index # Ensure smaller offsets come first sorted_entities = sorted(entities, key=lambda e: e['offset']) offset = 0 result = '' for e in sorted_entities: f,n,t = e['offset'], e['length'], e['type'] result += ''.join(seq[offset:f]) if t in format_map: # apply format result += format_map[t](''.join(seq[f:f+n]), e) else: result += ''.join(seq[f:f+n]) offset = f + n result += ''.join(seq[offset:]) return result def apply_entities_as_markdown(text, entities): """ Format text as Markdown. Also take care of escaping special characters. Returned value can be passed to :meth:`.Bot.sendMessage` with appropriate ``parse_mode``. :param text: plain text :param entities: a list of `MessageEntity <https://core.telegram.org/bots/api#messageentity>`_ objects """ escapes = {'*': '\\*', '_': '\\_', '[': '\\[', '`': '\\`',} formatters = {'bold': lambda s,e: '*'+s+'*', 'italic': lambda s,e: '_'+s+'_', 'text_link': lambda s,e: '['+s+']('+e['url']+')', 'text_mention': lambda s,e: '['+s+'](tg://user?id='+str(e['user']['id'])+')', 'code': lambda s,e: '`'+s+'`', 'pre': lambda s,e: '```text\n'+s+'```'} return _apply_entities(text, entities, escapes, formatters) def apply_entities_as_html(text, entities): """ Format text as HTML. Also take care of escaping special characters. Returned value can be passed to :meth:`.Bot.sendMessage` with appropriate ``parse_mode``. :param text: plain text :param entities: a list of `MessageEntity <https://core.telegram.org/bots/api#messageentity>`_ objects """ escapes = {'<': '&lt;', '>': '&gt;', '&': '&amp;',} formatters = {'bold': lambda s,e: '<b>'+s+'</b>', 'italic': lambda s,e: '<i>'+s+'</i>', 'text_link': lambda s,e: '<a href="'+e['url']+'">'+s+'</a>', 'text_mention': lambda s,e: '<a href="tg://user?id='+str(e['user']['id'])+'">'+s+'</a>', 'code': lambda s,e: '<code>'+s+'</code>', 'pre': lambda s,e: '<pre>'+s+'</pre>'} return _apply_entities(text, entities, escapes, formatters)
py
1a408c0a660ce638e9e1e26de29135c4193904e9
"""Support for (EMEA/EU-based) Honeywell TCC climate systems. Such systems include evohome, Round Thermostat, and others. """ from datetime import datetime as dt, timedelta import logging import re from typing import Any, Dict, Optional, Tuple import aiohttp.client_exceptions import evohomeasync import evohomeasync2 import voluptuous as vol from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME, HTTP_SERVICE_UNAVAILABLE, HTTP_TOO_MANY_REQUESTS, TEMP_CELSIUS, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.entity import Entity from homeassistant.helpers.service import verify_domain_control from homeassistant.helpers.typing import ConfigType, HomeAssistantType import homeassistant.util.dt as dt_util from .const import DOMAIN, EVO_FOLLOW, GWS, STORAGE_KEY, STORAGE_VERSION, TCS _LOGGER = logging.getLogger(__name__) ACCESS_TOKEN = "access_token" ACCESS_TOKEN_EXPIRES = "access_token_expires" REFRESH_TOKEN = "refresh_token" USER_DATA = "user_data" CONF_LOCATION_IDX = "location_idx" SCAN_INTERVAL_DEFAULT = timedelta(seconds=300) SCAN_INTERVAL_MINIMUM = timedelta(seconds=60) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_LOCATION_IDX, default=0): cv.positive_int, vol.Optional( CONF_SCAN_INTERVAL, default=SCAN_INTERVAL_DEFAULT ): vol.All(cv.time_period, vol.Range(min=SCAN_INTERVAL_MINIMUM)), } ) }, extra=vol.ALLOW_EXTRA, ) ATTR_SYSTEM_MODE = "mode" ATTR_DURATION_DAYS = "period" ATTR_DURATION_HOURS = "duration" ATTR_ZONE_TEMP = "setpoint" ATTR_DURATION_UNTIL = "duration" SVC_REFRESH_SYSTEM = "refresh_system" SVC_SET_SYSTEM_MODE = "set_system_mode" SVC_RESET_SYSTEM = "reset_system" SVC_SET_ZONE_OVERRIDE = "set_zone_override" SVC_RESET_ZONE_OVERRIDE = "clear_zone_override" RESET_ZONE_OVERRIDE_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id}) SET_ZONE_OVERRIDE_SCHEMA = vol.Schema( { vol.Required(ATTR_ENTITY_ID): cv.entity_id, vol.Required(ATTR_ZONE_TEMP): vol.All( vol.Coerce(float), vol.Range(min=4.0, max=35.0) ), vol.Optional(ATTR_DURATION_UNTIL): vol.All( cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1)), ), } ) # system mode schemas are built dynamically, below def _local_dt_to_aware(dt_naive: dt) -> dt: dt_aware = dt_util.now() + (dt_naive - dt.now()) if dt_aware.microsecond >= 500000: dt_aware += timedelta(seconds=1) return dt_aware.replace(microsecond=0) def _dt_to_local_naive(dt_aware: dt) -> dt: dt_naive = dt.now() + (dt_aware - dt_util.now()) if dt_naive.microsecond >= 500000: dt_naive += timedelta(seconds=1) return dt_naive.replace(microsecond=0) def convert_until(status_dict, until_key) -> str: """Convert datetime string from "%Y-%m-%dT%H:%M:%SZ" to local/aware/isoformat.""" if until_key in status_dict: # only present for certain modes dt_utc_naive = dt_util.parse_datetime(status_dict[until_key]) status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat() def convert_dict(dictionary: Dict[str, Any]) -> Dict[str, Any]: """Recursively convert a dict's keys to snake_case.""" def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) return (string[0]).lower() + re.sub( r"[A-Z]", lambda matched: "_" + matched.group(0).lower(), string[1:] ) return { (convert_key(k) if isinstance(k, str) else k): ( convert_dict(v) if isinstance(v, dict) else v ) for k, v in dictionary.items() } def _handle_exception(err) -> bool: """Return False if the exception can't be ignored.""" try: raise err except evohomeasync2.AuthenticationError: _LOGGER.error( "Failed to authenticate with the vendor's server. " "Check your network and the vendor's service status page. " "Also check that your username and password are correct. " "Message is: %s", err, ) return False except aiohttp.ClientConnectionError: # this appears to be a common occurrence with the vendor's servers _LOGGER.warning( "Unable to connect with the vendor's server. " "Check your network and the vendor's service status page. " "Message is: %s", err, ) return False except aiohttp.ClientResponseError: if err.status == HTTP_SERVICE_UNAVAILABLE: _LOGGER.warning( "The vendor says their server is currently unavailable. " "Check the vendor's service status page." ) return False if err.status == HTTP_TOO_MANY_REQUESTS: _LOGGER.warning( "The vendor's API rate limit has been exceeded. " "If this message persists, consider increasing the %s.", CONF_SCAN_INTERVAL, ) return False raise # we don't expect/handle any other Exceptions async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool: """Create a (EMEA/EU-based) Honeywell TCC system.""" async def load_auth_tokens(store) -> Tuple[Dict, Optional[Dict]]: app_storage = await store.async_load() tokens = dict(app_storage if app_storage else {}) if tokens.pop(CONF_USERNAME, None) != config[DOMAIN][CONF_USERNAME]: # any tokens won't be valid, and store might be be corrupt await store.async_save({}) return ({}, None) # evohomeasync2 requires naive/local datetimes as strings if tokens.get(ACCESS_TOKEN_EXPIRES) is not None: tokens[ACCESS_TOKEN_EXPIRES] = _dt_to_local_naive( dt_util.parse_datetime(tokens[ACCESS_TOKEN_EXPIRES]) ) user_data = tokens.pop(USER_DATA, None) return (tokens, user_data) store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY) tokens, user_data = await load_auth_tokens(store) client_v2 = evohomeasync2.EvohomeClient( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD], **tokens, session=async_get_clientsession(hass), ) try: await client_v2.login() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) return False finally: config[DOMAIN][CONF_PASSWORD] = "REDACTED" loc_idx = config[DOMAIN][CONF_LOCATION_IDX] try: loc_config = client_v2.installation_info[loc_idx][GWS][0][TCS][0] except IndexError: _LOGGER.error( "Config error: '%s' = %s, but the valid range is 0-%s. " "Unable to continue. Fix any configuration errors and restart HA.", CONF_LOCATION_IDX, loc_idx, len(client_v2.installation_info) - 1, ) return False _LOGGER.debug("Config = %s", loc_config) client_v1 = evohomeasync.EvohomeClient( client_v2.username, client_v2.password, user_data=user_data, session=async_get_clientsession(hass), ) hass.data[DOMAIN] = {} hass.data[DOMAIN]["broker"] = broker = EvoBroker( hass, client_v2, client_v1, store, config[DOMAIN] ) await broker.save_auth_tokens() await broker.async_update() # get initial state hass.async_create_task(async_load_platform(hass, "climate", DOMAIN, {}, config)) if broker.tcs.hotwater: hass.async_create_task( async_load_platform(hass, "water_heater", DOMAIN, {}, config) ) hass.helpers.event.async_track_time_interval( broker.async_update, config[DOMAIN][CONF_SCAN_INTERVAL] ) setup_service_functions(hass, broker) return True @callback def setup_service_functions(hass: HomeAssistantType, broker): """Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes. """ @verify_domain_control(hass, DOMAIN) async def force_refresh(call) -> None: """Obtain the latest state data via the vendor's RESTful API.""" await broker.async_update() @verify_domain_control(hass, DOMAIN) async def set_system_mode(call) -> None: """Set the system mode.""" payload = { "unique_id": broker.tcs.systemId, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) @verify_domain_control(hass, DOMAIN) async def set_zone_override(call) -> None: """Set the zone override (setpoint).""" entity_id = call.data[ATTR_ENTITY_ID] registry = await hass.helpers.entity_registry.async_get_registry() registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register(DOMAIN, SVC_REFRESH_SYSTEM, force_refresh) # Enumerate which operating modes are supported by this system modes = broker.config["allowedSystemModes"] # Not all systems support "AutoWithReset": register this handler only if required if [m["systemMode"] for m in modes if m["systemMode"] == "AutoWithReset"]: hass.services.async_register(DOMAIN, SVC_RESET_SYSTEM, set_system_mode) system_mode_schemas = [] modes = [m for m in modes if m["systemMode"] != "AutoWithReset"] # Permanent-only modes will use this schema perm_modes = [m["systemMode"] for m in modes if not m["canBeTemporary"]] if perm_modes: # any of: "Auto", "HeatingOff": permanent only schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)}) system_mode_schemas.append(schema) modes = [m for m in modes if m["canBeTemporary"]] # These modes are set for a number of hours (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Duration"] if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_HOURS): vol.All( cv.time_period, vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)), ), } ) system_mode_schemas.append(schema) # These modes are set for a number of days (or indefinitely): use this schema temp_modes = [m["systemMode"] for m in modes if m["timingMode"] == "Period"] if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_DAYS): vol.All( cv.time_period, vol.Range(min=timedelta(days=1), max=timedelta(days=99)), ), } ) system_mode_schemas.append(schema) if system_mode_schemas: hass.services.async_register( DOMAIN, SVC_SET_SYSTEM_MODE, set_system_mode, schema=vol.Any(*system_mode_schemas), ) # The zone modes are consistent across all systems and use the same schema hass.services.async_register( DOMAIN, SVC_RESET_ZONE_OVERRIDE, set_zone_override, schema=RESET_ZONE_OVERRIDE_SCHEMA, ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_override, schema=SET_ZONE_OVERRIDE_SCHEMA, ) class EvoBroker: """Container for evohome client and data.""" def __init__(self, hass, client, client_v1, store, params) -> None: """Initialize the evohome client and its data structure.""" self.hass = hass self.client = client self.client_v1 = client_v1 self._store = store self.params = params loc_idx = params[CONF_LOCATION_IDX] self.config = client.installation_info[loc_idx][GWS][0][TCS][0] self.tcs = client.locations[loc_idx]._gateways[0]._control_systems[0] self.temps = {} async def save_auth_tokens(self) -> None: """Save access tokens and session IDs to the store for later use.""" # evohomeasync2 uses naive/local datetimes access_token_expires = _local_dt_to_aware(self.client.access_token_expires) app_storage = {CONF_USERNAME: self.client.username} app_storage[REFRESH_TOKEN] = self.client.refresh_token app_storage[ACCESS_TOKEN] = self.client.access_token app_storage[ACCESS_TOKEN_EXPIRES] = access_token_expires.isoformat() if self.client_v1 and self.client_v1.user_data: app_storage[USER_DATA] = { "userInfo": {"userID": self.client_v1.user_data["userInfo"]["userID"]}, "sessionId": self.client_v1.user_data["sessionId"], } else: app_storage[USER_DATA] = None await self._store.async_save(app_storage) async def call_client_api(self, api_function, refresh=True) -> Any: """Call a client API.""" try: result = await api_function except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: if not _handle_exception(err): return if refresh: self.hass.helpers.event.async_call_later(1, self.async_update()) return result async def _update_v1(self, *args, **kwargs) -> None: """Get the latest high-precision temperatures of the default Location.""" def get_session_id(client_v1) -> Optional[str]: user_data = client_v1.user_data if client_v1 else None return user_data.get("sessionId") if user_data else None session_id = get_session_id(self.client_v1) try: temps = list(await self.client_v1.temperatures(force_refresh=True)) except aiohttp.ClientError as err: _LOGGER.warning( "Unable to obtain the latest high-precision temperatures. " "Check your network and the vendor's service status page. " "Proceeding with low-precision temperatures. " "Message is: %s", err, ) self.temps = None # these are now stale, will fall back to v2 temps else: if ( str(self.client_v1.location_id) != self.client.locations[self.params[CONF_LOCATION_IDX]].locationId ): _LOGGER.warning( "The v2 API's configured location doesn't match " "the v1 API's default location (there is more than one location), " "so the high-precision feature will be disabled" ) self.client_v1 = self.temps = None else: self.temps = {str(i["id"]): i["temp"] for i in temps} _LOGGER.debug("Temperatures = %s", self.temps) if session_id != get_session_id(self.client_v1): await self.save_auth_tokens() async def _update_v2(self, *args, **kwargs) -> None: """Get the latest modes, temperatures, setpoints of a Location.""" access_token = self.client.access_token loc_idx = self.params[CONF_LOCATION_IDX] try: status = await self.client.locations[loc_idx].status() except (aiohttp.ClientError, evohomeasync2.AuthenticationError) as err: _handle_exception(err) else: async_dispatcher_send(self.hass, DOMAIN) _LOGGER.debug("Status = %s", status[GWS][0][TCS][0]) if access_token != self.client.access_token: await self.save_auth_tokens() async def async_update(self, *args, **kwargs) -> None: """Get the latest state data of an entire Honeywell TCC Location. This includes state data for a Controller and all its child devices, such as the operating mode of the Controller and the current temp of its children (e.g. Zones, DHW controller). """ await self._update_v2() if self.client_v1: await self._update_v1() # inform the evohome devices that state data has been updated async_dispatcher_send(self.hass, DOMAIN) class EvoDevice(Entity): """Base for any evohome device. This includes the Controller, (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize the evohome entity.""" self._evo_device = evo_device self._evo_broker = evo_broker self._evo_tcs = evo_broker.tcs self._unique_id = self._name = self._icon = self._precision = None self._supported_features = None self._device_state_attrs = {} async def async_refresh(self, payload: Optional[dict] = None) -> None: """Process any signals.""" if payload is None: self.async_schedule_update_ha_state(force_refresh=True) return if payload["unique_id"] != self._unique_id: return if payload["service"] in [SVC_SET_ZONE_OVERRIDE, SVC_RESET_ZONE_OVERRIDE]: await self.async_zone_svc_request(payload["service"], payload["data"]) return await self.async_tcs_svc_request(payload["service"], payload["data"]) async def async_tcs_svc_request(self, service: dict, data: dict) -> None: """Process a service request (system mode) for a controller.""" raise NotImplementedError async def async_zone_svc_request(self, service: dict, data: dict) -> None: """Process a service request (setpoint override) for a zone.""" raise NotImplementedError @property def should_poll(self) -> bool: """Evohome entities should not be polled.""" return False @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self) -> str: """Return the name of the evohome entity.""" return self._name @property def device_state_attributes(self) -> Dict[str, Any]: """Return the evohome-specific state attributes.""" status = self._device_state_attrs if "systemModeStatus" in status: convert_until(status["systemModeStatus"], "timeUntil") if "setpointStatus" in status: convert_until(status["setpointStatus"], "until") if "stateStatus" in status: convert_until(status["stateStatus"], "until") return {"status": convert_dict(status)} @property def icon(self) -> str: """Return the icon to use in the frontend UI.""" return self._icon @property def supported_features(self) -> int: """Get the flag of supported features of the device.""" return self._supported_features async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" async_dispatcher_connect(self.hass, DOMAIN, self.async_refresh) @property def precision(self) -> float: """Return the temperature precision to use in the frontend UI.""" return self._precision @property def temperature_unit(self) -> str: """Return the temperature unit to use in the frontend UI.""" return TEMP_CELSIUS class EvoChild(EvoDevice): """Base for any evohome child. This includes (up to 12) Heating Zones and (optionally) a DHW controller. """ def __init__(self, evo_broker, evo_device) -> None: """Initialize a evohome Controller (hub).""" super().__init__(evo_broker, evo_device) self._schedule = {} self._setpoints = {} @property def current_temperature(self) -> Optional[float]: """Return the current temperature of a Zone.""" if not self._evo_device.temperatureStatus["isAvailable"]: return None if self._evo_broker.temps: return self._evo_broker.temps[self._evo_device.zoneId] return self._evo_device.temperatureStatus["temperature"] @property def setpoints(self) -> Dict[str, Any]: """Return the current/next setpoints from the schedule. Only Zones & DHW controllers (but not the TCS) can have schedules. """ if not self._schedule["DailySchedules"]: return {} # no schedule {'DailySchedules': []}, so no scheduled setpoints day_time = dt_util.now() day_of_week = int(day_time.strftime("%w")) # 0 is Sunday time_of_day = day_time.strftime("%H:%M:%S") try: # Iterate today's switchpoints until past the current time of day... day = self._schedule["DailySchedules"][day_of_week] sp_idx = -1 # last switchpoint of the day before for i, tmp in enumerate(day["Switchpoints"]): if time_of_day > tmp["TimeOfDay"]: sp_idx = i # current setpoint else: break # Did the current SP start yesterday? Does the next start SP tomorrow? this_sp_day = -1 if sp_idx == -1 else 0 next_sp_day = 1 if sp_idx + 1 == len(day["Switchpoints"]) else 0 for key, offset, idx in [ ("this", this_sp_day, sp_idx), ("next", next_sp_day, (sp_idx + 1) * (1 - next_sp_day)), ]: sp_date = (day_time + timedelta(days=offset)).strftime("%Y-%m-%d") day = self._schedule["DailySchedules"][(day_of_week + offset) % 7] switchpoint = day["Switchpoints"][idx] dt_local_aware = _local_dt_to_aware( dt_util.parse_datetime(f"{sp_date}T{switchpoint['TimeOfDay']}") ) self._setpoints[f"{key}_sp_from"] = dt_local_aware.isoformat() try: self._setpoints[f"{key}_sp_temp"] = switchpoint["heatSetpoint"] except KeyError: self._setpoints[f"{key}_sp_state"] = switchpoint["DhwState"] except IndexError: self._setpoints = {} _LOGGER.warning( "Failed to get setpoints, report as an issue if this error persists", exc_info=True, ) return self._setpoints async def _update_schedule(self) -> None: """Get the latest schedule, if any.""" if "DailySchedules" in self._schedule and not self._schedule["DailySchedules"]: if not self._evo_device.setpointStatus["setpointMode"] == EVO_FOLLOW: return # avoid unnecessary I/O - there's nothing to update self._schedule = await self._evo_broker.call_client_api( self._evo_device.schedule(), refresh=False ) _LOGGER.debug("Schedule['%s'] = %s", self.name, self._schedule) async def async_update(self) -> None: """Get the latest state data.""" next_sp_from = self._setpoints.get("next_sp_from", "2000-01-01T00:00:00+00:00") if dt_util.now() >= dt_util.parse_datetime(next_sp_from): await self._update_schedule() # no schedule, or it's out-of-date self._device_state_attrs = {"setpoints": self.setpoints}
py
1a408c8e141b93fda78e64d37317450e1216c5d1
import os import genapi import numpy_api from genapi import \ TypeApi, GlobalVarApi, FunctionApi, BoolValuesApi h_template = r""" #ifdef _UMATHMODULE #ifdef NPY_ENABLE_SEPARATE_COMPILATION extern NPY_NO_EXPORT PyTypeObject PyUFunc_Type; #else NPY_NO_EXPORT PyTypeObject PyUFunc_Type; #endif %s #else #if defined(PY_UFUNC_UNIQUE_SYMBOL) #define PyUFunc_API PY_UFUNC_UNIQUE_SYMBOL #endif #if defined(NO_IMPORT) || defined(NO_IMPORT_UFUNC) extern void **PyUFunc_API; #else #if defined(PY_UFUNC_UNIQUE_SYMBOL) void **PyUFunc_API; #else static void **PyUFunc_API=NULL; #endif #endif %s static int _import_umath(void) { PyObject *numpy = PyImport_ImportModule("numpy.core.umath"); PyObject *c_api = NULL; if (numpy == NULL) return -1; c_api = PyObject_GetAttrString(numpy, "_UFUNC_API"); if (c_api == NULL) {Py_DECREF(numpy); return -1;} if (PyCObject_Check(c_api)) { PyUFunc_API = (void **)PyCObject_AsVoidPtr(c_api); } Py_DECREF(c_api); Py_DECREF(numpy); if (PyUFunc_API == NULL) return -1; return 0; } #define import_umath() { UFUNC_NOFPE if (_import_umath() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.umath failed to import"); return; }} #define import_umath1(ret) { UFUNC_NOFPE if (_import_umath() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.umath failed to import"); return ret; }} #define import_umath2(msg, ret) { UFUNC_NOFPE if (_import_umath() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, msg); return ret; }} #define import_ufunc() { UFUNC_NOFPE if (_import_umath() < 0) {PyErr_Print(); PyErr_SetString(PyExc_ImportError, "numpy.core.umath failed to import"); }} #endif """ c_template = r""" /* These pointers will be stored in the C-object for use in other extension modules */ void *PyUFunc_API[] = { %s }; """ def generate_api(output_dir, force=False): basename = 'ufunc_api' h_file = os.path.join(output_dir, '__%s.h' % basename) c_file = os.path.join(output_dir, '__%s.c' % basename) d_file = os.path.join(output_dir, '%s.txt' % basename) targets = (h_file, c_file, d_file) sources = ['ufunc_api_order.txt'] if (not force and not genapi.should_rebuild(targets, sources + [__file__])): return targets else: do_generate_api(targets, sources) return targets def do_generate_api(targets, sources): header_file = targets[0] c_file = targets[1] doc_file = targets[2] ufunc_api_index = genapi.merge_api_dicts(( numpy_api.ufunc_funcs_api, numpy_api.ufunc_types_api)) genapi.check_api_dict(ufunc_api_index) ufunc_api_list = genapi.get_api_functions('UFUNC_API', numpy_api.ufunc_funcs_api) # Create dict name -> *Api instance ufunc_api_dict = {} api_name = 'PyUFunc_API' for f in ufunc_api_list: name = f.name index = ufunc_api_index[name] ufunc_api_dict[name] = FunctionApi(f.name, index, f.return_type, f.args, api_name) for name, index in numpy_api.ufunc_types_api.items(): ufunc_api_dict[name] = TypeApi(name, index, 'PyTypeObject', api_name) # set up object API module_list = [] extension_list = [] init_list = [] for name, index in genapi.order_dict(ufunc_api_index): api_item = ufunc_api_dict[name] extension_list.append(api_item.define_from_array_api_string()) init_list.append(api_item.array_api_define()) module_list.append(api_item.internal_define()) # Write to header fid = open(header_file, 'w') s = h_template % ('\n'.join(module_list), '\n'.join(extension_list)) fid.write(s) fid.close() # Write to c-code fid = open(c_file, 'w') s = c_template % ',\n'.join(init_list) fid.write(s) fid.close() # Write to documentation fid = open(doc_file, 'w') fid.write(''' ================= Numpy Ufunc C-API ================= ''') for func in ufunc_api_list: fid.write(func.to_ReST()) fid.write('\n\n') fid.close() return targets
py
1a408d10737edf27648279f4e05d7cc126574bcb
def bbknn(adata, batch_key='batch', save_knn=False, copy=False, **kwargs): """\ Batch balanced kNN [Park18]_. Batch balanced kNN alters the kNN procedure to identify each cell's top neighbours in each batch separately instead of the entire cell pool with no accounting for batch. Aligns batches in a quick and lightweight manner. For use in the scanpy workflow as an alternative to :func:`scanpi.pp.neighbors`. .. note:: This is just a wrapper of :func:`bbknn.bbknn`: more information and bug reports `here <https://github.com/Teichlab/bbknn>`__. Params ------ adata : ``AnnData`` Needs the PCA computed and stored in ``adata.obsm["X_pca"]``. batch_key : ``str``, optional (default: "batch") ``adata.obs`` column name discriminating between your batches. neighbors_within_batch : ``int``, optional (default: 3) How many top neighbours to report for each batch; total number of neighbours will be this number times the number of batches. n_pcs : ``int``, optional (default: 50) How many principal components to use in the analysis. trim : ``int`` or ``None``, optional (default: ``None``) If not ``None``, trim the neighbours of each cell to these many top connectivities. May help with population independence and improve the tidiness of clustering. approx : ``bool``, optional (default: ``True``) If ``True``, use annoy's approximate neighbour finding. This results in a quicker run time for large datasets while also potentially increasing the degree of batch correction. n_trees : ``int``, optional (default: 10) Only used when ``approx=True``. The number of trees to construct in the annoy forest. More trees give higher precision when querying, at the cost of increased run time and resource intensity. use_faiss : ``bool``, optional (default: ``True``) If ``approx=False`` and the metric is "euclidean", use the faiss package to compute nearest neighbours if installed. This improves performance at a minor cost to numerical precision as faiss operates on float32. metric : ``str`` or ``sklearn.neighbors.DistanceMetric``, optional (default: "angular") What distance metric to use. If using ``approx=True``, the options are "angular", "euclidean", "manhattan" and "hamming". Otherwise, the options are "euclidean", a member of the ``sklearn.neighbors.KDTree.valid_metrics`` list, or parameterised ``sklearn.neighbors.DistanceMetric`` `objects <https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.DistanceMetric.html>`_:: >>> from sklearn import neighbors >>> neighbors.KDTree.valid_metrics ['p', 'chebyshev', 'cityblock', 'minkowski', 'infinity', 'l2', 'euclidean', 'manhattan', 'l1'] >>> pass_as_metric = neighbors.DistanceMetric.get_metric('minkowski', p=3) bandwidth : ``float``, optional (default: 1) ``scanpy.neighbors.compute_connectivities_umap`` parameter, higher values result in a gentler slope of the connectivities exponentials (i.e. larger connectivity values being returned) local_connectivity : ``int``, optional (default: 1) ``scanpy.neighbors.compute_connectivities_umap`` parameter, how many nearest neighbors of each cell are assumed to be fully connected (and given a connectivity value of 1) save_knn : ``bool``, optional (default: ``False``) If ``True``, save the indices of the nearest neighbours for each cell in ``adata.uns['bbknn']``. copy : ``bool``, optional (default: ``False``) If ``True``, return a copy instead of writing to the supplied adata. Returns ------- The `adata` with the batch-corrected graph. """ try: from bbknn import bbknn except ImportError: raise ImportError('Please install bbknn: `pip install bbknn`.') params = locals() kwargs = params.pop('kwargs') return bbknn(**params, **kwargs)
py
1a408dbbbed32cb220435052ad13189f3833ff3f
# Copyright (C) 2014 Bill Marczak. # See the file 'LICENSE' for copying permission. import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', 80)) s.listen(1) while True: conn, addr = s.accept() data = conn.recv(1024) hex = data.encode('hex') print hex[0:16] if hex[0:16] == "0c00000040017300": print "VALID FINSPY HELLO" conn.recv(1024); conn.close(); else: print "INVALID FINSPY HELLO" conn.close() break s.close()
py
1a408e827f9c83ecaa88fa41a4c2c64c1c78354e
#!/usr/bin/env python3 # coding: utf-8 # Copyright 2016 Abram Hindle, https://github.com/tywtyw2002, and https://github.com/treedust # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Do not use urllib's HTTP GET and POST mechanisms. # Write your own HTTP GET and POST # The point is to understand what you have to send and get experience with it # Wonbin Jeong import sys import socket import re # you may use urllib to encode data appropriately import urllib.parse as parseu def help(): print("httpclient.py [GET/POST] [URL]\n") class HTTPResponse(object): def __init__(self, code=200, body=""): self.code = code self.body = body class HTTPClient(object): #def get_host_port(self,url): def connect(self, host, port): self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.connect((host, port)) return None def get_code(self, data): scheme = data.split("\r\n")[0] code = scheme.split()[1] return int(code) def get_headers(self,data): return None def get_body(self, data): body = data.split("\r\n\r\n")[1] return body def sendall(self, data): self.socket.sendall(data.encode('utf-8')) def close(self): self.socket.close() # read everything from the socket def recvall(self, sock): buffer = bytearray() done = False while not done: part = sock.recv(1024) if (part): buffer.extend(part) else: done = not part return buffer.decode('utf-8') def GET(self, url, args=None): code = 500 body = "" parse_result = parseu.urlparse(url) # print(parse_result) host = parse_result.netloc.split(":")[0] path = parse_result.path if path == "": path = "/" port = parse_result.port if port == None and parse_result.scheme == "https": port = 443 if port == None and parse_result.scheme == "http": port = 80 # print("Port {}\n Path {}\n".format(port, path)) self.connect(host, port) req_header = "GET {} HTTP/1.1\r\n".format(path) req_header += "Host: {}\r\n".format(host) req_header += "Accept: */*\r\n" req_header += "Connection: close\r\n\r\n" self.sendall(req_header) response = self.recvall(self.socket) # print(response) self.close() code = self.get_code(response) body = self.get_body(response) print(code) print(body) return HTTPResponse(code, body) def POST(self, url, args=None): code = 500 body = "" parse_result = parseu.urlparse(url) # print(parse_result) host = parse_result.netloc.split(":")[0] path = parse_result.path port = parse_result.port if port == None and parse_result.scheme == "https": port = 443 if port == None and parse_result.scheme == "http": port = 80 # print("Port {}\n Path {}\n".format(port, path)) self.connect(host, port) if args == None: args = parseu.urlencode("") else: args = parseu.urlencode(args) req_header = "POST {} HTTP/1.1\r\n".format(path) req_header += "Host: {}\r\n".format(host) req_header += "Content-Type: application/x-www-form-urlencoded\r\n" req_header += "Content-Length: {}\r\n".format(len(args)) req_header += "Connection: close\r\n\r\n" req_header += args self.sendall(req_header) response = self.recvall(self.socket) self.close() code = self.get_code(response) body = self.get_body(response) print(code) print(body) return HTTPResponse(code, body) def command(self, url, command="GET", args=None): if (command == "POST"): return self.POST( url, args ) else: return self.GET( url, args ) if __name__ == "__main__": client = HTTPClient() command = "GET" if (len(sys.argv) <= 1): help() sys.exit(1) elif (len(sys.argv) == 3): print(client.command( sys.argv[2], sys.argv[1] )) else: print(client.command( sys.argv[1] ))
py
1a408f448c16be9ad149928a2539c9d9ad6bc095
# 00.helloworld.py # # This example will create a window where you # should only see a gaussian noise pattern import nvisii nvisii.initialize() while (not nvisii.should_window_close()): pass nvisii.deinitialize()
py
1a408f4de124877b528b72398ff58050bfc0d20c
from django.urls import path from django.contrib.auth import views as views_auth from . import views_profile app_name = 'profiles' urlpatterns = [ path('ورود/', views_auth.LoginView.as_view(template_name='profiles/login.html'), name='ورود'), path('خروج/', views_auth.LogoutView.as_view(), name='خروج'), path('ثبت/', views_profile.SignUp.as_view(), name='ثبت'), ]
py
1a408f6adb6316ba0dec838bd479664aa306ab36
#!/usr/local/sci/bin/python #***************************** # # Known Records Check (KRC) # # Check for exceedence of world records # # #************************************************************************ # SVN Info #$Rev:: 219 $: Revision of last commit #$Author:: rdunn $: Author of last commit #$Date:: 2019-05-20 16:56:47 +0100 (Mon, 20 May 2019) $: Date of last commit #************************************************************************ import numpy as np import scipy as sp import datetime as dt # RJHD routines import qc_utils as utils # updated max dewpoint after David's paper. T_X = {"Africa":55.0,"Asia":53.9,"South_America":48.9,"North_America":56.7,"Europe":48.0,"Pacific":50.7,"Antarctica":15.0,"ROW":56.7} T_N = {"Africa":-23.9,"Asia":-67.8,"South_America":-32.8,"North_America":-63.0,"Europe":-58.1,"Pacific":-23.0,"Antarctica":-89.2,"ROW":-89.2} D_X = {"Africa":55.0,"Asia":53.9,"South_America":48.9,"North_America":56.7,"Europe":48.0,"Pacific":50.7,"Antarctica":15.0,"ROW":56.7} D_N = {"Africa":-50.,"Asia":-100.,"South_America":-60.,"North_America":-100.,"Europe":-100.,"Pacific":-50.,"Antarctica":-100.,"ROW":-100.} W_X = {"Africa":113.2,"Asia":113.2,"South_America":113.2,"North_America":113.2,"Europe":113.2,"Pacific":113.2,"Antarctica":113.2,"ROW":113.2} W_N = {"Africa":0.,"Asia":0.,"South_America":0.,"North_America":0.,"Europe":0.,"Pacific":0.,"Antarctica":0.,"ROW":0.} S_X = {"Africa":1083.3,"Asia":1083.3,"South_America":1083.3,"North_America":1083.3,"Europe":1083.3,"Pacific":1083.3,"Antarctica":1083.3,"ROW":1083.3} S_N = {"Africa":870.,"Asia":870.,"South_America":870.,"North_America":870.,"Europe":870.,"Pacific":870.,"Antarctica":870.,"ROW":870.} maxes = {"temperatures": T_X, "dewpoints": D_X, "windspeeds": W_X, "slp": S_X} mins = {"temperatures": T_N, "dewpoints": D_N, "windspeeds": W_N, "slp": S_N} #************************************************************************ def krc_get_wmo_region(stnid): ''' Get the WMO region from the station id ''' region = "ROW" # rest of world if 600000 <= stnid <= 699999: region = "Africa" if 200000 <= stnid <= 200999: region = "Asia" if 202000 <= stnid <= 219999: region = "Asia" if 230000 <= stnid <= 259999: region = "Asia" if 280000 <= stnid <= 329999: region = "Asia" if 350000 <= stnid <= 369999: region = "Asia" if 380000 <= stnid <= 399999: region = "Asia" if 403500 <= stnid <= 485999: region = "Asia" if 488000 <= stnid <= 499999: region = "Asia" if 500000 <= stnid <= 599999: region = "Asia" if 800000 <= stnid <= 889999: region = "South_America" if 700000 <= stnid <= 799999: region = "North_America" if 486000 <= stnid <= 487999: region = "Pacific" if 900000 <= stnid <= 989999: region = "Pacific" if stnid <= 199999: region = "Europe" if 201000 <= stnid <= 201999: region = "Europe" if 220000 <= stnid <= 229999: region = "Europe" if 260000 <= stnid <= 279999: region = "Europe" if 330000 <= stnid <= 349999: region = "Europe" if 370000 <= stnid <= 379999: region = "Europe" if 400000 <= stnid <= 403499: region = "Europe" if 890000 <= stnid <= 899999: region = "Antarctica" return region # krc_get_wmo_region #************************************************************************ def krc_set_flags(locs, flags, col): ''' Set the flags to 1 for correct column if data exists :param array locs: locations for flags :param array flags: the flags :param int col: column to use ''' if len(locs[0]) > 0: flags[locs, col] = 1 return # krc_set_flags #************************************************************************ def krc(station, var_list, flag_col, logfile, diagnostics = False, plots = False): ''' Run the known records check for each variable in list :param object station: station to process :param list var_list: list of variables to process :param list flag_col: which columns to use for which variable :param file logfile: logfile to store output :param bool diagnostics: diagnostic output (unused) :param bool plots: do the plots (unused) ''' for v, variable in enumerate(var_list): st_var = getattr(station, variable) st_region = krc_get_wmo_region(station.id) all_filtered = utils.apply_filter_flags(st_var) too_high = np.where(all_filtered > maxes[variable][st_region]) krc_set_flags(too_high, station.qc_flags, flag_col[v]) # make sure that don't flag the missing values! too_low = np.where(np.logical_and(all_filtered < mins[variable][st_region], all_filtered.mask == False )) krc_set_flags(too_low, station.qc_flags, flag_col[v]) flag_locs = np.where(station.qc_flags[:, flag_col[v]] != 0) utils.print_flagged_obs_number(logfile, "World Record", variable, len(flag_locs[0]), noWrite = diagnostics) # copy flags into attribute st_var.flags[flag_locs] = 1 station = utils.append_history(station, "World Record Check") return # krc #************************************************************************ if __name__ == "__main__": print "checking for exceedence of world records"
py
1a4090127ea57ee6dcc337a0f9823df3e6e3da49
def distance(strand_a, strand_b): if len(strand_a) != len(strand_b): raise ValueError('The lenght of two strands are different.') else: return len([1 for a, b in zip(strand_a, strand_b) if a != b])
py
1a409050873f7a5f90856ae314849268d0eca8c0
# This is basically a copy-paste from https://github.com/facebookresearch/detr/blob/master/models/matcher.py """ Modules to compute the matching cost and solve the corresponding LSAP. """ import sys import torch from scipy.optimize import linear_sum_assignment from torch import nn sys.path.append('./util') from seg_ops import seg_bxw_to_cxw, seg_cxw_to_x0x1, generalized_seg_iou class HungarianMatcher(nn.Module): """This class computes an assignment between the targets and the predictions of the network For efficiency reasons, the targets don't include the no_object. Because of this, in general, there are more predictions than targets. In this case, we do a 1-to-1 matching of the best predictions, while the others are un-matched (and thus treated as non-objects). """ def __init__(self, cost_class: float = 1, cost_bsegment: float = 1, cost_giou: float = 1): """Creates the matcher Params: cost_class: This is the relative weight of the classification error in the matching cost cost_bseg: This is the relative weight of the L1 error of the bounding segment coordinates in the matching cost cost_giou: This is the relative weight of the giou loss of the bounding segment in the matching cost """ super().__init__() self.cost_class = cost_class self.cost_bsegment = cost_bsegment self.cost_giou = cost_giou assert cost_class != 0 or cost_bseg != 0 or cost_giou != 0, "all costs cant be 0" @torch.no_grad() def forward(self, outputs, targets): """ Performs the matching Params: outputs: This is a dict that contains at least these entries: "pred_logits": Tensor of dim [batch_size, num_queries, num_classes] with the classification logits "pred_segments": Tensor of dim [batch_size, num_queries, 2] with the predicted segment coordinates targets: This is a list of targets (len(targets) = batch_size), where each target is a dict containing: "labels": Tensor of dim [num_target_segments] (where num_target_segments is the number of ground-truth pulses in the target) containing the class labels "segments": Tensor of dim [num_target_segments, 2] containing the target segments coordinates Returns: A list of size batch_size, containing tuples of (index_i, index_j) where: - index_i is the indices of the selected predictions (in order) - index_j is the indices of the corresponding selected targets (in order) For each batch element, it holds: len(index_i) = len(index_j) = min(num_queries, num_target_segments) """ batch_size, num_queries = outputs["pred_logits"].shape[:2] # We flatten to compute the cost matrices in a batch out_prob = outputs["pred_logits"].flatten(0, 1).softmax(-1) # [batch_size * num_queries, num_classes] out_bsegment = outputs["pred_segments"].flatten(0, 1) # [batch_size * num_queries, 2] # Also concat the target labels and segments tgt_ids = torch.cat([v["labels"] for v in targets]).long() # [num_target_segments_(1) + num_target_segments_(2) + ... + num_target_segments_(batch_size)] tgt_bsegment = torch.cat([v["segments"] for v in targets]) # [num_target_segments_(1) + num_target_segments_(2) + ... + num_target_segments_(batch_size), 2] # Compute the classification cost. Contrary to the loss, we don't use the Negative Log-Likelihood (NLL), # but approximate it in 1 - proba[target class]. # The 1 is a constant that doesn't change the matching, it can be ommitted. cost_class = -out_prob[:, tgt_ids] # Compute the L1 cost between segments cost_bsegment = torch.cdist(out_bsegment, tgt_bsegment, p=1) # Compute the giou cost betwen segments cost_giou = -generalized_seg_iou(seg_cxw_to_x0x1(seg_bxw_to_cxw(out_bsegment)), seg_cxw_to_x0x1(seg_bxw_to_cxw(tgt_bsegment))) # Final cost matrix C = self.cost_bsegment * cost_bsegment + self.cost_class * cost_class + self.cost_giou * cost_giou C = C.view(batch_size, num_queries, -1).cpu() sizes = [len(v['segments']) for v in targets] indices = [linear_sum_assignment(c[i]) for i, c in enumerate(C.split(sizes, -1))] return [(torch.as_tensor(i, dtype=torch.int64), torch.as_tensor(j, dtype=torch.int64)) for i, j in indices] def build_matcher(args): return HungarianMatcher(cost_class=args.set_cost_class, cost_bsegment=args.set_cost_bsegment, cost_giou=args.set_cost_giou)
py
1a4092177a74e0388e77ef1a3cfc87ba8911d557
import os import sys import click from zipfile import ZipFile, ZIP_DEFLATED import pathlib import hashlib import re from loguetools import og, xd, common from loguetools import version XD_PATCH_LENGTH = 1024 def explode(filename, match_name, match_ident, prepend_id, append_md5_4, append_version, unskip_init): """Explode a minilogue og or xd or prologue program bank or extract a program. \b Examples -------- explode xd_program_bank.mnlgxdlib explode -n XDProgName xd_program_bank.mnlgxdlib """ zipobj = ZipFile(filename, "r", compression=ZIP_DEFLATED, compresslevel=9) proglist = common.zipread_progbins(zipobj) proginfo_dict = common.zipread_all_prog_info(zipobj) if match_name is not None: match_ident = common.id_from_name(zipobj, match_name) if match_ident is not None: proglist = [proglist[match_ident - 1]] # Create directory based on the filename stem input_file = pathlib.Path(filename) dir_path = input_file.with_suffix("") dir_path.mkdir(exist_ok=True) if input_file.suffix in {".mnlgxdpreset", ".mnlgxdlib"}: suffix = ".mnlgxdprog" flavour = "xd" elif input_file.suffix in {".mnlgpreset", ".mnlglib"}: suffix = ".mnlgprog" flavour = "og" elif input_file.suffix in {".prlgpreset", ".prlglib"}: suffix = ".prlgprog" flavour = "prologue" elif input_file.suffix in {".molgpreset", ".molglib"}: suffix = ".molgprog" flavour = "monologue" elif input_file.suffix in {".kklib"}: suffix = ".kkprog" flavour = "kk" fileinfo_xml = common.fileinfo_xml(flavour, [0], False) # Read any copyright and author information if available copyright = None author = None comment = None if input_file.suffix in {".mnlgxdpreset", ".mnlgpreset", ".prlgpreset", ".molgpreset"}: author, copyright = common.author_copyright_from_presetinformation_xml(zipobj) sanitise = common.sanitise_patchname() for i, p in enumerate(proglist): patchdata = zipobj.read(p) hash = hashlib.md5(patchdata).hexdigest() flavour = common.patch_type(patchdata) if common.is_init_patch(flavour, hash): # Init Program identified based on hash; i.e. a "True" Init Program continue prgname = common.program_name(patchdata, flavour) if common.is_init_program_name(prgname) and not unskip_init: # Init Program found and option not to skip is unchecked continue if prepend_id: prgname = f"{i+1:03d}_{prgname}" if append_md5_4: hash = hashlib.md5(patchdata).hexdigest() prgname = f"{prgname}-{hash[:4]}" if append_version: ver = version.__version__.replace(".", "") prgname = f"{prgname}-v{ver}" output_path = (dir_path / (sanitise(prgname) + suffix)) with ZipFile(output_path, "w") as zip: binary = zipobj.read(p) # .prog_bin record/file zip.writestr(f"Prog_000.prog_bin", binary) # .prog_info record/file # Use any available presetinformation_xml author and copyright fields if author is not None: comment = f"Author: {author}" proginfo_comment = (proginfo_dict[p])['Comment'] if proginfo_comment is not None: comment = f"{comment}, " + proginfo_comment prog_info_template = common.prog_info_template_xml(flavour, comment=comment, copyright=copyright) zip.writestr(f"Prog_000.prog_info", prog_info_template) # FileInformation.xml record/file zip.writestr(f"FileInformation.xml", fileinfo_xml, False) print(f"{int(p[5:8])+1:03d}: {prgname:<12s} -> {output_path}") @click.command() @click.argument("filename", type=click.Path(exists=True)) @click.option("--match_name", "-n", help="Dump the patch with name NAME") @click.option("--match_ident", "-i", type=int, help="Dump the patch with ident ID") @click.option("--prepend_id", "-p", is_flag=True, help="Prepend patch ID to the filename") @click.option("--append_md5_4", "-m", is_flag=True, help="Append 4 digits of an md5 checksum to the filename") @click.option("--append_version", "-v", is_flag=True, help="Append loguetools version to the filename") @click.option("--unskip_init", "-u", is_flag=True, help="Don't skip patches named Init Program") def click_explode(filename, match_name, match_ident, prepend_id, append_md5_4, append_version, unskip_init): explode(filename, match_name, match_ident, prepend_id, append_md5_4, append_version, unskip_init) if __name__ == "__main__": click_explode()
py
1a4092e4644b62603ccfd0d1a49fcbe8800ad787
from flask import request, render_template, session class Version_HTML(): endpoints = ["/version", "/version.html"] endpoint_name = "page_version_html" endpoint_access_level = 1 endpoint_category = "tool_pages" pretty_name = "Version" def __init__(self, fhdhr): self.fhdhr = fhdhr def __call__(self, *args): return self.get(*args) def get(self, *args): version_dict = {} for key in list(self.fhdhr.config.internal["versions"].keys()): version_dict[key] = self.fhdhr.config.internal["versions"][key] # Sort the Version Info sorted_version_list = sorted(version_dict, key=lambda i: (version_dict[i]['type'], version_dict[i]['name'])) sorted_version_dict = { "fHDHR": version_dict["fHDHR"], "fHDHR_web": version_dict["fHDHR_web"] } for version_item in sorted_version_list: if version_item not in ["fHDHR", "fHDHR_web"]: sorted_version_dict[version_item] = version_dict[version_item] return render_template('version.html', request=request, session=session, fhdhr=self.fhdhr, version_dict=sorted_version_dict, list=list)
py
1a4094ed2ce90a89a15b9d775e3a84cb49e1357a
# Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from transformers import is_torch_available from transformers.file_utils import cached_property from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device from .test_modeling_common import ModelTesterMixin if is_torch_available(): import torch from transformers import ( AutoModelForSeq2SeqLM, AutoTokenizer, BatchEncoding, MBartConfig, MBartForConditionalGeneration, ) EN_CODE = 250004 RO_CODE = 250020 @require_torch class ModelTester: def __init__(self, parent): self.config = MBartConfig( vocab_size=99, d_model=24, encoder_layers=2, decoder_layers=2, encoder_attention_heads=2, decoder_attention_heads=2, encoder_ffn_dim=32, decoder_ffn_dim=32, max_position_embeddings=48, add_final_layer_norm=True, ) def prepare_config_and_inputs_for_common(self): return self.config, {} @require_torch class SelectiveCommonTest(unittest.TestCase): all_model_classes = (MBartForConditionalGeneration,) if is_torch_available() else () test_save_load__keys_to_ignore_on_save = ModelTesterMixin.test_save_load__keys_to_ignore_on_save def setUp(self): self.model_tester = ModelTester(self) @require_torch @require_sentencepiece @require_tokenizers class AbstractSeq2SeqIntegrationTest(unittest.TestCase): maxDiff = 1000 # longer string compare tracebacks checkpoint_name = None @classmethod def setUpClass(cls): cls.tokenizer = AutoTokenizer.from_pretrained(cls.checkpoint_name, use_fast=False) return cls @cached_property def model(self): """Only load the model if needed.""" model = AutoModelForSeq2SeqLM.from_pretrained(self.checkpoint_name).to(torch_device) if "cuda" in torch_device: model = model.half() return model @require_torch @require_sentencepiece @require_tokenizers class MBartEnroIntegrationTest(AbstractSeq2SeqIntegrationTest): checkpoint_name = "facebook/mbart-large-en-ro" src_text = [ " UN Chief Says There Is No Military Solution in Syria", """ Secretary-General Ban Ki-moon says his response to Russia's stepped up military support for Syria is that "there is no military solution" to the nearly five-year conflict and more weapons will only worsen the violence and misery for millions of people.""", ] tgt_text = [ "Şeful ONU declară că nu există o soluţie militară în Siria", 'Secretarul General Ban Ki-moon declară că răspunsul său la intensificarea sprijinului militar al Rusiei pentru Siria este că "nu există o soluţie militară" la conflictul de aproape cinci ani şi că noi arme nu vor face decât să înrăutăţească violenţa şi mizeria pentru milioane de oameni.', ] expected_src_tokens = [8274, 127873, 25916, 7, 8622, 2071, 438, 67485, 53, 187895, 23, 51712, 2, EN_CODE] @slow def test_enro_generate_one(self): batch: BatchEncoding = self.tokenizer.prepare_seq2seq_batch( ["UN Chief Says There Is No Military Solution in Syria"], return_tensors="pt" ).to(torch_device) translated_tokens = self.model.generate(**batch) decoded = self.tokenizer.batch_decode(translated_tokens, skip_special_tokens=True) self.assertEqual(self.tgt_text[0], decoded[0]) # self.assertEqual(self.tgt_text[1], decoded[1]) @slow def test_enro_generate_batch(self): batch: BatchEncoding = self.tokenizer.prepare_seq2seq_batch(self.src_text, return_tensors="pt").to( torch_device ) translated_tokens = self.model.generate(**batch) decoded = self.tokenizer.batch_decode(translated_tokens, skip_special_tokens=True) assert self.tgt_text == decoded def test_mbart_enro_config(self): mbart_models = ["facebook/mbart-large-en-ro"] expected = {"scale_embedding": True, "output_past": True} for name in mbart_models: config = MBartConfig.from_pretrained(name) self.assertTrue(config.is_valid_mbart()) for k, v in expected.items(): try: self.assertEqual(v, getattr(config, k)) except AssertionError as e: e.args += (name, k) raise def test_mbart_fast_forward(self): config = MBartConfig( vocab_size=99, d_model=24, encoder_layers=2, decoder_layers=2, encoder_attention_heads=2, decoder_attention_heads=2, encoder_ffn_dim=32, decoder_ffn_dim=32, max_position_embeddings=48, add_final_layer_norm=True, ) lm_model = MBartForConditionalGeneration(config).to(torch_device) context = torch.Tensor([[71, 82, 18, 33, 46, 91, 2], [68, 34, 26, 58, 30, 2, 1]]).long().to(torch_device) summary = torch.Tensor([[82, 71, 82, 18, 2], [58, 68, 2, 1, 1]]).long().to(torch_device) result = lm_model(input_ids=context, decoder_input_ids=summary, labels=summary) expected_shape = (*summary.shape, config.vocab_size) self.assertEqual(result.logits.shape, expected_shape) @require_torch @require_sentencepiece @require_tokenizers class MBartCC25IntegrationTest(AbstractSeq2SeqIntegrationTest): checkpoint_name = "facebook/mbart-large-cc25" src_text = [ " UN Chief Says There Is No Military Solution in Syria", " I ate lunch twice yesterday", ] tgt_text = ["Şeful ONU declară că nu există o soluţie militară în Siria", "to be padded"] @unittest.skip("This test is broken, still generates english") def test_cc25_generate(self): inputs = self.tokenizer.prepare_seq2seq_batch([self.src_text[0]], return_tensors="pt").to(torch_device) translated_tokens = self.model.generate( input_ids=inputs["input_ids"].to(torch_device), decoder_start_token_id=self.tokenizer.lang_code_to_id["ro_RO"], ) decoded = self.tokenizer.batch_decode(translated_tokens, skip_special_tokens=True) self.assertEqual(self.tgt_text[0], decoded[0]) @slow def test_fill_mask(self): inputs = self.tokenizer.prepare_seq2seq_batch(["One of the best <mask> I ever read!"], return_tensors="pt").to( torch_device ) outputs = self.model.generate( inputs["input_ids"], decoder_start_token_id=self.tokenizer.lang_code_to_id["en_XX"], num_beams=1 ) prediction: str = self.tokenizer.batch_decode( outputs, clean_up_tokenization_spaces=True, skip_special_tokens=True )[0] self.assertEqual(prediction, "of the best books I ever read!")
py
1a40958b6ba4475ce2c8329d78e3daadbf9d4fce
print("") print('ERROR: stitck_wrapper not yet compiled. Please run:') print('cd KittiBox/submodules/utils && make')
py
1a4095a3fde0e39cd487913187f40570a66599df
# coding=utf-8 from common import errcode from dao.ask.ask_answer_reply_dao import AskAnswerReplyDao from dao.ask.ask_answer_reply_like_dao import AskAnswerReplyLikeDao from handlers.base.base_handler import BaseHandler class LikeAnswerReplyHandler(BaseHandler): methods = ['POST'] def __init__(self): expect_request_para = { "ask_id": None, "answer_id": None, "reply_id": None, "common_param": {}, } need_para = ( "ask_id", "answer_id", "reply_id", "common_param", ) super(LikeAnswerReplyHandler, self).__init__(expect_request_para, need_para) def _process_imp(self): # xxx 给 xxx答案的xxx回复点赞 ret = AskAnswerReplyLikeDao.insert(self.para_map["ask_id"], self.para_map["answer_id"], self.para_map["reply_id"], self.uid) # 答案点赞数 + 1 if ret: AskAnswerReplyDao.update({ "id": self.para_map["reply_id"], "like_num": (1, True), }) self.ret_code = errcode.NO_ERROR self.ret_msg = 'ok' return
py
1a4096880701e7088f61b7c5a8871f7bbc66611e
"""Date based tools""" import calendar import datetime from dateutil import parser as date_parser import datetime as dt import calendar MONTHS = { "jan": 1, "jan.": 1, "january": 1, "feb": 2, "feb.": 2, "february": 2, "mar": 3, "mar.": 3, "march": 3, "apr": 4, "apr.": 4, "april": 4, "may": 5, "may.": 5, "jun": 6, "jun.": 6, "june": 6, "jul": 7, "jul.": 7, "july": 7, "aug": 8, "aug.": 8, "august": 8, "sep": 9, "sep.": 9, "sept": 9, "sept.": 9, "september": 9, "oct": 10, "oct.": 10, "october": 10, "nov": 11, "nov.": 11, "november": 11, "dec": 12, "dec.": 12, "december": 12, "": 1, "tbd": 1 } def month_to_int(m): """Converts a month to an integer.""" try: m = int(m) except ValueError: m = MONTHS[m.lower()] return m def month_to_str_int(m): """Converts a month to an int form, str type, with a leading zero""" mi = month_to_int(m) if mi < 10: ms = "0{}".format(mi) else: ms = str(mi) return ms def day_to_str_int(d): """Converts a day to an int form, str type, with a leading zero""" if d < 10: ds = "0{}".format(d) else: ds = str(d) return ds def date_to_float(y, m, d=0): """Converts years / months / days to a float, eg 2015.0818 is August 18th 2015. """ y = int(y) m = month_to_int(m) d = int(d) return y + (m / 100.0) + (d / 10000.0) def find_gaps_overlaps(dateslist, overlaps_ok=False): ''' Find whether there is a gap or an overlap in a list of date-ranges Parameters ---------- dateslist: list of tuples of datetime.date objects The list of date-ranges. overlaps_ok: bool Returns false if there are gaps but true if there are overlaps but no gaps Returns ------- True if there are no gaps or overlaps else False ''' status = True dateslist.sort(key=lambda x: x[0]) for i in range(len(dateslist) - 1): if dateslist[i + 1][0] <= dateslist[i][1] and not overlaps_ok: status = False elif (dateslist[i + 1][0] - dateslist[i][1]).days > 1: status = False return status def last_day(year, month): """ Returns the last day of the month for the month given Parameters ---------- year: integer the year that the month is in month: integer or string the month. if a string should be resolvable using regolith month_to_int Returns ------- The last day of that month """ return calendar.monthrange(year, month_to_int(month))[1] def get_dates(thing): ''' given a dict like thing, return the items Parameters ---------- thing: dict the dict that contains the dates Returns ------- dict containing datetime.date objects for begin_date end_date and date Description ----------- If "begin_date", "end_date" or "date" values are found, if these are are in an ISO format string they will be converted to datetime.date objects and returned in the dictionary under keys of the same name. A specified date will override any date built from year/month/day data. If they are not found the function will look for begin_year, end_year and year. If "year", "month" and "day" are found the function will return these in the "date" field and begin_date and end_date will be None If year is found but no month or day are found the function will return begin_date and end_date with the beginning and the end of the given year/month. The returned date will be None. If end_year is found, the end month and end day are missing they are set to 12 and 31, respectively If begin_year is found, the begin month and begin day are missing they are set to 1 and 1, respectively ''' if thing.get("end_year") and not thing.get("begin_year"): print('WARNING: end_year specified without begin_year') begin_date, end_date, date = None, None, None if thing.get('begin_year'): if not thing.get('begin_month'): thing['begin_month'] = 1 if not thing.get('begin_day'): thing['begin_day'] = 1 begin_date = datetime.date(thing['begin_year'],month_to_int(thing['begin_month']), thing['begin_day']) if thing.get('end_year'): if not thing.get('end_month'): thing['end_month'] = 12 if not thing.get('end_day'): thing['end_day'] = last_day(thing['end_year'], thing['end_month']) end_date = datetime.date(thing['end_year'],month_to_int(thing['end_month']), thing['end_day']) if thing.get('year'): if not thing.get('month'): if thing.get('begin_year'): print("WARNING: both year and begin_year specified. Year info will be used") begin_date = datetime.date(thing['year'],1,1) end_date = datetime.date(thing['year'],12,31) elif not thing.get('day'): if thing.get('begin_year'): print("WARNING: both year and begin_year specified. Year info will be used") begin_date = datetime.date(thing['year'],month_to_int(thing['month']), 1) end_date = datetime.date(thing['year'], month_to_int(thing['month']), last_day(thing['year'], thing['month'])) else: date = datetime.date(thing['year'], month_to_int(thing['month']), thing['day']) begin_date = datetime.date(thing['year'], month_to_int(thing['month']), thing['day']) end_date = datetime.date(thing['year'], month_to_int(thing['month']), thing['day']) if thing.get('begin_date'): if isinstance(thing.get('begin_date'), str): begin_date = date_parser.parse(thing.get('begin_date')).date() else: begin_date = thing.get('begin_date') if thing.get('end_date'): if isinstance(thing.get('end_date'), str): end_date = date_parser.parse(thing.get('end_date')).date() else: end_date = thing.get('end_date') if thing.get('date'): if isinstance(thing.get('date'), str): date = date_parser.parse(thing.get('date')).date() else: date = thing.get('date') dates = {'begin_date': begin_date, 'end_date': end_date, 'date': date} return dates def get_due_date(thing): """ Parameters ---------- thing: dict gets the field named 'due_date' from doc and ensurese it is a datetime.date object Returns ------- The due date as a datetime.date object """ due_date = thing.get('due_date') if isinstance(due_date, str): due_date = date_parser.parse(due_date).date() elif isinstance(due_date, datetime.date): pass else: raise RuntimeError(f'due date not a known type') return due_date def is_current(thing, now=None): """ given a thing with dates, returns true if the thing is current looks for begin_ and end_ daty things (date, year, month, day), or just the daty things themselves. e.g., begin_date, end_month, month, and so on. Parameters ---------- thing: dict the thing that we want to know whether or not it is current now: datetime.date object a date for now. If it is None it uses the current date. Default is None Returns ------- True if the thing is current and false otherwise """ if not now: now = datetime.date.today() dates = get_dates(thing) current = False if not dates.get("end_date"): dates["end_date"] = datetime.date(5000, 12, 31) try: if dates.get("begin_date") <= now <= dates.get("end_date"): current = True except: raise RuntimeError(f"Cannot find begin_date in document:\n {thing}") return current def has_started(thing, now=None): """ given a thing with dates, returns true if the thing has started Parameters ---------- thing: dict the thing that we want to know whether or not it is has started now: datetime.date object a date for now. If it is None it uses the current date. Default is None Returns ------- True if the thing has started and false otherwise """ if not now: now = datetime.date.today() dates = get_dates(thing) started = False try: if dates.get("begin_date") <= now: started = True except: raise RuntimeError(f"Cannot find begin_date in document:\n {thing}") return started def has_finished(thing, now=None): """ given a thing with dates, returns true if the thing has finished Parameters ---------- thing: dict the thing that we want to know whether or not it has finished now: datetime.date object a date for now. If it is None it uses the current date. Default is None Returns ------- True if the thing has finished and false otherwise """ if not now: now = datetime.date.today() dates = get_dates(thing) finished = False if not dates.get("end_date"): dates["end_date"] = datetime.date(5000, 12, 31) if dates.get("end_date") < now: finished = True return finished def is_before(thing, now=None): """ given a thing with a date, returns true if the thing is before the input date Parameters ---------- thing: dict the thing that we want to know whether or not is before a date now: datetime.date object a date for now. If it is None it uses the current date. Default is None Returns ------- True if the thing is before the date """ if not now: now = datetime.date.today() dates = get_dates(thing) before = False try: if dates.get("date") < now: before = True except: raise RuntimeError(f"Cannot find date in document:\n {thing}") return before def is_after(thing, now=None): """ given a thing with a date, returns true if the thing is after the input date Parameters ---------- thing: dict the thing that we want to know whether or not is after a date now: datetime.date object a date for now. If it is None it uses the current date. Default is None Returns ------- True if the thing is after the date """ if not now: now = datetime.date.today() dates = get_dates(thing) after = False try: if now < dates.get('date'): after = True except: raise RuntimeError(f"Cannot find date in document:\n {thing}") return after def is_between(thing, start=None, end=None): """ given a thing with a date, returns true if the thing is between the start and end date Parameters ---------- thing: dict the thing that we want to know whether or not is after a date start: datetime.date object a date for the start. If it is None it uses the current date. Default is None end: datetime.date object a date for the end. If it is None it uses the current date. Default is None Returns ------- True if the thing is between the start and end """ if not start: start = datetime.date.today() if not end: end = datetime.date.today() between = False if is_after(thing, start) and is_before(thing, end): between = True return between
py
1a4098247092159ec9d7307b17c89d3ee773f0bd
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Define the PINNs network for the Schrodinger equation.""" import numpy as np from mindspore import Parameter, Tensor, nn, ops from mindspore.common.initializer import TruncatedNormal, Zero, initializer from mindspore.ops import constexpr import mindspore.common.dtype as mstype @constexpr def _generate_ones(batch_size): arr = np.ones((batch_size, 1), np.float32) return Tensor(arr, mstype.float32) @constexpr def _generate_zeros(batch_size): arr = np.zeros((batch_size, 1), np.float32) return Tensor(arr, mstype.float32) class neural_net(nn.Cell): """ Neural net to fit the wave function Args: layers (int): num of neurons for each layer lb (np.array): lower bound (x, t) of domain ub (np.array): upper bound (x, t) of domain """ def __init__(self, layers, lb, ub): super(neural_net, self).__init__() self.layers = layers self.concat = ops.Concat(axis=1) self.lb = Tensor(lb, mstype.float32) self.ub = Tensor(ub, mstype.float32) self.tanh = ops.Tanh() self.add = ops.Add() self.matmul = ops.MatMul() self.w0 = self._init_weight_xavier(0) self.b0 = self._init_biase(0) self.w1 = self._init_weight_xavier(1) self.b1 = self._init_biase(1) self.w2 = self._init_weight_xavier(2) self.b2 = self._init_biase(2) self.w3 = self._init_weight_xavier(3) self.b3 = self._init_biase(3) self.w4 = self._init_weight_xavier(4) self.b4 = self._init_biase(4) def construct(self, x, t): """forward propagation""" X = self.concat((x, t)) X = 2.0*(X - self.lb)/(self.ub - self.lb) - 1.0 X = self.tanh(self.add(self.matmul(X, self.w0), self.b0)) X = self.tanh(self.add(self.matmul(X, self.w1), self.b1)) X = self.tanh(self.add(self.matmul(X, self.w2), self.b2)) X = self.tanh(self.add(self.matmul(X, self.w3), self.b3)) X = self.add(self.matmul(X, self.w4), self.b4) return X[:, 0:1], X[:, 1:2] def _init_weight_xavier(self, layer): """ Initialize weight for the ith layer """ in_dim = self.layers[layer] out_dim = self.layers[layer+1] std = np.sqrt(2/(in_dim + out_dim)) name = 'w' + str(layer) return Parameter(default_input=initializer(TruncatedNormal(std), [in_dim, out_dim], mstype.float32), name=name, requires_grad=True) def _init_biase(self, layer): """ Initialize biase for the ith layer """ name = 'b' + str(layer) return Parameter(default_input=initializer(Zero(), self.layers[layer+1], mstype.float32), name=name, requires_grad=True) class Grad_1(nn.Cell): """ Using the first output to compute gradient. """ def __init__(self, net): super(Grad_1, self).__init__() self.net = net self.grad = ops.GradOperation(get_all=True, sens_param=True) def construct(self, x, t): sens_1 = _generate_ones(x.shape[0]) sens_2 = _generate_zeros(x.shape[0]) return self.grad(self.net)(x, t, (sens_1, sens_2)) class Grad_2(nn.Cell): """ Using the second output to compute gradient. """ def __init__(self, net): super(Grad_2, self).__init__() self.net = net self.grad = ops.GradOperation(get_all=True, sens_param=True) def construct(self, x, t): sens_1 = _generate_zeros(x.shape[0]) sens_2 = _generate_ones(x.shape[0]) return self.grad(self.net)(x, t, (sens_1, sens_2)) class PINNs(nn.Cell): """ PINNs for the Schrodinger equation. """ def __init__(self, layers, lb, ub): super(PINNs, self).__init__() self.nn = neural_net(layers, lb, ub) self.du = Grad_1(self.nn) self.dv = Grad_2(self.nn) self.dux = Grad_1(self.du) self.dvx = Grad_1(self.dv) self.add = ops.Add() self.pow = ops.Pow() self.mul = ops.Mul() def construct(self, X): """forward propagation""" x = X[:, 0:1] t = X[:, 1:2] u, v = self.nn(x, t) ux, ut = self.du(x, t) vx, vt = self.dv(x, t) uxx, _ = self.dux(x, t) vxx, _ = self.dvx(x, t) square_sum = self.add(self.pow(u, 2), self.pow(v, 2)) fu1 = self.mul(vxx, 0.5) fu2 = self.mul(square_sum, v) fu = self.add(self.add(ut, fu1), fu2) fv1 = self.mul(uxx, -0.5) fv2 = self.mul(square_sum, u) fv2 = self.mul(fv2, -1.0) fv = self.add(self.add(vt, fv1), fv2) return u, v, ux, vx, fu, fv
py
1a40989282806f4b158838e07a7fcf366762a458
from django.urls import path, include from rest_framework.routers import DefaultRouter from recipe import views router = DefaultRouter() router.register('tags', views.TagViewSet) router.register('ingredients', views.IngredientViewSet) router.register('recipe', views.RecipeViewSet) app_name='recipe' urlpatterns = [ path('', include(router.urls)) ]
py
1a40993e05612895a50adcf15b240802bada0769
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'btc_exchange_rate.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
py
1a409aef610a2837ab54e9b922a7e247d2559d9e
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify expected behavior when an implicit dependency is modified asynchronously (that is, mid-build and without our knowledge). Test case courtesy Greg Noel. """ import TestSCons _python_ = TestSCons._python_ test = TestSCons.TestSCons() test.write(['SConstruct'], """\ import SCons.Defaults DefaultEnvironment(tools=[]) env = Environment(tools=[]) env['BUILDERS']['C'] = Builder(action = Copy('$TARGET', '$SOURCE'), source_scanner = SCons.Defaults.CScan) env['BUILDERS']['Mod'] = Builder(action = r'%(_python_)s mod.py') Alias('seq', env.C('one.c')) Alias('seq', env.Mod('mod', 'mod.py')) Alias('seq', env.C('two.c')) Default('seq') """ % locals()) test.write(['hdr.h'], """\ /* empty header */ """) test.write(['mod.py'], """\ with open('mod', 'w') as f, open('mod.py', 'r') as ifp: f.write(ifp.read()) with open('hdr.h', 'w') as f: f.write("/* modified */\\n") """) test.write(['one.c'], """\ #include "hdr.h" """) test.write(['two.c'], """\ #include "hdr.h" """) # The first run builds the file 'one', then runs the 'mod' script # (which update modifies the 'hdr.h' file) then builds the file 'two'. test.run(arguments = 'seq') # The 'hdr.h' file had its original contents when 'one' was built, # and modified contents when 'two' was built. Because we took a # look at 'hdr.h' once, up front, we think both files are out of # date and will rebuild both (even though 'two' is really up to date). # # A future enhancement might add some sort of verification mode that # would examine 'hdr.h' again when 'two' was built, thereby avoiding # the unnecessary rebuild. In that case, the second line below # will need to change to "test.up_to_date(...)". test.not_up_to_date(arguments = 'one') test.not_up_to_date(arguments = 'two') # Regardless of what happened on the middle run(s), both files should # be up to date now. test.up_to_date(arguments = 'seq') test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
py
1a409d0da25fa3c3cf12eee64ac5bdcadf5896b6
#!/usr/bin/env python from setuptools import setup setup(name='tap-quaderno', version='0.1.0', description='Singer.io tap for extracting data from the Quaderno API', author='[email protected]', classifiers=[ 'Programming Language :: Python :: 3 :: Only' ], py_modules=['tap_quaderno'], install_requires=[ 'backoff==1.8.0', 'requests==2.22.0', 'singer-python==5.8.0' ], entry_points=''' [console_scripts] tap-quaderno=tap_quaderno:main ''', packages=['tap_quaderno'], package_data={ 'tap_quaderno': ['schemas/*.json'], } )
py
1a409dbdf0a711d46a3f10e7e122467efd08896e
# Copyright 2019 Contributors to Hyperledger Sawtooth # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ----------------------------------------------------------------------------- """Imports Role test""" # pylint: disable=no-member import pytest from rbac.common.role import Role from rbac.common.user import User from rbac.common import protobuf from rbac.common.logs import get_default_logger from tests.rbac.common import helper LOGGER = get_default_logger(__name__) @pytest.mark.library @pytest.mark.role @pytest.mark.imports_role def test_make(): """Test making a message""" name = helper.role.name() role_id = helper.role.id() next_id = helper.user.id() message = Role().imports.make( role_id=role_id, name=name, owners=[next_id], admins=[next_id] ) assert isinstance(message, protobuf.role_transaction_pb2.ImportsRole) assert isinstance(message.role_id, str) assert isinstance(message.name, str) assert message.role_id == role_id assert message.name == name assert message.owners == [next_id] assert message.admins == [next_id] @pytest.mark.library @pytest.mark.role @pytest.mark.imports_role def test_make_addresses(): """Test the make addresses method for the message""" name = helper.role.name() role_id = helper.role.id() role_address = Role().address(role_id) next_id = helper.user.id() user_address = User().address(next_id) signer_user_id = helper.user.id() owner_address = Role().owner.address(role_id, next_id) admin_address = Role().admin.address(role_id, next_id) message = Role().imports.make( role_id=role_id, name=name, owners=[next_id], admins=[next_id] ) inputs, outputs = Role().imports.make_addresses( message=message, signer_user_id=signer_user_id ) assert role_address in inputs assert user_address in inputs assert owner_address in inputs assert admin_address in inputs assert role_address in outputs assert user_address in outputs assert owner_address in outputs assert admin_address in outputs @pytest.mark.role @pytest.mark.imports_role def test_create(): """Test importing a role""" user, keypair = helper.user.create() name = helper.role.name() role_id = helper.role.id() status = Role().imports.new( signer_keypair=keypair, signer_user_id=user.next_id, role_id=role_id, name=name, owners=[user.next_id], admins=[user.next_id], members=[user.next_id], ) assert len(status) == 1 assert status[0]["status"] == "COMMITTED" role = Role().get(object_id=role_id) assert role.role_id == role_id assert role.name == name assert Role().owner.exists(object_id=role.role_id, related_id=user.next_id) assert Role().admin.exists(object_id=role.role_id, related_id=user.next_id) assert Role().member.exists(object_id=role.role_id, related_id=user.next_id)
py
1a409e438286f5829c61c7d81be3ef5e763ddcdb
# Copyright 2020 Energinet DataHub A/S # # Licensed under the Apache License, Version 2.0 (the "License2"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.
py
1a409e9f5b904e214afcebc16adaed692b1a74d8
#!/usr/bin/env python ############################################################################# ## ## Copyright (C) 2013 Riverbank Computing Limited. ## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). ## All rights reserved. ## ## This file is part of the examples of PyQt. ## ## $QT_BEGIN_LICENSE:BSD$ ## You may use this file under the terms of the BSD license as follows: ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are ## met: ## * Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## * Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in ## the documentation and/or other materials provided with the ## distribution. ## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor ## the names of its contributors may be used to endorse or promote ## products derived from this software without specific prior written ## permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT ## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## $QT_END_LICENSE$ ## ############################################################################# from PySide2.QtCore import QDataStream, QSettings, QTimer from PySide2.QtGui import QIntValidator from PySide2.QtWidgets import (QApplication, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QLabel, QLineEdit, QMessageBox, QPushButton) from PySide2.QtNetwork import (QAbstractSocket, QHostInfo, QNetworkConfiguration, QNetworkConfigurationManager, QNetworkInterface, QNetworkSession, QTcpSocket) class Client(QDialog): def __init__(self, parent=None): super(Client, self).__init__(parent) self.networkSession = None self.blockSize = 0 self.currentFortune = '' hostLabel = QLabel("&Server name:") portLabel = QLabel("S&erver port:") self.hostCombo = QComboBox() self.hostCombo.setEditable(True) name = QHostInfo.localHostName() if name != '': self.hostCombo.addItem(name) domain = QHostInfo.localDomainName() if domain != '': self.hostCombo.addItem(name + '.' + domain) if name != 'localhost': self.hostCombo.addItem('localhost') ipAddressesList = QNetworkInterface.allAddresses() for ipAddress in ipAddressesList: if not ipAddress.isLoopback(): self.hostCombo.addItem(ipAddress.toString()) for ipAddress in ipAddressesList: if ipAddress.isLoopback(): self.hostCombo.addItem(ipAddress.toString()) self.portLineEdit = QLineEdit() self.portLineEdit.setValidator(QIntValidator(1, 65535, self)) hostLabel.setBuddy(self.hostCombo) portLabel.setBuddy(self.portLineEdit) self.statusLabel = QLabel("This examples requires that you run " "the Fortune Server example as well.") self.getFortuneButton = QPushButton("Get Fortune") self.getFortuneButton.setDefault(True) self.getFortuneButton.setEnabled(False) quitButton = QPushButton("Quit") buttonBox = QDialogButtonBox() buttonBox.addButton(self.getFortuneButton, QDialogButtonBox.ActionRole) buttonBox.addButton(quitButton, QDialogButtonBox.RejectRole) self.tcpSocket = QTcpSocket(self) self.hostCombo.editTextChanged.connect(self.enableGetFortuneButton) self.portLineEdit.textChanged.connect(self.enableGetFortuneButton) self.getFortuneButton.clicked.connect(self.requestNewFortune) quitButton.clicked.connect(self.close) self.tcpSocket.readyRead.connect(self.readFortune) self.tcpSocket.error.connect(self.displayError) mainLayout = QGridLayout() mainLayout.addWidget(hostLabel, 0, 0) mainLayout.addWidget(self.hostCombo, 0, 1) mainLayout.addWidget(portLabel, 1, 0) mainLayout.addWidget(self.portLineEdit, 1, 1) mainLayout.addWidget(self.statusLabel, 2, 0, 1, 2) mainLayout.addWidget(buttonBox, 3, 0, 1, 2) self.setLayout(mainLayout) self.setWindowTitle("Fortune Client") self.portLineEdit.setFocus() manager = QNetworkConfigurationManager() if manager.capabilities() & QNetworkConfigurationManager.NetworkSessionRequired: settings = QSettings(QSettings.UserScope, 'QtProject') settings.beginGroup('QtNetwork') id = settings.value('DefaultNetworkConfiguration') settings.endGroup() config = manager.configurationFromIdentifier(id) if config.state() & QNetworkConfiguration.Discovered == 0: config = manager.defaultConfiguration() self.networkSession = QNetworkSession(config, self) self.networkSession.opened.connect(self.sessionOpened) self.getFortuneButton.setEnabled(False) self.statusLabel.setText("Opening network session.") self.networkSession.open() def requestNewFortune(self): self.getFortuneButton.setEnabled(False) self.blockSize = 0 self.tcpSocket.abort() self.tcpSocket.connectToHost(self.hostCombo.currentText(), int(self.portLineEdit.text())) def readFortune(self): instr = QDataStream(self.tcpSocket) instr.setVersion(QDataStream.Qt_4_0) if self.blockSize == 0: if self.tcpSocket.bytesAvailable() < 2: return self.blockSize = instr.readUInt16() if self.tcpSocket.bytesAvailable() < self.blockSize: return nextFortune = instr.readQString() if nextFortune == self.currentFortune: QTimer.singleShot(0, self.requestNewFortune) return self.currentFortune = nextFortune self.statusLabel.setText(self.currentFortune) self.getFortuneButton.setEnabled(True) def displayError(self, socketError): if socketError == QAbstractSocket.RemoteHostClosedError: pass elif socketError == QAbstractSocket.HostNotFoundError: QMessageBox.information(self, "Fortune Client", "The host was not found. Please check the host name and " "port settings.") elif socketError == QAbstractSocket.ConnectionRefusedError: QMessageBox.information(self, "Fortune Client", "The connection was refused by the peer. Make sure the " "fortune server is running, and check that the host name " "and port settings are correct.") else: QMessageBox.information(self, "Fortune Client", "The following error occurred: %s." % self.tcpSocket.errorString()) self.getFortuneButton.setEnabled(True) def enableGetFortuneButton(self): self.getFortuneButton.setEnabled( (self.networkSession is None or self.networkSession.isOpen()) and self.hostCombo.currentText() != '' and self.portLineEdit.text() != '') def sessionOpened(self): config = self.networkSession.configuration() if config.type() == QNetworkConfiguration.UserChoice: id = self.networkSession.sessionProperty('UserChoiceConfiguration') else: id = config.identifier() settings = QSettings(QSettings.UserScope, 'QtProject') settings.beginGroup('QtNetwork') settings.setValue('DefaultNetworkConfiguration', id) settings.endGroup() self.statusLabel.setText("This examples requires that you run the " "Fortune Server example as well.") self.enableGetFortuneButton() if __name__ == '__main__': import sys app = QApplication(sys.argv) client = Client() client.show() sys.exit(client.exec_())
py
1a409e9f6d6e299efb1e9b38728bb06f1a7f66a4
#!/usr/bin/python # # Python library for reading and writing Windows shortcut files (.lnk) # Copyright 2011 Tim-Christian Mundt # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 3 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, see # <http://www.gnu.org/licenses/>. # # hardly cannibalized from https://sourceforge.net/p/pylnk/home/Home/ # not as clean as i wished # cannibal: @theguly from __future__ import print_function from future import standard_library standard_library.install_aliases() from builtins import chr from builtins import str from builtins import range from builtins import object import sys, os, time, re from struct import pack, unpack from pprint import pformat,PrettyPrinter from datetime import datetime from io import StringIO pp = PrettyPrinter(indent=4) #---- constants _SIGNATURE = 'L\x00\x00\x00' _GUID = '\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00F' _LINK_INFO_HEADER_DEFAULT = 0x1C _LINK_INFO_HEADER_OPTIONAL = 0x24 _LINK_FLAGS = ('has_shell_item_id_list', 'has_link_info', 'has_description', 'has_relative_path', 'has_work_directory', 'has_arguments', 'has_icon', 'is_unicode', 'force_no_link_info') _FILE_ATTRIBUTES_FLAGS = ('read_only', 'hidden', 'system_file', 'reserved1', 'directory', 'archive', 'reserved2', 'normal', 'temporary', 'sparse_file', 'reparse_point', 'compressed', 'offline', 'not_content_indexed', 'encrypted') _MODIFIER_KEYS = ('SHIFT', 'CONTROL', 'ALT') WINDOW_NORMAL = "Normal" WINDOW_MAXIMIZED = "Maximized" WINDOW_MINIMIZED = "Minimized" _SHOW_COMMANDS = {1:WINDOW_NORMAL, 3:WINDOW_MAXIMIZED, 7:WINDOW_MINIMIZED} _SHOW_COMMAND_IDS = dict((v, k) for k, v in _SHOW_COMMANDS.items()) DRIVE_UNKNOWN = "Unknown" DRIVE_NO_ROOT_DIR = "No root directory" DRIVE_REMOVABLE = "Removable" DRIVE_FIXED = "Fixed (Hard disk)" DRIVE_REMOTE = "Remote (Network drive)" DRIVE_CDROM = "CD-ROM" DRIVE_RAMDISK = "Ram disk" _DRIVE_TYPES = {0: DRIVE_UNKNOWN, 1: DRIVE_NO_ROOT_DIR, 2: DRIVE_REMOVABLE, 3: DRIVE_FIXED, 4: DRIVE_REMOTE, 5: DRIVE_CDROM, 6: DRIVE_RAMDISK} _DRIVE_TYPE_IDS = dict((v, k) for k, v in _DRIVE_TYPES.items()) _KEYS = {0x30: '0', 0x31: '1', 0x32: '2', 0x33: '3', 0x34: '4', 0x35: '5', 0x36: '6', 0x37: '7', 0x38: '8', 0x39: '9', 0x41: 'A', 0x42: 'B', 0x43: 'C', 0x44: 'D', 0x45: 'E', 0x46: 'F', 0x47: 'G', 0x48: 'H', 0x49: 'I', 0x4A: 'J', 0x4B: 'K', 0x4C: 'L', 0x4D: 'M', 0x4E: 'N', 0x4F: 'O', 0x50: 'P', 0x51: 'Q', 0x52: 'R', 0x53: 'S', 0x54: 'T', 0x55: 'U', 0x56: 'V', 0x57: 'W', 0x58: 'X', 0x59: 'Y', 0x5A: 'Z', 0x70: 'F1', 0x71: 'F2', 0x72: 'F3', 0x73: 'F4', 0x74: 'F5', 0x75: 'F6', 0x76: 'F7', 0x77: 'F8', 0x78: 'F9', 0x79: 'F10', 0x7A: 'F11', 0x7B: 'F12', 0x7C: 'F13', 0x7D: 'F14', 0x7E: 'F15', 0x7F: 'F16', 0x80: 'F17', 0x81: 'F18', 0x82: 'F19', 0x83: 'F20', 0x84: 'F21', 0x85: 'F22', 0x86: 'F23', 0x87: 'F24', 0x90: 'NUM LOCK', 0x91: 'SCROLL LOCK'} _KEY_CODES = dict((v, k) for k, v in _KEYS.items()) ROOT_MY_COMPUTER = 'MY_COMPUTER' ROOT_MY_DOCUMENTS = 'MY_DOCUMENTS' ROOT_NETWORK_SHARE = 'NETWORK_SHARE' ROOT_NETWORK_SERVER = 'NETWORK_SERVER' ROOT_NETWORK_PLACES = 'NETWORK_PLACES' ROOT_NETWORK_DOMAIN = 'NETWORK_DOMAIN' ROOT_INTERNET = 'INTERNET' ROOT_RECYLCE_BIN = 'RECYLCE_BIN' ROOT_CONTROL_PANEL = 'CONTROL_PANEL' _ROOT_LOCATIONS = {'{20D04FE0-3AEA-1069-A2D8-08002B30309D}': ROOT_MY_COMPUTER, '{450D8FBA-AD25-11D0-98A8-0800361B1103}': ROOT_MY_DOCUMENTS, '{54a754c0-4bf1-11d1-83ee-00a0c90dc849}': ROOT_NETWORK_SHARE, '{c0542a90-4bf0-11d1-83ee-00a0c90dc849}': ROOT_NETWORK_SERVER, '{208D2C60-3AEA-1069-A2D7-08002B30309D}': ROOT_NETWORK_PLACES, '{46e06680-4bf0-11d1-83ee-00a0c90dc849}': ROOT_NETWORK_DOMAIN, '{871C5380-42A0-1069-A2EA-08002B30309D}': ROOT_INTERNET, '{645FF040-5081-101B-9F08-00AA002F954E}': ROOT_RECYLCE_BIN, '{21EC2020-3AEA-1069-A2DD-08002B30309D}': ROOT_CONTROL_PANEL} _ROOT_LOCATION_GUIDS = dict((v, k) for k, v in _ROOT_LOCATIONS.items()) TYPE_FOLDER = 'FOLDER' TYPE_FILE = 'FILE' _ENTRY_TYPES = {0x31: 'FOLDER', 0x32: 'FILE', 0x35: 'FOLDER (UNICODE)', 0x36: 'FILE (UNICODE)'} _ENTRY_TYPE_IDS = dict((v, k) for k, v in _ENTRY_TYPES.items()) _DRIVE_PATTERN = re.compile("(\w)[:/\\\\]*$") #---- read and write binary data def read_byte(buf): return unpack('<B', buf.read(1))[0] def read_short(buf): return unpack('<H', buf.read(2))[0] def read_int(buf): return unpack('<I', buf.read(4))[0] def read_double(buf): return unpack('<Q', buf.read(8))[0] def read_cunicode(buf): s = "" b = buf.read(2) while b!= '\x00\x00': s += b b = buf.read(2) return s.decode('utf-16-le') def read_cstring(buf, padding=False): s = "" b = buf.read(1) while b != '\x00': s += b b = buf.read(1) if padding and not len(s) % 2: buf.read(1) # make length + terminator even #TODO: encoding is not clear, unicode-escape has been necessary sometimes return s.decode('cp1252') def read_sized_string(buf, str=True): size = read_short(buf) if str: return buf.read(size*2).decode('utf-16-le') else: return buf.read(size) def get_bits(value, start, count, length=16): mask = 0 for i in range(count): mask = mask | 1 << i shift = length - start - count return value >> shift & mask def read_dos_datetime(buf): date = read_short(buf) time = read_short(buf) year = get_bits(date, 0, 7) + 1980 month = get_bits(date, 7, 4) day = get_bits(date, 11, 5) hour = get_bits(time, 0, 5) minute = get_bits(time, 5, 6) second = get_bits(time, 11, 5) return datetime(year, month, day, hour, minute, second) def write_byte(val, buf): buf.write(pack('<B', val)) def write_short(val, buf): buf.write(pack('<H', val)) def write_int(val, buf): buf.write(pack('<I', val)) def write_double(val, buf): buf.write(pack('<Q', val)) def write_cstring(val, buf, padding=False): #val = val.encode('unicode-escape').replace('\\\\', '\\') val = val.encode('cp1252') buf.write(val + '\x00') if padding and not len(val) % 2: buf.write('\x00') def write_cunicode(val, buf): uni = val.encode('utf-16-le') buf.write(uni + '\x00\x00') def write_sized_string(val, buf, str=True): size = len(val) write_short(size, buf) if str: buf.write(val.encode('utf-16-le')) else: buf.write(val) def ret_sized_string(val, str=True): size = len(val) ret = pack('<H', size) if str: ret += val.encode('utf-16-le') else: ret += val return ret def put_bits(bits, target, start, count, length=16): return target | bits << (length - start - count) def write_dos_datetime(val, buf): date = time = 0 date = put_bits(val.year-1980, date, 0, 7) date = put_bits(val.month, date, 7, 4) date = put_bits(val.day, date, 11, 5) time = put_bits(val.hour, time, 0, 5) time = put_bits(val.minute, time, 5, 6) time = put_bits(val.second, time, 11, 5) write_short(date, buf) write_short(time, buf) #---- helpers def convert_time_to_unix(windows_time): # Windows time is specified as the number of 0.1 nanoseconds since January 1, 1601. # UNIX time is specified as the number of seconds since January 1, 1970. # There are 134774 days (or 11644473600 seconds) between these dates. unix_time = windows_time / 10000000.0 - 11644473600 return datetime.fromtimestamp(unix_time) def convert_time_to_windows(unix_time): if isinstance(unix_time, datetime): unix_time = time.mktime(unix_time.timetuple()) return int((unix_time + 11644473600) * 10000000) class FormatException(Exception): pass class MissingInformationException(Exception): pass class InvalidKeyException(Exception): pass #---- data structures class Flags(object): def __init__(self, flag_names, flags_bytes=0): self._flag_names = flag_names self._flags = dict([(name, None) for name in flag_names]) self.set_flags(flags_bytes) def set_flags(self, flags_bytes): for pos in range(len(self._flag_names)): self._flags[self._flag_names[pos]] = flags_bytes >> pos & 0x1 and True or False def bytes(self): bytes = 0 for pos in range(len(self._flag_names)): bytes = (self._flags[self._flag_names[pos]] and 1 or 0) << pos | bytes return bytes bytes = property(bytes) def __getitem__(self, key): return object.__getattribute__(self, '_flags')[key] def __setitem__(self, key, value): if key not in self._flags: raise KeyError("The key '%s' is not defined for those flags." % key) self._flags[key] = value def __getattr__(self, key): return object.__getattribute__(self, '_flags')[key] def __setattr__(self, key, value): if '_flags' not in self.__dict__: object.__setattr__(self, key, value) elif key in self.__dict__: object.__setattr__(self, key, value) else: self.__setitem__(key, value) def __str__(self): return pformat(self._flags, indent=2) class ModifierKeys(Flags): def __init__(self, flags_bytes=0): Flags.__init__(self, _MODIFIER_KEYS, flags_bytes) def __str__(self): s = "" s += self.CONTROL and "CONTROL+" or "" s += self.SHIFT and "SHIFT+" or "" s += self.ALT and "ALT+" or "" return s class RootEntry(object): def __init__(self, root): if root is not None: if root in list(_ROOT_LOCATION_GUIDS.keys()): self.root = root self.guid = _ROOT_LOCATION_GUIDS[root] else: bytes = root if len(bytes) == 18: # and bytes[:2] == '\x1F\x50': # '\x1F\x50' for MY_COMPUTER # '\x1FX' for NETWORK bytes = bytes[2:] if len(bytes) != 16: raise FormatException("This is no valid _GUID: %s" % bytes) ordered = [bytes[3], bytes[2], bytes[1], bytes[0], bytes[5], bytes[4], bytes[7], bytes[6], bytes[8], bytes[9], bytes[10], bytes[11], bytes[12], bytes[13], bytes[14], bytes[15]] self.guid = "{%02X%02X%02X%02X-%02X%02X-%02X%02X-%02X%02X-%02X%02X%02X%02X%02X%02X}" % tuple( [ord(x) for x in ordered]) self.root = _ROOT_LOCATIONS.get(self.guid, "UNKNOWN") def bytes(self): guid = self.guid[1:-1].replace('-', '') chars = [chr(int(x, 16)) for x in [guid[i:i+2] for i in range(0, 32, 2)]] return '\x1F\x50' + chars[3] + chars[2] + chars[1] + chars[0] + chars[5] + chars[4] \ + chars[7] + chars[6] + ''.join(chars[8:]) bytes = property(bytes) def __str__(self): return "<RootEntry: %s>" % self.root class DriveEntry(object): def __init__(self, drive): if len(drive) == 23: self.drive = drive[1:3] else: m = _DRIVE_PATTERN.match(drive.strip()) if m: self.drive = m.groups()[0].upper() + ':' else: raise FormatException("This is not a valid drive: " + drive) def bytes(self): return '/' + self.drive + '\\' + '\x00' * 19 bytes = property(bytes) def __str__(self): return "<DriveEntry: %s>" % self.drive class PathSegmentEntry(object): def __init__(self, bytes=None): self.type = None self.file_size = None self.modified = None self.short_name = None self.created = None self.accessed = None self.full_name = None if bytes is not None: buf = StringIO(bytes) self.type = _ENTRY_TYPES.get(read_short(buf), 'UNKNOWN') short_name_is_unicode = self.type.endswith('(UNICODE)') self.file_size = read_int(buf) self.modified = read_dos_datetime(buf) unknown = read_short(buf) # should be 0x10 if short_name_is_unicode: self.short_name = read_cunicode(buf) else: self.short_name = read_cstring(buf, padding=True) indicator_1 = read_short(buf) # see below only_83 = read_short(buf) < 0x03 unknown = read_short(buf) # 0x04 self.is_unicode = read_short(buf) == 0xBeef self.created = read_dos_datetime(buf) self.accessed = read_dos_datetime(buf) offset_unicode = read_short(buf) only_83_2 = offset_unicode >= indicator_1 or offset_unicode < 0x14 offset_ansi = read_short(buf) self.full_name = read_cunicode(buf) offset_part2 = read_short(buf) # offset to byte after short name def create_for_path(cls, path): entry = cls() entry.type = 'FILE' entry.file_size = 473600 entry.short_name = path entry.modified = datetime.fromtimestamp(1444297518) entry.created = datetime.fromtimestamp(1444297518) entry.accessed = datetime.fromtimestamp(1503493813) entry.full_name = entry.short_name return entry create_for_path = classmethod(create_for_path) def _validate(self): if self.type is None: raise MissingInformationException("Type is missing, choose either TYPE_FOLDER or TYPE_FILE.") if self.file_size is None: if self.type.startswith('FOLDER'): self.file_size = 0 else: raise MissingInformationException("File size missing") if self.modified is None or self.accessed is None or self.created is None: raise MissingInformationException("Date information missing") if self.full_name is None: raise MissingInformationException("A full name is missing") if self.short_name is None: self.short_name = self.full_name def bytes(self): self._validate() out = StringIO() entry_type = self.type short_name_len = len(self.short_name) + 1 try: self.short_name.decode("ascii") short_name_is_unicode = False short_name_len += short_name_len % 2 # padding except (UnicodeEncodeError, UnicodeDecodeError): short_name_is_unicode = True short_name_len = short_name_len * 2 self.type += " (UNICODE)" write_short(_ENTRY_TYPE_IDS[entry_type], out) write_int(self.file_size, out) write_dos_datetime(self.modified, out) write_short(0x10, out) if short_name_is_unicode: write_cunicode(self.short_name, out) else: write_cstring(self.short_name, out, padding=True) indicator = 24 + 2 * len(self.short_name) write_short(indicator, out) write_short(0x03, out) write_short(0x04, out) write_short(0xBeef, out) write_dos_datetime(self.created, out) write_dos_datetime(self.accessed, out) offset_unicode = 0x14 # fixed data structure, always the same write_short(offset_unicode, out) offset_ansi = 0 # we always write unicode write_short(offset_ansi, out) write_cunicode(self.full_name, out) offset_part2 = 0x0E + short_name_len write_short(offset_part2, out) return out.getvalue() bytes = property(bytes) def __str__(self): return "<PathSegmentEntry: %s>" % self.full_name class LinkTargetIDList(object): def __init__(self, bytes=None): self.items = [] if bytes is not None: buf = StringIO(bytes) raw = [] entry_len = read_short(buf) while entry_len > 0: raw.append(buf.read(entry_len - 2)) # the length includes the size entry_len = read_short(buf) self._interpret(raw) def _interpret(self, raw): if len(raw[0]) == 0x12: self.items.append(RootEntry(raw[0])) if self.items[0].root == ROOT_MY_COMPUTER: if not len(raw[1]) == 0x17: raise ValueError("This seems to be an absolute link which requires a drive as second element.") self.items.append(DriveEntry(raw[1])) items = raw[2:] elif self.items[0].root == ROOT_NETWORK_PLACES: raise NotImplementedError("""Parsing network lnks has not yet been implemented. If you need it just contact me and we'll see...""") else: items = raw[1:] else: items = raw for item in items: self.items.append(PathSegmentEntry(item)) def _validate(self): if type(self.items[0]) == RootEntry: if self.items[0].root == ROOT_MY_COMPUTER \ and type(self.items[1]) != DriveEntry: raise ValueError("A drive is required for absolute lnks") def bytes(self): self._validate() out = StringIO() for item in self.items: bytes = item.bytes write_short(len(bytes) + 2, out) # len + terminator out.write(bytes) out.write('\x00\x00') return out.getvalue() bytes = property(bytes) def __str__(self): return "<LinkTargetIDList:\n%s>" % pformat([str(item) for item in self.items]) class LinkInfo(object): def __init__(self, lnk=None): if lnk is not None: self.start = lnk.tell() self.size = read_int(lnk) self.header_size = read_int(lnk) link_info_flags = read_int(lnk) self.local = link_info_flags & 1 self.remote = link_info_flags & 2 self.offs_local_volume_table = read_int(lnk) self.offs_local_base_path = read_int(lnk) self.offs_network_volume_table = read_int(lnk) self.offs_base_name = read_int(lnk) if self.header_size >= _LINK_INFO_HEADER_OPTIONAL: print("TODO: read the unicode stuff") # TODO: read the unicode stuff self._parse_path_elements(lnk) else: self.size = None self.header_size = _LINK_INFO_HEADER_DEFAULT self.remote = None self.offs_local_volume_table = 0 self.offs_local_base_path = 0 self.offs_network_volume_table = 0 self.offs_base_name = 0 self.drive_type = None self.drive_serial = None self.volume_label = None self.local_base_path = None self.network_share_name = None self.base_name = None self._path = None class Lnk(object): def __init__(self, f=None): self.file = None if type(f) == str or type(f) == str: self.file = f try: f = open(self.file, 'rb') except IOError: self.file += ".lnk" f = open(self.file, 'rb') # defaults self.link_flags = Flags(_LINK_FLAGS) self.file_flags = Flags(_FILE_ATTRIBUTES_FLAGS) self.creation_time = datetime.now() self.access_time = datetime.now() self.modification_time = datetime.now() self.file_size = 0 self.icon_index = 0 self._show_command = WINDOW_NORMAL self.hot_key = None self._link_info = LinkInfo() self.description = None self.relative_path = None self.work_dir = None self.arguments = None self.icon = None def _write_hot_key(self, hot_key, lnk): if hot_key is None: low = high = 0 else: hot_key = hot_key.split('+') try: low = _KEY_CODES[hot_key[-1]] except KeyError: raise InvalidKeyException("Cannot find key code for %s" % hot_key[1]) modifiers = ModifierKeys() for modifier in hot_key[:-1]: modifiers[modifier.upper()] = True high = modifiers.bytes write_byte(low, lnk) write_byte(high, lnk) def save(self, f=None, force_ext=False): if f is None: f = self.file if f is None: raise ValueError("File (name) missing for saveing the lnk") is_file = hasattr(f, 'write') if not is_file: if not type(f) == str and not type(f) == str: raise ValueError("Need a writeable object or a file name to save to, got %s" % f) if force_ext: if not f.lower().endswith('.lnk'): f += '.lnk' f = open(f, 'wb') self.write(f) # only close the stream if it's our own if not is_file: f.close() def ret(self): ret = _SIGNATURE ret += _GUID ret += pack('<I',self.link_flags.bytes) ret += pack('<I',self.file_flags.bytes) ret += pack('<Q',convert_time_to_windows(self.creation_time)) ret += pack('<Q',convert_time_to_windows(self.access_time)) ret += pack('<Q',convert_time_to_windows(self.modification_time)) ret += pack('<I',self.file_size) ret += pack('<I',self.icon_index) ret += pack('<I',_SHOW_COMMAND_IDS[self._show_command]) ret += pack('<B',0) #hotkey ret += pack('<B',0) #hotkey ret += ('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') # reserved if self.link_flags.has_shell_item_id_list: siil = self.shell_item_id_list.bytes ret += pack('<H',len(siil)) ret += siil # TOFIX / TOINVESTIGATE #if self.link_flags.has_link_info: #self._link_info.write(lnk) if self.link_flags.has_description: ret += ret_sized_string(self.description, self.link_flags.is_unicode) if self.link_flags.has_relative_path: ret += ret_sized_string(self.relative_path, self.link_flags.is_unicode) if self.link_flags.has_work_directory: ret += ret_sized_string(self.work_dir, self.link_flags.is_unicode) if self.link_flags.has_arguments: ret += ret_sized_string(self.arguments, self.link_flags.is_unicode) if self.link_flags.has_icon: ret += ret_sized_string(self.icon, self.link_flags.is_unicode) ret += ('\x00\x00\x00\x00') # header_size return ret def write(self, lnk): lnk.write(_SIGNATURE) lnk.write(_GUID) write_int(self.link_flags.bytes, lnk) write_int(self.file_flags.bytes, lnk) write_double(convert_time_to_windows(self.creation_time), lnk) write_double(convert_time_to_windows(self.access_time), lnk) write_double(convert_time_to_windows(self.modification_time), lnk) write_int(self.file_size, lnk) write_int(self.icon_index, lnk) write_int(_SHOW_COMMAND_IDS[self._show_command], lnk) self._write_hot_key(self.hot_key, lnk) lnk.write('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') # reserved if self.link_flags.has_shell_item_id_list: siil = self.shell_item_id_list.bytes write_short(len(siil), lnk) lnk.write(siil) if self.link_flags.has_link_info: self._link_info.write(lnk) if self.link_flags.has_description: write_sized_string(self.description, lnk, self.link_flags.is_unicode) if self.link_flags.has_relative_path: write_sized_string(self.relative_path, lnk, self.link_flags.is_unicode) if self.link_flags.has_work_directory: write_sized_string(self.work_dir, lnk, self.link_flags.is_unicode) if self.link_flags.has_arguments: write_sized_string(self.arguments, lnk, self.link_flags.is_unicode) if self.link_flags.has_icon: write_sized_string(self.icon, lnk, self.link_flags.is_unicode) lnk.write('\x00\x00\x00\x00') # header_size def _get_shell_item_id_list(self): return self._shell_item_id_list def _set_shell_item_id_list(self, shell_item_id_list): self._shell_item_id_list = shell_item_id_list self.link_flags.has_shell_item_id_list = shell_item_id_list != None shell_item_id_list = property(_get_shell_item_id_list, _set_shell_item_id_list) def _get_link_info(self): return self._link_info def _set_link_info(self, link_info): self._link_info = link_info self.link_flags.force_no_link_info = link_info == None self.link_flags.has_link_info = link_info != None link_info = property(_get_link_info, _set_link_info) def _get_description(self): return self._description def _set_description(self, description): self._description = description self.link_flags.has_description = description != None description = property(_get_description, _set_description) def _get_relative_path(self): return self._relative_path def _set_relative_path(self, relative_path): self._relative_path = relative_path self.link_flags.has_relative_path = relative_path != None relative_path = property(_get_relative_path, _set_relative_path) def _get_work_dir(self): return self._work_dir def _set_work_dir(self, work_dir): self._work_dir = work_dir self.link_flags.has_work_directory = work_dir != None work_dir = working_dir = property(_get_work_dir, _set_work_dir) def _get_arguments(self): return self._arguments def _set_arguments(self, arguments): self._arguments = arguments self.link_flags.has_arguments = arguments != None arguments = property(_get_arguments, _set_arguments) def _get_icon(self): return self._icon def _set_icon(self, icon): self._icon = icon self.link_flags.has_icon = icon != None icon = property(_get_icon, _set_icon) def _get_window_mode(self): return self._show_command def _set_window_mode(self, value): if not value in list(_SHOW_COMMANDS.values()): raise ValueError("Not a valid window mode: %s. Choose any of pylnk.WINDOW_*" % value) self._show_command = value window_mode = show_command = property(_get_window_mode, _set_window_mode) def _get_path(self): return self._shell_item_id_list.get_path() path = property(_get_path) def __str__(self): s = "Target file:\n" s += str(self.file_flags) s += "\nCreation Time: %s" % self.creation_time s += "\nModification Time: %s" % self.modification_time s += "\nAccess Time: %s" % self.access_time s += "\nFile size: %s" % self.file_size s += "\nWindow mode: %s" % self._show_command s += "\nHotkey: %s\n" % self.hot_key s += str(self._link_info) if self.link_flags.has_shell_item_id_list: s += "\n%s" % self.shell_item_id_list if self.link_flags.has_description: s += "\nDescription: %s" % self.description if self.link_flags.has_relative_path: s += "\nRelative Path: %s" % self.relative_path if self.link_flags.has_work_directory: s += "\nWorking Directory: %s" % self.work_dir if self.link_flags.has_arguments: s += "\nCommandline Arguments: %s" % self.arguments if self.link_flags.has_icon: s += "\nIcon: %s" % self.icon s += "\nUsed Path: %s" % self.shell_item_id_list.get_path() return s #---- convenience functions def create(f=None): lnk = Lnk() lnk.file = f return lnk def for_file(target_file, arguments, lnkname, lnkicon=None, description=None): drive, full_path = target_file.split(':',1) full_path = full_path.lstrip('\\') lnk = create() lnk.link_info = None level = full_path elements = [RootEntry(ROOT_MY_COMPUTER), DriveEntry(drive)] segment = PathSegmentEntry.create_for_path(level) elements.append(segment) lnk.shell_item_id_list = LinkTargetIDList() lnk.shell_item_id_list.items = elements lnk.description = description lnk.arguments = arguments lnk.icon = lnkicon #if lnkname: # lnk.save() return lnk
py
1a409f5097c8150f2313bde345bf7bae3a145e8f
# # Generated with IndexSelectionFilterBlueprint from dmt.blueprint import Blueprint from dmt.dimension import Dimension from dmt.attribute import Attribute from dmt.enum_attribute import EnumAttribute from dmt.blueprint_attribute import BlueprintAttribute from .operationnode import OperationNodeBlueprint class IndexSelectionFilterBlueprint(OperationNodeBlueprint): """""" def __init__(self, name="IndexSelectionFilter", package_path="sima/post", description=""): super().__init__(name,package_path,description) self.attributes.append(Attribute("name","string","",default="")) self.attributes.append(Attribute("description","string","",default="")) self.attributes.append(Attribute("_id","string","",default="")) self.attributes.append(BlueprintAttribute("scriptableValues","sima/sima/ScriptableValue","",True,Dimension("*"))) self.attributes.append(Attribute("x","integer","",default=0)) self.attributes.append(Attribute("y","integer","",default=0)) self.attributes.append(Attribute("h","integer","",default=0)) self.attributes.append(Attribute("w","integer","",default=0)) self.attributes.append(BlueprintAttribute("controlSignalInputSlots","sima/post/ControlSignalInputSlot","",True,Dimension("*"))) self.attributes.append(BlueprintAttribute("filterInputSlots","sima/post/InputSlot","",True,Dimension("*"))) self.attributes.append(BlueprintAttribute("filterOutputSlots","sima/post/OutputSlot","",True,Dimension("*"))) self.attributes.append(Attribute("renameOutput","boolean","",default=True)) self.attributes.append(EnumAttribute("axis","sima/post/SignalAxis","Select the value from the x-axis or the y-axis")) self.attributes.append(Attribute("lastIndex","boolean","Select the last index",default=False)) self.attributes.append(Attribute("index","integer","Select the given zero-based index of the input signal",default=0))
py
1a409f638edb15ccfbd0dd6f67624a5bf70d1e76
# Copyright 2020 DeepMind Technologies Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Configuration for Capture the Flag. Example video: https://youtu.be/VRNt55-0IqE This substrate a team based zero sum game. There are four players on each team. There is a red team and blue team. Players can paint the ground anywhere by using their zapping beam. If they stand on their own color then they gain health up to a maximum of 3 (so they are more likely to win shootouts). They lose health down to 1 from their default of 2 when standing on the opposing team's color (so they are more likely to lose shootouts in that case). Health recovers stochastically, at a fixed rate of 0.05 per frame. It cannot exceed its maximum, determined by the current color of the ground the agent is standing on. Players also cannot move over their opposing team's color. If the opposing team paints the square underneath their feet then they get stuck in place until they use their own zapping beam to re-paint the square underneath and in front of themselves to break free. In practice this slows them down by one frame (which may be critical if they are being chased). Friendly fire is impossible; agents cannot zap their teammates. In the _Capture the Flag_ substrate the final goal is capturing the opposing team's flag. Payoffs are common to the entire winning team. Indicator tiles around the edge of the map and in its very center display which teams have their own flag on their base, allowing them the possibility of capturing their opponent's flag by bringing it to their own base/flag. When indicator tiles are red then only the red team can score. When indicator tiles are blue then only the blue team can score. When the indicator tiles are purple then both teams have the possibility of scoring (though neither is close to doing so) since both flags are in their respective home bases. """ from typing import Any, Dict, Optional from ml_collections import config_dict import numpy as np from meltingpot.python.utils.substrates import shapes _COMPASS = ["N", "E", "S", "W"] DEFAULT_ASCII_MAP = """ IIIIIIIIIIIIIIIIIIIIIII IWWWWWWWWWWWWWWWWWWWWWI IWPPP,PPPP,F,PPPP,PPPWI IWPPP,,PP,,,,,PP,,PPPWI IWPPP,,,,,,,,,,,,,PPPWI IWP,,WW,,,,,,,,,WW,,PWI IWHHWWW,WWWWWWW,WWWHHWI IWHHW,D,,,,,,,,,D,WHHWI IWHH,,W,,,WWW,,,W,,HHWI IW,,,,W,,,,,,,,,W,,,,WI IW,,,,WWW,,,,,WWW,,,,WI IW,,,,,,,,,I,,,,,,,,,WI IW,,,,WWW,,,,,WWW,,,,WI IW,,,,W,,,,,,,,,W,,,,WI IWHH,,W,,,WWW,,,W,,HHWI IWHHW,D,,,,,,,,,D,WHHWI IWHHWWW,WWWWWWW,WWWHHWI IWQ,,WW,,,,,,,,,WW,,QWI IWQQQ,,,,,,,,,,,,,QQQWI IWQQQ,,QQ,,,,,QQ,,QQQWI IWQQQ,QQQQ,G,QQQQ,QQQWI IWWWWWWWWWWWWWWWWWWWWWI IIIIIIIIIIIIIIIIIIIIIII """ # `prefab` determines which prefab game object to use for each `char` in the # ascii map. CHAR_PREFAB_MAP = { "P": {"type": "all", "list": ["spawn_point_red", "ground"]}, "Q": {"type": "all", "list": ["spawn_point_blue", "ground"]}, "W": "wall", "D": {"type": "choice", "list": ["destroyable_wall"] * 9 + ["destroyed_wall"]}, "H": {"type": "choice", "list": ["destroyable_wall"] * 3 + ["destroyed_wall"]}, ",": "ground", "I": {"type": "all", "list": ["indicator", "indicator_frame"]}, "F": {"type": "all", "list": ["ground", "home_tile_red", "flag_red"]}, "G": {"type": "all", "list": ["ground", "home_tile_blue", "flag_blue"]}, } RED_COLOR = (225, 55, 85, 255) DARKER_RED_COLOR = (200, 35, 55, 255) DARKEST_RED_COLOR = (160, 5, 25, 255) BLUE_COLOR = (85, 55, 225, 255) DARKER_BLUE_COLOR = (55, 35, 200, 255) DARKEST_BLUE_COLOR = (25, 5, 160, 255) PURPLE_COLOR = (107, 63, 160, 255) def multiply_tuple(color_tuple, factor): alpha = color_tuple[3] return tuple([int(np.min([x * factor, alpha])) for x in color_tuple[0: 3]]) TEAMS_DATA = { "red": {"color": RED_COLOR, "spawn_group": "{}SpawnPoints".format("red")}, "blue": {"color": BLUE_COLOR, "spawn_group": "{}SpawnPoints".format("blue")}, } WALL = { "name": "wall", "components": [ { "component": "StateManager", "kwargs": { "initialState": "wall", "stateConfigs": [{ "state": "wall", "layer": "upperPhysical", "sprite": "Wall", }], } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "ascii_shape", "spriteNames": ["Wall",], "spriteShapes": [shapes.WALL], "palettes": [{"*": (95, 95, 95, 255), "&": (100, 100, 100, 255), "@": (109, 109, 109, 255), "#": (152, 152, 152, 255)}], "noRotates": [True] } }, { "component": "AllBeamBlocker", "kwargs": {} }, ] } INDICATOR_FRAME = { "name": "indicator_frame", "components": [ { "component": "StateManager", "kwargs": { "initialState": "inert", "stateConfigs": [ {"state": "inert", "layer": "superOverlay", "sprite": "InertFrame"} ] } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "ascii_shape", "spriteNames": ["InertFrame"], "spriteShapes": [shapes.BUTTON], "palettes": [{"*": (0, 0, 0, 0), "x": (55, 55, 55, 255), "#": (0, 0, 0, 0)}], "noRotates": [True] } }, ] } INDICATOR = { "name": "control_indicator", "components": [ { "component": "StateManager", "kwargs": { "initialState": "both", "stateConfigs": [ { "state": "neither", "layer": "background", "sprite": "NeitherIndicator", }, { "state": "red", "layer": "background", "sprite": "RedIndicator", }, { "state": "blue", "layer": "background", "sprite": "BlueIndicator", }, { "state": "both", "layer": "background", "sprite": "BothIndicator", }, ] } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "spriteNames": ["NeitherIndicator", "RedIndicator", "BlueIndicator", "BothIndicator"], "spriteRGBColors": [(0, 0, 0, 0), DARKER_RED_COLOR, DARKER_BLUE_COLOR, PURPLE_COLOR] } }, {"component": "ControlIndicator",}, ] } def create_home_tile_prefab(team: str): """Return a home tile prefab, where the flag starts and must be brought.""" sprite_name = "HomeTileFrame{}".format(team) prefab = { "name": "home_tile", "components": [ { "component": "StateManager", "kwargs": { "initialState": "inert", "stateConfigs": [ {"state": "inert", "layer": "background", "sprite": sprite_name} ] } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "ascii_shape", "spriteNames": [sprite_name], "spriteShapes": [shapes.BUTTON], "palettes": [{"*": (0, 0, 0, 0), "x": (0, 0, 0, 0), "#": (218, 165, 32, 255)}], "noRotates": [True] } }, { "component": "HomeTile", "kwargs": { "team": team, } }, ] } return prefab def create_ground_prefab(): """Return a prefab for a colorable ground prefab.""" sprite_names = ["RedGround", "BlueGround"] sprite_colors = [DARKEST_RED_COLOR, DARKEST_BLUE_COLOR] prefab = { "name": "ground", "components": [ { "component": "StateManager", "kwargs": { "initialState": "clean", "stateConfigs": [ { "state": "clean", "layer": "alternateLogic", }, { "state": "red", "layer": "alternateLogic", "sprite": sprite_names[0], }, { "state": "blue", "layer": "alternateLogic", "sprite": sprite_names[1], }, ] } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "spriteNames": sprite_names, "spriteRGBColors": sprite_colors } }, { "component": "Ground", "kwargs": { "teamNames": ["red", "blue"], } }, ] } return prefab def create_destroyable_wall_prefab(initial_state): """Return destroyable wall prefab, potentially starting in destroyed state.""" if initial_state == "destroyed": initial_health = 0 else: initial_health = 5 prefab = { "name": "destroyableWall", "components": [ { "component": "StateManager", "kwargs": { "initialState": initial_state, "stateConfigs": [ { "state": "destroyable", "layer": "upperPhysical", "sprite": "DestroyableWall", }, { "state": "damaged", "layer": "upperPhysical", "sprite": "DamagedWall", }, { "state": "destroyed", "layer": "alternateLogic", "sprite": "Rubble", }, ], } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "ascii_shape", "spriteNames": ["DestroyableWall", "DamagedWall", "Rubble"], "spriteShapes": [shapes.WALL, shapes.WALL, shapes.WALL], "palettes": [{"*": (55, 55, 55, 255), "&": (100, 100, 100, 255), "@": (109, 109, 109, 255), "#": (152, 152, 152, 255)}, {"*": (55, 55, 55, 255), "&": (100, 100, 100, 255), "@": (79, 79, 79, 255), "#": (152, 152, 152, 255)}, {"*": (0, 0, 0, 255), "&": (0, 0, 0, 255), "@": (29, 29, 29, 255), "#": (0, 0, 0, 255)}], "noRotates": [True] * 3 } }, { "component": "Destroyable", "kwargs": {"hitNames": ["red", "blue"], "initialHealth": initial_health, "damagedHealthLevel": 2} } ] } return prefab def create_spawn_point_prefab(team): """Return a team-specific spawn-point prefab.""" prefab = { "name": "spawn_point", "components": [ { "component": "StateManager", "kwargs": { "initialState": "playerSpawnPoint", "stateConfigs": [{ "state": "playerSpawnPoint", "layer": "logic", "groups": [TEAMS_DATA[team]["spawn_group"]], }], } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "invisible", "spriteNames": [], "spriteRGBColors": [] } }, ] } return prefab def create_flag_prefab(team: str): """Return a team-specific flag prefab.""" dropped_sprite_name = "DroppedFlag_{}".format(team) carried_sprite_name = "CarriedFlag_{}".format(team) if team == "red": flag_color = RED_COLOR elif team == "blue": flag_color = BLUE_COLOR prefab = { "name": "{}_flag".format(team), "components": [ { "component": "StateManager", "kwargs": { "initialState": "dropped", "stateConfigs": [ { "state": "dropped", "layer": "lowerPhysical", "sprite": dropped_sprite_name, }, { "state": "carried", "layer": "overlay", "sprite": carried_sprite_name, }, { "state": "wait", } ] } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "ascii_shape", "spriteNames": [dropped_sprite_name, carried_sprite_name], "spriteShapes": [shapes.FLAG, shapes.FLAG_HELD], "palettes": [shapes.get_palette(flag_color)] * 2, "noRotates": [True, True] } }, { "component": "Flag", "kwargs": { "team": team, } } ] } return prefab # PREFABS is a dictionary mapping names to template game objects that can # be cloned and placed in multiple locations accoring to an ascii map. PREFABS = { "wall": WALL, "spawn_point_red": create_spawn_point_prefab("red"), "spawn_point_blue": create_spawn_point_prefab("blue"), "destroyable_wall": create_destroyable_wall_prefab("destroyable"), "destroyed_wall": create_destroyable_wall_prefab("destroyed"), "ground": create_ground_prefab(), "indicator": INDICATOR, "indicator_frame": INDICATOR_FRAME, "flag_red": create_flag_prefab("red"), "flag_blue": create_flag_prefab("blue"), "home_tile_red": create_home_tile_prefab("red"), "home_tile_blue": create_home_tile_prefab("blue"), } # Primitive action components. # pylint: disable=bad-whitespace # pyformat: disable NOOP = {"move": 0, "turn": 0, "fireZap": 0} FORWARD = {"move": 1, "turn": 0, "fireZap": 0} STEP_RIGHT = {"move": 2, "turn": 0, "fireZap": 0} BACKWARD = {"move": 3, "turn": 0, "fireZap": 0} STEP_LEFT = {"move": 4, "turn": 0, "fireZap": 0} TURN_LEFT = {"move": 0, "turn": -1, "fireZap": 0} TURN_RIGHT = {"move": 0, "turn": 1, "fireZap": 0} FIRE_ZAP = {"move": 0, "turn": 0, "fireZap": 1} # pyformat: enable # pylint: enable=bad-whitespace ACTION_SET = ( NOOP, FORWARD, BACKWARD, STEP_LEFT, STEP_RIGHT, TURN_LEFT, TURN_RIGHT, FIRE_ZAP, ) # The Scene is a non-physical object, its components implement global logic. def create_scene(): """Creates the global scene.""" scene = { "name": "scene", "components": [ { "component": "StateManager", "kwargs": { "initialState": "scene", "stateConfigs": [{ "state": "scene", }], } }, {"component": "Transform",}, { "component": "FlagManager", "kwargs": {} }, ] } return scene def create_avatar_object( player_idx: int, team: str, override_taste_kwargs: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: """Create an avatar object.""" # Lua is 1-indexed. lua_index = player_idx + 1 team_color = TEAMS_DATA[team]["color"] health1_avatar_sprite_name = "avatarSprite{}Health1".format(lua_index) health2_avatar_sprite_name = "avatarSprite{}Health2".format(lua_index) health3_avatar_sprite_name = "avatarSprite{}Health3".format(lua_index) health1_color_palette = shapes.get_palette(multiply_tuple(team_color, 0.35)) health2_color_palette = shapes.get_palette(team_color) health3_color_palette = shapes.get_palette(multiply_tuple(team_color, 1.75)) taste_kwargs = { "defaultTeamReward": 1.0, "rewardForZapping": 0.0, "extraRewardForZappingFlagCarrier": 0.0, "rewardForReturningFlag": 0.0, "rewardForPickingUpOpposingFlag": 0.0, } if override_taste_kwargs: taste_kwargs.update(override_taste_kwargs) avatar_object = { "name": "avatar", "components": [ { "component": "StateManager", "kwargs": { "initialState": "health2", "stateConfigs": [ {"state": "health1", "layer": "upperPhysical", "sprite": health1_avatar_sprite_name, "contact": "avatar", "groups": ["players"]}, {"state": "health2", "layer": "upperPhysical", "sprite": health2_avatar_sprite_name, "contact": "avatar", "groups": ["players"]}, {"state": "health3", "layer": "upperPhysical", "sprite": health3_avatar_sprite_name, "contact": "avatar", "groups": ["players"]}, # Player wait state used when they have been zapped out. {"state": "playerWait", "groups": ["playerWaits"]}, ] } }, {"component": "Transform",}, { "component": "Appearance", "kwargs": { "renderMode": "ascii_shape", "spriteNames": [health1_avatar_sprite_name, health2_avatar_sprite_name, health3_avatar_sprite_name], "spriteShapes": [shapes.CUTE_AVATAR, shapes.CUTE_AVATAR, shapes.CUTE_AVATAR], "palettes": [health1_color_palette, health2_color_palette, health3_color_palette], "noRotates": [True] * 3 } }, { "component": "Avatar", "kwargs": { "index": lua_index, "aliveState": "health2", "additionalLiveStates": ["health1", "health3"], "waitState": "playerWait", "spawnGroup": TEAMS_DATA[team]["spawn_group"], "actionOrder": ["move", "turn", "fireZap"], "actionSpec": { "move": {"default": 0, "min": 0, "max": len(_COMPASS)}, "turn": {"default": 0, "min": -1, "max": 1}, "fireZap": {"default": 0, "min": 0, "max": 1}, }, "view": { "left": 5, "right": 5, "forward": 9, "backward": 1, "centered": False }, # The following kwarg makes it possible to get rewarded for # team rewards even when an avatar is "dead". "skipWaitStateRewards": False, } }, { "component": "ColorZapper", "kwargs": { "team": team, # The color zapper beam is somewhat transparent. "color": (team_color[0], team_color[1], team_color[2], 150), "cooldownTime": 2, "beamLength": 3, "beamRadius": 1, "aliveStates": ["health1", "health2", "health3"], } }, { "component": "ReadyToShootObservation", "kwargs": { "zapperComponent": "ColorZapper", } }, { "component": "ZappedByColor", "kwargs": { "team": team, "allTeamNames": ["red", "blue"], "framesTillRespawn": 80, "penaltyForBeingZapped": 0, "rewardForZapping": 0, "healthRegenerationRate": 0.05, "maxHealthOnGround": 2, "maxHealthOnOwnColor": 3, "maxHealthOnEnemyColor": 1, } }, { "component": "TeamMember", "kwargs": {"team": team} }, { "component": "Taste", "kwargs": taste_kwargs }, { "component": "LocationObserver", "kwargs": { "objectIsAvatar": True, "alsoReportOrientation": True } }, ] } return avatar_object def _even_vs_odd_team_assignment(num_players, taste_kwargs: Optional[Any] = None): """Assign players with even ids to red team and odd ids to blue team.""" avatar_objects = [] for player_idx in range(0, num_players): if player_idx % 2 == 0: team = "red" elif player_idx % 2 == 1: team = "blue" game_object = create_avatar_object(player_idx, team, override_taste_kwargs=taste_kwargs) avatar_objects.append(game_object) return avatar_objects def _low_vs_high_team_assignment(num_players, taste_kwargs: Optional[Any] = None): """Assign players with id below the median id to blue and above it to red.""" median = np.median(range(num_players)) avatar_objects = [] for player_idx in range(0, num_players): if player_idx < median: team = "blue" elif player_idx > median: team = "red" game_object = create_avatar_object(player_idx, team, override_taste_kwargs=taste_kwargs) avatar_objects.append(game_object) return avatar_objects def create_avatar_objects(num_players, taste_kwargs: Optional[Any] = None, fixed_teams: Optional[bool] = False): """Returns list of avatar objects of length 'num_players'.""" assert num_players % 2 == 0, "num players must be divisible by 2" if fixed_teams: avatar_objects = _low_vs_high_team_assignment(num_players, taste_kwargs=taste_kwargs) else: avatar_objects = _even_vs_odd_team_assignment(num_players, taste_kwargs=taste_kwargs) return avatar_objects def create_lab2d_settings( num_players: int, avatar_taste_kwargs: Optional[Any] = None, fixed_teams: Optional[bool] = False) -> Dict[str, Any]: """Returns the lab2d settings.""" lab2d_settings = { "levelName": "paintball_capture_the_flag", "levelDirectory": "meltingpot/lua/levels", "numPlayers": num_players, "maxEpisodeLengthFrames": 1000, "spriteSize": 8, "topology": "BOUNDED", # Choose from ["BOUNDED", "TORUS"], "simulation": { "map": DEFAULT_ASCII_MAP, "gameObjects": create_avatar_objects(num_players, taste_kwargs=avatar_taste_kwargs, fixed_teams=fixed_teams), "scene": create_scene(), "prefabs": PREFABS, "charPrefabMap": CHAR_PREFAB_MAP, }, } return lab2d_settings def get_config(factory=create_lab2d_settings): """Default configuration for training on the capture_the_flag level.""" config = config_dict.ConfigDict() config.num_players = 8 config.lab2d_settings = factory(config.num_players) # Action set configuration. config.action_set = ACTION_SET # Observation format configuration. config.individual_observation_names = [ "RGB", "READY_TO_SHOOT", "POSITION", "ORIENTATION", ] config.global_observation_names = [ "WORLD.RGB", ] return config