repo_name
stringlengths
6
61
path
stringlengths
4
230
copies
stringlengths
1
3
size
stringlengths
4
6
text
stringlengths
1.01k
850k
license
stringclasses
15 values
hash
int64
-9,220,477,234,079,998,000
9,219,060,020B
line_mean
float64
11.6
96.6
line_max
int64
32
939
alpha_frac
float64
0.26
0.9
autogenerated
bool
1 class
ratio
float64
1.62
6.1
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
mahak/neutron
neutron/objects/floatingip.py
2
1314
# Copyright (c) 2016 Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib.objects import common_types from neutron.db.models import dns as models from neutron.objects import base @base.NeutronObjectRegistry.register class FloatingIPDNS(base.NeutronDbObject): # Version 1.0: Initial version VERSION = '1.0' db_model = models.FloatingIPDNS primary_keys = ['floatingip_id'] foreign_keys = {'FloatingIP': {'floatingip_id': 'id'}} fields = { 'floatingip_id': common_types.UUIDField(), 'dns_name': common_types.DomainNameField(), 'dns_domain': common_types.DomainNameField(), 'published_dns_name': common_types.DomainNameField(), 'published_dns_domain': common_types.DomainNameField(), }
apache-2.0
6,196,161,757,057,274,000
34.513514
78
0.704718
false
3.922388
false
false
false
KmolYuan/pyslvs
pyslvs/expression_parser.py
1
9940
# -*- coding: utf-8 -*- """Lark parser to parse the expression.""" __author__ = "Yuan Chang" __copyright__ = "Copyright (C) 2016-2021" __license__ = "AGPL" __email__ = "[email protected]" from abc import abstractmethod from typing import ( cast, Tuple, List, Dict, Iterator, Optional, Union, TypeVar, Generic, ) from dataclasses import dataclass from lark import Lark, Transformer, LexError from .expression import get_vlinks, VJoint, VPoint, VLink from .graph import Graph _T1 = TypeVar('_T1') _T2 = TypeVar('_T2') _Coord = Tuple[float, float] _JointArgs = List[Union[str, VJoint, float, _Coord, Tuple[str, ...]]] # Color dictionary _color_list: Dict[str, Tuple[int, int, int]] = { 'red': (172, 68, 68), 'green': (110, 190, 30), 'blue': (68, 120, 172), 'cyan': (0, 255, 255), 'magenta': (255, 0, 255), 'brick-red': (255, 130, 130), 'yellow': (255, 255, 0), 'gray': (160, 160, 160), 'orange': (225, 165, 0), 'pink': (225, 192, 230), 'black': (0, 0, 0), 'white': (255, 255, 255), 'dark-red': (128, 0, 0), 'dark-green': (0, 128, 0), 'dark-blue': (0, 0, 128), 'dark-cyan': (128, 0, 128), 'dark-magenta': (255, 0, 255), 'dark-yellow': (128, 128, 0), 'dark-gray': (128, 128, 128), 'dark-orange': (225, 140, 0), 'dark-pink': (225, 20, 147), } color_names = tuple(sorted(_color_list.keys())) def color_rgb(name: str) -> Tuple[int, int, int]: """Get color by name. Get RGB color data by name, return `(0, 0, 0)` if it is invalid. Also support `"(R, G, B)"` string format. """ name = name.lower() if name in _color_list: return _color_list[name] else: try: # Input RGB as a "(255, 255, 255)" string rgb = ( name.replace('(', '') .replace(')', '') .replace(" ", '') .split(',', maxsplit=3) ) color_text = (int(rgb[0]), int(rgb[1]), int(rgb[2])) except ValueError: return 0, 0, 0 else: return color_text @dataclass(repr=False, eq=False) class PointArgs: """Point table argument.""" links: str type: str color: str x: float y: float @dataclass(repr=False, eq=False) class LinkArgs: """Link table argument.""" name: str color: str points: str _GRAMMAR = Lark(r""" // Number DIGIT: "0".."9" INT: DIGIT+ SIGNED_INT: ["+" | "-"] INT DECIMAL: INT "." INT? | "." INT _EXP: ("e" | "E") SIGNED_INT FLOAT: INT _EXP | DECIMAL _EXP? NUMBER: FLOAT | INT SIGNED_NUMBER: ["+" | "-"] NUMBER // Letters LCASE_LETTER: "a".."z" UCASE_LETTER: "A".."Z" LETTER: UCASE_LETTER | LCASE_LETTER | "_" CNAME: LETTER (LETTER | DIGIT)* // White space and new line WS: /\s+/ CR: /\r/ LF: /\n/ NEWLINE: (CR? LF)+ %ignore WS %ignore NEWLINE // Comment LINE_COMMENT: /#[^\n]*/ MULTILINE_COMMENT: /#\[[\s\S]*#\][^\n]*/ %ignore LINE_COMMENT %ignore MULTILINE_COMMENT // Custom data type JOINT_TYPE: "RP" | "R" | "P" COLOR: """ + "|".join(f'"{color}"i' for color in color_names) + r""" type: JOINT_TYPE name: CNAME number: SIGNED_NUMBER color_value: INT // Main grammar joint: "J[" type ["," angle] ["," color] "," point "," link "]" link: "L[" [name ("," name)* ","?] "]" point: "P[" number "," number "]" angle: "A[" number "]" color: "color[" (("(" color_value ("," color_value) ~ 2 ")") | COLOR) "]" mechanism: "M[" [joint ("," joint)* ","?] "]" ?start: mechanism """, parser='lalr') class _Transformer(Transformer, Generic[_T1, _T2]): """Base transformer implementation.""" @staticmethod @abstractmethod def type(n: List[str]) -> _T1: raise NotImplementedError @staticmethod def name(n: List[str]) -> str: return str(n[0]) @staticmethod def color(n: List[str]) -> str: return str(n[0]) if len(n) == 1 else str(tuple(n)) @staticmethod def color_value(n: List[str]) -> int: return int(n[0]) @staticmethod def number(n: List[str]) -> float: return float(n[0]) angle = number @staticmethod def point(c: List[float]) -> _Coord: return c[0], c[1] @staticmethod def link(a: List[str]) -> Tuple[str, ...]: return tuple(a) @staticmethod @abstractmethod def joint(args: _JointArgs) -> _T2: raise NotImplementedError @staticmethod def mechanism(joints: List[_T2]) -> List[_T2]: return joints class _ParamsTrans(_Transformer[str, PointArgs]): """Transformer will parse into a list of VPoint data.""" @staticmethod def type(n: List[str]) -> str: return str(n[0]) @staticmethod def joint(args: _JointArgs) -> PointArgs: """Sort the argument list. [0]: type ([1]: angle) ([2]: color) [-2]: point (coordinate) [-1]: link """ type_str = cast(str, args[0]) x, y = cast(_Coord, args[-2]) links = ','.join(cast(Tuple[str, ...], args[-1])) if type_str == 'R': if len(args) == 3: return PointArgs(links, 'R', 'Green', x, y) elif len(args) == 4: color = cast(str, args[-3]) return PointArgs(links, 'R', color, x, y) else: angle = cast(float, args[1]) type_angle = f'{type_str}:{angle}' if len(args) == 4: return PointArgs(links, type_angle, 'Green', x, y) elif len(args) == 5: color = cast(str, args[-3]) return PointArgs(links, type_angle, color, x, y) raise LexError(f"invalid options: {args}") class _PositionTrans(_Transformer[str, _Coord]): """Transformer will parse into a list of position data.""" @staticmethod def type(n: List[str]) -> str: return str(n[0]) @staticmethod def joint(args: _JointArgs) -> _Coord: x, y = cast(_Coord, args[-2]) return x, y class _VPointsTrans(_Transformer[VJoint, VPoint]): """Using same grammar return as VPoints.""" @staticmethod def type(n: List[str]) -> VJoint: """Return as int type.""" type_str = str(n[0]) if type_str == 'R': return VJoint.R elif type_str == 'P': return VJoint.P elif type_str == 'RP': return VJoint.RP else: raise ValueError(f"invalid joint type: {type_str}") @staticmethod def joint(args: _JointArgs) -> VPoint: """Sort the argument list. [0]: type ([1]: angle) ([2]: color) [-2]: point (coordinate) [-1]: link """ type_int = cast(VJoint, args[0]) x, y = cast(_Coord, args[-2]) links = cast(Tuple[str, ...], args[-1]) if type_int == VJoint.R: if len(args) == 3: return VPoint.r_joint(links, x, y) elif len(args) == 4: color = cast(str, args[-3]) return VPoint(links, VJoint.R, 0., color, x, y, color_rgb) else: angle = cast(float, args[1]) if len(args) == 4: return VPoint.slider_joint(links, type_int, angle, x, y) elif len(args) == 5: color = cast(str, args[-3]) return VPoint(links, type_int, angle, color, x, y, color_rgb) raise LexError(f"invalid options: {args}") _params_translator = _ParamsTrans() _pos_translator = _PositionTrans() _vpoint_translator = _VPointsTrans() def parse_params(expr: str) -> List[PointArgs]: """Parse mechanism expression into VPoint constructor arguments.""" return _params_translator.transform(_GRAMMAR.parse(expr)) def parse_pos(expr: str) -> List[_Coord]: """Parse mechanism expression into coordinates.""" return _pos_translator.transform(_GRAMMAR.parse(expr)) def parse_vpoints(expr: str) -> List[VPoint]: """Parse mechanism expression into VPoint objects.""" return _vpoint_translator.transform(_GRAMMAR.parse(expr)) def parse_vlinks(expr: str) -> List[VLink]: """Parse mechanism expression into VLink objects.""" return get_vlinks(parse_vpoints(expr)) def _sorted_pair(a: int, b: int) -> Tuple[int, int]: return (a, b) if a < b else (b, a) def edges_view(graph: Graph) -> Iterator[Tuple[int, Tuple[int, int]]]: """The iterator will yield the sorted edges from `graph`.""" yield from enumerate(sorted(_sorted_pair(n1, n2) for n1, n2 in graph.edges)) def graph2vpoints( graph: Graph, pos: Dict[int, _Coord], cus: Optional[Dict[int, int]] = None, same: Optional[Dict[int, int]] = None, grounded: Optional[int] = None ) -> List[VPoint]: """Transform `graph` into [VPoint] objects. The vertices are mapped to links. + `pos`: Position for each vertices. + `cus`: Extra points on the specific links. + `same`: Multiple joint setting. The joints are according to [`edges_view`](#edges_view). + `grounded`: The ground link of vertices. """ if cus is None: cus = {} if same is None: same = {} same_r: Dict[int, List[int]] = {} for k, v in same.items(): if v in same_r: same_r[v].append(k) else: same_r[v] = [k] tmp_list = [] ev = dict(edges_view(graph)) for i, edge in ev.items(): if i in same: # Do not connect to anyone! continue edges = set(edge) if i in same_r: for j in same_r[i]: edges.update(set(ev[j])) x, y = pos[i] links = [ f"L{link}" if link != grounded else VLink.FRAME for link in edges ] tmp_list.append(VPoint.r_joint(links, x, y)) for name in sorted(cus): link = f"L{cus[name]}" if cus[name] != grounded else VLink.FRAME x, y = pos[name] tmp_list.append(VPoint.r_joint((link,), x, y)) return tmp_list
agpl-3.0
-1,177,047,438,729,620,200
26.458564
94
0.548491
false
3.260085
false
false
false
wa3l/BookSearch
main.py
1
2709
#!/usr/bin/python """ BookSearch Module: main Author: Wael Al-Sallami Date: 2/10/2013 """ import sys, re, cmd, gen, engn, timer class Prompt(cmd.Cmd): """Search query interface""" engine = None store = None line = None prompt = "\nquery> " welcome = "\n### Welcome to BookSearch!\n### Enter your query to perform a search.\n### Enter '?' for help and 'exit' to terminate." def preloop(self): """Print intro message and write or load indices""" print self.welcome with timer.Timer() as t: self.store = gen.Store("books") print '> Request took %.03f sec.' % t.interval def default(self, line): """Handle search query""" query = self.parse_query(line) with timer.Timer() as t: if not self.engine: self.engine = engn.Engine(self.store) answers = self.engine.search(query) self.print_answers(answers) print '\n> Search took %.06f sec.' % t.interval def parse_query(self, line): """Parse all three kinds of query terms into a dict""" query = {'bool': [], 'phrase': [], 'wild': []} self.line = re.sub(r'[_]|[^\w\s"*]', ' ', line.strip().lower()) query = self.parse_wildcard(query) query = self.parse_phrase(query) query = self.parse_boolean(query) return query def parse_wildcard(self, query): """Extract wildcard queries into query{}""" regex = r"([\w]+)?([\*])([\w]+)?" query['wild'] = re.findall(regex, self.line) if query['wild']: self.line = re.sub(regex, '', self.line) for i in range(len(query['wild'])): query['wild'][i] = filter(len, query['wild'][i]) return query def parse_phrase(self, query): """extract phrase query terms into query{}""" regex = r'\w*"([^"]*)"' query['phrase'] = re.findall(regex, self.line) if query['phrase']: self.line = re.sub(regex, '', self.line) return query def parse_boolean(self, query): """Consider whatever is left as boolean query terms""" query['bool'] = self.line.split() return query def print_answers(self, answers): """Print search results""" if answers: print "\n> Found %d search results:" % len(answers), for doc in answers: print doc, else: print "\n> Sorry, your search for: (%s) did not yield any results :(" % line def emptyline(self): """Called when user doesn't enter anything""" print "\n> Enter your search query or type '?' for help." def do_exit(slef, line): """Type 'exit' to terminate the program""" return True def do_EOF(self, line): print '' # print new line for prettier exits return True def main(): Prompt().cmdloop() if __name__ == '__main__': main()
unlicense
5,852,404,163,806,000,000
24.8
134
0.601698
false
3.490979
false
false
false
huangziwei/MorphoPy
morphopy/_utils/representation.py
1
2273
import copy import random import numpy as np import pandas as pd from .graph_utils import get_tips, get_root def get_persistence_barcode(G, dist='radDist'): if dist == 'radDist': f = _radial_dist_to_soma else: raise NotImplementedError return _get_persistence_barcode(G, f) def _get_persistence_barcode(G, f): """ Creates the persistence barcode for the graph G. The algorithm is taken from _Quantifying topological invariants of neuronal morphologies_ from Lida Kanari et al (https://arxiv.org/abs/1603.08432). :param G: networkx.DiGraph :param f: A real-valued function defined over the set of nodes in G. :return: pandas.DataFrame with entries node_id | type | birth | death . Where birth and death are defined in distance from soma according to the distance function f. """ # Initialization L = get_tips(G) R = get_root(G) D = dict(node_id=[], type=[], birth=[], death=[]) # holds persistence barcode v = dict() # holds 'aging' function of visited nodes defined by f # active nodes A = list(copy.copy(L)) # set the initial value for leaf nodes for l in L: v[l] = f(G, l) while R not in A: for l in A: p = G.predecessors(l)[0] C = G.successors(p) # if all children are active if all(c in A for c in C): # choose randomly from the oldest children age = np.array([v[c] for c in C]) indices = np.where(age == age[np.argmax(age)])[0] c_m = C[random.choice(indices)] A.append(p) for c_i in C: A.remove(c_i) if c_i != c_m: D['node_id'].append(c_i) D['type'].append(G.node[c_i]['type']) D['birth'].append(v[c_i]) D['death'].append(f(G, p)) v[p] = v[c_m] D['node_id'].append(R) D['type'].append(1) D['birth'].append(v[R]) D['death'].append(f(G, R)) return pd.DataFrame(D) def _radial_dist_to_soma(G, n): root_pos = G.node[1]['pos'] return np.sqrt(np.dot(G.node[n]['pos'] - root_pos, G.node[n]['pos'] - root_pos))
mit
1,829,698,795,874,876,000
28.907895
112
0.549054
false
3.412913
false
false
false
wikimedia/operations-debs-contenttranslation-hfst
test/tools/fsmbook-tests/python-scripts/EinsteinsPuzzle.hfst.py
2
5097
exec(compile(open('CompileOptions.py', 'rb').read(), 'CompileOptions.py', 'exec')) # matches all symbols pi = hfst.read_att_string("0 0 @_IDENTITY_SYMBOL_@ @_IDENTITY_SYMBOL_@\n\ 0") # matches all symbols except "|" pi_house = hfst.read_att_string("0 0 @_IDENTITY_SYMBOL_@ @_IDENTITY_SYMBOL_@\n\ 0 1 | |\n\ 0") # The possible values of a house color (spaces are added for better readability) Color = hfst.fst(["blue ", "green ", "red ", "white ", "yellow "]) # The possible values of nationality Nationality = hfst.fst(["Dane ", "Englishman ", "German ", "Swede ", "Norwegian "]) # The possible values of a drink Drink = hfst.fst(["bier ", "coffee ", "milk ", "tea ", "water "]) # The possible values of cigarettes Cigarette = hfst.fst(["Blend ", "BlueMaster ", "Dunhill ", "PallMall ", "Prince "]) # The possible values of animals Pet = hfst.fst(["birds ", "cats ", "dogs ", "fish ", "horses "]) Color.write_to_file('Color.py.hfst') Nationality.write_to_file('Nationality.py.hfst') Drink.write_to_file('Drink.py.hfst') Cigarette.write_to_file('Cigarette.py.hfst') Pet.write_to_file('Pet.py.hfst') # Convert all strings into transducers vars={} for i in ("blue ", "green ", "red ", "white ", "yellow ", "Dane ", "Englishman ", "German ", "Swede ", "Norwegian ", "bier ", "coffee ", "milk ", "tea ", "water ", "Blend ", "BlueMaster ", "Dunhill ", "PallMall ", "Prince ", "birds ", "cats ", "dogs ", "fish ", "horses "): tr = hfst.fst(i) vars[i] = tr # Separator character (spaces are included for better readability) HouseSeparator = hfst.fst("| ") # House contains the consecutive values "ColorNationalityDrinkCigarettePet" House = hfst.concatenate((Color, Nationality, Drink, Cigarette, Pet)) # Houses is "House| House| House| House| House" tmp = hfst.concatenate((House, HouseSeparator)) tmp.repeat_n(4) Houses = hfst.concatenate((tmp, House)) # 1. The Englishman lives in the red house. # Since color and nationality are adjacent, it is enough to accept all strings that contain "red Englishman" tmp = hfst.fst("red Englishman") C1 = hfst.concatenate((pi, tmp, pi)) # .* "red Englishman" .* # 2. The Swede keeps dogs. # Now we must match the string between Swede and dog inside the same house. tmp1 = hfst.fst('Swede') tmp2 = hfst.fst('dogs') C2 = hfst.concatenate((pi, tmp1, pi_house, tmp2, pi)) # .* "Swede" .* "dogs" .* # 3. The Dane drinks tea C3 = hfst.concatenate((pi, vars['Dane '], vars['tea '], pi)) # 4. The green house is just to the left of the white one C4 = hfst.concatenate((pi, vars['green '], pi_house, HouseSeparator, vars['white '], pi)) # 5. The owner of the green house drinks coffee C5 = hfst.concatenate((pi, vars['green '], pi_house, vars['coffee '], pi)) # 6. The Pall Mall smoker keeps birds C6 = hfst.concatenate((pi, vars['PallMall '], vars['birds '], pi)) # 7. The owner of the yellow house smokes Dunhills C7 = hfst.concatenate((pi, vars['yellow '], pi_house, vars['Dunhill '], pi)) # 8. The man in the center house drinks milk C8 = hfst.concatenate((pi_house, HouseSeparator, pi_house, HouseSeparator, pi_house, vars['milk '], pi_house, HouseSeparator, pi_house, HouseSeparator, pi_house)) # 9. The Norwegian lives in the first house C9 = hfst.concatenate((pi_house, vars['Norwegian '], pi)) # 10. The Blend smoker has a neighbor who keeps cats C101 = hfst.concatenate((pi, vars['Blend '], Pet, HouseSeparator, pi_house, vars['cats '], pi)) C102 = hfst.concatenate((pi, vars['cats '], pi_house, HouseSeparator, pi_house, vars['Blend '], pi)) C10 = hfst.disjunct((C101, C102)) C10.minimize() # 11. The man who keeps horses lives next to the Dunhill smoker C111 = hfst.concatenate((pi, vars['horses '], HouseSeparator, pi_house, vars['Dunhill '], pi)) C112 = hfst.concatenate((pi, vars['Dunhill '], pi_house, HouseSeparator, pi_house, vars['horses '], pi)) C11 = hfst.disjunct((C111, C112)) C11.minimize() # 12. The man who smokes Blue Masters drinks bier. C12 = hfst.concatenate((pi, vars['bier '], vars['BlueMaster '], pi)) # 13. The German smokes Prince C13 = hfst.concatenate((pi, vars['German '], Drink, vars['Prince '], pi)) # 14. The Norwegian lives next to the blue house C141 = hfst.concatenate((pi, vars['Norwegian '], pi_house, HouseSeparator, vars['blue '], pi)) C142 = hfst.concatenate((pi, vars['blue '], pi_house, HouseSeparator, Color, vars['Norwegian '], pi)) C14 = hfst.disjunct((C141, C142)) C14.minimize() # 15. The Blend smoker has a neighbor who drinks water C151 = hfst.concatenate((pi, vars['Blend '], Pet, HouseSeparator, pi_house, vars['water '], pi)) C152 = hfst.concatenate((pi, vars['water '], pi_house, HouseSeparator, pi_house, vars['Blend '], pi)) C15 = hfst.disjunct((C151, C152)) C15.minimize() # Let's minimize the constraint transducers to carry out conjunction more efficiently: Result = Houses for i in (C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, C11, C12, C13, C14, C15): i.minimize() # Let's conjunct Houses with the constraints one by one: Result.conjunct(i) Result.minimize() Result.write_to_file('Result')
gpl-3.0
5,359,581,569,216,738,000
39.133858
108
0.671179
false
2.753647
false
false
false
tripatheea/Simple-IMDB-Scraper
scraper.py
1
2978
import urllib2 import mechanize from bs4 import BeautifulSoup import os def get_html(url): header = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8', 'Connection': 'keep-alive'} request = urllib2.Request(url, headers=header) try: br = mechanize.Browser() response = br.open(request) return response.get_data() except urllib2.HTTPError, e: print e.fp.read() def get_info(title): movie = {} url = "http://www.imdb.com/title/{}".format(title) site = BeautifulSoup(get_html(url)) top_bar = site.find(id="overview-top") header = top_bar.find("h1", {"class": "header" }) movie['name'] = header.find("span", {"itemprop": "name"}).string movie['year'] = header.find("a").string info_bar = top_bar.find("div", {"class": "infobar"}) try: movie['content_rating'] = info_bar.find("span", {"itemprop": "contentRating"})['content'] except TypeError as e: movie['content_rating'] = "N/A" movie['duration'] = info_bar.find("time").string index = movie['duration'].find(" min") movie['duration'] = movie['duration'][:index] movie['release_date'] = info_bar.find("meta", {"itemprop": "datePublished"})['content'] movie['rating'] = top_bar.find("div", {"class": "star-box"}).find("div", {"class": "star-box-giga-star"}).string movie['director'] = top_bar.find("div", {"itemprop": "director"}).find("span", {"itemprop": "name"}).string movie['actors'] = [tag.string for tag in top_bar.find("div", {"itemprop": "actors"}).find_all("span", {"itemprop": "name"})] movie['genre'] = [tag.string for tag in site.find("div", {"id": "titleStoryLine"}).find("div", {"itemprop": "genre"}).find_all("a")] details = site.find("div", {"id": "titleDetails"}) movie['country'] = details.find("h4", text="Country:").parent.find("a").text # Find award wins and nominations. awards = site.find("div", {"id": "titleAwardsRanks"}).find_all("span", {"itemprop": "awards"}) awards = [award.find("b").text for award in awards if award.find("b") is not None] oscar_nominations = 0 oscar_wins = 0 for award in awards: if "Nominated" in award: start = "for " end = "Oscar" oscar_nominations = int(award[award.find(start) + len(start) : award.rfind(end)]) elif "Won" in award: start = "Won" end ="Oscar" oscar_wins = int(award[award.find(start) + len(start) : award.rfind(end)]) movie['oscar_nominations'] = oscar_nominations movie['oscar_wins'] = oscar_wins for prop in movie: if not str(type(movie[prop])) == "<type 'list'>": # :'( movie[prop] = str(movie[prop]).strip(" \t\n\r") else: movie[prop] = str([ele.encode("utf-8").strip(" \t\n\r") for ele in movie[prop]]) return movie # print get_info(imdb_movie_id)
mit
8,651,847,595,360,548,000
32.088889
133
0.636333
false
2.712204
false
false
false
Jumpscale/jumpscale6_core
lib/JumpScale/baselib/codetools/TemplateEngine.py
1
2642
# from JumpScale.core.System import System from JumpScale import j import urllib class TemplateEngine(object): def __init__(self): self.replaceDict = {}##dict(string,string) #System ##System def add(self, search, replace,variants=False): if not j.basetype.string.check(search): raise RuntimeError("only strings can be searched for when using template engine, param search is not a string") if not j.basetype.string.check(replace): raise RuntimeError("can only replace with strings when using template engine, param replace is not a string") self.replaceDict[search] = replace if variants: self.replaceDict[search+"s"] =self.makePlural(replace) self.replaceDict[self.capitalize(search)] =self.capitalize(replace) self.replaceDict[self.capitalize(search+"s")] =self.makePlural(self.capitalize(replace)) def capitalize(self,txt): return txt[0].upper()+txt[1:] def makePlural(self,txt): if txt[-1]=="y": txt=txt[:-1]+"ies" else: txt=txt+"s" return txt def __replace(self,body): for search in self.replaceDict.keys(): replace = self.replaceDict[search] body = body.replace("{" + search + "}", replace) body = body.replace("{:urlencode:" + search + "}", urllib.quote(replace, '')) return body def replace(self, body, replaceCount = 3): for i in range(replaceCount): body = self.__replace(body) return body def replaceInsideFile(self, filePath, replaceCount = 3): self.__createFileFromTemplate(filePath, filePath, replaceCount) def writeFileFromTemplate(self,templatePath,targetPath): self.__createFileFromTemplate(templatePath, targetPath) def getOutputFromTemplate(self,templatePath): originalFile = j.system.fs.fileGetContents(templatePath) modifiedString = self.replace(originalFile, replaceCount=3) return modifiedString def __createFileFromTemplate(self, templatePath, targetPath, replaceCount = 3): originalFile = j.system.fs.fileGetContents(templatePath) modifiedString = self.replace(originalFile, replaceCount) j.system.fs.writeFile(targetPath, modifiedString) def reset(self): self.replaceDict={} if __name__ == '__main__': te=TemplateEngine() te.add("login", "kristof") te.add("passwd","root") text="This is a test file for {login} with a passwd:{passwd}" print te.replace(text)
bsd-2-clause
-7,447,887,505,872,092,000
37.289855
129
0.635125
false
4.121685
false
false
false
ljean/djaloha
djaloha/widgets.py
1
3739
# -*- coding: utf-8 -*- """widgets to be used in a form""" from bs4 import BeautifulSoup from django.forms import Media from floppyforms.widgets import TextInput from djaloha import settings class AlohaInput(TextInput): """ Text widget with aloha html editor requires floppyforms to be installed """ template_name = 'djaloha/alohainput.html' def __init__(self, *args, **kwargs): # for compatibility with previous versions kwargs.pop('text_color_plugin', None) self.aloha_plugins = kwargs.pop('aloha_plugins', None) self.extra_aloha_plugins = kwargs.pop('extra_aloha_plugins', None) self.aloha_init_url = kwargs.pop('aloha_init_url', None) super(AlohaInput, self).__init__(*args, **kwargs) def _get_media(self): """ return code for inserting required js and css files need aloha , plugins and initialization """ try: aloha_init_url = self.aloha_init_url or settings.init_url() aloha_version = settings.aloha_version() aloha_plugins = self.aloha_plugins if not aloha_plugins: aloha_plugins = settings.plugins() if self.extra_aloha_plugins: aloha_plugins = tuple(aloha_plugins) + tuple(self.extra_aloha_plugins) css = { 'all': ( "{0}/css/aloha.css".format(aloha_version), ) } javascripts = [] if not settings.skip_jquery(): javascripts.append(settings.jquery_version()) #if aloha_version.startswith('aloha.0.22.') or aloha_version.startswith('aloha.0.23.'): javascripts.append("{0}/lib/require.js".format(aloha_version)) javascripts.append(aloha_init_url) javascripts.append( u'{0}/lib/aloha.js" data-aloha-plugins="{1}'.format(aloha_version, u",".join(aloha_plugins)) ) javascripts.append('djaloha/js/djaloha-init.js') return Media(css=css, js=javascripts) except Exception, msg: print '## AlohaInput._get_media Error ', msg media = property(_get_media) def value_from_datadict(self, data, files, name): """return value""" value = super(AlohaInput, self).value_from_datadict(data, files, name) return self.clean_value(value) def clean_value(self, origin_value): """This apply several fixes on the html""" return_value = origin_value if return_value: # don't manage None values callbacks = (self._fix_br, self._fix_img, ) for callback in callbacks: return_value = callback(return_value) return return_value def _fix_br(self, content): """ This change the <br> tag into <br /> in order to avoid empty lines at the end in HTML4 for example for newsletters """ return content.replace('<br>', '<br />') def _fix_img(self, content): """Remove the handlers generated on the image for resizing. It may be not removed by editor in some cases""" soup = BeautifulSoup(content, 'html.parser') wrapped_img = soup.select(".ui-wrapper img") if len(wrapped_img) > 0: img = wrapped_img[0] # Remove the ui-resizable class img_classes = img.get('class', None) or [] img_classes.remove('ui-resizable') img['class'] = img_classes # Replace the ui-wrapper by the img wrapper = soup.select(".ui-wrapper")[0] wrapper.replace_with(img) content = unicode(soup) return content
bsd-3-clause
-2,580,920,205,617,877,500
32.088496
116
0.581439
false
3.890739
false
false
false
bswartz/cinder
cinder/tests/unit/test_rpc.py
1
4298
# Copyright 2015 Intel Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from cinder.objects import base from cinder import rpc from cinder import test class FakeAPI(rpc.RPCAPI): RPC_API_VERSION = '1.5' TOPIC = 'cinder-scheduler-topic' BINARY = 'cinder-scheduler' class RPCAPITestCase(test.TestCase): """Tests RPCAPI mixin aggregating stuff related to RPC compatibility.""" def setUp(self): super(RPCAPITestCase, self).setUp() # Reset cached version pins rpc.LAST_RPC_VERSIONS = {} rpc.LAST_OBJ_VERSIONS = {} @mock.patch('cinder.objects.Service.get_minimum_rpc_version', return_value='1.2') @mock.patch('cinder.objects.Service.get_minimum_obj_version', return_value='1.4') @mock.patch('cinder.rpc.get_client') def test_init(self, get_client, get_min_obj, get_min_rpc): def fake_get_client(target, version_cap, serializer): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) self.assertEqual('1.2', version_cap) self.assertEqual('1.4', serializer.version_cap) get_client.side_effect = fake_get_client FakeAPI() @mock.patch('cinder.objects.Service.get_minimum_rpc_version', return_value='liberty') @mock.patch('cinder.objects.Service.get_minimum_obj_version', return_value='liberty') @mock.patch('cinder.rpc.get_client') def test_init_liberty_caps(self, get_client, get_min_obj, get_min_rpc): def fake_get_client(target, version_cap, serializer): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) self.assertEqual(rpc.LIBERTY_RPC_VERSIONS[FakeAPI.BINARY], version_cap) self.assertEqual('liberty', serializer.version_cap) get_client.side_effect = fake_get_client FakeAPI() @mock.patch('cinder.objects.Service.get_minimum_rpc_version', return_value=None) @mock.patch('cinder.objects.Service.get_minimum_obj_version', return_value=None) @mock.patch('cinder.objects.base.CinderObjectSerializer') @mock.patch('cinder.rpc.get_client') def test_init_none_caps(self, get_client, serializer, get_min_obj, get_min_rpc): """Test that with no service latest versions are selected.""" FakeAPI() serializer.assert_called_once_with(base.OBJ_VERSIONS.get_current()) get_client.assert_called_once_with(mock.ANY, version_cap=FakeAPI.RPC_API_VERSION, serializer=serializer.return_value) self.assertTrue(get_min_obj.called) self.assertTrue(get_min_rpc.called) @mock.patch('cinder.objects.Service.get_minimum_rpc_version') @mock.patch('cinder.objects.Service.get_minimum_obj_version') @mock.patch('cinder.rpc.get_client') @mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'cinder-scheduler': '1.4'}) @mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'cinder-scheduler': '1.3'}) def test_init_cached_caps(self, get_client, get_min_obj, get_min_rpc): def fake_get_client(target, version_cap, serializer): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) self.assertEqual('1.4', version_cap) self.assertEqual('1.3', serializer.version_cap) get_client.side_effect = fake_get_client FakeAPI() self.assertFalse(get_min_obj.called) self.assertFalse(get_min_rpc.called)
apache-2.0
-1,170,877,304,757,852,000
41.554455
79
0.647278
false
3.737391
true
false
false
hkaushalya/IntroDataScienceCoursera
Ass1_TwitterSentimentAnalysis/rawcode/happiest_state.py
1
4500
import sys import json #try: import simplejson as json #except ImportError: import json DEBUG = True STATES = { 'AK': 'Alaska', 'AL': 'Alabama', 'AR': 'Arkansas', 'AS': 'American Samoa', 'AZ': 'Arizona', 'CA': 'California', 'CO': 'Colorado', 'CT': 'Connecticut', 'DC': 'District of Columbia', 'DE': 'Delaware', 'FL': 'Florida', 'GA': 'Georgia', 'GU': 'Guam', 'HI': 'Hawaii', 'IA': 'Iowa', 'ID': 'Idaho', 'IL': 'Illinois', 'IN': 'Indiana', 'KS': 'Kansas', 'KY': 'Kentucky', 'LA': 'Louisiana', 'MA': 'Massachusetts', 'MD': 'Maryland', 'ME': 'Maine', 'MI': 'Michigan', 'MN': 'Minnesota', 'MO': 'Missouri', 'MP': 'Northern Mariana Islands', 'MS': 'Mississippi', 'MT': 'Montana', 'NA': 'National', 'NC': 'North Carolina', 'ND': 'North Dakota', 'NE': 'Nebraska', 'NH': 'New Hampshire', 'NJ': 'New Jersey', 'NM': 'New Mexico', 'NV': 'Nevada', 'NY': 'New York', 'OH': 'Ohio', 'OK': 'Oklahoma', 'OR': 'Oregon', 'PA': 'Pennsylvania', 'PR': 'Puerto Rico', 'RI': 'Rhode Island', 'SC': 'South Carolina', 'SD': 'South Dakota', 'TN': 'Tennessee', 'TX': 'Texas', 'UT': 'Utah', 'VA': 'Virginia', 'VI': 'Virgin Islands', 'VT': 'Vermont', 'WA': 'Washington', 'WI': 'Wisconsin', 'WV': 'West Virginia', 'WY': 'Wyoming' } def loadscores(fp): scores = {} for line in fp: term, score = line.split("\t") # The file is tab-delimited. "\t" means "tab character" scores[term] = int(score) # Convert the score to an integer. #if (DEBUG) #print scores.items() # Print every (term, score) pair in the dictionary return scores def calcscore(s_str, scores_dic): twt_score_int = 0 #print s_str for wd in s_str.split(' '): word = wd.lower() #print 'looking score for ', word.lower() #print scores.keys() if word in scores_dic.keys(): sc = scores_dic[word] twt_score_int += sc #print word , ' found in dic with score of ', sc #print 'total score = ', twt_score return twt_score_int def findlocation_using_place(place_dic): ''' if 'location' is not null, try to find location ''' pass def find_location_using_cdt(cdt_lst): ''' find location based on 'coordinates' if 'location' is null ''' pass def tweetscore(tw_file, sent_file): scores_dic = loadscores(sent_file) json_dic = {} scores_lst = [] sc = 0 #score for each tweet state_scores = {} for line in tw_file: sc = 0 #reinit try: json_dic = json.loads(line) except ValueError: continue if 'lang' in json_dic: lang = json_dic[u'lang'] if (lang.encode('utf-8').find('en') != -1): unicode_string = json_dic[u'text'] encoded_string = unicode_string.encode('utf-8') #print encoded_string sc = calcscore(encoded_string, scores_dic) # now find location place_dic = json_dic[u'place'] if (not place_dic): continue #print place_dic country = place_dic[u'country'] if (country): if ( (country.encode('utf-8').find('US') == -1) and (country.encode('utf-8').find('United States') == -1) ): continue state = place_dic[u'name'] if (not state): continue #print (country, ',', state) if (state in state_scores.keys()): state_scores[state] += sc else: state_scores[state] = sc hap_state = '' for key, value in sorted(state_scores.iteritems(), key=lambda (k,v): (v,k), reverse=True): #print "%s %s" % (key, value) hap_state = key break #return scores_lst def main(): sent_file = open(sys.argv[1]) tweet_file = open(sys.argv[2]) tweetscore(tweet_file, sent_file) if __name__ == '__main__': main()
apache-2.0
-7,029,859,394,640,567,000
25.946108
96
0.473556
false
3.345725
false
false
false
philanthropy-u/edx-platform
common/djangoapps/philu_commands/management/commands/fetch_course_structures.py
1
1681
""" Django management command to fetch course structures for given course ids """ import json from logging import getLogger from django.core.management.base import BaseCommand from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from philu_commands.helpers import generate_course_structure log = getLogger(__name__) class Command(BaseCommand): """ A command to fetch course structures for provided course ids """ help = """ This command prints the course structure for all the course ids given in arguments example: manage.py ... fetch_course_structures course_id_1 course_id_2 """ def add_arguments(self, parser): parser.add_argument('course_ids', nargs='+', help='Course ids for which we require the course structures.') def handle(self, *args, **options): course_structures = [] course_ids = options['course_ids'] for course_id in course_ids: try: course_key = CourseKey.from_string(course_id) except InvalidKeyError: log.error('Invalid course id provided: {}'.format(course_id)) continue course_structure = generate_course_structure(course_key) course_structure['course_id'] = course_id course_structures.append(course_structure) if course_structures: # pylint: disable=superfluous-parens print ('-' * 80) print('Course structures for given course ids: ') print(json.dumps(course_structures)) print('-' * 80) else: log.error('All course ids provided are invalid')
agpl-3.0
5,931,398,817,853,859,000
31.326923
115
0.64069
false
4.555556
false
false
false
invitecomm/asterisk-ivr
pigeonhole/agi_record.py
1
1378
#!/usr/bin/python """ Example to get and set variables via AGI. You can call directly this script with AGI() in Asterisk dialplan. """ from asterisk.agi import * agi = AGI() agi.answer() agi.verbose("python agi started") # record file <filename> <format> <escape_digits> <timeout> [<offset samples>] [<BEEP>] [<s=silence>] # record_file(self, filename, format='gsm', escape_digits='#', timeout=20000, offset=0, beep='beep') # agi.record_file('/tmp/test2.ulaw','ulaw') #RECORD FILE $tmpname $format \"$intkey\" \"$abs_timeout\" $beep \"$silence\"\n"; filename = '/tmp/test5' format = 'ulaw' intkey = '#' timeout = 20000 beep = 'beep' offset = '0' silence = 's=5' agi.execute('RECORD FILE', (filename), (format), (intkey), (timeout), (offset), (beep), (silence)) # agi.record_file((filename), (format)) """ while True: agi.stream_file('/var/lib/asterisk/sounds/en/tt-codezone') result = agi.wait_for_digit(-1) agi.verbose("got digit %s" % result) if result.isdigit(): agi.say_number(result) else: agi.verbose("bye!") agi.hangup() sys.exit() """ # result = agi.wait_for_digit() # agi.verbose("got digit %s" % result) # Get variable environment extension = agi.env['agi_extension'] # Get variable in dialplan phone_exten = agi.get_variable('PHONE_EXTEN') # Set variable, it will be available in dialplan agi.set_variable('EXT_CALLERID', (digit))
gpl-3.0
6,374,965,588,502,300,000
24.054545
101
0.669086
false
2.750499
false
false
false
jschaf/ibolcdb
ibolc/person/models.py
2
1593
from ibolc.database import ( Column, db, Email, Model, PhoneNumber, ReferenceCol, relationship, SSN, SurrogatePK, Zipcode ) # pylint: disable=too-few-public-methods class State(Model, SurrogatePK): __tablename__ = 'state' code = Column(db.String(2), nullable=False) name = Column(db.String, nullable=False) def __repr__(self): return "<State({})>".format(self.name) class Address(Model, SurrogatePK): __tablename__ = 'address' address1 = Column(db.String, nullable=False) address2 = Column(db.String) address3 = Column(db.String) city = Column(db.String, nullable=False) state_id = ReferenceCol('state') zipcode = Column(Zipcode, nullable=False) state = relationship('State') def __repr__(self): return "<Address({}...)>".format(self.address1[:10]) class Person(Model, SurrogatePK): __tablename__ = 'person' first_name = Column(db.String, nullable=False) middle_name = Column(db.String) last_name = Column(db.String, nullable=False) ssn = Column(SSN, nullable=False) dob = Column(db.Date, nullable=False) country_id = ReferenceCol('country') country = relationship('Country') address_id = ReferenceCol('address') address = relationship('Address') cell_phone = Column(PhoneNumber) email = Column(Email, nullable=False) type = Column(db.String) __mapper_args__ = { 'polymorphic_identity': 'person', 'polymorphic_on': type } def __repr__(self): return "<Person({})>".format(self.last_name)
bsd-3-clause
8,387,545,558,255,241,000
25.55
60
0.633396
false
3.604072
false
false
false
nrgaway/qubes-tools
builder-tools/libs/dialog.py
1
150374
# dialog.py --- A Python interface to the ncurses-based "dialog" utility # -*- coding: utf-8 -*- # # Copyright (C) 2002, 2003, 2004, 2009, 2010, 2013 Florent Rougon # Copyright (C) 2004 Peter Åstrand # Copyright (C) 2000 Robb Shecter, Sultanbek Tezadov # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, # MA 02110-1301 USA. """Python interface to dialog-like programs. This module provides a Python interface to dialog-like programs such as 'dialog' and 'Xdialog'. It provides a Dialog class that retains some parameters such as the program name and path as well as the values to pass as DIALOG* environment variables to the chosen program. For a quick start, you should look at the simple_example.py file that comes with pythondialog. It is a very simple and straightforward example using a few basic widgets. Then, you could study the demo.py file that illustrates most features of pythondialog, or more directly dialog.py. See the Dialog class documentation for general usage information, list of available widgets and ways to pass options to dialog. Notable exceptions ------------------ Here is the hierarchy of notable exceptions raised by this module: error ExecutableNotFound BadPythonDialogUsage PythonDialogSystemError PythonDialogOSError PythonDialogIOError (should not be raised starting from Python 3.3, as IOError becomes an alias of OSError) PythonDialogErrorBeforeExecInChildProcess PythonDialogReModuleError UnexpectedDialogOutput DialogTerminatedBySignal DialogError UnableToCreateTemporaryDirectory UnableToRetrieveBackendVersion UnableToParseBackendVersion UnableToParseDialogBackendVersion InadequateBackendVersion PythonDialogBug ProbablyPythonBug As you can see, every exception 'exc' among them verifies: issubclass(exc, error) so if you don't need fine-grained error handling, simply catch 'error' (which will probably be accessible as dialog.error from your program) and you should be safe. Changed in version 2.12: PythonDialogIOError is now a subclass of PythonDialogOSError in order to help with the transition from IOError to OSError in the Python language. With this change, you can safely replace "except PythonDialogIOError" clauses with "except PythonDialogOSError" even if running under Python < 3.3. """ from __future__ import with_statement, unicode_literals, print_function import collections from itertools import imap from itertools import izip from io import open import locale _VersionInfo = collections.namedtuple( "VersionInfo", ("major", "minor", "micro", "releasesuffix")) class VersionInfo(_VersionInfo): def __unicode__(self): res = ".".join( ( unicode(elt) for elt in self[:3] ) ) if self.releasesuffix: res += self.releasesuffix return res def __repr__(self): # Unicode strings are not supported as the result of __repr__() # in Python 2.x (cf. <http://bugs.python.org/issue5876>). return b"{0}.{1}".format(__name__, _VersionInfo.__repr__(self)) version_info = VersionInfo(3, 0, 1, None) __version__ = unicode(version_info) import sys, os, tempfile, random, re, warnings, traceback from contextlib import contextmanager from textwrap import dedent # This is not for calling programs, only to prepare the shell commands that are # written to the debug log when debugging is enabled. try: from shlex import quote as _shell_quote except ImportError: def _shell_quote(s): return "'%s'" % s.replace("'", "'\"'\"'") # Exceptions raised by this module # # When adding, suppressing, renaming exceptions or changing their # hierarchy, don't forget to update the module's docstring. class error(Exception): """Base class for exceptions in pythondialog.""" def __init__(self, message=None): self.message = message def __unicode__(self): return self.complete_message() def __repr__(self): # Unicode strings are not supported as the result of __repr__() # in Python 2.x (cf. <http://bugs.python.org/issue5876>). return b"{0}.{1}({2!r})".format(__name__, self.__class__.__name__, self.message) def complete_message(self): if self.message: return "{0}: {1}".format(self.ExceptionShortDescription, self.message) else: return self.ExceptionShortDescription ExceptionShortDescription = "{0} generic exception".format("pythondialog") # For backward-compatibility # # Note: this exception was not documented (only the specific ones were), so # the backward-compatibility binding could be removed relatively easily. PythonDialogException = error class ExecutableNotFound(error): """Exception raised when the dialog executable can't be found.""" ExceptionShortDescription = "Executable not found" class PythonDialogBug(error): """Exception raised when pythondialog finds a bug in his own code.""" ExceptionShortDescription = "Bug in pythondialog" # Yeah, the "Probably" makes it look a bit ugly, but: # - this is more accurate # - this avoids a potential clash with an eventual PythonBug built-in # exception in the Python interpreter... class ProbablyPythonBug(error): """Exception raised when pythondialog behaves in a way that seems to \ indicate a Python bug.""" ExceptionShortDescription = "Bug in python, probably" class BadPythonDialogUsage(error): """Exception raised when pythondialog is used in an incorrect way.""" ExceptionShortDescription = "Invalid use of pythondialog" class PythonDialogSystemError(error): """Exception raised when pythondialog cannot perform a "system \ operation" (e.g., a system call) that should work in "normal" situations. This is a convenience exception: PythonDialogIOError, PythonDialogOSError and PythonDialogErrorBeforeExecInChildProcess all derive from this exception. As a consequence, watching for PythonDialogSystemError instead of the aformentioned exceptions is enough if you don't need precise details about these kinds of errors. Don't confuse this exception with Python's builtin SystemError exception. """ ExceptionShortDescription = "System error" class PythonDialogOSError(PythonDialogSystemError): """Exception raised when pythondialog catches an OSError exception that \ should be passed to the calling program.""" ExceptionShortDescription = "OS error" class PythonDialogIOError(PythonDialogOSError): """Exception raised when pythondialog catches an IOError exception that \ should be passed to the calling program. This exception should not be raised starting from Python 3.3, as the built-in exception IOError becomes an alias of OSError. """ ExceptionShortDescription = "IO error" class PythonDialogErrorBeforeExecInChildProcess(PythonDialogSystemError): """Exception raised when an exception is caught in a child process \ before the exec sytem call (included). This can happen in uncomfortable situations such as: - the system being out of memory; - the maximum number of open file descriptors being reached; - the dialog-like program being removed (or made non-executable) between the time we found it with _find_in_path and the time the exec system call attempted to execute it; - the Python program trying to call the dialog-like program with arguments that cannot be represented in the user's locale (LC_CTYPE).""" ExceptionShortDescription = "Error in a child process before the exec " \ "system call" class PythonDialogReModuleError(PythonDialogSystemError): """Exception raised when pythondialog catches a re.error exception.""" ExceptionShortDescription = "'re' module error" class UnexpectedDialogOutput(error): """Exception raised when the dialog-like program returns something not \ expected by pythondialog.""" ExceptionShortDescription = "Unexpected dialog output" class DialogTerminatedBySignal(error): """Exception raised when the dialog-like program is terminated by a \ signal.""" ExceptionShortDescription = "dialog-like terminated by a signal" class DialogError(error): """Exception raised when the dialog-like program exits with the \ code indicating an error.""" ExceptionShortDescription = "dialog-like terminated due to an error" class UnableToCreateTemporaryDirectory(error): """Exception raised when we cannot create a temporary directory.""" ExceptionShortDescription = "Unable to create a temporary directory" class UnableToRetrieveBackendVersion(error): """Exception raised when we cannot retrieve the version string of the \ dialog-like backend.""" ExceptionShortDescription = "Unable to retrieve the version of the \ dialog-like backend" class UnableToParseBackendVersion(error): """Exception raised when we cannot parse the version string of the \ dialog-like backend.""" ExceptionShortDescription = "Unable to parse as a dialog-like backend \ version string" class UnableToParseDialogBackendVersion(UnableToParseBackendVersion): """Exception raised when we cannot parse the version string of the dialog \ backend.""" ExceptionShortDescription = "Unable to parse as a dialog version string" class InadequateBackendVersion(error): """Exception raised when the backend version in use is inadequate \ in a given situation.""" ExceptionShortDescription = "Inadequate backend version" @contextmanager def _OSErrorHandling(): try: yield except OSError, e: raise PythonDialogOSError(unicode(e)) except IOError, e: raise PythonDialogIOError(unicode(e)) try: # Values accepted for checklists _on_cre = re.compile(r"on$", re.IGNORECASE) _off_cre = re.compile(r"off$", re.IGNORECASE) _calendar_date_cre = re.compile( r"(?P<day>\d\d)/(?P<month>\d\d)/(?P<year>\d\d\d\d)$") _timebox_time_cre = re.compile( r"(?P<hour>\d\d):(?P<minute>\d\d):(?P<second>\d\d)$") except re.error, e: raise PythonDialogReModuleError(unicode(e)) # From dialog(1): # # All options begin with "--" (two ASCII hyphens, for the benefit of those # using systems with deranged locale support). # # A "--" by itself is used as an escape, i.e., the next token on the # command-line is not treated as an option, as in: # dialog --title -- --Not an option def _dash_escape(args): """Escape all elements of 'args' that need escaping. 'args' may be any sequence and is not modified by this function. Return a new list where every element that needs escaping has been escaped. An element needs escaping when it starts with two ASCII hyphens ('--'). Escaping consists in prepending an element composed of two ASCII hyphens, i.e., the string '--'. """ res = [] for arg in args: if arg.startswith("--"): res.extend(("--", arg)) else: res.append(arg) return res # We need this function in the global namespace for the lambda # expressions in _common_args_syntax to see it when they are called. def _dash_escape_nf(args): # nf: non-first """Escape all elements of 'args' that need escaping, except the first one. See _dash_escape() for details. Return a new list. """ if not args: raise PythonDialogBug("not a non-empty sequence: {0!r}".format(args)) l = _dash_escape(args[1:]) l.insert(0, args[0]) return l def _simple_option(option, enable): """Turn on or off the simplest dialog Common Options.""" if enable: return (option,) else: # This will not add any argument to the command line return () # This dictionary allows us to write the dialog common options in a Pythonic # way (e.g. dialog_instance.checklist(args, ..., title="Foo", no_shadow=True)). # # Options such as --separate-output should obviously not be set by the user # since they affect the parsing of dialog's output: _common_args_syntax = { "ascii_lines": lambda enable: _simple_option("--ascii-lines", enable), "aspect": lambda ratio: _dash_escape_nf(("--aspect", unicode(ratio))), "backtitle": lambda backtitle: _dash_escape_nf(("--backtitle", backtitle)), # Obsolete according to dialog(1) "beep": lambda enable: _simple_option("--beep", enable), # Obsolete according to dialog(1) "beep_after": lambda enable: _simple_option("--beep-after", enable), # Warning: order = y, x! "begin": lambda coords: _dash_escape_nf( ("--begin", unicode(coords[0]), unicode(coords[1]))), "cancel_label": lambda s: _dash_escape_nf(("--cancel-label", s)), # Old, unfortunate choice of key, kept for backward compatibility "cancel": lambda s: _dash_escape_nf(("--cancel-label", s)), "clear": lambda enable: _simple_option("--clear", enable), "colors": lambda enable: _simple_option("--colors", enable), "column_separator": lambda s: _dash_escape_nf(("--column-separator", s)), "cr_wrap": lambda enable: _simple_option("--cr-wrap", enable), "create_rc": lambda filename: _dash_escape_nf(("--create-rc", filename)), "date_format": lambda s: _dash_escape_nf(("--date-format", s)), "defaultno": lambda enable: _simple_option("--defaultno", enable), "default_button": lambda s: _dash_escape_nf(("--default-button", s)), "default_item": lambda s: _dash_escape_nf(("--default-item", s)), "exit_label": lambda s: _dash_escape_nf(("--exit-label", s)), "extra_button": lambda enable: _simple_option("--extra-button", enable), "extra_label": lambda s: _dash_escape_nf(("--extra-label", s)), "help": lambda enable: _simple_option("--help", enable), "help_button": lambda enable: _simple_option("--help-button", enable), "help_label": lambda s: _dash_escape_nf(("--help-label", s)), "help_status": lambda enable: _simple_option("--help-status", enable), "help_tags": lambda enable: _simple_option("--help-tags", enable), "hfile": lambda filename: _dash_escape_nf(("--hfile", filename)), "hline": lambda s: _dash_escape_nf(("--hline", s)), "ignore": lambda enable: _simple_option("--ignore", enable), "insecure": lambda enable: _simple_option("--insecure", enable), "item_help": lambda enable: _simple_option("--item-help", enable), "keep_tite": lambda enable: _simple_option("--keep-tite", enable), "keep_window": lambda enable: _simple_option("--keep-window", enable), "max_input": lambda size: _dash_escape_nf(("--max-input", unicode(size))), "no_cancel": lambda enable: _simple_option("--no-cancel", enable), "nocancel": lambda enable: _simple_option("--nocancel", enable), "no_collapse": lambda enable: _simple_option("--no-collapse", enable), "no_kill": lambda enable: _simple_option("--no-kill", enable), "no_label": lambda s: _dash_escape_nf(("--no-label", s)), "no_lines": lambda enable: _simple_option("--no-lines", enable), "no_mouse": lambda enable: _simple_option("--no-mouse", enable), "no_nl_expand": lambda enable: _simple_option("--no-nl-expand", enable), "no_ok": lambda enable: _simple_option("--no-ok", enable), "no_shadow": lambda enable: _simple_option("--no-shadow", enable), "no_tags": lambda enable: _simple_option("--no-tags", enable), "ok_label": lambda s: _dash_escape_nf(("--ok-label", s)), # cf. Dialog.maxsize() "print_maxsize": lambda enable: _simple_option("--print-maxsize", enable), "print_size": lambda enable: _simple_option("--print-size", enable), # cf. Dialog.backend_version() "print_version": lambda enable: _simple_option("--print-version", enable), "scrollbar": lambda enable: _simple_option("--scrollbar", enable), "separate_output": lambda enable: _simple_option("--separate-output", enable), "separate_widget": lambda s: _dash_escape_nf(("--separate-widget", s)), "shadow": lambda enable: _simple_option("--shadow", enable), # Obsolete according to dialog(1) "size_err": lambda enable: _simple_option("--size-err", enable), "sleep": lambda secs: _dash_escape_nf(("--sleep", unicode(secs))), "stderr": lambda enable: _simple_option("--stderr", enable), "stdout": lambda enable: _simple_option("--stdout", enable), "tab_correct": lambda enable: _simple_option("--tab-correct", enable), "tab_len": lambda n: _dash_escape_nf(("--tab-len", unicode(n))), "time_format": lambda s: _dash_escape_nf(("--time-format", s)), "timeout": lambda secs: _dash_escape_nf(("--timeout", unicode(secs))), "title": lambda title: _dash_escape_nf(("--title", title)), "trace": lambda filename: _dash_escape_nf(("--trace", filename)), "trim": lambda enable: _simple_option("--trim", enable), "version": lambda enable: _simple_option("--version", enable), "visit_items": lambda enable: _simple_option("--visit-items", enable), "yes_label": lambda s: _dash_escape_nf(("--yes-label", s)) } def _find_in_path(prog_name): """Search an executable in the PATH. If PATH is not defined, the default path ":/bin:/usr/bin" is used. Return a path to the file or None if no readable and executable file is found. Notable exception: PythonDialogOSError """ with _OSErrorHandling(): # Note that the leading empty component in the default value for PATH # could lead to the returned path not being absolute. PATH = os.getenv("PATH", ":/bin:/usr/bin") # see the execvp(3) man page for d in PATH.split(":"): file_path = os.path.join(d, prog_name) if os.path.isfile(file_path) \ and os.access(file_path, os.R_OK | os.X_OK): return file_path return None def _path_to_executable(f): """Find a path to an executable. Find a path to an executable, using the same rules as the POSIX exec*p functions (see execvp(3) for instance). If 'f' contains a '/', it is assumed to be a path and is simply checked for read and write permissions; otherwise, it is looked for according to the contents of the PATH environment variable, which defaults to ":/bin:/usr/bin" if unset. The returned path is not necessarily absolute. Notable exceptions: ExecutableNotFound PythonDialogOSError """ with _OSErrorHandling(): if '/' in f: if os.path.isfile(f) and \ os.access(f, os.R_OK | os.X_OK): res = f else: raise ExecutableNotFound("%s cannot be read and executed" % f) else: res = _find_in_path(f) if res is None: raise ExecutableNotFound( "can't find the executable for the dialog-like " "program") return res def _to_onoff(val): """Convert boolean expressions to "on" or "off". Return: - "on" if 'val' is True, a non-zero integer, "on" or any case variation thereof; - "off" if 'val' is False, 0, "off" or any case variation thereof. Notable exceptions: PythonDialogReModuleError BadPythonDialogUsage """ if isinstance(val, (bool, int)): return "on" if val else "off" elif isinstance(val, basestring): try: if _on_cre.match(val): return "on" elif _off_cre.match(val): return "off" except re.error, e: raise PythonDialogReModuleError(unicode(e)) raise BadPythonDialogUsage("invalid boolean value: {0!r}".format(val)) def _compute_common_args(mapping): """Compute the list of arguments for dialog common options. Compute a list of the command-line arguments to pass to dialog from a keyword arguments dictionary for options listed as "common options" in the manual page for dialog. These are the options that are not tied to a particular widget. This allows to specify these options in a pythonic way, such as: d.checklist(<usual arguments for a checklist>, title="...", backtitle="...") instead of having to pass them with strings like "--title foo" or "--backtitle bar". Notable exceptions: None """ args = [] for option, value in mapping.items(): args.extend(_common_args_syntax[option](value)) return args def _create_temporary_directory(): """Create a temporary directory (securely). Return the directory path. Notable exceptions: - UnableToCreateTemporaryDirectory - PythonDialogOSError - exceptions raised by the tempfile module """ find_temporary_nb_attempts = 5 for i in xrange(find_temporary_nb_attempts): with _OSErrorHandling(): tmp_dir = os.path.join(tempfile.gettempdir(), "%s-%d" \ % ("pythondialog", random.randint(0, sys.maxsize))) try: os.mkdir(tmp_dir, 0700) except os.error: continue else: break else: raise UnableToCreateTemporaryDirectory( "somebody may be trying to attack us") return tmp_dir # Classes for dealing with the version of dialog-like backend programs if sys.hexversion >= 0x030200F0: import abc # Abstract base class class BackendVersion(): __metaclass__ = abc.ABCMeta @abc.abstractmethod def __unicode__(self): raise NotImplementedError() if sys.hexversion >= 0x030300F0: @classmethod @abc.abstractmethod def fromstring(cls, s): raise NotImplementedError() else: # for Python 3.2 @abc.abstractclassmethod def fromstring(cls, s): raise NotImplementedError() @abc.abstractmethod def __lt__(self, other): raise NotImplementedError() @abc.abstractmethod def __le__(self, other): raise NotImplementedError() @abc.abstractmethod def __eq__(self, other): raise NotImplementedError() @abc.abstractmethod def __ne__(self, other): raise NotImplementedError() @abc.abstractmethod def __gt__(self, other): raise NotImplementedError() @abc.abstractmethod def __ge__(self, other): raise NotImplementedError() else: class BackendVersion(object): pass class DialogBackendVersion(BackendVersion): """Class representing possible versions of the dialog backend. The purpose of this class is to make it easy to reliably compare between versions of the dialog backend. It encapsulates the specific details of the backend versioning scheme to allow eventual adaptations to changes in this scheme without affecting external code. The version is represented by two components in this class: the "dotted part" and the "rest". For instance, in the '1.2' version string, the dotted part is [1, 2] and the rest is the empty string. However, in version '1.2-20130902', the dotted part is still [1, 2], but the rest is the string '-20130902'. Instances of this class can be created with the constructor by specifying the dotted part and the rest. Alternatively, an instance can be created from the corresponding version string (e.g., '1.2-20130902') using the fromstring() class method. This is particularly useful with the result of d.backend_version(), where 'd' is a Dialog instance. Actually, the main constructor detects if its first argument is a string and calls fromstring() in this case as a convenience. Therefore, all of the following expressions are valid to create a DialogBackendVersion instance: DialogBackendVersion([1, 2]) DialogBackendVersion([1, 2], "-20130902") DialogBackendVersion("1.2-20130902") DialogBackendVersion.fromstring("1.2-20130902") If 'bv' is a DialogBackendVersion instance, unicode(bv) is a string representing the same version (for instance, "1.2-20130902"). Two DialogBackendVersion instances can be compared with the usual comparison operators (<, <=, ==, !=, >=, >). The algorithm is designed so that the following order is respected (after instanciation with fromstring()): 1.2 < 1.2-20130902 < 1.2-20130903 < 1.2.0 < 1.2.0-20130902 among other cases. Actually, the "dotted parts" are the primary keys when comparing and "rest" strings act as secondary keys. Dotted parts are compared with the standard Python list comparison and "rest" strings using the standard Python string comparison. """ try: _backend_version_cre = re.compile(r"""(?P<dotted> (\d+) (\.\d+)* ) (?P<rest>.*)$""", re.VERBOSE) except re.error, e: raise PythonDialogReModuleError(unicode(e)) def __init__(self, dotted_part_or_str, rest=""): """Create a DialogBackendVersion instance. Please see the class docstring for details. """ if isinstance(dotted_part_or_str, basestring): if rest: raise BadPythonDialogUsage( "non-empty 'rest' with 'dotted_part_or_str' as string: " "{0!r}".format(rest)) else: tmp = self.__class__.fromstring(dotted_part_or_str) dotted_part_or_str, rest = tmp.dotted_part, tmp.rest for elt in dotted_part_or_str: if not isinstance(elt, int): raise BadPythonDialogUsage( "when 'dotted_part_or_str' is not a string, it must " "be a sequence (or iterable) of integers; however, " "{0!r} is not an integer.".format(elt)) self.dotted_part = list(dotted_part_or_str) self.rest = rest def __repr__(self): # Unicode strings are not supported as the result of __repr__() # in Python 2.x (cf. <http://bugs.python.org/issue5876>). return b"{0}.{1}({2!r}, rest={3!r})".format( __name__, self.__class__.__name__, self.dotted_part, self.rest) def __unicode__(self): return '.'.join(imap(unicode, self.dotted_part)) + self.rest @classmethod def fromstring(cls, s): try: mo = cls._backend_version_cre.match(s) if not mo: raise UnableToParseDialogBackendVersion(s) dotted_part = [ int(x) for x in mo.group("dotted").split(".") ] rest = mo.group("rest") except re.error, e: raise PythonDialogReModuleError(unicode(e)) return cls(dotted_part, rest) def __lt__(self, other): return (self.dotted_part, self.rest) < (other.dotted_part, other.rest) def __le__(self, other): return (self.dotted_part, self.rest) <= (other.dotted_part, other.rest) def __eq__(self, other): return (self.dotted_part, self.rest) == (other.dotted_part, other.rest) # Python 3.2 has a decorator (functools.total_ordering) to automate this. def __ne__(self, other): return not (self == other) def __gt__(self, other): return not (self <= other) def __ge__(self, other): return not (self < other) def widget(func): """Decorator to mark Dialog methods that provide widgets. This allows code to perform automatic operations on these specific methods. For instance, one can define a class that behaves similarly to Dialog, except that after every widget-producing call, it spawns a "confirm quit" dialog if the widget returned Dialog.ESC, and loops in case the user doesn't actually want to quit. When it is unclear whether a method should have the decorator or not, the return value is used to draw the line. For instance, among 'gauge_start', 'gauge_update' and 'gauge_stop', only the last one has the decorator because it returns a Dialog exit code, whereas the first two don't return anything meaningful. Note: Some widget-producing methods return the Dialog exit code, but other methods return a *sequence*, the first element of which is the Dialog exit code; the 'retval_is_code' attribute, which is set by the decorator of the same name, allows to programmatically discover the interface a given method conforms to. """ func.is_widget = True return func def retval_is_code(func): """Decorator for Dialog widget-producing methods whose return value is \ the Dialog exit code. This decorator is intended for widget-producing methods whose return value consists solely of the Dialog exit code. When this decorator is *not* used on a widget-producing method, the Dialog exit code must be the first element of the return value. """ func.retval_is_code = True return func def _obsolete_property(name, replacement=None): if replacement is None: replacement = name def getter(self): warnings.warn("the DIALOG_{name} attribute of Dialog instances is " "obsolete; use the Dialog.{repl} class attribute " "instead.".format(name=name, repl=replacement), DeprecationWarning) return getattr(self, replacement) return getter # Main class of the module class Dialog(object): """Class providing bindings for dialog-compatible programs. This class allows you to invoke dialog or a compatible program in a pythonic way to build quicky and easily simple but nice text interfaces. An application typically creates one instance of the Dialog class and uses it for all its widgets, but it is possible to concurrently use several instances of this class with different parameters (such as the background title) if you have a need for this. Public methods of the Dialog class (mainly widgets) =================================================== The Dialog class has the following methods that produce or update widgets: buildlist calendar checklist dselect editbox form fselect gauge_start gauge_update gauge_stop infobox inputbox inputmenu menu mixedform mixedgauge msgbox passwordbox passwordform pause programbox progressbox radiolist rangebox scrollbox tailbox textbox timebox treeview yesno All these widgets are described in the docstrings of the corresponding Dialog methods. Many of these descriptions are adapted from the dialog(1) manual page, with the kind permission of Thomas Dickey. The Dialog class also has a few other methods, that are not related to a particular widget: add_persistent_args backend_version (see "Checking the backend version" below) maxsize set_background_title clear (has been OBSOLETE for many years!) setBackgroundTitle (has been OBSOLETE for many years!) Passing dialog "Common Options" =============================== Every widget method has a **kwargs argument allowing you to pass dialog so-called Common Options (see the dialog(1) manual page) to dialog for this widget call. For instance, if 'd' is a Dialog instance, you can write: d.checklist(args, ..., title="A Great Title", no_shadow=True) The no_shadow option is worth looking at: 1. It is an option that takes no argument as far as dialog is concerned (unlike the "--title" option, for instance). When you list it as a keyword argument, the option is really passed to dialog only if the value you gave it evaluates to True in a boolean context. For instance, "no_shadow=True" will cause "--no-shadow" to be passed to dialog whereas "no_shadow=False" will cause this option not to be passed to dialog at all. 2. It is an option that has a hyphen (-) in its name, which you must change into an underscore (_) to pass it as a Python keyword argument. Therefore, "--no-shadow" is passed by giving a "no_shadow=True" keyword argument to a Dialog method (the leading two dashes are also consistently removed). Return value of widget-producing methods ======================================== Most Dialog methods that create a widget (actually: all methods that supervise the exit of a widget) return a value which fits into one of these categories: 1. The return value is a Dialog exit code (see below). 2. The return value is a sequence whose first element is a Dialog exit code (the rest of the sequence being related to what the user entered in the widget). "Dialog exit code" (high-level) ------------------------------- A Dialog exit code is a string such as "ok", "cancel", "esc", "help" and "extra", respectively available as Dialog.OK, Dialog.CANCEL, Dialog.ESC, Dialog.HELP and Dialog.EXTRA, i.e. attributes of the Dialog class. These are the standard Dialog exit codes, also known as "high-level exit codes", that user code should deal with. They indicate how/why the widget ended. Some widgets may return additional, non-standard exit codes; for instance, the inputmenu widget may return "accepted" or "renamed" in addition to the standard Dialog exit codes. When getting a Dialog exit code from a widget-producing method, user code should compare it with Dialog.OK and friends (or equivalently, with "ok" and friends) using the == operator. This allows to easily replace Dialog.OK and friends with objects that compare the same with "ok" and u"ok" in Python 2, for instance. "dialog exit status" (low-level) -------------------------------- The standard Dialog exit codes are derived from the dialog exit status, also known as "low-level exit code". This low-level exit code is an integer returned by the dialog backend whose different possible values are referred to as DIALOG_OK, DIALOG_CANCEL, DIALOG_ESC, DIALOG_ERROR, DIALOG_EXTRA, DIALOG_HELP and DIALOG_ITEM_HELP in the dialog(1) manual page. Note that: - DIALOG_HELP and DIALOG_ITEM_HELP both map to Dialog.HELP in pythondialog, because they both correspond to the same user action and the difference brings no information that the caller does not already have; - DIALOG_ERROR has no counterpart as a Dialog attribute, because it is automatically translated into a DialogError exception when received. In pythondialog 2.x, the low-level exit codes were available as the DIALOG_OK, DIALOG_CANCEL, etc. attributes of Dialog instances. For compatibility, the Dialog class has attributes of the same names mapped to Dialog.OK, Dialog.CANCEL, etc., but their use is deprecated as of pythondialog 3.0. Adding a Extra button ===================== With most widgets, it is possible to add a supplementary button called "Extra button". To do that, you simply have to use 'extra_button=True' (keyword argument) in the widget call. By default, the button text is "Extra", but you can specify another string with the 'extra_label' keyword argument. When the widget exits, you know if the Extra button was pressed if the Dialog exit code is Dialog.EXTRA ("extra"). Normally, the rest of the return value is the same as if the widget had been closed with OK. Therefore, if the widget normally returns a list of three integers, for instance, you can expect to get the same information if Extra is pressed instead of OK. Providing on-line help facilities ================================= With most dialog widgets, it is possible to provide online help to the final user. At the time of this writing (October 2013), there are three main options governing these help facilities in the dialog backend: --help-button, --item-help and --help-status. Since dialog 1.2-20130902, there is also --help-tags that modifies the way --item-help works. As explained previously, to use these options in pythondialog, you can pass the 'help_button', 'item_help', 'help_status' and 'help_tags' keyword arguments to Dialog widget-producing methods. Adding a Help button -------------------- In order to provide a Help button in addition to the normal buttons of a widget, you can pass help_button=True (keyword argument) to the corresponding Dialog method. For instance, if 'd' is a Dialog instance, you can write: code = d.yesno("<text>", height=10, width=40, help_button=True) or code, answer = d.inputbox("<text>", init="<init>", help_button=True) When the method returns, the exit code is Dialog.HELP (i.e., the string "help") if the user pressed the Help button. Apart from that, it works exactly as if 'help_button=True' had not been used. In the last example, if the user presses the Help button, 'answer' will contain the user input, just as if OK had been pressed. Similarly, if you write: code, t = d.checklist( "<text>", height=0, width=0, list_height=0, choices=[ ("Tag 1", "Item 1", False), ("Tag 2", "Item 2", True), ("Tag 3", "Item 3", True) ], help_button=True) and find that code == Dialog.HELP, then 't' contains the tag string for the highlighted item when the Help button was pressed. Finally, note that it is possible to choose the text written on the Help button by supplying a string as the 'help_label' keyword argument. Providing inline per-item help ------------------------------ In addition to, or instead of the Help button, you can provide item-specific help that is normally displayed at the bottom of the widget. This can be done by passing the 'item_help=True' keyword argument to the widget-producing method and by including the item-specific help strings in the appropriate argument. For widgets where item-specific help makes sense (i.e., there are several elements that can be highlighted), there is usually a parameter, often called 'elements', 'choices', 'nodes'..., that must be provided as a sequence describing the various lines/items/nodes/... that can be highlighted in the widget. When 'item_help=True' is passed, every element of this sequence must be completed with a string which is the item-help string of the element (dialog(1) terminology). For instance, the following call with no inline per-item help support: code, t = d.checklist( "<text>", height=0, width=0, list_height=0, choices=[ ("Tag 1", "Item 1", False), ("Tag 2", "Item 2", True), ("Tag 3", "Item 3", True) ], help_button=True) can be altered this way to provide inline item-specific help: code, t = d.checklist( "<text>", height=0, width=0, list_height=0, choices=[ ("Tag 1", "Item 1", False, "Help 1"), ("Tag 2", "Item 2", True, "Help 2"), ("Tag 3", "Item 3", True, "Help 3") ], help_button=True, item_help=True, help_tags=True) With this modification, the item-help string for the highlighted item is displayed in the bottom line of the screen and updated as the user highlights other items. If you don't want a Help button, just use 'item_help=True' without 'help_button=True' ('help_tags' doesn't matter). Then, you have the inline help at the bottom of the screen, and the following discussion about the return value can be ignored. If the user chooses the Help button, 'code' will be equal to Dialog.HELP ("help") and 't' will contain the tag string corresponding to the highlighted item when the Help button was pressed ("Tag 1/2/3" in the example). This is because of the 'help_tags' option; without it (or with 'help_tags=False'), 't' would have contained the item-help string of the highlighted choice ("Help 1/2/3" in the example). If you remember what was said earlier, if 'item_help=True' had not been used in the previous example, 't' would still contain the tag of the highlighted choice if the user closed the widget with the Help button. This is the same as when using 'item_help=True' in combination with 'help_tags=True'; however, you would get the item-help string instead if 'help_tags' were False (which is the default, as in the dialog backend, and in order to preserve compatibility with the 'menu' implementation that is several years old). Therefore, I recommend for consistency to use 'help_tags=True' whenever possible when specifying 'item_help=True'. This makes "--help-tags" a good candidate for use with Dialog.add_persistent_args() to avoid repeating it over and over. However, there are two cases where 'help_tags=True' cannot be used: - when the version of the dialog backend is lower than 1.2-20130902 (the --help-tags option was added in this version); - when using empty or otherwise identical tags for presentation purposes (unless you don't need to tell which element was highlighted when the Help button was pressed, in which case it doesn't matter to be unable to discriminate between the tags). Getting the widget status before the Help button was pressed ------------------------------------------------------------ Typically, when the user chooses Help in a widget, the application will display a dialog box such as 'textbox', 'msgbox' or 'scrollbox' and redisplay the original widget afterwards. For simple widgets such as 'inputbox', when the Dialog exit code is equal to Dialog.HELP, the return value contains enough information to redisplay the widget in the same state it had when Help was chosen. However, for more complex widgets such as 'radiolist', 'checklist', 'form' and its derivatives, knowing the highlighted item is not enough to restore the widget state after processing the help request: one needs to know the checked item / list of checked items / form contents. This is where the 'help_status' keyword argument becomes useful. Example: code, t = d.checklist( "<text>", height=0, width=0, list_height=0, choices=[ ("Tag 1", "Item 1", False), ("Tag 2", "Item 2", True), ("Tag 3", "Item 3", True) ], help_button=True, help_status=True) When Help is chosen, code == Dialog.HELP and 't' is a tuple of the form (tag, selected_tags, choices) where: - 'tag' gives the tag string of the highlighted item (which would be the value of 't' if 'help_status' were set to False); - 'selected_tags' is the... list of selected tags (note that highlighting and selecting an item are different things!); - 'choices' is a list built from the original 'choices' argument of the 'checklist' call and from the list of selected tags, that can be used as is to create a widget with the same items and selection state as the original widget had when Help was chosen. Normally, pythondialog should always provide something similar to the last item in the previous example in order to make it as easy as possible to redisplay the widget in the appropriate state. To know precisely what is returned with 'help_status=True', the best ways are usually to experiment or read the code (by the way, there are many examples of widgets with various combinations of 'help_button', 'item_help' and 'help_status' in the demo). As can be inferred from the last sentence, the various options related to help support are not mutually exclusive and may be used together to provide good help support. It is also worth noting that the docstrings of the various widgets are written, in most cases, under the assumption that the widget was closed "normally" (typically, with the OK or Extra button). For instance, a docstring may state that the method returns a tuple of the form (code, tag) where 'tag' is ..., but actually, if using 'item_help=True' with 'help_tags=False', the 'tag' may very well be an item-help string, and if using 'help_status=True', it is likely to be a structured object such as a tuple or list. Of course, handling all these possible variations for all widgets would be a tedious task and would probably significantly degrade the readability of said docstrings. Checking the backend version ============================ The Dialog constructor retrieves the version string of the dialog backend and stores it as an instance of a BackendVersion subclass into the 'cached_backend_version' attribute. This allows doing things such as ('d' being a Dialog instance): if d.compat == "dialog" and \\ d.cached_backend_version >= DialogBackendVersion("1.2-20130902"): ... in a reliable way, allowing to fix the parsing and comparison algorithms right in the appropriate BackendVersion subclass, should the dialog-like backend versioning scheme change in unforeseen ways. As Xdialog seems to be dead and not to support --print-version, the 'cached_backend_version' attribute is set to None in Xdialog-compatibility mode (2013-09-12). Should this ever change, one should define an XDialogBackendVersion class to handle the particularities of the Xdialog versioning scheme. Exceptions ========== Please refer to the specific methods' docstrings or simply to the module's docstring for a list of all exceptions that might be raised by this class' methods. """ try: _print_maxsize_cre = re.compile(r"""^MaxSize:[ \t]+ (?P<rows>\d+),[ \t]* (?P<columns>\d+)[ \t]*$""", re.VERBOSE) _print_version_cre = re.compile( r"^Version:[ \t]+(?P<version>.+?)[ \t]*$", re.MULTILINE) except re.error, e: raise PythonDialogReModuleError(unicode(e)) # DIALOG_OK, DIALOG_CANCEL, etc. are environment variables controlling # the dialog backend exit status in the corresponding situation ("low-level # exit status/code"). # # Note: # - 127 must not be used for any of the DIALOG_* values. It is used # when a failure occurs in the child process before it exec()s # dialog (where "before" includes a potential exec() failure). # - 126 is also used (although in presumably rare situations). _DIALOG_OK = 0 _DIALOG_CANCEL = 1 _DIALOG_ESC = 2 _DIALOG_ERROR = 3 _DIALOG_EXTRA = 4 _DIALOG_HELP = 5 _DIALOG_ITEM_HELP = 6 # cf. also _lowlevel_exit_codes and _dialog_exit_code_ll_to_hl which are # created by __init__(). It is not practical to define everything here, # because there is no equivalent of 'self' for the class outside method # definitions. _lowlevel_exit_code_varnames = frozenset(("OK", "CANCEL", "ESC", "ERROR", "EXTRA", "HELP", "ITEM_HELP")) # High-level exit codes, AKA "Dialog exit codes". These are the codes that # pythondialog-based applications should use. OK = "ok" CANCEL = "cancel" ESC = "esc" EXTRA = "extra" HELP = "help" # Define properties to maintain backward-compatibility while warning about # the obsolete attributes (which used to refer to the low-level exit codes # in pythondialog 2.x). DIALOG_OK = property(_obsolete_property("OK"), doc="Obsolete property superseded by Dialog.OK") DIALOG_CANCEL = property(_obsolete_property("CANCEL"), doc="Obsolete property superseded by Dialog.CANCEL") DIALOG_ESC = property(_obsolete_property("ESC"), doc="Obsolete property superseded by Dialog.ESC") DIALOG_EXTRA = property(_obsolete_property("EXTRA"), doc="Obsolete property superseded by Dialog.EXTRA") DIALOG_HELP = property(_obsolete_property("HELP"), doc="Obsolete property superseded by Dialog.HELP") # We treat DIALOG_ITEM_HELP and DIALOG_HELP the same way in pythondialog, # since both indicate the same user action ("Help" button pressed). DIALOG_ITEM_HELP = property(_obsolete_property("ITEM_HELP", replacement="HELP"), doc="Obsolete property superseded by Dialog.HELP") @property def DIALOG_ERROR(self): warnings.warn("the DIALOG_ERROR attribute of Dialog instances is " "obsolete. Since the corresponding exit status is " "automatically translated into a DialogError exception, " "users should not see nor need this attribute. If you " "think you have a good reason to use it, please expose " "your situation on the pythondialog mailing-list.", DeprecationWarning) # There is no corresponding high-level code; and if the user *really* # wants to know the (integer) error exit status, here it is... return self._DIALOG_ERROR def __init__(self, dialog="dialog", DIALOGRC=None, compat="dialog", use_stdout=None): """Constructor for Dialog instances. dialog -- name of (or path to) the dialog-like program to use; if it contains a '/', it is assumed to be a path and is used as is; otherwise, it is looked for according to the contents of the PATH environment variable, which defaults to ":/bin:/usr/bin" if unset. DIALOGRC -- string to pass to the dialog-like program as the DIALOGRC environment variable, or None if no modification to the environment regarding this variable should be done in the call to the dialog-like program compat -- compatibility mode (see below) use_stdout -- read dialog's standard output stream instead of its standard error stream in order to get most 'results' (user-supplied strings, etc.; basically everything apart from the exit status). This is for compatibility with Xdialog and should only be used if you have a good reason to do so. The officially supported dialog-like program in pythondialog is the well-known dialog program written in C, based on the ncurses library. It is also known as cdialog and its home page is currently (2013-08-12) located at: http://invisible-island.net/dialog/dialog.html If you want to use a different program such as Xdialog, you should indicate the executable file name with the 'dialog' argument *and* the compatibility type that you think it conforms to with the 'compat' argument. Currently, 'compat' can be either "dialog" (for dialog; this is the default) or "Xdialog" (for, well, Xdialog). The 'compat' argument allows me to cope with minor differences in behaviour between the various programs implementing the dialog interface (not the text or graphical interface, I mean the "API"). However, having to support various APIs simultaneously is ugly and I would really prefer you to report bugs to the relevant maintainers when you find incompatibilities with dialog. This is for the benefit of pretty much everyone that relies on the dialog interface. Notable exceptions: ExecutableNotFound PythonDialogOSError UnableToRetrieveBackendVersion UnableToParseBackendVersion """ # DIALOGRC differs from the Dialog._DIALOG_* attributes in that: # 1. It is an instance attribute instead of a class attribute. # 2. It should be a string if not None. # 3. We may very well want it to be unset. if DIALOGRC is not None: self.DIALOGRC = DIALOGRC # Mapping from "OK", "CANCEL", ... to the corresponding dialog exit # statuses (integers). self._lowlevel_exit_codes = dict(( name, getattr(self, "_DIALOG_" + name)) for name in self._lowlevel_exit_code_varnames) # Mapping from dialog exit status (integer) to Dialog exit code ("ok", # "cancel", ... strings referred to by Dialog.OK, Dialog.CANCEL, ...); # in other words, from low-level to high-level exit code. self._dialog_exit_code_ll_to_hl = {} for name in self._lowlevel_exit_code_varnames: intcode = self._lowlevel_exit_codes[name] if name == "ITEM_HELP": self._dialog_exit_code_ll_to_hl[intcode] = self.HELP elif name == "ERROR": continue else: self._dialog_exit_code_ll_to_hl[intcode] = getattr(self, name) self._dialog_prg = _path_to_executable(dialog) self.compat = compat self.dialog_persistent_arglist = [] # Use stderr or stdout for reading dialog's output? if self.compat == "Xdialog": # Default to using stdout for Xdialog self.use_stdout = True else: self.use_stdout = False if use_stdout is not None: # Allow explicit setting self.use_stdout = use_stdout if self.use_stdout: self.add_persistent_args(["--stdout"]) self.setup_debug(False) if compat == "dialog": self.cached_backend_version = DialogBackendVersion.fromstring( self.backend_version()) else: # Xdialog doesn't seem to offer --print-version (2013-09-12) self.cached_backend_version = None @classmethod def dash_escape(cls, args): """Escape all elements of 'args' that need escaping. 'args' may be any sequence and is not modified by this method. Return a new list where every element that needs escaping has been escaped. An element needs escaping when it starts with two ASCII hyphens ('--'). Escaping consists in prepending an element composed of two ASCII hyphens, i.e., the string '--'. All high-level Dialog methods automatically perform dash escaping where appropriate. In particular, this is the case for every method that provides a widget: yesno(), msgbox(), etc. You only need to do it yourself when calling a low-level method such as add_persistent_args(). """ return _dash_escape(args) @classmethod def dash_escape_nf(cls, args): """Escape all elements of 'args' that need escaping, except the first one. See dash_escape() for details. Return a new list. All high-level Dialog methods automatically perform dash escaping where appropriate. In particular, this is the case for every method that provides a widget: yesno(), msgbox(), etc. You only need to do it yourself when calling a low-level method such as add_persistent_args(). """ return _dash_escape_nf(args) def add_persistent_args(self, args): """Add arguments to use for every subsequent dialog call. This method cannot guess which elements of 'args' are dialog options (such as '--title') and which are not (for instance, you might want to use '--title' or even '--' as an argument to a dialog option). Therefore, this method does not perform any kind of dash escaping; you have to do it yourself. dash_escape() and dash_escape_nf() may be useful for this purpose. """ self.dialog_persistent_arglist.extend(args) def set_background_title(self, text): """Set the background title for dialog. text -- string to use as the background title """ self.add_persistent_args(self.dash_escape_nf(("--backtitle", text))) # For compatibility with the old dialog def setBackgroundTitle(self, text): """Set the background title for dialog. text -- string to use as the background title This method is obsolete. Please remove calls to it from your programs. """ warnings.warn("Dialog.setBackgroundTitle() has been obsolete for " "many years; use Dialog.set_background_title() instead", DeprecationWarning) self.set_background_title(text) def setup_debug(self, enable, file=None, always_flush=False): """Setup the debugging parameters. When enabled, all dialog commands are written to 'file' using Bourne shell syntax. enable -- boolean indicating whether to enable or disable debugging file -- file object where to write debugging information always_flush -- boolean indicating whether to call file.flush() after each command written """ self._debug_enabled = enable if not hasattr(self, "_debug_logfile"): self._debug_logfile = None # Allows to switch debugging on and off without having to pass the file # object again and again. if file is not None: self._debug_logfile = file if enable and self._debug_logfile is None: raise BadPythonDialogUsage( "you must specify a file object when turning debugging on") self._debug_always_flush = always_flush self._debug_first_output = True def _write_command_to_file(self, env, arglist): envvar_settings_list = [] if "DIALOGRC" in env: envvar_settings_list.append( "DIALOGRC={0}".format(_shell_quote(env["DIALOGRC"]))) for var in self._lowlevel_exit_code_varnames: varname = "DIALOG_" + var envvar_settings_list.append( "{0}={1}".format(varname, _shell_quote(env[varname]))) command_str = ' '.join(envvar_settings_list + list(imap(_shell_quote, arglist))) s = "{separator}{cmd}\n\nArgs: {args!r}\n".format( separator="" if self._debug_first_output else ("-" * 79) + "\n", cmd=command_str, args=arglist) self._debug_logfile.write(s) if self._debug_always_flush: self._debug_logfile.flush() self._debug_first_output = False def _call_program(self, cmdargs, **kwargs): """Do the actual work of invoking the dialog-like program. Communication with the dialog-like program is performed through one pipe(2) and optionally a user-specified file descriptor, depending on 'redir_child_stdin_from_fd'. The pipe allows the parent process to read what dialog writes on its standard error[*] stream. If 'use_persistent_args' is True (the default), the elements of self.dialog_persistent_arglist are passed as the first arguments to self._dialog_prg; otherwise, self.dialog_persistent_arglist is not used at all. The remaining arguments are those computed from kwargs followed by the elements of 'cmdargs'. If 'dash_escape' is the string "non-first", then every element of 'cmdargs' that starts with '--' is escaped by prepending an element consisting of '--', except the first one (which is usually a dialog option such as '--yesno'). In order to disable this escaping mechanism, pass the string "none" as 'dash_escape'. If 'redir_child_stdin_from_fd' is not None, it should be an open file descriptor (i.e., an integer). That file descriptor will be connected to dialog's standard input. This is used by the gauge widget to feed data to dialog, as well as for progressbox() to allow dialog to read data from a possibly-growing file. If 'redir_child_stdin_from_fd' is None, the standard input in the child process (which runs dialog) is not redirected in any way. If 'close_fds' is passed, it should be a sequence of file descriptors that will be closed by the child process before it exec()s the dialog-like program. [*] standard ouput stream with 'use_stdout' Notable exception: PythonDialogOSError (if any of the pipe(2) or close(2) system calls fails...) """ if 'close_fds' in kwargs: close_fds = kwargs['close_fds']; del kwargs['close_fds'] else: close_fds = () if 'redir_child_stdin_from_fd' in kwargs: redir_child_stdin_from_fd = kwargs['redir_child_stdin_from_fd']; del kwargs['redir_child_stdin_from_fd'] else: redir_child_stdin_from_fd = None if 'use_persistent_args' in kwargs: use_persistent_args = kwargs['use_persistent_args']; del kwargs['use_persistent_args'] else: use_persistent_args = True if 'dash_escape' in kwargs: dash_escape = kwargs['dash_escape']; del kwargs['dash_escape'] else: dash_escape = "non-first" # We want to define DIALOG_OK, DIALOG_CANCEL, etc. in the # environment of the child process so that we know (and # even control) the possible dialog exit statuses. new_environ = {} new_environ.update(os.environ) for var, value in self._lowlevel_exit_codes.items(): varname = "DIALOG_" + var new_environ[varname] = unicode(value) if hasattr(self, "DIALOGRC"): new_environ["DIALOGRC"] = self.DIALOGRC if dash_escape == "non-first": # Escape all elements of 'cmdargs' that start with '--', except the # first one. cmdargs = self.dash_escape_nf(cmdargs) elif dash_escape != "none": raise PythonDialogBug("invalid value for 'dash_escape' parameter: " "{0!r}".format(dash_escape)) arglist = [ self._dialog_prg ] if use_persistent_args: arglist.extend(self.dialog_persistent_arglist) arglist.extend(_compute_common_args(kwargs) + cmdargs) if self._debug_enabled: # Write the complete command line with environment variables # setting to the debug log file (Bourne shell syntax for easy # copy-pasting into a terminal, followed by repr(arglist)). self._write_command_to_file(new_environ, arglist) # Create a pipe so that the parent process can read dialog's # output on stderr (stdout with 'use_stdout') with _OSErrorHandling(): # rfd = File Descriptor for Reading # wfd = File Descriptor for Writing (child_output_rfd, child_output_wfd) = os.pipe() child_pid = os.fork() if child_pid == 0: # We are in the child process. We MUST NOT raise any exception. try: # 1) If the write end of a pipe isn't closed, the read end # will never see EOF, which can indefinitely block the # child waiting for input. To avoid this, the write end # must be closed in the father *and* child processes. # 2) The child process doesn't need child_output_rfd. for fd in close_fds + (child_output_rfd,): os.close(fd) # We want: # - to keep a reference to the father's stderr for error # reporting (and use line-buffering for this stream); # - dialog's output on stderr[*] to go to child_output_wfd; # - data written to fd 'redir_child_stdin_from_fd' # (if not None) to go to dialog's stdin. # # [*] stdout with 'use_stdout' # # We'll just print the result of traceback.format_exc() to # father_stderr, which is a byte string in Python 2, hence the # binary mode. father_stderr = open(os.dup(2), mode="wb") os.dup2(child_output_wfd, 1 if self.use_stdout else 2) if redir_child_stdin_from_fd is not None: os.dup2(redir_child_stdin_from_fd, 0) os.execve(self._dialog_prg, arglist, new_environ) except: print(traceback.format_exc(), file=father_stderr) father_stderr.close() os._exit(127) # Should not happen unless there is a bug in Python os._exit(126) # We are in the father process. # # It is essential to close child_output_wfd, otherwise we will never # see EOF while reading on child_output_rfd and the parent process # will block forever on the read() call. # [ after the fork(), the "reference count" of child_output_wfd from # the operating system's point of view is 2; after the child exits, # it is 1 until the father closes it itself; then it is 0 and a read # on child_output_rfd encounters EOF once all the remaining data in # the pipe has been read. ] with _OSErrorHandling(): os.close(child_output_wfd) return (child_pid, child_output_rfd) def _wait_for_program_termination(self, child_pid, child_output_rfd): """Wait for a dialog-like process to terminate. This function waits for the specified process to terminate, raises the appropriate exceptions in case of abnormal termination and returns the Dialog exit code (high-level) and stderr[*] output of the process as a tuple: (hl_exit_code, output_string). 'child_output_rfd' must be the file descriptor for the reading end of the pipe created by self._call_program(), the writing end of which was connected by self._call_program() to the child process's standard error[*]. This function reads the process' output on standard error[*] from 'child_output_rfd' and closes this file descriptor once this is done. [*] actually, standard output if self.use_stdout is True Notable exceptions: DialogTerminatedBySignal DialogError PythonDialogErrorBeforeExecInChildProcess PythonDialogIOError if the Python version is < 3.3 PythonDialogOSError PythonDialogBug ProbablyPythonBug """ # Read dialog's output on its stderr (stdout with 'use_stdout') with _OSErrorHandling(): with open(child_output_rfd, "r") as f: child_output = f.read() # The closing of the file object causes the end of the pipe we used # to read dialog's output on its stderr to be closed too. This is # important, otherwise invoking dialog enough times would # eventually exhaust the maximum number of open file descriptors. exit_info = os.waitpid(child_pid, 0)[1] if os.WIFEXITED(exit_info): ll_exit_code = os.WEXITSTATUS(exit_info) # As we wait()ed for the child process to terminate, there is no # need to call os.WIFSTOPPED() elif os.WIFSIGNALED(exit_info): raise DialogTerminatedBySignal("the dialog-like program was " "terminated by signal %d" % os.WTERMSIG(exit_info)) else: raise PythonDialogBug("please report this bug to the " "pythondialog maintainer(s)") if ll_exit_code == self._DIALOG_ERROR: raise DialogError( "the dialog-like program exited with status {0} (which was " "passed to it as the DIALOG_ERROR environment variable). " "Sometimes, the reason is simply that dialog was given a " "height or width parameter that is too big for the terminal " "in use. Its output, with leading and trailing whitespace " "stripped, was:\n\n{1}".format(ll_exit_code, child_output.strip())) elif ll_exit_code == 127: raise PythonDialogErrorBeforeExecInChildProcess(dedent("""\ possible reasons include: - the dialog-like program could not be executed (this can happen for instance if the Python program is trying to call the dialog-like program with arguments that cannot be represented in the user's locale [LC_CTYPE]); - the system is out of memory; - the maximum number of open file descriptors has been reached; - a cosmic ray hit the system memory and flipped nasty bits. There ought to be a traceback above this message that describes more precisely what happened.""")) elif ll_exit_code == 126: raise ProbablyPythonBug( "a child process returned with exit status 126; this might " "be the exit status of the dialog-like program, for some " "unknown reason (-> probably a bug in the dialog-like " "program); otherwise, we have probably found a python bug") try: hl_exit_code = self._dialog_exit_code_ll_to_hl[ll_exit_code] except KeyError: raise PythonDialogBug( "unexpected low-level exit status (new code?): {0!r}".format( ll_exit_code)) return (hl_exit_code, child_output) def _perform(self, cmdargs, **kwargs): """Perform a complete dialog-like program invocation. This function invokes the dialog-like program, waits for its termination and returns the appropriate Dialog exit code (high-level) along with whatever output it produced. See _call_program() for a description of the parameters. Notable exceptions: any exception raised by self._call_program() or self._wait_for_program_termination() """ if 'use_persistent_args' in kwargs: use_persistent_args = kwargs['use_persistent_args']; del kwargs['use_persistent_args'] else: use_persistent_args = True if 'dash_escape' in kwargs: dash_escape = kwargs['dash_escape']; del kwargs['dash_escape'] else: dash_escape = "non-first" (child_pid, child_output_rfd) = \ self._call_program(cmdargs, dash_escape=dash_escape, use_persistent_args=use_persistent_args, **kwargs) (exit_code, output) = \ self._wait_for_program_termination(child_pid, child_output_rfd) return (exit_code, output) def _strip_xdialog_newline(self, output): """Remove trailing newline (if any) in Xdialog compatibility mode""" if self.compat == "Xdialog" and output.endswith("\n"): output = output[:-1] return output # This is for compatibility with the old dialog.py def _perform_no_options(self, cmd): """Call dialog without passing any more options.""" warnings.warn("Dialog._perform_no_options() has been obsolete for " "many years", DeprecationWarning) return os.system(self._dialog_prg + ' ' + cmd) # For compatibility with the old dialog.py def clear(self): """Clear the screen. Equivalent to the dialog --clear option. This method is obsolete. Please remove calls to it from your programs. You may use the clear(1) program to clear the screen. cf. clear_screen() in demo.py for an example. """ warnings.warn("Dialog.clear() has been obsolete for many years.\n" "You may use the clear(1) program to clear the screen.\n" "cf. clear_screen() in demo.py for an example", DeprecationWarning) self._perform_no_options('--clear') def _help_status_on(self, kwargs): return ("--help-status" in self.dialog_persistent_arglist or kwargs.get("help_status", False)) def _parse_quoted_string(self, s, start=0): """Parse a quoted string from a dialog help output.""" if start >= len(s) or s[start] != '"': raise PythonDialogBug("quoted string does not start with a double " "quote: {0!r}".format(s)) l = [] i = start + 1 while i < len(s) and s[i] != '"': if s[i] == "\\": i += 1 if i >= len(s): raise PythonDialogBug( "quoted string ends with a backslash: {0!r}".format(s)) l.append(s[i]) i += 1 if s[i] != '"': raise PythonDialogBug("quoted string does not and with a double " "quote: {0!r}".format(s)) return (''.join(l), i+1) def _split_shellstyle_arglist(self, s): """Split an argument list with shell-style quoting performed by dialog. Any argument in 's' may or may not be quoted. Quoted arguments are always expected to be enclosed in double quotes (more restrictive than what the POSIX shell allows). This function could maybe be replaced with shlex.split(), however: - shlex only handles Unicode strings in Python 2.7.3 and above; - the bulk of the work is done by _parse_quoted_string(), which is probably still needed in _parse_help(), where one needs to parse things such as 'HELP <id> <status>' in which <id> may be quoted but <status> is never quoted, even if it contains spaces or quotes. """ s = s.rstrip() l = [] i = 0 while i < len(s): if s[i] == '"': arg, i = self._parse_quoted_string(s, start=i) if i < len(s) and s[i] != ' ': raise PythonDialogBug( "expected a space or end-of-string after quoted " "string in {0!r}, but found {1!r}".format(s, s[i])) # Start of the next argument, or after the end of the string i += 1 l.append(arg) else: try: end = s.index(' ', i) except ValueError: end = len(s) l.append(s[i:end]) # Start of the next argument, or after the end of the string i = end + 1 return l def _parse_help(self, output, kwargs, **_3to2kwargs): """Parse the dialog help output from a widget. 'kwargs' should contain the keyword arguments used in the widget call that produced the help output. 'multival' is for widgets that return a list of values as opposed to a single value. 'raw_format' is for widgets that don't start their help output with the string "HELP ". """ if 'raw_format' in _3to2kwargs: raw_format = _3to2kwargs['raw_format']; del _3to2kwargs['raw_format'] else: raw_format = False if 'multival_on_single_line' in _3to2kwargs: multival_on_single_line = _3to2kwargs['multival_on_single_line']; del _3to2kwargs['multival_on_single_line'] else: multival_on_single_line = False if 'multival' in _3to2kwargs: multival = _3to2kwargs['multival']; del _3to2kwargs['multival'] else: multival = False l = output.splitlines() if raw_format: # This format of the help output is either empty or consists of # only one line (possibly terminated with \n). It is # encountered with --calendar and --inputbox, among others. if len(l) > 1: raise PythonDialogBug("raw help feedback unexpected as " "multiline: {0!r}".format(output)) elif len(l) == 0: return "" else: return l[0] # Simple widgets such as 'yesno' will fall in this case if they use # this method. if not l: return None # The widgets that actually use --help-status always have the first # help line indicating the active item; there is no risk of # confusing this line with the first line produced by --help-status. if not l[0].startswith("HELP "): raise PythonDialogBug( "unexpected help output that does not start with 'HELP ': " "{0!r}".format(output)) # Everything that follows "HELP "; what it contains depends on whether # --item-help and/or --help-tags were passed to dialog. s = l[0][5:] if not self._help_status_on(kwargs): return s if multival: if multival_on_single_line: args = self._split_shellstyle_arglist(s) if not args: raise PythonDialogBug( "expected a non-empty space-separated list of " "possibly-quoted strings in this help output: {0!r}" .format(output)) return (args[0], args[1:]) else: return (s, l[1:]) else: if not s: raise PythonDialogBug( "unexpected help output whose first line is 'HELP '") elif s[0] != '"': l2 = s.split(' ', 1) if len(l2) == 1: raise PythonDialogBug( "expected 'HELP <id> <status>' in the help output, " "but couldn't find any space after 'HELP '") else: return tuple(l2) else: help_id, after_index = self._parse_quoted_string(s) if not s[after_index:].startswith(" "): raise PythonDialogBug( "expected 'HELP <quoted_id> <status>' in the help " "output, but couldn't find any space after " "'HELP <quoted_id>'") return (help_id, s[after_index+1:]) def _widget_with_string_output(self, args, kwargs, strip_xdialog_newline=False, raw_help=False): """Generic implementation for a widget that produces a single string. The help output must be present regardless of whether --help-status was passed or not. """ code, output = self._perform(args, **kwargs) if strip_xdialog_newline: output = self._strip_xdialog_newline(output) if code == self.HELP: # No check for --help-status help_data = self._parse_help(output, kwargs, raw_format=raw_help) return (code, help_data) else: return (code, output) def _widget_with_no_output(self, widget_name, args, kwargs): """Generic implementation for a widget that produces no output.""" code, output = self._perform(args, **kwargs) if output: raise PythonDialogBug( "expected an empty output from {0!r}, but got: {1!r}".format( widget_name, output)) return code def _dialog_version_check(self, version_string, feature): if self.compat == "dialog": minimum_version = DialogBackendVersion.fromstring(version_string) if self.cached_backend_version < minimum_version: raise InadequateBackendVersion( "the programbox widget requires dialog {0} or later, " "but you seem to be using version {1}".format( minimum_version, self.cached_backend_version)) def backend_version(self): """Get the version of the dialog-like program (backend). If the version of the dialog-like program can be retrieved, return it as a string; otherwise, raise UnableToRetrieveBackendVersion. This version is not to be confused with the pythondialog version. In most cases, you should rather use the 'cached_backend_version' attribute of Dialog instances, because: - it avoids calling the backend every time one needs the version; - it is a BackendVersion instance (or instance of a subclass) that allows easy and reliable comparisons between versions; - the version string corresponding to a BackendVersion instance (or instance of a subclass) can be obtained with unicode(). Notable exceptions: UnableToRetrieveBackendVersion PythonDialogReModuleError any exception raised by self._perform() """ code, output = self._perform(["--print-version"], use_persistent_args=False) if code == self.OK: try: mo = self._print_version_cre.match(output) if mo: return mo.group("version") else: raise UnableToRetrieveBackendVersion( "unable to parse the output of '{0} --print-version': " "{1!r}".format(self._dialog_prg, output)) except re.error, e: raise PythonDialogReModuleError(unicode(e)) else: raise UnableToRetrieveBackendVersion( "exit code {0!r} from the backend".format(code)) def maxsize(self, **kwargs): """Get the maximum size of dialog boxes. If the exit code from the backend is self.OK, return a (lines, cols) tuple of integers; otherwise, return None. If you want to obtain the number of lines and columns of the terminal, you should call this method with use_persistent_args=False, because arguments such as --backtitle modify the values returned. Notable exceptions: PythonDialogReModuleError any exception raised by self._perform() """ code, output = self._perform(["--print-maxsize"], **kwargs) if code == self.OK: try: mo = self._print_maxsize_cre.match(output) if mo: return tuple(imap(int, mo.group("rows", "columns"))) else: raise PythonDialogBug( "Unable to parse the output of '{0} --print-maxsize': " "{1!r}".format(self._dialog_prg, output)) except re.error, e: raise PythonDialogReModuleError(unicode(e)) else: return None @widget def buildlist(self, text, height=0, width=0, list_height=0, items=[], **kwargs): """Display a buildlist box. text -- text to display in the box height -- height of the box width -- width of the box list_height -- height of the selected and unselected list boxes items -- a list of (tag, item, status) tuples where 'status' specifies the initial selected/unselected state of each entry; can be True or False, 1 or 0, "on" or "off" (True, 1 and "on" meaning selected), or any case variation of these two strings. A buildlist dialog is similar in logic to the checklist but differs in presentation. In this widget, two lists are displayed, side by side. The list on the left shows unselected items. The list on the right shows selected items. As items are selected or unselected, they move between the two lists. The 'status' component of 'items' specifies which items are initially selected. Return a tuple of the form (code, tags) where: - 'code' is the Dialog exit code; - 'tags' is a list of the tags corresponding to the selected items, in the order they have in the list on the right. Keys: SPACE select or deselect the highlighted item, i.e., move it between the left and right lists ^ move the focus to the left list $ move the focus to the right list TAB move focus (see 'visit_items' below) ENTER press the focused button If called with 'visit_items=True', the TAB key can move the focus to the left and right lists, which is probably more intuitive for users than the default behavior that requires using ^ and $ for this purpose. This widget requires dialog >= 1.2 (2012-12-30). Notable exceptions: any exception raised by self._perform() or _to_onoff() """ self._dialog_version_check("1.2", "the buildlist widget") cmd = ["--buildlist", text, unicode(height), unicode(width), unicode(list_height)] for t in items: cmd.extend([ t[0], t[1], _to_onoff(t[2]) ] + list(t[3:])) code, output = self._perform(cmd, **kwargs) if code == self.HELP: help_data = self._parse_help(output, kwargs, multival=True, multival_on_single_line=True) if self._help_status_on(kwargs): help_id, selected_tags = help_data updated_items = [] for elt in items: tag, item, status = elt[:3] rest = elt[3:] updated_items.append([ tag, item, tag in selected_tags ] + list(rest)) return (code, (help_id, selected_tags, updated_items)) else: return (code, help_data) elif code in (self.OK, self.EXTRA): return (code, self._split_shellstyle_arglist(output)) else: return (code, None) def _calendar_parse_date(self, date_str): try: mo = _calendar_date_cre.match(date_str) except re.error, e: raise PythonDialogReModuleError(unicode(e)) if not mo: raise UnexpectedDialogOutput( "the dialog-like program returned the following " "unexpected output (a date string was expected) from the " "calendar box: {0!r}".format(date_str)) return [ int(s) for s in mo.group("day", "month", "year") ] @widget def calendar(self, text, height=6, width=0, day=0, month=0, year=0, **kwargs): """Display a calendar dialog box. text -- text to display in the box height -- height of the box (minus the calendar height) width -- width of the box day -- inititial day highlighted month -- inititial month displayed year -- inititial year selected (0 causes the current date to be used as the initial date) A calendar box displays month, day and year in separately adjustable windows. If the values for day, month or year are missing or negative, the current date's corresponding values are used. You can increment or decrement any of those using the left, up, right and down arrows. Use tab or backtab to move between windows. If the year is given as zero, the current date is used as an initial value. Return a tuple of the form (code, date) where: - 'code' is the Dialog exit code; - 'date' is a list of the form [day, month, year], where 'day', 'month' and 'year' are integers corresponding to the date chosen by the user. Notable exceptions: - any exception raised by self._perform() - UnexpectedDialogOutput - PythonDialogReModuleError """ (code, output) = self._perform( ["--calendar", text, unicode(height), unicode(width), unicode(day), unicode(month), unicode(year)], **kwargs) if code == self.HELP: # The output does not depend on whether --help-status was passed # (dialog 1.2-20130902). help_data = self._parse_help(output, kwargs, raw_format=True) return (code, self._calendar_parse_date(help_data)) elif code in (self.OK, self.EXTRA): return (code, self._calendar_parse_date(output)) else: return (code, None) @widget def checklist(self, text, height=15, width=54, list_height=7, choices=[], **kwargs): """Display a checklist box. text -- text to display in the box height -- height of the box width -- width of the box list_height -- number of entries displayed in the box (which can be scrolled) at a given time choices -- a list of tuples (tag, item, status) where 'status' specifies the initial on/off state of each entry; can be True or False, 1 or 0, "on" or "off" (True, 1 and "on" meaning checked), or any case variation of these two strings. Return a tuple of the form (code, [tag, ...]) with the tags for the entries that were selected by the user. 'code' is the Dialog exit code. If the user exits with ESC or CANCEL, the returned tag list is empty. Notable exceptions: any exception raised by self._perform() or _to_onoff() """ cmd = ["--checklist", text, unicode(height), unicode(width), unicode(list_height)] for t in choices: t = [ t[0], t[1], _to_onoff(t[2]) ] + list(t[3:]) cmd.extend(t) # The dialog output cannot be parsed reliably (at least in dialog # 0.9b-20040301) without --separate-output (because double quotes in # tags are escaped with backslashes, but backslashes are not # themselves escaped and you have a problem when a tag ends with a # backslash--the output makes you think you've encountered an embedded # double-quote). kwargs["separate_output"] = True (code, output) = self._perform(cmd, **kwargs) # Since we used --separate-output, the tags are separated by a newline # in the output. There is also a final newline after the last tag. if code == self.HELP: help_data = self._parse_help(output, kwargs, multival=True) if self._help_status_on(kwargs): help_id, selected_tags = help_data updated_choices = [] for elt in choices: tag, item, status = elt[:3] rest = elt[3:] updated_choices.append([ tag, item, tag in selected_tags ] + list(rest)) return (code, (help_id, selected_tags, updated_choices)) else: return (code, help_data) else: return (code, output.split('\n')[:-1]) def _form_updated_items(self, status, elements): """Return a complete list with up-to-date items from 'status'. Return a new list of same length as 'elements'. Items are taken from 'status', except when data inside 'elements' indicates a read-only field: such items are not output by dialog ... --help-status ..., and therefore have to be extracted from 'elements' instead of 'status'. Actually, for 'mixedform', the elements that are defined as read-only using the attribute instead of a non-positive field_length are not concerned by this function, since they are included in the --help-status output. """ res = [] for i, elt in enumerate(elements): label, yl, xl, item, yi, xi, field_length = elt[:7] res.append(status[i] if field_length > 0 else item) return res def _generic_form(self, widget_name, method_name, text, elements, height=0, width=0, form_height=0, **kwargs): cmd = ["--%s" % widget_name, text, unicode(height), unicode(width), unicode(form_height)] if not elements: raise BadPythonDialogUsage( "{0}.{1}.{2}: empty ELEMENTS sequence: {3!r}".format( __name__, type(self).__name__, method_name, elements)) elt_len = len(elements[0]) # for consistency checking for i, elt in enumerate(elements): if len(elt) != elt_len: raise BadPythonDialogUsage( "{0}.{1}.{2}: ELEMENTS[0] has length {3}, whereas " "ELEMENTS[{4}] has length {5}".format( __name__, type(self).__name__, method_name, elt_len, i, len(elt))) # Give names to make the code more readable if widget_name in ("form", "passwordform"): label, yl, xl, item, yi, xi, field_length, input_length = \ elt[:8] rest = elt[8:] # optional "item_help" string elif widget_name == "mixedform": label, yl, xl, item, yi, xi, field_length, input_length, \ attributes = elt[:9] rest = elt[9:] # optional "item_help" string else: raise PythonDialogBug( "unexpected widget name in {0}.{1}._generic_form(): " "{2!r}".format(__name__, type(self).__name__, widget_name)) for name, value in (("LABEL", label), ("ITEM", item)): if not isinstance(value, basestring): raise BadPythonDialogUsage( "{0}.{1}.{2}: {3} element not a string: {4!r}".format( __name__, type(self).__name__, method_name, name, value)) cmd.extend((label, unicode(yl), unicode(xl), item, unicode(yi), unicode(xi), unicode(field_length), unicode(input_length))) if widget_name == "mixedform": cmd.append(unicode(attributes)) # "item help" string when using --item-help, nothing otherwise cmd.extend(rest) (code, output) = self._perform(cmd, **kwargs) if code == self.HELP: help_data = self._parse_help(output, kwargs, multival=True) if self._help_status_on(kwargs): help_id, status = help_data # 'status' does not contain the fields marked as read-only in # 'elements'. Build a list containing all up-to-date items. updated_items = self._form_updated_items(status, elements) # Reconstruct 'elements' with the updated items taken from # 'status'. updated_elements = [] for elt, updated_item in izip(elements, updated_items): label, yl, xl, item = elt[:4] rest = elt[4:] updated_elements.append([ label, yl, xl, updated_item ] + list(rest)) return (code, (help_id, status, updated_elements)) else: return (code, help_data) else: return (code, output.split('\n')[:-1]) @widget def form(self, text, elements, height=0, width=0, form_height=0, **kwargs): """Display a form consisting of labels and fields. text -- text to display in the box elements -- sequence describing the labels and fields (see below) height -- height of the box width -- width of the box form_height -- number of form lines displayed at the same time A form box consists in a series of fields and associated labels. This type of dialog is suitable for adjusting configuration parameters and similar tasks. Each element of 'elements' must itself be a sequence (LABEL, YL, XL, ITEM, YI, XI, FIELD_LENGTH, INPUT_LENGTH) containing the various parameters concerning a given field and the associated label. LABEL is a string that will be displayed at row YL, column XL. ITEM is a string giving the initial value for the field, which will be displayed at row YI, column XI (row and column numbers starting from 1). FIELD_LENGTH and INPUT_LENGTH are integers that respectively specify the number of characters used for displaying the field and the maximum number of characters that can be entered for this field. These two integers also determine whether the contents of the field can be modified, as follows: - if FIELD_LENGTH is zero, the field cannot be altered and its contents determines the displayed length; - if FIELD_LENGTH is negative, the field cannot be altered and the opposite of FIELD_LENGTH gives the displayed length; - if INPUT_LENGTH is zero, it is set to FIELD_LENGTH. Return a tuple of the form (code, list) where 'code' is the Dialog exit code and 'list' gives the contents of every editable field on exit, with the same order as in 'elements'. Notable exceptions: BadPythonDialogUsage any exception raised by self._perform() """ return self._generic_form("form", "form", text, elements, height, width, form_height, **kwargs) @widget def passwordform(self, text, elements, height=0, width=0, form_height=0, **kwargs): """Display a form consisting of labels and invisible fields. This widget is identical to the form box, except that all text fields are treated as passwordbox widgets rather than inputbox widgets. By default (as in dialog), nothing is echoed to the terminal as the user types in the invisible fields. This can be confusing to users. Use the 'insecure' keyword argument if you want an asterisk to be echoed for each character entered by the user. Notable exceptions: BadPythonDialogUsage any exception raised by self._perform() """ return self._generic_form("passwordform", "passwordform", text, elements, height, width, form_height, **kwargs) @widget def mixedform(self, text, elements, height=0, width=0, form_height=0, **kwargs): """Display a form consisting of labels and fields. text -- text to display in the box elements -- sequence describing the labels and fields (see below) height -- height of the box width -- width of the box form_height -- number of form lines displayed at the same time A mixedform box is very similar to a form box, and differs from the latter by allowing field attributes to be specified. Each element of 'elements' must itself be a sequence (LABEL, YL, XL, ITEM, YI, XI, FIELD_LENGTH, INPUT_LENGTH, ATTRIBUTES) containing the various parameters concerning a given field and the associated label. ATTRIBUTES is a bit mask with the following meaning: bit 0 -- the field should be hidden (e.g., a password) bit 1 -- the field should be read-only (e.g., a label) For all other parameters, please refer to the documentation of the form box. The return value is the same as would be with the form box, except that field marked as read-only with bit 1 of ATTRIBUTES are also included in the output list. Notable exceptions: BadPythonDialogUsage any exception raised by self._perform() """ return self._generic_form("mixedform", "mixedform", text, elements, height, width, form_height, **kwargs) @widget def dselect(self, filepath, height=0, width=0, **kwargs): """Display a directory selection dialog box. filepath -- initial path height -- height of the box width -- width of the box The directory-selection dialog displays a text-entry window in which you can type a directory, and above that a window with directory names. Here, filepath can be a filepath in which case the directory window will display the contents of the path and the text-entry window will contain the preselected directory. Use tab or arrow keys to move between the windows. Within the directory window, use the up/down arrow keys to scroll the current selection. Use the space-bar to copy the current selection into the text-entry window. Typing any printable characters switches focus to the text-entry window, entering that character as well as scrolling the directory window to the closest match. Use a carriage return or the "OK" button to accept the current value in the text-entry window and exit. Return a tuple of the form (code, path) where 'code' is the Dialog exit code and 'path' is the directory chosen by the user. Notable exceptions: any exception raised by self._perform() """ # The help output does not depend on whether --help-status was passed # (dialog 1.2-20130902). return self._widget_with_string_output( ["--dselect", filepath, unicode(height), unicode(width)], kwargs, raw_help=True) @widget def editbox(self, filepath, height=0, width=0, **kwargs): """Display a basic text editor dialog box. filepath -- file which determines the initial contents of the dialog box height -- height of the box width -- width of the box The editbox dialog displays a copy of the file contents. You may edit it using the Backspace, Delete and cursor keys to correct typing errors. It also recognizes Page Up and Page Down. Unlike the inputbox, you must tab to the "OK" or "Cancel" buttons to close the dialog. Pressing the "Enter" key within the box will split the corresponding line. Return a tuple of the form (code, text) where 'code' is the Dialog exit code and 'text' is the contents of the text entry window on exit. Notable exceptions: any exception raised by self._perform() """ return self._widget_with_string_output( ["--editbox", filepath, unicode(height), unicode(width)], kwargs) @widget def fselect(self, filepath, height=0, width=0, **kwargs): """Display a file selection dialog box. filepath -- initial file path height -- height of the box width -- width of the box The file-selection dialog displays a text-entry window in which you can type a filename (or directory), and above that two windows with directory names and filenames. Here, filepath can be a file path in which case the file and directory windows will display the contents of the path and the text-entry window will contain the preselected filename. Use tab or arrow keys to move between the windows. Within the directory or filename windows, use the up/down arrow keys to scroll the current selection. Use the space-bar to copy the current selection into the text-entry window. Typing any printable character switches focus to the text-entry window, entering that character as well as scrolling the directory and filename windows to the closest match. Use a carriage return or the "OK" button to accept the current value in the text-entry window, or the "Cancel" button to cancel. Return a tuple of the form (code, path) where 'code' is the Dialog exit code and 'path' is the path chosen by the user (the last element of which may be a directory or a file). Notable exceptions: any exception raised by self._perform() """ # The help output does not depend on whether --help-status was passed # (dialog 1.2-20130902). return self._widget_with_string_output( ["--fselect", filepath, unicode(height), unicode(width)], kwargs, strip_xdialog_newline=True, raw_help=True) def gauge_start(self, text="", height=8, width=54, percent=0, **kwargs): """Display gauge box. text -- text to display in the box height -- height of the box width -- width of the box percent -- initial percentage shown in the meter A gauge box displays a meter along the bottom of the box. The meter indicates a percentage. This function starts the dialog-like program telling it to display a gauge box with a text in it and an initial percentage in the meter. Return value: undefined. Gauge typical usage ------------------- Gauge typical usage (assuming that 'd' is an instance of the Dialog class) looks like this: d.gauge_start() # do something d.gauge_update(10) # 10% of the whole task is done # ... d.gauge_update(100, "any text here") # work is done exit_code = d.gauge_stop() # cleanup actions Notable exceptions: - any exception raised by self._call_program() - PythonDialogOSError """ with _OSErrorHandling(): # We need a pipe to send data to the child (dialog) process's # stdin while it is running. # rfd = File Descriptor for Reading # wfd = File Descriptor for Writing (child_stdin_rfd, child_stdin_wfd) = os.pipe() (child_pid, child_output_rfd) = self._call_program( ["--gauge", text, unicode(height), unicode(width), unicode(percent)], redir_child_stdin_from_fd=child_stdin_rfd, close_fds=(child_stdin_wfd,), **kwargs) # fork() is done. We don't need child_stdin_rfd in the father # process anymore. os.close(child_stdin_rfd) self._gauge_process = { "pid": child_pid, "stdin": open(child_stdin_wfd, "w"), "child_output_rfd": child_output_rfd } def gauge_update(self, percent, text="", update_text=False): """Update a running gauge box. percent -- new percentage (integer) to show in the gauge meter text -- new text to optionally display in the box update_text -- boolean indicating whether to update the text in the box This function updates the percentage shown by the meter of a running gauge box (meaning 'gauge_start' must have been called previously). If update_text is True, the text displayed in the box is also updated. See the 'gauge_start' function's documentation for information about how to use a gauge. Return value: undefined. Notable exception: PythonDialogIOError (PythonDialogOSError from Python 3.3 onwards) can be raised if there is an I/O error while writing to the pipe used to talk to the dialog-like program. """ if not isinstance(percent, int): raise BadPythonDialogUsage( "the 'percent' argument of gauge_update() must be an integer, " "but {0!r} is not".format(percent)) if update_text: gauge_data = "XXX\n{0}\n{1}\nXXX\n".format(percent, text) else: gauge_data = "{0}\n".format(percent) with _OSErrorHandling(): self._gauge_process["stdin"].write(gauge_data) self._gauge_process["stdin"].flush() # For "compatibility" with the old dialog.py... def gauge_iterate(*args, **kwargs): warnings.warn("Dialog.gauge_iterate() has been obsolete for " "many years", DeprecationWarning) gauge_update(*args, **kwargs) @widget @retval_is_code def gauge_stop(self): """Terminate a running gauge widget. This function performs the appropriate cleanup actions to terminate a running gauge (started with 'gauge_start'). See the 'gauge_start' function's documentation for information about how to use a gauge. Return value: the Dialog exit code from the backend. Notable exceptions: - any exception raised by self._wait_for_program_termination() - PythonDialogIOError (PythonDialogOSError from Python 3.3 onwards) can be raised if closing the pipe used to talk to the dialog-like program fails. """ p = self._gauge_process # Close the pipe that we are using to feed dialog's stdin with _OSErrorHandling(): p["stdin"].close() # According to dialog(1), the output should always be empty. exit_code = \ self._wait_for_program_termination(p["pid"], p["child_output_rfd"])[0] return exit_code @widget @retval_is_code def infobox(self, text, height=10, width=30, **kwargs): """Display an information dialog box. text -- text to display in the box height -- height of the box width -- width of the box An info box is basically a message box. However, in this case, dialog will exit immediately after displaying the message to the user. The screen is not cleared when dialog exits, so that the message will remain on the screen after the method returns. This is useful when you want to inform the user that some operations are carrying on that may require some time to finish. Return the Dialog exit code from the backend. Notable exceptions: any exception raised by self._perform() """ return self._widget_with_no_output( "infobox", ["--infobox", text, unicode(height), unicode(width)], kwargs) @widget def inputbox(self, text, height=10, width=30, init='', **kwargs): """Display an input dialog box. text -- text to display in the box height -- height of the box width -- width of the box init -- default input string An input box is useful when you want to ask questions that require the user to input a string as the answer. If init is supplied it is used to initialize the input string. When entering the string, the BACKSPACE key can be used to correct typing errors. If the input string is longer than can fit in the dialog box, the input field will be scrolled. Return a tuple of the form (code, string) where 'code' is the Dialog exit code and 'string' is the string entered by the user. Notable exceptions: any exception raised by self._perform() """ # The help output does not depend on whether --help-status was passed # (dialog 1.2-20130902). return self._widget_with_string_output( ["--inputbox", text, unicode(height), unicode(width), init], kwargs, strip_xdialog_newline=True, raw_help=True) @widget def inputmenu(self, text, height=0, width=60, menu_height=7, choices=[], **kwargs): """Display an inputmenu dialog box. text -- text to display in the box height -- height of the box width -- width of the box menu_height -- height of the menu (scrollable part) choices -- a sequence of (tag, item) tuples, the meaning of which is explained below Overview -------- An inputmenu box is a dialog box that can be used to present a list of choices in the form of a menu for the user to choose. Choices are displayed in the given order. The main differences with the menu dialog box are: * entries are not automatically centered, but left-adjusted; * the current entry can be renamed by pressing the Rename button, which allows editing the 'item' part of the current entry. Each menu entry consists of a 'tag' string and an 'item' string. The tag gives the entry a name to distinguish it from the other entries in the menu and to provide quick keyboard access. The item is a short description of the option that the entry represents. The user can move between the menu entries by pressing the UP/DOWN keys or the first letter of the tag as a hot key. There are 'menu_height' lines (not entries!) displayed in the scrollable part of the menu at one time. BEWARE! It is strongly advised not to put any space in tags, otherwise the dialog output can be ambiguous if the corresponding entry is renamed, causing pythondialog to return a wrong tag string and new item text. The reason is that in this case, the dialog output is "RENAMED <tag> <item>" (without angle brackets) and pythondialog cannot guess whether spaces after the "RENAMED " prefix belong to the <tag> or the new <item> text. Note: there is no point in calling this method with 'help_status=True', because it is not possible to rename several items nor is it possible to choose the Help button (or any button other than Rename) once one has started to rename an item. Return value ------------ Return a tuple of the form (exit_info, tag, new_item_text) where: 'exit_info' is either: - the string "accepted", meaning that an entry was accepted without renaming; - the string "renamed", meaning that an entry was accepted after being renamed; - one of the standard Dialog exit codes Dialog.CANCEL, Dialog.ESC, Dialog.HELP. 'tag' indicates which entry was accepted (with or without renaming), if any. If no entry was accepted (e.g., if the dialog was exited with the Cancel button), then 'tag' is None. 'new_item_text' gives the new 'item' part of the renamed entry if 'exit_info' is "renamed", otherwise it is None. Notable exceptions: any exception raised by self._perform() """ cmd = ["--inputmenu", text, unicode(height), unicode(width), unicode(menu_height)] for t in choices: cmd.extend(t) (code, output) = self._perform(cmd, **kwargs) if code == self.HELP: help_id = self._parse_help(output, kwargs) return (code, help_id, None) elif code == self.OK: return ("accepted", output, None) elif code == self.EXTRA: if not output.startswith("RENAMED "): raise PythonDialogBug( "'output' does not start with 'RENAMED ': {0!r}".format( output)) t = output.split(' ', 2) return ("renamed", t[1], t[2]) else: return (code, None, None) @widget def menu(self, text, height=15, width=54, menu_height=7, choices=[], **kwargs): """Display a menu dialog box. text -- text to display in the box height -- height of the box width -- width of the box menu_height -- number of entries displayed in the box (which can be scrolled) at a given time choices -- a sequence of (tag, item) tuples (see below) Overview -------- As its name suggests, a menu box is a dialog box that can be used to present a list of choices in the form of a menu for the user to choose. Choices are displayed in the given order. Each menu entry consists of a 'tag' string and an 'item' string. The tag gives the entry a name to distinguish it from the other entries in the menu and to provide quick keyboard access. The item is a short description of the option that the entry represents. The user can move between the menu entries by pressing the UP/DOWN keys, the first letter of the tag as a hot key, or the number keys 1-9. There are 'menu_height' entries displayed in the menu at one time, but the menu will be scrolled if there are more entries than that. Return value ------------ Return a tuple of the form (code, tag) where 'code' is the Dialog exit code and 'tag' the tag string of the item that the user chose. Notable exceptions: any exception raised by self._perform() """ cmd = ["--menu", text, unicode(height), unicode(width), unicode(menu_height)] for t in choices: cmd.extend(t) return self._widget_with_string_output( cmd, kwargs, strip_xdialog_newline=True) @widget @retval_is_code def mixedgauge(self, text, height=0, width=0, percent=0, elements=[], **kwargs): """Display a mixed gauge dialog box. text -- text to display in the middle of the box, between the elements list and the progress bar height -- height of the box width -- width of the box percent -- integer giving the percentage for the global progress bar elements -- a sequence of (tag, item) tuples, the meaning of which is explained below A mixedgauge box displays a list of "elements" with status indication for each of them, followed by a text and finally a (global) progress bar along the bottom of the box. The top part ('elements') is suitable for displaying a task list. One element is displayed per line, with its 'tag' part on the left and its 'item' part on the right. The 'item' part is a string that is displayed on the right of the same line. The 'item' of an element can be an arbitrary string, but special values listed in the dialog(3) manual page translate into a status indication for the corresponding task ('tag'), such as: "Succeeded", "Failed", "Passed", "Completed", "Done", "Skipped", "In Progress", "Checked", "N/A" or a progress bar. A progress bar for an element is obtained by supplying a negative number for the 'item'. For instance, "-75" will cause a progress bar indicating 75 % to be displayed on the corresponding line. For your convenience, if an 'item' appears to be an integer or a float, it will be converted to a string before being passed to the dialog-like program. 'text' is shown as a sort of caption between the list and the global progress bar. The latter displays 'percent' as the percentage of completion. Contrary to the gauge widget, mixedgauge is completely static. You have to call mixedgauge() several times in order to display different percentages in the global progress bar, or status indicators for a given task. Return the Dialog exit code from the backend. Notable exceptions: any exception raised by self._perform() """ cmd = ["--mixedgauge", text, unicode(height), unicode(width), unicode(percent)] for t in elements: cmd.extend( (t[0], unicode(t[1])) ) return self._widget_with_no_output("mixedgauge", cmd, kwargs) @widget @retval_is_code def msgbox(self, text, height=10, width=30, **kwargs): """Display a message dialog box, with scrolling and line wrapping. text -- text to display in the box height -- height of the box width -- width of the box Display a text in a message box, with a scrollbar and percentage indication if the text is too long to fit in a single "screen". A message box is very similar to a yes/no box. The only difference between a message box and a yes/no box is that a message box has only a single OK button. You can use this dialog box to display any message you like. After reading the message, the user can press the Enter key so that dialog will exit and the calling program can continue its operation. msgbox() performs automatic line wrapping. If you want to force a newline at some point, simply insert it in 'text'. In other words (with the default settings), newline characters in 'text' *are* respected; the line wrapping process performed by dialog only inserts *additional* newlines when needed. If you want no automatic line wrapping, consider using scrollbox(). Return the Dialog exit code from the backend. Notable exceptions: any exception raised by self._perform() """ return self._widget_with_no_output( "msgbox", ["--msgbox", text, unicode(height), unicode(width)], kwargs) @widget @retval_is_code def pause(self, text, height=15, width=60, seconds=5, **kwargs): """Display a pause dialog box. text -- text to display in the box height -- height of the box width -- width of the box seconds -- number of seconds to pause for (integer) A pause box displays a text and a meter along the bottom of the box, during a specified amount of time ('seconds'). The meter indicates how many seconds remain until the end of the pause. The widget exits when the specified number of seconds is elapsed, or immediately if the user presses the OK button, the Cancel button or the Esc key. Return the Dialog exit code, which is Dialog.OK if the pause ended automatically after 'seconds' seconds or if the user pressed the OK button. Notable exceptions: any exception raised by self._perform() """ return self._widget_with_no_output( "pause", ["--pause", text, unicode(height), unicode(width), unicode(seconds)], kwargs) @widget def passwordbox(self, text, height=10, width=60, init='', **kwargs): """Display a password input dialog box. text -- text to display in the box height -- height of the box width -- width of the box init -- default input password A password box is similar to an input box, except that the text the user enters is not displayed. This is useful when prompting for passwords or other sensitive information. Be aware that if anything is passed in "init", it will be visible in the system's process table to casual snoopers. Also, it is very confusing to the user to provide them with a default password they cannot see. For these reasons, using "init" is highly discouraged. By default (as in dialog), nothing is echoed to the terminal as the user enters the sensitive text. This can be confusing to users. Use the 'insecure' keyword argument if you want an asterisk to be echoed for each character entered by the user. Return a tuple of the form (code, password) where 'code' is the Dialog exit code and 'password' is the password entered by the user. Notable exceptions: any exception raised by self._perform() """ # The help output does not depend on whether --help-status was passed # (dialog 1.2-20130902). return self._widget_with_string_output( ["--passwordbox", text, unicode(height), unicode(width), init], kwargs, strip_xdialog_newline=True, raw_help=True) def _progressboxoid(self, widget, file_path=None, file_flags=os.O_RDONLY, fd=None, text=None, height=20, width=78, **kwargs): if (file_path is None and fd is None) or \ (file_path is not None and fd is not None): raise BadPythonDialogUsage( "{0}.{1}.{2}: either 'file_path' or 'fd' must be provided, and " "not both at the same time".format( __name__, self.__class__.__name__, widget)) with _OSErrorHandling(): if file_path is not None: if fd is not None: raise PythonDialogBug( "unexpected non-None value for 'fd': {0!r}".format(fd)) # No need to pass 'mode', as the file is not going to be # created here. fd = os.open(file_path, file_flags) try: args = [ "--{0}".format(widget) ] if text is not None: args.append(text) args.extend([unicode(height), unicode(width)]) kwargs["redir_child_stdin_from_fd"] = fd code = self._widget_with_no_output(widget, args, kwargs) finally: with _OSErrorHandling(): if file_path is not None: # We open()ed file_path ourselves, let's close it now. os.close(fd) return code @widget @retval_is_code def progressbox(self, file_path=None, file_flags=os.O_RDONLY, fd=None, text=None, height=20, width=78, **kwargs): """Display a possibly growing stream in a dialog box, as with "tail -f". file_path -- path to the file that is going to be displayed file_flags -- flags used when opening 'file_path'; those are passed to os.open() function (not the built-in open function!). By default, only one flag is used: os.O_RDONLY. OR, ALTERNATIVELY: fd -- file descriptor for the stream to be displayed text -- caption continuously displayed at the top, above the stream text, or None to disable the caption height -- height of the box width -- width of the box Display the contents of the specified file, updating the dialog box whenever the file grows, as with the "tail -f" command. The file can be specified in two ways: - either by giving its path (and optionally os.open() flags) with parameters 'file_path' and 'file_flags'; - or by passing its file descriptor with parameter 'fd' (in which case it may not even be a file; for instance, it could be an anonymous pipe created with os.pipe()). Return the Dialog exit code from the backend. Notable exceptions: PythonDialogIOError if the Python version is < 3.3 PythonDialogOSError any exception raised by self._perform() """ return self._progressboxoid( "progressbox", file_path=file_path, file_flags=file_flags, fd=fd, text=text, height=height, width=width, **kwargs) @widget @retval_is_code def programbox(self, file_path=None, file_flags=os.O_RDONLY, fd=None, text=None, height=20, width=78, **kwargs): """Display a possibly growing stream in a dialog box, as with "tail -f". A programbox is very similar to a progressbox. The only difference between a program box and a progress box is that a program box displays an OK button, but only after the input stream has been exhausted (i.e., End Of File has been reached). This dialog box can be used to display the piped output of an external program. After the program completes, the user can press the Enter key to close the dialog and resume execution of the calling program. The parameters and exceptions are the same as for 'progressbox'. Please refer to the corresponding documentation. This widget requires dialog >= 1.1 (2011-03-02). """ self._dialog_version_check("1.1", "the programbox widget") return self._progressboxoid( "programbox", file_path=file_path, file_flags=file_flags, fd=fd, text=text, height=height, width=width, **kwargs) @widget def radiolist(self, text, height=15, width=54, list_height=7, choices=[], **kwargs): """Display a radiolist box. text -- text to display in the box height -- height of the box width -- width of the box list_height -- number of entries displayed in the box (which can be scrolled) at a given time choices -- a list of tuples (tag, item, status) where 'status' specifies the initial on/off state of each entry; can be True or False, 1 or 0, "on" or "off" (True and 1 meaning "on"), or any case variation of these two strings. No more than one entry should be set to True. A radiolist box is similar to a menu box. The main difference is that you can indicate which entry is initially selected, by setting its status to True. Return a tuple of the form (code, tag) with the tag for the entry that was chosen by the user. 'code' is the Dialog exit code from the backend. If the user exits with ESC or CANCEL, or if all entries were initially set to False and not altered before the user chose OK, the returned tag is the empty string. Notable exceptions: any exception raised by self._perform() or _to_onoff() """ cmd = ["--radiolist", text, unicode(height), unicode(width), unicode(list_height)] for t in choices: cmd.extend([ t[0], t[1], _to_onoff(t[2]) ] + list(t[3:])) (code, output) = self._perform(cmd, **kwargs) output = self._strip_xdialog_newline(output) if code == self.HELP: help_data = self._parse_help(output, kwargs) if self._help_status_on(kwargs): help_id, selected_tag = help_data # Reconstruct 'choices' with the selected item inferred from # 'selected_tag'. updated_choices = [] for elt in choices: tag, item, status = elt[:3] rest = elt[3:] updated_choices.append([ tag, item, tag == selected_tag ] + list(rest)) return (code, (help_id, selected_tag, updated_choices)) else: return (code, help_data) else: return (code, output) @widget def rangebox(self, text, height=0, width=0, min=None, max=None, init=None, **kwargs): """Display an range dialog box. text -- text to display above the actual range control height -- height of the box width -- width of the box min -- minimum value for the range control max -- maximum value for the range control init -- initial value for the range control The rangebox dialog allows the user to select from a range of values using a kind of slider. The range control shows the current value as a bar (like the gauge dialog). The return value is a tuple of the form (code, val) where 'code' is the Dialog exit code and 'val' is an integer: the value chosen by the user. The Tab and arrow keys move the cursor between the buttons and the range control. When the cursor is on the latter, you can change the value with the following keys: Left/Right arrows select a digit to modify +/- increment/decrement the selected digit by one unit 0-9 set the selected digit to the given value Some keys are also recognized in all cursor positions: Home/End set the value to its minimum or maximum PageUp/PageDown decrement/increment the value so that the slider moves by one column This widget requires dialog >= 1.2 (2012-12-30). Notable exceptions: any exception raised by self._perform() """ self._dialog_version_check("1.2", "the rangebox widget") for name in ("min", "max", "init"): if not isinstance(locals()[name], int): raise BadPythonDialogUsage( "'{0}' argument not an int: {1!r}".format(name, locals()[name])) (code, output) = self._perform( ["--rangebox", text] + [ unicode(i) for i in (height, width, min, max, init) ], **kwargs) if code == self.HELP: help_data = self._parse_help(output, kwargs, raw_format=True) # The help output does not depend on whether --help-status was # passed (dialog 1.2-20130902). return (code, int(help_data)) elif code in (self.OK, self.EXTRA): return (code, int(output)) else: return (code, None) @widget @retval_is_code def scrollbox(self, text, height=20, width=78, **kwargs): """Display a string in a scrollable box, with no line wrapping. text -- string to display in the box height -- height of the box width -- width of the box This method is a layer on top of textbox. The textbox widget in dialog allows to display file contents only. This method allows you to display any text in a scrollable box. This is simply done by creating a temporary file, calling textbox() and deleting the temporary file afterwards. The text is not automatically wrapped. New lines in the scrollable box will be placed exactly as in 'text'. If you want automatic line wrapping, you should use the msgbox widget instead (the 'textwrap' module from the Python standard library is also worth knowing about). Return the Dialog exit code from the backend. Notable exceptions: - UnableToCreateTemporaryDirectory - PythonDialogIOError if the Python version is < 3.3 - PythonDialogOSError - exceptions raised by the tempfile module (which are unfortunately not mentioned in its documentation, at least in Python 2.3.3...) """ # In Python < 2.3, the standard library does not have # tempfile.mkstemp(), and unfortunately, tempfile.mktemp() is # insecure. So, I create a non-world-writable temporary directory and # store the temporary file in this directory. with _OSErrorHandling(): tmp_dir = _create_temporary_directory() fName = os.path.join(tmp_dir, "text") # If we are here, tmp_dir *is* created (no exception was raised), # so chances are great that os.rmdir(tmp_dir) will succeed (as # long as tmp_dir is empty). # # Don't move the _create_temporary_directory() call inside the # following try statement, otherwise the user will always see a # PythonDialogOSError instead of an # UnableToCreateTemporaryDirectory because whenever # UnableToCreateTemporaryDirectory is raised, the subsequent # os.rmdir(tmp_dir) is bound to fail. try: # No race condition as with the deprecated tempfile.mktemp() # since tmp_dir is not world-writable. with open(fName, mode="w") as f: f.write(text) # Ask for an empty title unless otherwise specified if kwargs.get("title", None) is None: kwargs["title"] = "" return self._widget_with_no_output( "textbox", ["--textbox", fName, unicode(height), unicode(width)], kwargs) finally: if os.path.exists(fName): os.unlink(fName) os.rmdir(tmp_dir) @widget @retval_is_code def tailbox(self, filename, height=20, width=60, **kwargs): """Display the contents of a file in a dialog box, as with "tail -f". filename -- name of the file, the contents of which is to be displayed in the box height -- height of the box width -- width of the box Display the contents of the specified file, updating the dialog box whenever the file grows, as with the "tail -f" command. Return the Dialog exit code from the backend. Notable exceptions: any exception raised by self._perform() """ return self._widget_with_no_output( "tailbox", ["--tailbox", filename, unicode(height), unicode(width)], kwargs) # No tailboxbg widget, at least for now. @widget @retval_is_code def textbox(self, filename, height=20, width=60, **kwargs): """Display the contents of a file in a dialog box. filename -- name of the file whose contents is to be displayed in the box height -- height of the box width -- width of the box A text box lets you display the contents of a text file in a dialog box. It is like a simple text file viewer. The user can move through the file by using the UP/DOWN, PGUP/PGDN and HOME/END keys available on most keyboards. If the lines are too long to be displayed in the box, the LEFT/RIGHT keys can be used to scroll the text region horizontally. For more convenience, forward and backward searching functions are also provided. Return the Dialog exit code from the backend. Notable exceptions: any exception raised by self._perform() """ # This is for backward compatibility... not that it is # stupid, but I prefer explicit programming. if kwargs.get("title", None) is None: kwargs["title"] = filename return self._widget_with_no_output( "textbox", ["--textbox", filename, unicode(height), unicode(width)], kwargs) def _timebox_parse_time(self, time_str): try: mo = _timebox_time_cre.match(time_str) except re.error, e: raise PythonDialogReModuleError(unicode(e)) if not mo: raise UnexpectedDialogOutput( "the dialog-like program returned the following " "unexpected output (a time string was expected) with the " "--timebox option: {0!r}".format(time_str)) return [ int(s) for s in mo.group("hour", "minute", "second") ] @widget def timebox(self, text, height=3, width=30, hour=-1, minute=-1, second=-1, **kwargs): """Display a time dialog box. text -- text to display in the box height -- height of the box width -- width of the box hour -- inititial hour selected minute -- inititial minute selected second -- inititial second selected A dialog is displayed which allows you to select hour, minute and second. If the values for hour, minute or second are negative (or not explicitely provided, as they default to -1), the current time's corresponding values are used. You can increment or decrement any of those using the left-, up-, right- and down-arrows. Use tab or backtab to move between windows. Return a tuple of the form (code, time) where: - 'code' is the Dialog exit code; - 'time' is a list of the form [hour, minute, second], where 'hour', 'minute' and 'second' are integers corresponding to the time chosen by the user. Notable exceptions: - any exception raised by self._perform() - PythonDialogReModuleError - UnexpectedDialogOutput """ (code, output) = self._perform( ["--timebox", text, unicode(height), unicode(width), unicode(hour), unicode(minute), unicode(second)], **kwargs) if code == self.HELP: help_data = self._parse_help(output, kwargs, raw_format=True) # The help output does not depend on whether --help-status was # passed (dialog 1.2-20130902). return (code, self._timebox_parse_time(help_data)) elif code in (self.OK, self.EXTRA): return (code, self._timebox_parse_time(output)) else: return (code, None) @widget def treeview(self, text, height=0, width=0, list_height=0, nodes=[], **kwargs): """Display a treeview box. text -- text to display at the top of the box height -- height of the box width -- width of the box list_height -- number of lines reserved for the main part of the box, where the tree is displayed nodes -- a list of (tag, item, status, depth) tuples describing nodes, where: - 'tag' is used to indicate which node was selected by the user on exit; - 'item' is the text displayed for the node; - 'status' specifies the initial on/off state of each node; can be True or False, 1 or 0, "on" or "off" (True, 1 and "on" meaning selected), or any case variation of these two strings; - 'depth' is a non-negative integer indicating the depth of the node in the tree (0 for the root node). Display nodes organized in a tree structure. Each node has a tag, an 'item' text, a selected status, and a depth in the tree. Only the 'item' texts are displayed in the widget; tags are only used for the return value. Only one node can be selected at a given time, as for the radiolist widget. Return a tuple of the form (code, tag) where: - 'code' is the Dialog exit code from the backend; - 'tag' is the tag of the selected node. This widget requires dialog >= 1.2 (2012-12-30). Notable exceptions: any exception raised by self._perform() or _to_onoff() """ self._dialog_version_check("1.2", "the treeview widget") cmd = ["--treeview", text, unicode(height), unicode(width), unicode(list_height)] nselected = 0 for i, t in enumerate(nodes): if not isinstance(t[3], int): raise BadPythonDialogUsage( "fourth element of node {0} not an int: {1!r}".format( i, t[3])) status = _to_onoff(t[2]) if status == "on": nselected += 1 cmd.extend([ t[0], t[1], status, unicode(t[3]) ] + list(t[4:])) if nselected != 1: raise BadPythonDialogUsage( "exactly one node must be selected, not {0}".format(nselected)) (code, output) = self._perform(cmd, **kwargs) if code == self.HELP: help_data = self._parse_help(output, kwargs) if self._help_status_on(kwargs): help_id, selected_tag = help_data # Reconstruct 'nodes' with the selected item inferred from # 'selected_tag'. updated_nodes = [] for elt in nodes: tag, item, status = elt[:3] rest = elt[3:] updated_nodes.append([ tag, item, tag == selected_tag ] + list(rest)) return (code, (help_id, selected_tag, updated_nodes)) else: return (code, help_data) elif code in (self.OK, self.EXTRA): return (code, output) else: return (code, None) @widget @retval_is_code def yesno(self, text, height=10, width=30, **kwargs): """Display a yes/no dialog box. text -- text to display in the box height -- height of the box width -- width of the box A yes/no dialog box of size 'height' rows by 'width' columns will be displayed. The string specified by 'text' is displayed inside the dialog box. If this string is too long to fit in one line, it will be automatically divided into multiple lines at appropriate places. The text string can also contain the sub-string "\\n" or newline characters to control line breaking explicitly. This dialog box is useful for asking questions that require the user to answer either yes or no. The dialog box has a Yes button and a No button, in which the user can switch between by pressing the TAB key. Return the Dialog exit code from the backend. Notable exceptions: any exception raised by self._perform() """ return self._widget_with_no_output( "yesno", ["--yesno", text, unicode(height), unicode(width)], kwargs)
gpl-2.0
3,278,259,654,904,537,000
40.017458
161
0.603412
false
4.448553
false
false
false
ShashaQin/erpnext
erpnext/projects/doctype/time_log/time_log.py
1
11405
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe import _ from datetime import datetime,timedelta from frappe.utils import cstr, flt, get_datetime, get_time, getdate, cint ,get_defaults from dateutil.relativedelta import relativedelta from erpnext.manufacturing.doctype.manufacturing_settings.manufacturing_settings import get_mins_between_operations class OverlapError(frappe.ValidationError): pass class OverProductionLoggedError(frappe.ValidationError): pass class NotSubmittedError(frappe.ValidationError): pass class NegativeHoursError(frappe.ValidationError): pass from frappe.model.document import Document class TimeLog(Document): def validate(self): self.set_status() self.set_title() if not(cint(get_defaults("fs_simplified_time_log"))): self.validate_overlap() self.validate_timings() self.calculate_total_hours() self.validate_time_log_for() self.check_workstation_timings() self.validate_production_order() self.validate_manufacturing() self.set_project_if_missing() self.update_cost() def on_submit(self): self.update_production_order() self.update_task_and_project() def on_cancel(self): self.update_production_order() self.update_task_and_project() def before_update_after_submit(self): self.set_status() def before_cancel(self): self.set_status() def set_status(self): self.status = { 0: "Draft", 1: "Submitted", 2: "Cancelled" }[self.docstatus or 0] if self.time_log_batch: self.status="Batched for Billing" if self.sales_invoice: self.status="Billed" def set_title(self): """Set default title for the Time Log""" if self.production_order: self.title = _("{0} for {1}").format(self.operation, self.production_order) elif self.activity_type : self.title = _("{0}").format(self.activity_type) if self.quotation_: self.title += " for " + self.quotation_ if self.task: self.title += " for " + self.task if self.project: self.title += " for " + self.project if self.support_ticket: self.title += " for " + self.support_ticket def validate_overlap(self): """Checks if 'Time Log' entries overlap for a user, workstation. """ self.validate_overlap_for("user") self.validate_overlap_for("employee") self.validate_overlap_for("workstation") def validate_overlap_for(self, fieldname): existing = self.get_overlap_for(fieldname) if existing: frappe.throw(_("This Time Log conflicts with {0} for {1} {2}").format(existing.name, self.meta.get_label(fieldname), self.get(fieldname)), OverlapError) def get_overlap_for(self, fieldname): if not self.get(fieldname): return existing = frappe.db.sql("""select name, from_time, to_time from `tabTime Log` where `{0}`=%(val)s and ( (%(from_time)s > from_time and %(from_time)s < to_time) or (%(to_time)s > from_time and %(to_time)s < to_time) or (%(from_time)s <= from_time and %(to_time)s >= to_time)) and name!=%(name)s and docstatus < 2""".format(fieldname), { "val": self.get(fieldname), "from_time": self.from_time, "to_time": self.to_time, "name": self.name or "No Name" }, as_dict=True) return existing[0] if existing else None def validate_timings(self): if self.to_time and self.from_time and get_datetime(self.to_time) <= get_datetime(self.from_time): frappe.throw(_("To Time must be greater than From Time"), NegativeHoursError) def calculate_total_hours(self): if self.to_time and self.from_time: from frappe.utils import time_diff_in_seconds self.hours = flt(time_diff_in_seconds(self.to_time, self.from_time)) / 3600 def set_project_if_missing(self): """Set project if task is set""" if self.task and not self.project: self.project = frappe.db.get_value("Task", self.task, "project") def validate_time_log_for(self): if not self.for_manufacturing: for fld in ["production_order", "operation", "workstation", "completed_qty"]: self.set(fld, None) else: self.activity_type=None def check_workstation_timings(self): """Checks if **Time Log** is between operating hours of the **Workstation**.""" if self.workstation and self.from_time and self.to_time: from erpnext.manufacturing.doctype.workstation.workstation import check_if_within_operating_hours check_if_within_operating_hours(self.workstation, self.operation, self.from_time, self.to_time) def validate_production_order(self): """Throws 'NotSubmittedError' if **production order** is not submitted. """ if self.production_order: if frappe.db.get_value("Production Order", self.production_order, "docstatus") != 1 : frappe.throw(_("You can make a time log only against a submitted production order"), NotSubmittedError) def update_production_order(self): """Updates `start_date`, `end_date`, `status` for operation in Production Order.""" if self.production_order and self.for_manufacturing: if not self.operation_id: frappe.throw(_("Operation ID not set")) dates = self.get_operation_start_end_time() summary = self.get_time_log_summary() pro = frappe.get_doc("Production Order", self.production_order) for o in pro.operations: if o.name == self.operation_id: o.actual_start_time = dates.start_date o.actual_end_time = dates.end_date o.completed_qty = summary.completed_qty o.actual_operation_time = summary.mins break pro.flags.ignore_validate_update_after_submit = True pro.update_operation_status() pro.calculate_operating_cost() pro.set_actual_dates() pro.save() def get_operation_start_end_time(self): """Returns Min From and Max To Dates of Time Logs against a specific Operation. """ return frappe.db.sql("""select min(from_time) as start_date, max(to_time) as end_date from `tabTime Log` where production_order = %s and operation = %s and docstatus=1""", (self.production_order, self.operation), as_dict=1)[0] def move_to_next_day(self): """Move start and end time one day forward""" self.from_time = get_datetime(self.from_time) + relativedelta(day=1) def move_to_next_working_slot(self): """Move to next working slot from workstation""" workstation = frappe.get_doc("Workstation", self.workstation) slot_found = False for working_hour in workstation.working_hours: if get_datetime(self.from_time).time() < get_time(working_hour.start_time): self.from_time = getdate(self.from_time).strftime("%Y-%m-%d") + " " + working_hour.start_time slot_found = True break if not slot_found: # later than last time self.from_time = getdate(self.from_time).strftime("%Y-%m-%d") + " " + workstation.working_hours[0].start_time self.move_to_next_day() def move_to_next_non_overlapping_slot(self): """If in overlap, set start as the end point of the overlapping time log""" overlapping = self.get_overlap_for("workstation") \ or self.get_overlap_for("employee") \ or self.get_overlap_for("user") if not overlapping: frappe.throw("Logical error: Must find overlapping") self.from_time = get_datetime(overlapping.to_time) + get_mins_between_operations() def get_time_log_summary(self): """Returns 'Actual Operating Time'. """ return frappe.db.sql("""select sum(hours*60) as mins, sum(completed_qty) as completed_qty from `tabTime Log` where production_order = %s and operation_id = %s and docstatus=1""", (self.production_order, self.operation_id), as_dict=1)[0] def validate_manufacturing(self): if self.for_manufacturing: if not self.production_order: frappe.throw(_("Production Order is Mandatory")) if not self.completed_qty: self.completed_qty = 0 production_order = frappe.get_doc("Production Order", self.production_order) pending_qty = flt(production_order.qty) - flt(production_order.produced_qty) if flt(self.completed_qty) > pending_qty: frappe.throw(_("Completed Qty cannot be more than {0} for operation {1}").format(pending_qty, self.operation), OverProductionLoggedError) else: self.production_order = None self.operation = None self.quantity = None def update_cost(self): rate = get_activity_cost(self.employee, self.activity_type) if rate: self.costing_rate = flt(rate.get('costing_rate')) self.billing_rate = flt(rate.get('billing_rate')) self.costing_amount = self.costing_rate * self.hours if self.billable: self.billing_amount = self.billing_rate * self.hours else: self.billing_amount = 0 if self.additional_cost and self.billable: self.billing_amount += self.additional_cost def update_task_and_project(self): """Update costing rate in Task or Project if either is set""" if self.task: task = frappe.get_doc("Task", self.task) task.update_time_and_costing() task.save(ignore_permissions=True) elif self.project: frappe.get_doc("Project", self.project).update_project() @frappe.whitelist() def get_events(start, end, filters=None): """Returns events for Gantt / Calendar view rendering. :param start: Start date-time. :param end: End date-time. :param filters: Filters like workstation, project etc. """ from frappe.desk.calendar import get_event_conditions conditions = get_event_conditions("Time Log", filters) if (cint(get_defaults("fs_simplified_time_log"))): date_cond = "date_worked between %(start)s and %(end)s" else: date_cond = "( from_time between %(start)s and %(end)s or to_time between %(start)s and %(end)s )" data = frappe.db.sql("""select name, from_time, to_time, activity_type, task, project, production_order, workstation, date_worked, employee, hours from `tabTime Log` where docstatus < 2 and {date_cond} {conditions}""".format(conditions=conditions,date_cond=date_cond), { "start": start, "end": end }, as_dict=True, update={"allDay": 0}) #aligns the assorted time logs so they are layed out sequentially if(cint(get_defaults("fs_simplified_time_log"))): slist = {} for idx,da in enumerate(data): if (da.employee not in slist): slist[da.employee]={} if (da.date_worked not in slist[da.employee]): slist[da.employee][da.date_worked]=[] slist[da.employee][da.date_worked].append([idx,da.from_time,da.to_time,da.hours]) for e in slist: for d in slist[e]: temp = slist[e][d][0] temp[1]= datetime.combine(d,get_time("8:00:00")) temp[2]= temp[1] + timedelta(hours=temp[3]) for idx,l in enumerate(slist[e][d][1:]): data[l[0]]["from_time"]= l[1] = slist[e][d][idx][2] data[l[0]]["to_time"] = l[2] = l[1]+ timedelta(hours=l[3]) l= slist[e][d][0] data[temp[0]]["from_time"]= slist[e][d][0][1] data[temp[0]]["to_time"] = slist[e][d][0][2] for d in data: d.title = d.name + ": " + (d.activity_type or d.production_order or "") if d.task: d.title += " for Task: " + d.task if d.project: d.title += " for Project: " + d.project return data @frappe.whitelist() def get_activity_cost(employee=None, activity_type=None): rate = frappe.db.get_values("Activity Cost", {"employee": employee, "activity_type": activity_type}, ["costing_rate", "billing_rate"], as_dict=True) if not rate: rate = frappe.db.get_values("Activity Type", {"activity_type": activity_type}, ["costing_rate", "billing_rate"], as_dict=True) return rate[0] if rate else {}
agpl-3.0
5,370,010,735,895,954,000
34.752351
115
0.694257
false
3.05273
false
false
false
ekcs/congress
congress/datalog/builtin/congressbuiltin.py
1
13313
#! /usr/bin/python # # Copyright (c) 2014 IBM, Corp. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import datetime import six from six.moves import range from thirdparty_dateutil import parser as datetime_parser class DatetimeBuiltins(object): # casting operators (used internally) @classmethod def to_timedelta(cls, x): if isinstance(x, six.string_types): fields = x.split(":") num_fields = len(fields) args = {} keys = ['seconds', 'minutes', 'hours', 'days', 'weeks'] for i in range(0, len(fields)): args[keys[i]] = int(fields[num_fields - 1 - i]) return datetime.timedelta(**args) else: return datetime.timedelta(seconds=x) @classmethod def to_datetime(cls, x): return datetime_parser.parse(x, ignoretz=True) # current time @classmethod def now(cls): return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") # extraction and creation of datetimes @classmethod def unpack_time(cls, x): x = cls.to_datetime(x) return (x.hour, x.minute, x.second) @classmethod def unpack_date(cls, x): x = cls.to_datetime(x) return (x.year, x.month, x.day) @classmethod def unpack_datetime(cls, x): x = cls.to_datetime(x) return (x.year, x.month, x.day, x.hour, x.minute, x.second) @classmethod def pack_time(cls, hour, minute, second): return "{}:{}:{}".format(hour, minute, second) @classmethod def pack_date(cls, year, month, day): return "{}-{}-{}".format(year, month, day) @classmethod def pack_datetime(cls, year, month, day, hour, minute, second): return "{}-{}-{} {}:{}:{}".format( year, month, day, hour, minute, second) # extraction/creation convenience function @classmethod def extract_date(cls, x): return str(cls.to_datetime(x).date()) @classmethod def extract_time(cls, x): return str(cls.to_datetime(x).time()) # conversion to seconds @classmethod def datetime_to_seconds(cls, x): since1900 = cls.to_datetime(x) - datetime.datetime(year=1900, month=1, day=1) return int(since1900.total_seconds()) # native operations on datetime @classmethod def datetime_plus(cls, x, y): return str(cls.to_datetime(x) + cls.to_timedelta(y)) @classmethod def datetime_minus(cls, x, y): return str(cls.to_datetime(x) - cls.to_timedelta(y)) @classmethod def datetime_lessthan(cls, x, y): return cls.to_datetime(x) < cls.to_datetime(y) @classmethod def datetime_lessthanequal(cls, x, y): return cls.to_datetime(x) <= cls.to_datetime(y) @classmethod def datetime_greaterthan(cls, x, y): return cls.to_datetime(x) > cls.to_datetime(y) @classmethod def datetime_greaterthanequal(cls, x, y): return cls.to_datetime(x) >= cls.to_datetime(y) @classmethod def datetime_equal(cls, x, y): return cls.to_datetime(x) == cls.to_datetime(y) # the registry for builtins _builtin_map = { 'comparison': [ {'func': 'lt(x,y)', 'num_inputs': 2, 'code': lambda x, y: x < y}, {'func': 'lteq(x,y)', 'num_inputs': 2, 'code': lambda x, y: x <= y}, {'func': 'equal(x,y)', 'num_inputs': 2, 'code': lambda x, y: x == y}, {'func': 'gt(x,y)', 'num_inputs': 2, 'code': lambda x, y: x > y}, {'func': 'gteq(x,y)', 'num_inputs': 2, 'code': lambda x, y: x >= y}, {'func': 'max(x,y,z)', 'num_inputs': 2, 'code': lambda x, y: max(x, y)}], 'arithmetic': [ {'func': 'plus(x,y,z)', 'num_inputs': 2, 'code': lambda x, y: x + y}, {'func': 'minus(x,y,z)', 'num_inputs': 2, 'code': lambda x, y: x - y}, {'func': 'mul(x,y,z)', 'num_inputs': 2, 'code': lambda x, y: x * y}, {'func': 'div(x,y,z)', 'num_inputs': 2, 'code': lambda x, y: x / y}, {'func': 'float(x,y)', 'num_inputs': 1, 'code': lambda x: float(x)}, {'func': 'int(x,y)', 'num_inputs': 1, 'code': lambda x: int(x)}], 'string': [ {'func': 'concat(x,y,z)', 'num_inputs': 2, 'code': lambda x, y: x + y}, {'func': 'len(x, y)', 'num_inputs': 1, 'code': lambda x: len(x)}], 'datetime': [ {'func': 'now(x)', 'num_inputs': 0, 'code': DatetimeBuiltins.now}, {'func': 'unpack_date(x, year, month, day)', 'num_inputs': 1, 'code': DatetimeBuiltins.unpack_date}, {'func': 'unpack_time(x, hours, minutes, seconds)', 'num_inputs': 1, 'code': DatetimeBuiltins.unpack_time}, {'func': 'unpack_datetime(x, y, m, d, h, i, s)', 'num_inputs': 1, 'code': DatetimeBuiltins.unpack_datetime}, {'func': 'pack_time(hours, minutes, seconds, result)', 'num_inputs': 3, 'code': DatetimeBuiltins.pack_time}, {'func': 'pack_date(year, month, day, result)', 'num_inputs': 3, 'code': DatetimeBuiltins.pack_date}, {'func': 'pack_datetime(y, m, d, h, i, s, result)', 'num_inputs': 6, 'code': DatetimeBuiltins.pack_datetime}, {'func': 'extract_date(x, y)', 'num_inputs': 1, 'code': DatetimeBuiltins.extract_date}, {'func': 'extract_time(x, y)', 'num_inputs': 1, 'code': DatetimeBuiltins.extract_time}, {'func': 'datetime_to_seconds(x, y)', 'num_inputs': 1, 'code': DatetimeBuiltins.datetime_to_seconds}, {'func': 'datetime_plus(x,y,z)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_plus}, {'func': 'datetime_minus(x,y,z)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_minus}, {'func': 'datetime_lt(x,y)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_lessthan}, {'func': 'datetime_lteq(x,y)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_lessthanequal}, {'func': 'datetime_gt(x,y)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_greaterthan}, {'func': 'datetime_gteq(x,y)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_greaterthanequal}, {'func': 'datetime_equal(x,y)', 'num_inputs': 2, 'code': DatetimeBuiltins.datetime_equal}]} class CongressBuiltinPred(object): def __init__(self, name, arglist, num_inputs, code): self.predname = name self.predargs = arglist self.num_inputs = num_inputs self.code = code self.num_outputs = len(arglist) - num_inputs def string_to_pred(self, predstring): try: self.predname = predstring.split('(')[0] self.predargs = predstring.split('(')[1].split(')')[0].split(',') except Exception: print("Unexpected error in parsing predicate string") def __str__(self): return self.predname + '(' + ",".join(self.predargs) + ')' class CongressBuiltinCategoryMap(object): def __init__(self, start_builtin_map): self.categorydict = dict() self.preddict = dict() for key, value in start_builtin_map.items(): self.categorydict[key] = [] for predtriple in value: pred = self.dict_predtriple_to_pred(predtriple) self.categorydict[key].append(pred) self.sync_with_predlist(pred.predname, pred, key, 'add') def mapequal(self, othercbc): if self.categorydict == othercbc.categorydict: return True else: return False def dict_predtriple_to_pred(self, predtriple): ncode = predtriple['code'] ninputs = predtriple['num_inputs'] nfunc = predtriple['func'] nfunc_pred = nfunc.split("(")[0] nfunc_arglist = nfunc.split("(")[1].split(")")[0].split(",") pred = CongressBuiltinPred(nfunc_pred, nfunc_arglist, ninputs, ncode) return pred def add_map(self, newmap): for key, value in newmap.items(): if key not in self.categorydict: self.categorydict[key] = [] for predtriple in value: pred = self.dict_predtriple_to_pred(predtriple) if not self.builtin_is_registered(pred): self.categorydict[key].append(pred) self.sync_with_predlist(pred.predname, pred, key, 'add') def delete_map(self, newmap): for key, value in newmap.items(): for predtriple in value: predtotest = self.dict_predtriple_to_pred(predtriple) for pred in self.categorydict[key]: if pred.predname == predtotest.predname: if pred.num_inputs == predtotest.num_inputs: self.categorydict[key].remove(pred) self.sync_with_predlist(pred.predname, pred, key, 'del') if self.categorydict[key] == []: del self.categorydict[key] def sync_with_predlist(self, predname, pred, category, operation): if operation == 'add': self.preddict[predname] = [pred, category] if operation == 'del': if predname in self.preddict: del self.preddict[predname] def delete_builtin(self, category, name, inputs): if category not in self.categorydict: self.categorydict[category] = [] for pred in self.categorydict[category]: if pred.num_inputs == inputs and pred.predname == name: self.categorydict[category].remove(pred) self.sync_with_predlist(name, pred, category, 'del') def get_category_name(self, predname, predinputs): if predname in self.preddict: if self.preddict[predname][0].num_inputs == predinputs: return self.preddict[predname][1] return None def exists_category(self, category): return category in self.categorydict def insert_category(self, category): self.categorydict[category] = [] def delete_category(self, category): if category in self.categorydict: categorypreds = self.categorydict[category] for pred in categorypreds: self.sync_with_predlist(pred.predname, pred, category, 'del') del self.categorydict[category] def insert_to_category(self, category, pred): if category in self.categorydict: self.categorydict[category].append(pred) self.sync_with_predlist(pred.predname, pred, category, 'add') else: assert("Category does not exist") def delete_from_category(self, category, pred): if category in self.categorydict: self.categorydict[category].remove(pred) self.sync_with_predlist(pred.predname, pred, category, 'del') else: assert("Category does not exist") def delete_all_in_category(self, category): if category in self.categorydict: categorypreds = self.categorydict[category] for pred in categorypreds: self.sync_with_predlist(pred.predname, pred, category, 'del') self.categorydict[category] = [] else: assert("Category does not exist") def builtin_is_registered(self, predtotest): """Given a CongressBuiltinPred, check if it has been registered.""" pname = predtotest.predname if pname in self.preddict: if self.preddict[pname][0].num_inputs == predtotest.num_inputs: return True return False def is_builtin(self, table, arity=None): """Given a Tablename and arity, check if it is a builtin.""" if table.table in self.preddict: if not arity: return True if len(self.preddict[table.table][0].predargs) == arity: return True return False def builtin(self, table): """Return a CongressBuiltinPred for given Tablename or None.""" if not isinstance(table, six.string_types): table = table.table if table in self.preddict: return self.preddict[table][0] return None def list_available_builtins(self): """Print out the list of builtins, by category.""" for key, value in self.categorydict.items(): predlist = self.categorydict[key] for pred in predlist: print(str(pred)) # a Singleton that serves as the entry point for builtin functionality builtin_registry = CongressBuiltinCategoryMap(_builtin_map)
apache-2.0
-8,263,622,788,772,332,000
37.700581
79
0.577631
false
3.663456
true
false
false
openwebcc/ba
www/lib/Laser/Util/las.py
1
20558
#!/usr/bin/python # # utility library # # lasinfo: parser library that parses LASFILE.info.txt files # created by: lasinfo -i LASFILE -o ../meta/LASFILE.info.txt -compute_density -repair # rawdata: helpers to clean up ASCII rawdata # import os import re import simplejson from gps import gps_week_from_doy class lasinfo: def __init__(self): """ setup new LASInfo parser """ self.meta = {} # define methods to use when parsing metadata self.attr_methods = { # search pattern attribute key func to call 'file signature' : ['File Signature','set_signature'], 'file source ID' : ['File Source ID','set_int'], 'global_encoding' : ['Global Encoding','set_int'], 'project ID GUID data 1-4' : ['Project ID - GUID data','set_str'], 'version major.minor' : ['Version','set_version'], 'system identifier' : ['System Identifier','set_system_identifier'], 'generating software' : ['Generating Software','set_str'], 'file creation day/year' : ['File Creation','set_creation'], 'header size' : ['Header Size','set_int'], 'offset to point data' : ['Offset to point data','set_int'], 'number var. length records' : ['Number of Variable Length Records','set_int'], 'point data format' : ['Point Data Record Format','set_int'], 'point data record length' : ['Point Data Record Length','set_int'], 'number of point records' : ['Legacy Number of point records','set_int'], 'number of points by return' : ['Legacy Number of points by return','set_returns'], 'scale factor x y z' : ['Scale factor','set_xyz'], 'offset x y z' : ['Offset','set_xyz'], 'min x y z' : ['Min','set_xyz'], 'max x y z' : ['Max','set_xyz'], 'start of waveform data packet record' : ['Start of Waveform Data Packet Record','set_int'], 'start of first extended variable length record' : ['Start of first Extended Variable Length Record','set_int'], 'number of extended_variable length records' : ['Number of Extended Variable Length Records','set_int'], 'extended number of point records' : ['Number of point records','set_int'], 'extended number of points by return' : ['Number of points by return','set_returns'], 'overview over number of returns of given pulse' : ['returns_of_given_pulse','ignore'], 'covered area in square meters/kilometers' : ['area','set_area'], 'covered area in square units/kilounits' : ['area','set_area'], 'point density' : ['density','set_density'], 'spacing' : ['spacing','set_spacing'], 'number of first returns' : ['first_returns','ignore'], 'number of intermediate returns' : ['intermediate_returns','ignore'], 'number of last returns' : ['last_returns','ignore'], 'number of single returns' : ['single_returns','ignore'], 'overview over extended number of returns of given pulse' : ['extended_number_of_returns','ignore'], 'minimum and maximum for all LAS point record entries' : ['min_max','set_min_max'], 'histogram of classification of points' : ['class_histo','set_class_histo'], 'WARNING' : ['warning','ignore'], 'moretocomemaybe' : ['xxx','ignore'], } def read(self, fpath): """ read file containing output of lasinfo and collect metadata """ with open(fpath) as f: # set filename and size of corresponding .las file lasname = re.sub(r'/meta/(.*).info.txt',r'/las/\1',fpath) if os.path.exists(lasname): self.meta['file_name'] = lasname self.meta['file_size'] = os.path.getsize(lasname) else: raise NameError('%s does not exist' % lasname) # set filenpaths to corresponding metafiles .info.txt, .hull.wkt, .traj.wkt if any metafiles = { 'info' : fpath, 'hull' : re.sub('.info.txt','.hull.wkt',fpath), 'traj' : re.sub('.info.txt','.traj.wkt',fpath) } for ftype in metafiles: if os.path.exists(metafiles[ftype]): if not 'metafiles' in self.meta: self.meta['metafiles'] = {} self.meta['metafiles'][ftype] = metafiles[ftype] # extract metadata from .info file section = None for line in f.readlines(): # set section if needed and skip lines if needed if re.search('reporting all LAS header entries',line): section = 'HEADER' continue elif re.search(r'^variable length header', line): section = 'HEADER_VAR' continue elif re.search(r'^reporting minimum and maximum for all LAS point record entries', line): section = 'MINMAX' continue elif re.search(r'^histogram of classification of points', line): section = 'HISTO' continue elif re.search(r'^histogram of extended classification of points', line): section = 'HISTO_EXT' continue elif re.search(r'^LASzip compression', line) or re.search(r'^LAStiling', line): section = None continue elif re.search(r'flagged as synthetic', line) or re.search(r'flagged as keypoints', line) or re.search(r'flagged as withheld', line): section = None continue else: # what else? pass # reset section unless leading blanks are present in current line if section and not re.search(r'^ +',line): section = None if section == 'HEADER': # split up trimmed line on colon+blank [key,val] = self.strip_whitespace(line).split(': ') # set header attribute with corresponding key and method getattr(self, self.attr_methods[key][1])( self.attr_methods[key][0], val ) elif section == 'HEADER_VAR': # extract SRID and projection name if available self.set_srid_proj(line) elif section == 'MINMAX': # set min/max for point record entries self.set_min_max(line) elif section in ('HISTO','HISTO_EXT'): # set classification histogram value, name and point count self.set_class_histo(line) else: parts = self.strip_whitespace(line).split(': ') if parts[0] in self.attr_methods: # set attribute with corresponding key and method getattr(self, self.attr_methods[parts[0]][1])( self.attr_methods[parts[0]][0], parts[1] ) elif parts[0] in [ 'bounding box is correct.', 'number of point records in header is correct.', 'number of points by return in header is correct.', 'extended number of point records in header is correct.', 'extended number of points by return in header is correct.' ]: # ignore positive info from -repair continue elif parts[0] == 'bounding box was repaired.': # tell user to re-run lasinfo as header has been updated and content in .info might not be correct anymore print "RE-RUN sh /home/institut/rawdata/maintenance/scripts/als/get_lasinfo.sh %s rebuild" % self.meta['file']['las'] elif parts[0].startswith("lasinfo ("): pass else: pass print "TODO", parts, '(%s)' % f.name def has_wkt_geometry(self,ftype=None): """ return True if WKT geometry is present, false otherwise """ if 'metafiles' in self.meta and ftype in self.meta['metafiles']: return True else: return False def get_wkt_geometry(self,ftype=None): """ read WKT geometry for hull or trajectory if any """ wkt = '' if self.has_wkt_geometry(ftype): with open(self.meta['metafiles'][ftype]) as f: wkt = f.read() return wkt.rstrip() def as_json(self,obj=None,pretty=False): """ return object as JSON """ if pretty: return simplejson.dumps(obj,sort_keys=True, indent=4 * ' ') else: return simplejson.dumps(obj) def strip_whitespace(self, val=None): """ remove leading, trailing whitespace and replace successive blanks with one blank """ if type(val) == str: return re.sub(r' +',' ',val.lstrip().rstrip()) else: return val def ignore(self,key,val): """ ignore this attribute """ pass def warning(self,key,val): """ display warnings """ print "WARNING: %s=%s" % (key,val) def set_str(self,key,val): """ set value as string """ self.meta[key] = str(val) def set_int(self,key,val): """ set value as integer """ self.meta[key] = int(val) def set_signature(self,key,val): """ set file signature as string """ self.meta[key] = val.lstrip("'").rstrip("'") def set_system_identifier(self,key,val): self.meta[key] = val.lstrip("'").rstrip("'") def set_version(self,key,val): """ set major and minor version """ major,minor = [str(v) for v in val.split('.')] self.meta['Version Major'] = major self.meta['Version Minor'] = minor def set_creation(self,key,val): """ set file creation day/year """ doy,year = [int(v) for v in val.split('/')] self.meta['File Creation Day of Year'] = doy self.meta['File Creation Year'] = year # compute GPS-week as well self.meta['creation_gpsweek'] = gps_week_from_doy(doy,year) def set_returns(self,key,val): """ set number of points by return as list with five entries exactly """ pts = [int(v) for v in val.split(' ')] if key == 'Legacy Number of points by return': if len(pts) < 5: # fill with zeros for n in range(0,5-len(pts)): pts.append(0) self.meta['Legacy Number of points by return'] = pts[:5] elif key == 'Number of points by return': if len(pts) < 15: # fill with zeros for n in range(0,15-len(pts)): pts.append(0) self.meta['Number of points by return'] = pts[:15] else: pass def set_xyz(self,key,val): """ set x y z values as floats """ arr = [float(v) for v in val.split(' ')] if key == 'Scale factor': self.meta['X scale factor'] = arr[0] self.meta['Y scale factor'] = arr[1] self.meta['Z scale factor'] = arr[2] elif key == 'Offset': self.meta['X offset'] = arr[0] self.meta['Y offset'] = arr[1] self.meta['Z offset'] = arr[2] elif key == 'Min': self.meta['Min X'] = arr[0] self.meta['Min Y'] = arr[1] self.meta['Min Z'] = arr[2] elif key == 'Max': self.meta['Max X'] = arr[0] self.meta['Max Y'] = arr[1] self.meta['Max Z'] = arr[2] else: pass def set_srid_proj(self,line): """ set SRID and projection name if available """ if re.search('ProjectedCSTypeGeoKey',line): srid,info = (re.sub(r'^key.*value_offset (\d+) - ProjectedCSTypeGeoKey: (.*)$',r'\1;\2',self.strip_whitespace(line))).split(';') self.meta['projection_srid'] = int(srid) self.meta['projection_info'] = info def set_min_max(self,line): """ set min, max values for attribute """ for k in ('minimum','maximum'): if not k in self.meta: self.meta[k] = {} # isolate attribute name, min and max from line parts = self.strip_whitespace(line).split(' ') attr = ' '.join(parts[:-2]) if attr in ('X','Y','Z'): # skip unscaled X,Y,Z values and assign regular min / max values instead that have been extracted before self.meta['minimum'][attr.lower()] = self.meta['Min %s' % attr] self.meta['maximum'][attr.lower()] = self.meta['Max %s' % attr] return self.meta['minimum'][attr] = float(parts[-2]) self.meta['maximum'][attr] = float(parts[-1]) def set_class_histo(self,line): """ return classification histogram value, name and point count """ if not 'class_histo' in self.meta: self.meta['class_histo'] = {} parts = self.strip_whitespace(line).split(' ') class_value = int(re.sub(r'[\(\)]','',parts[-1])) class_name = ' '.join(parts[1:-1]) num_points = int(parts[0]) self.meta['class_histo'][class_value] = { 'name' : class_name, 'points' : num_points } def set_area(self,key,val): """ return covered area in square meters/kilometers """ m2,km2 = [float(v) for v in val.split('/')] self.meta['area_m2'] = float(m2) self.meta['area_km2'] = float(km2) def set_density(self,key,val): """ return estimated point density for all returns and last returns per square meter """ all_r,last_r = (re.sub(r'all returns ([^ ]+) last only ([^ ]+) \(per square .*\)$',r'\1;\2',self.strip_whitespace(val))).split(';') self.meta['density_per_m2_all'] = float(all_r) self.meta['density_per_m2_last'] = float(last_r) def set_spacing(self,key,val): """ get spacing for all returns and last returns in meters """ all_r,last_r = (re.sub(r'all returns ([^ ]+) last only ([^ ]+) \(in .*\)$',r'\1;\2',self.strip_whitespace(val))).split(';') self.meta['spacing_in_m_all'] = float(all_r) self.meta['spacing_in_m_last'] = float(last_r) # check if metadata has been collected def has_metadata(self): """ return True if file signature has been set to 'LASF' as required by specification, False otherwise """ return ('File Signature' in self.meta and self.meta['File Signature'] == 'LASF') def get_points(self): """ get number of points from regular or legacy number of points """ if 'Number of point records' in self.meta and self.meta['Number of point records'] != 0: return self.meta['Number of point records'] elif 'Legacy Number of point records' in self.meta and self.meta['Legacy Number of point records'] != 0: return self.meta['Legacy Number of point records'] else: return 0 def get_points_by_return(self): """ get number of points by return from regular or legacy number of points by return """ if 'Number of points by return' in self.meta: return self.meta['Number of points by return'] elif 'Legacy Number of points by return' in self.meta: return self.meta['Legacy Number of points by return'] else: return [] def get_attr(self,attrname,attrtype): """ safely return meatdata attribute """ if not attrname in self.meta: if attrtype == list: return [] elif attrtype == dict: return {} else: return None else: return self.meta[attrname] def get_metadata(self,json=False,pretty=False): """ return metadata collect during parsing """ if json: return self.as_json(self.meta,pretty) else: return self.meta def get_db_metadata(self,pretty=False): """ return subset of metadata for database """ return { 'file_name' : self.get_attr('file_name',str).split('/')[-1], 'file_size' : self.get_attr('file_size',str), 'file_year' : self.get_attr('File Creation Year',int), 'file_doy' : self.get_attr('File Creation Day of Year',int), 'file_gpsweek' : self.get_attr('creation_gpsweek',int), 'srid' : self.get_attr('projection_srid',int), 'projection' : self.get_attr('projection_info',str), 'points' : self.get_points(), 'points_by_return' : self.get_points_by_return(), 'minimum' : self.get_attr('minimum',list), 'maximum' : self.get_attr('maximum',list), 'histogram' : self.get_attr('class_histo',dict), 'point_area' : self.get_attr('area_m2',float), 'point_density' : self.get_attr('density_per_m2_all',float), 'point_spacing' : self.get_attr('spacing_in_m_all',float), 'point_format' : self.get_attr('Point Data Record Format',int), 'system_identifier' : self.get_attr('System Identifier',str), 'global_encoding' : self.get_attr('Global Encoding',int), } class rawdata: def __init__(self, req=None): """ helpers to clean up ASCII rawdata """ self.known_attrs = { 't' : 'gpstime', 'x' : 'x coordinate', 'y' : 'y coordinate', 'z' : 'z coordinate', 'i' : 'intensity', 'n' : 'number of returns of given pulse', 'r' : 'number of return', 'c' : 'classification', 'u' : 'user data', 'p' : 'point source ID', 'a' : 'scan angle', 'e' : 'edge of flight line flag', 'd' : 'direction of scan flag', 'R' : 'red channel of RGB color', 'G' : 'green channel of RGB color', 'B' : 'blue channel of RGB color', 's' : 'skip number' } self.req = req def strip_whitespace(self, val=None): """ remove leading, trailing whitespace and replace successive blanks with one blank """ if type(val) == str: return re.sub(r' +',' ',val.lstrip().rstrip()) else: return val def strip_utm32(self, val=None): """ strip trailing 32 from UTM str, int or float x-coordinates """ if type(val) == str: return val[2:] elif type(val) in (float, int): return val - 32000000 else: return val def parse_line(self, line=None, pattern=None): """ split up line on blank and create list or dictionary with params by name """ # split up cleaned line on blank row = self.strip_whitespace(line).split(' ') # safely assign attributes when requested if pattern: # init return dictionary rec = {} # split up pattern attrs = list(pattern) # bail out if number of attributes does not match number of columns if not len(row) == len(attrs): raise ValueError('Number of columns and attributes in pattern do not match. Got %s, expected %s.\nline=%s\npattern=%s' % ( len(attrs), len(row), self.strip_whitespace(line), pattern )) # assign attributes for i in range(0,len(row)): if not attrs[i] in self.known_attrs: raise ValueError('%s is not a valid attribute abreviation.' % attrs[i]) else: # handle skip flag if attrs[i] == 's': continue else: rec[attrs[i]] = row[i] return rec else: return row
gpl-3.0
7,783,275,557,073,702,000
42.371308
149
0.527532
false
4.163224
false
false
false
blakfeld/Bash-To-Python
bash_to_python/cat.py
1
2231
""" cat.py -- Emulate UNIX cat. Author: Corwin Brown E-Mail: [email protected] Date: 5/25/2015 """ import os import sys class Cat(object): def __init__(self, fname=None, stdin=None): """ Constructor Args: fname (str): File to print to screen stdin (str): Input from sys.stdin to output. Raises: ValueError: If provided file doesn't exist or is a directory. """ self.fname = fname self.stdin = stdin def run(self): """ Emulate 'cat'. Echo User input if a file is not provided, if a file is provided, print it to the screen. """ if self.stdin: self._cat_stdin(self.stdin) return if not self.fname: self._cat_input() return if isinstance(self.fname, list): for f in self.fname: self._validate_file(f) self._cat_file(f) else: self._validate_file(self.fname) self._cat_file(self.fname) def _cat_stdin(self, stdin): """ Print data provided in stdin. Args: stdin (str): The output of sys.stdin.read() """ print stdin def _cat_file(self, fname): """ Print contents of a file. Args: fname: Name of file to print. """ with open(fname, 'r') as f: sys.stdout.write((f.read())) def _cat_input(self): """ Echo back user input. """ while True: user_input = raw_input() sys.stdout.write(user_input) def _validate_file(self, fname): """ Ensure fname exists, and is not a directory. Args: fname (str): The file path to validate. Raises: ValueError: If file does not exist or is a directory. """ if not os.path.exists(fname): raise ValueError('cat: {}: No such file or directory.' .format(fname)) if os.path.isdir(fname): raise ValueError('cat: {}: Is a directory.' .format(fname))
mit
6,770,552,773,676,666,000
21.31
79
0.494845
false
4.233397
false
false
false
tochikuji/chainer-libDNN
libdnn/visualizer.py
1
4553
# coding: utf-8 import chainer import numpy import matplotlib import matplotlib.pyplot as plt import cv2 class Visualizer(object): def __init__(self, network): self.nnbase = network self.model = network.model plt.subplots_adjust(hspace=0.5) def __convert_filters(self, layer, shape=(), T=False): layer = self.model[layer] self.bitmap = [] weight = [] if not T: weight = chainer.cuda.to_cpu(layer.W) else: weight = chainer.cuda.to_cpu(layer.W.T) if shape: for bitmap in weight: self.bitmap.append(bitmap.reshape(shape)) else: for bitmap in weight: self.bitmap.append(bitmap[0]) def plot_filters(self, layer, shape=(), T=False, title=True, interpolation=False): int_mode = 'none' if interpolation: int_mode = 'hermite' self.__convert_filters(layer, shape, T) N = len(self.bitmap) nrow = int(numpy.sqrt(N)) + 1 for i in range(N): ax = plt.subplot(nrow, nrow, i + 1) if title: ax.set_title('filter %d' % (i + 1), fontsize=10) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) plt.imshow(self.bitmap[i], interpolation=int_mode, cmap=matplotlib.cm.gray) plt.show() def write_filters(self, layer, path='./', identifier='img', type='bmp', shape=(), T=False): self.__convert_filters(layer, shape, T) N = len(self.bitmap) # length of file indexes maxlen = int(numpy.log10(N)) + 1 form = '{0:0>' + str(maxlen) + '}' fmax = numpy.max(self.bitmap) fmin = numpy.min(self.bitmap) self.bitmap = ((self.bitmap - fmin) * 0xff / (fmax - fmin)).astype(numpy.uint8) for i in range(N): filename = path + '/' + identifier + form.format(i) + '.' + type cv2.imwrite(filename, self.bitmap[i]) def save_raw_filter(self, dst): for i in range(len(self.bitmap)): numpy.savetxt(dst + '/%d' % (i + 1) + '.csv', self.bitmap[i], delimiter=',') def __apply_filter(self, x, layer): output = self.nnbase.output(x, layer) # chainer.Variable -> numpy.ndarray (of GPUArray) return chainer.cuda.to_cpu(output).data def plot_output(self, x, layer): output = self.__apply_filter(x, layer) N = output.shape[0] * output.shape[1] nrow = int(numpy.sqrt(N)) + 1 j = 0 for batch in output: j += 1 i = 0 for img in batch: i += 1 ax = plt.subplot(nrow, nrow, (j - 1) * output.shape[1] + i) ax.set_title('img%d-filt%d' % (j + 1, i + 1), fontsize=10) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) plt.imshow(chainer.cuda.to_cpu(img), interpolation='none', cmap=matplotlib.cm.gray) plt.show() def write_output(self, x, layer, path='./', identifier='img_', type='bmp'): output = self.__apply_filter(x, layer) maxlen_t = int(numpy.log10(output.shape[0])) + 1 tform = '{0:0>' + str(maxlen_t) + '}' maxlen_f = int(numpy.log10(output.shape[1])) + 1 fform = '{0:0>' + str(maxlen_f) + '}' j = 0 for batch in output: j += 1 i = 0 for img in batch: i += 1 bitmap = chainer.cuda.to_cpu(img) fmax = numpy.max(bitmap) fmin = numpy.min(bitmap) bitmap = ((bitmap - fmin) * 0xff / (fmax - fmin)).astype(numpy.uint8) filename = path + '/' + identifier + tform.format(j) + '_f' + fform.format(i) + '.' + type cv2.imwrite(filename, bitmap) def write_activation(self, x, layer, path='./', identifier='img_', type='bmp'): output = self.__apply_filter(numpy.array([x]).astype(numpy.float32), layer) fform = '{0:0>' + str(int(numpy.log10(output.shape[1])) + 1) + '}' # filter num i = 0 for img in output[0]: i += 1 bitmap = chainer.cuda.to_cpu(img) fmax = numpy.max(bitmap) fmin = numpy.min(bitmap) bitmap = ((bitmap - fmin) * 0xff / (fmax - fmin)).astype(numpy.uint8) filename = path + '/' + identifier + 'f' + fform.format(i) + '.' + type cv2.imwrite(filename, bitmap)
mit
-5,844,995,503,812,205,000
32.477941
106
0.520316
false
3.491564
false
false
false
harankumar/MultiDimensionalHeuristics
src/pathfinder.py
1
5368
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from Queue import PriorityQueue from scipy.spatial.distance import * from grid import Grid def find_path(grid, start_cell, goal_cell, heuristic): def distance(p1, p2, name=heuristic, dimensions=grid.num_dimensions): p1 = p1.coordinates p2 = p2.coordinates if name == 'null': return 0.0 elif name == 'minkowski-n': return pdist([p1, p2], 'minkowski', p=dimensions) elif name == 'minkowski-0.5n': return pdist([p1, p2], 'minkowski', p=dimensions / 2) else: return pdist([p1, p2], name) # print "started pathfinding" start_frontier = PriorityQueue() start_cell.cost_to = start_cell.cost start_frontier.put(start_cell) goal_frontier = PriorityQueue() goal_cell.cost_from = goal_cell.cost goal_frontier.put(goal_cell) num_iterations = 0 while not start_frontier.empty() and not goal_frontier.empty(): num_iterations += 1 current_start_cell = start_frontier.get() current_goal_cell = goal_frontier.get() # print(str(current_start_cell) + " " + str(current_goal_cell)) if current_start_cell == current_goal_cell: # print "0" path = [] while current_start_cell.previous is not None: path.append(current_start_cell) current_start_cell = current_start_cell.previous path.append(current_start_cell) path.reverse() path.append(current_goal_cell) while not current_goal_cell.successor == goal_cell: path.append(current_goal_cell) current_goal_cell = current_goal_cell.successor return path, num_iterations if current_start_cell.visited_from_goal: # print "1" path = [] current = current_start_cell while not current == goal_cell: current.successor.previous = current current = current.successor current = goal_cell while current.previous is not None: path.append(current) current = current.previous path.append(current) path.reverse() return path, num_iterations if current_goal_cell.visited_from_start: # print "2" path = [] current = current_goal_cell while current.previous is not None: path.append(current) current = current.previous path.append(current) path.reverse() current = current_goal_cell while not current == goal_cell: path.append(current.successor) current = current.successor return path, num_iterations current_start_cell.closed = True # add to start frontier neighbors = current_start_cell.get_neighbors() for neighbor in neighbors: if neighbor.closed: continue cost_to = current_start_cell.cost_to + neighbor.cost visited = neighbor.visited_from_start if (not visited) or cost_to < neighbor.cost_to: neighbor.visited_from_start = True neighbor.previous = current_start_cell if neighbor.predicted_cost_from == 100000000.0: neighbor.predicted_cost_from = distance(neighbor, goal_cell) neighbor.cost_to = cost_to neighbor.predicted_cost_to = cost_to neighbor.total_cost = neighbor.cost_to + neighbor.predicted_cost_from if visited: start_frontier.queue.remove(neighbor) start_frontier.put(neighbor) # add to goal frontier neighbors = current_goal_cell.get_neighbors() for neighbor in neighbors: if neighbor.closed: continue cost_from = current_goal_cell.cost_from + neighbor.cost visited = neighbor.visited_from_goal if (not visited) or cost_from < neighbor.cost_from: neighbor.visited_from_goal = True neighbor.successor = current_goal_cell if neighbor.predicted_cost_to == 100000000.0: neighbor.predicted_cost_to = distance(neighbor, start_cell) neighbor.cost_from = cost_from neighbor.predicted_cost_from = cost_from neighbor.total_cost = neighbor.cost_from + neighbor.predicted_cost_to if visited: try: goal_frontier.queue.remove(neighbor) except ValueError: pass goal_frontier.put(neighbor) # np.set_printoptions(linewidth=500) # g = Grid([5, 5], fill=True) # print g.grid # print g.get_cell([0,0]) # print g.get_cell([4,4]) # path, iter = find_path(g, g.get_cell([0, 0]), g.get_cell([4, 4]), heuristic='null') # print [str(i) for i in path] # print iter
mpl-2.0
5,993,779,184,779,561,000
36.02069
85
0.560358
false
4.148377
false
false
false
pitunti/alfaPitunti
plugin.video.alfa/core/channeltools.py
1
12463
# -*- coding: utf-8 -*- # ------------------------------------------------------------ # channeltools - Herramientas para trabajar con canales # ------------------------------------------------------------ import os import jsontools from platformcode import config, logger DEFAULT_UPDATE_URL = "/channels/" dict_channels_parameters = dict() def is_adult(channel_name): logger.info("channel_name=" + channel_name) channel_parameters = get_channel_parameters(channel_name) return channel_parameters["adult"] def is_enabled(channel_name): logger.info("channel_name=" + channel_name) return get_channel_parameters(channel_name)["active"] and get_channel_setting("enabled", channel=channel_name, default=True) def get_channel_parameters(channel_name): global dict_channels_parameters if channel_name not in dict_channels_parameters: try: channel_parameters = get_channel_json(channel_name) # logger.debug(channel_parameters) if channel_parameters: # cambios de nombres y valores por defecto channel_parameters["title"] = channel_parameters.pop("name") channel_parameters["channel"] = channel_parameters.pop("id") # si no existe el key se declaran valor por defecto para que no de fallos en las funciones que lo llaman channel_parameters["update_url"] = channel_parameters.get("update_url", DEFAULT_UPDATE_URL) channel_parameters["language"] = channel_parameters.get("language", ["all"]) channel_parameters["adult"] = channel_parameters.get("adult", False) channel_parameters["active"] = channel_parameters.get("active", False) channel_parameters["include_in_global_search"] = channel_parameters.get("include_in_global_search", False) channel_parameters["categories"] = channel_parameters.get("categories", list()) channel_parameters["thumbnail"] = channel_parameters.get("thumbnail", "") channel_parameters["banner"] = channel_parameters.get("banner", "") channel_parameters["fanart"] = channel_parameters.get("fanart", "") # Imagenes: se admiten url y archivos locales dentro de "resources/images" if channel_parameters.get("thumbnail") and "://" not in channel_parameters["thumbnail"]: channel_parameters["thumbnail"] = os.path.join(config.get_runtime_path(), "resources", "media", "channels", "thumb", channel_parameters["thumbnail"]) if channel_parameters.get("banner") and "://" not in channel_parameters["banner"]: channel_parameters["banner"] = os.path.join(config.get_runtime_path(), "resources", "media", "channels", "banner", channel_parameters["banner"]) if channel_parameters.get("fanart") and "://" not in channel_parameters["fanart"]: channel_parameters["fanart"] = os.path.join(config.get_runtime_path(), "resources", "media", "channels", "fanart", channel_parameters["fanart"]) # Obtenemos si el canal tiene opciones de configuración channel_parameters["has_settings"] = False if 'settings' in channel_parameters: # if not isinstance(channel_parameters['settings'], list): # channel_parameters['settings'] = [channel_parameters['settings']] # if "include_in_global_search" in channel_parameters['settings']: # channel_parameters["include_in_global_search"] = channel_parameters['settings'] # ["include_in_global_search"].get('default', False) # # found = False # for el in channel_parameters['settings']: # for key in el.items(): # if 'include_in' not in key: # channel_parameters["has_settings"] = True # found = True # break # if found: # break for s in channel_parameters['settings']: if 'id' in s: if s['id'] == "include_in_global_search": channel_parameters["include_in_global_search"] = True elif not s['id'].startswith("include_in_") and \ (s.get('enabled', False) or s.get('visible', False)): channel_parameters["has_settings"] = True del channel_parameters['settings'] # Compatibilidad if 'compatible' in channel_parameters: # compatible python python_compatible = True if 'python' in channel_parameters["compatible"]: import sys python_condition = channel_parameters["compatible"]['python'] if sys.version_info < tuple(map(int, (python_condition.split(".")))): python_compatible = False channel_parameters["compatible"] = python_compatible else: channel_parameters["compatible"] = True dict_channels_parameters[channel_name] = channel_parameters else: # para evitar casos donde canales no están definidos como configuración # lanzamos la excepcion y asi tenemos los valores básicos raise Exception except Exception, ex: logger.error(channel_name + ".json error \n%s" % ex) channel_parameters = dict() channel_parameters["channel"] = "" channel_parameters["adult"] = False channel_parameters['active'] = False channel_parameters["compatible"] = True channel_parameters["language"] = "" channel_parameters["update_url"] = DEFAULT_UPDATE_URL return channel_parameters return dict_channels_parameters[channel_name] def get_channel_json(channel_name): # logger.info("channel_name=" + channel_name) import filetools channel_json = None try: channel_path = filetools.join(config.get_runtime_path(), "channels", channel_name + ".json") if filetools.isfile(channel_path): # logger.info("channel_data=" + channel_path) channel_json = jsontools.load(filetools.read(channel_path)) # logger.info("channel_json= %s" % channel_json) except Exception, ex: template = "An exception of type %s occured. Arguments:\n%r" message = template % (type(ex).__name__, ex.args) logger.error(" %s" % message) return channel_json def get_channel_controls_settings(channel_name): # logger.info("channel_name=" + channel_name) dict_settings = {} list_controls = get_channel_json(channel_name).get('settings', list()) for c in list_controls: if 'id' not in c or 'type' not in c or 'default' not in c: # Si algun control de la lista no tiene id, type o default lo ignoramos continue # new dict with key(id) and value(default) from settings dict_settings[c['id']] = c['default'] return list_controls, dict_settings def get_channel_setting(name, channel, default=None): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion propia del canal 'channel'. Busca en la ruta \addon_data\plugin.video.alfa\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.json y crea un archivo channel_data.json antes de retornar el valor solicitado. Si el parametro 'name' tampoco existe en el el archivo channel.json se devuelve el parametro default. @param name: nombre del parametro @type name: str @param channel: nombre del canal @type channel: str @param default: valor devuelto en caso de que no exista el parametro name @type default: any @return: El valor del parametro 'name' @rtype: any """ file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json") dict_settings = {} dict_file = {} if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load(open(file_settings, "rb").read()) if isinstance(dict_file, dict) and 'settings' in dict_file: dict_settings = dict_file['settings'] except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % file_settings) if not dict_settings or name not in dict_settings: # Obtenemos controles del archivo ../channels/channel.json try: list_controls, default_settings = get_channel_controls_settings(channel) except: default_settings = {} if name in default_settings: # Si el parametro existe en el channel.json creamos el channel_data.json default_settings.update(dict_settings) dict_settings = default_settings dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json json_data = jsontools.dump(dict_file) try: open(file_settings, "wb").write(json_data) except EnvironmentError: logger.error("ERROR al salvar el archivo: %s" % file_settings) # Devolvemos el valor del parametro local 'name' si existe, si no se devuelve default return dict_settings.get(name, default) def set_channel_setting(name, value, channel): """ Fija el valor de configuracion del parametro indicado. Establece 'value' como el valor del parametro 'name' en la configuracion propia del canal 'channel'. Devuelve el valor cambiado o None si la asignacion no se ha podido completar. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.alfa\settings_channels el archivo channel_data.json y establece el parametro 'name' al valor indicado por 'value'. Si el parametro 'name' no existe lo añade, con su valor, al archivo correspondiente. @param name: nombre del parametro @type name: str @param value: valor del parametro @type value: str @param channel: nombre del canal @type channel: str @return: 'value' en caso de que se haya podido fijar el valor y None en caso contrario @rtype: str, None """ # Creamos la carpeta si no existe if not os.path.exists(os.path.join(config.get_data_path(), "settings_channels")): os.mkdir(os.path.join(config.get_data_path(), "settings_channels")) file_settings = os.path.join(config.get_data_path(), "settings_channels", channel + "_data.json") dict_settings = {} dict_file = None if os.path.exists(file_settings): # Obtenemos configuracion guardada de ../settings/channel_data.json try: dict_file = jsontools.load(open(file_settings, "r").read()) dict_settings = dict_file.get('settings', {}) except EnvironmentError: logger.error("ERROR al leer el archivo: %s" % file_settings) dict_settings[name] = value # comprobamos si existe dict_file y es un diccionario, sino lo creamos if dict_file is None or not dict_file: dict_file = {} dict_file['settings'] = dict_settings # Creamos el archivo ../settings/channel_data.json try: json_data = jsontools.dump(dict_file) open(file_settings, "w").write(json_data) except EnvironmentError: logger.error("ERROR al salvar el archivo: %s" % file_settings) return None return value
gpl-3.0
-5,649,340,184,223,636,000
44.137681
120
0.586531
false
4.213054
true
false
false
CERT-BDF/Cortex-Analyzers
analyzers/OTXQuery/otxquery.py
1
9023
#!/usr/bin/env python # encoding: utf-8 import json import requests import urllib import hashlib import io from cortexutils.analyzer import Analyzer class OTXQueryAnalyzer(Analyzer): def __init__(self): Analyzer.__init__(self) self.otx_key = self.get_param('config.key', None, 'Missing OTX API key') def _get_headers(self): return { 'X-OTX-API-KEY': self.otx_key, 'Accept': 'application/json' } def otx_query_ip(self, data): baseurl = "https://otx.alienvault.com:443/api/v1/indicators/IPv4/%s/" % data headers = self._get_headers() sections = [ 'general', 'reputation', 'geo', 'malware', 'url_list', 'passive_dns' ] ip_ = {} try: for section in sections: queryurl = baseurl + section ip_[section] = json.loads(requests.get(queryurl, headers=headers).content) ip_general = ip_['general'] ip_geo = ip_['geo'] self.report({ 'pulse_count': ip_general.get('pulse_info', {}).get('count', "0"), 'pulses': ip_general.get('pulse_info', {}).get('pulses', "-"), 'whois': ip_general.get('whois', "-"), 'continent_code': ip_geo.get('continent_code', "-"), 'country_code': ip_geo.get('country_code', "-"), 'country_name': ip_geo.get('country_name', "-"), 'city': ip_geo.get('city', "-"), 'longitude': ip_general.get('longitude', "-"), 'latitude': ip_general.get('latitude', "-"), 'asn': ip_geo.get('asn', "-"), 'malware_samples': ip_.get('malware', {}).get('result', "-"), 'url_list': ip_.get('url_list', {}).get('url_list', "-"), 'passive_dns': ip_.get('passive_dns', {}).get('passive_dns', "-") }) except Exception: self.error('API Error! Please verify data type is correct.') def otx_query_domain(self, data): baseurl = "https://otx.alienvault.com:443/api/v1/indicators/domain/%s/" % data headers = self._get_headers() sections = ['general', 'geo', 'malware', 'url_list', 'passive_dns'] ip_ = {} try: for section in sections: queryurl = baseurl + section ip_[section] = json.loads(requests.get(queryurl, headers=headers).content) result = { 'pulse_count': ip_.get('general', {}).get('pulse_info', {}).get('count', "0"), 'pulses': ip_.get('general', {}).get('pulse_info', {}).get('pulses', "-"), 'whois': ip_.get('general', {}).get('whois', "-"), 'malware_samples': ip_.get('malware', {}).get('result', "-"), 'url_list': ip_.get('url_list', {}).get('url_list', "-"), 'passive_dns': ip_.get('passive_dns', {}).get('passive_dns', "-") } try: result.update({ 'continent_code': ip_.get('geo', {}).get('continent_code', "-"), 'country_code': ip_.get('geo', {}).get('country_code', "-"), 'country_name': ip_.get('geo', {}).get('country_name', "-"), 'city': ip_.get('geo', {}).get('city', "-"), 'asn': ip_.get('geo', {}).get('asn', "-") }) except Exception: pass self.report(result) except Exception: self.error('API Error! Please verify data type is correct.') def otx_query_file(self, data): baseurl = "https://otx.alienvault.com:443/api/v1/indicators/file/%s/" % data headers = self._get_headers() sections = ['general', 'analysis'] ip_ = {} try: for section in sections: queryurl = baseurl + section ip_[section] = json.loads(requests.get(queryurl, headers=headers).content) if ip_['analysis']['analysis']: # file has been analyzed before self.report({ 'pulse_count': ip_.get('general', {}).get('pulse_info', {}).get('count', "0"), 'pulses': ip_.get('general', {}).get('pulse_info', {}).get('pulses', "-"), 'malware': ip_.get('analysis', {}).get('malware', "-"), 'page_type': ip_.get('analysis', {}).get('page_type', "-"), 'sha1': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get('sha1', "-"), 'sha256': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get( 'sha256', "-"), 'md5': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get('md5', "-"), 'file_class': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get( 'file_class', "-"), 'file_type': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get( 'file_type', "-"), 'filesize': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get( 'filesize', "-"), 'ssdeep': ip_.get('analysis', {}).get('analysis', {}).get('info', {}).get('results', {}).get( 'ssdeep') }) else: # file has not been analyzed before self.report({ 'errortext': 'File has not previously been analyzed by OTX!', 'pulse_count': ip_['general']['pulse_info']['count'], 'pulses': ip_['general']['pulse_info']['pulses'] }) except Exception: self.error('API Error! Please verify data type is correct.') def otx_query_url(self, data): # urlencode the URL that we are searching for data = urllib.quote_plus(data) baseurl = "https://otx.alienvault.com:443/api/v1/indicators/url/%s/" % data headers = self._get_headers() sections = ['general', 'url_list'] IP_ = {} try: for section in sections: queryurl = baseurl + section IP_[section] = json.loads(requests.get(queryurl, headers=headers).content) self.report({ 'pulse_count': IP_.get('general', {}).get('pulse_info', {}).get('count', "0"), 'pulses': IP_.get('general', {}).get('pulse_info', {}).get('pulses', "-"), 'alexa': IP_.get('general', {}).get('alexa', "-"), 'whois': IP_.get('general', {}).get('whois', "-"), 'url_list': IP_.get('url_list', {}).get('url_list', "-") }) except: self.error('API Error! Please verify data type is correct.') def summary(self, raw): taxonomies = [] level = "info" namespace = "OTX" predicate = "Pulses" value = "{}".format(raw["pulse_count"]) taxonomies.append(self.build_taxonomy(level, namespace, predicate, value)) return {"taxonomies": taxonomies} def run(self): Analyzer.run(self) if self.data_type == 'file': hashes = self.get_param('attachment.hashes', None) if hashes is None: filepath = self.get_param('file', None, 'File is missing') sha256 = hashlib.sha256() with io.open(filepath, 'rb') as fh: while True: data = fh.read(4096) if not data: break sha256.update(data) hash = sha256.hexdigest() else: # find SHA256 hash hash = next(h for h in hashes if len(h) == 64) self.otx_query_file(hash) elif self.data_type == 'url': data = self.get_param('data', None, 'Data is missing') self.otx_query_url(data) elif self.data_type == 'domain': data = self.get_param('data', None, 'Data is missing') self.otx_query_domain(data) elif self.data_type == 'ip': data = self.get_param('data', None, 'Data is missing') self.otx_query_ip(data) elif self.data_type == 'hash': data = self.get_param('data', None, 'Data is missing') self.otx_query_file(data) else: self.error('Invalid data type') if __name__ == '__main__': OTXQueryAnalyzer().run()
agpl-3.0
4,062,654,437,126,525,400
43.668317
118
0.458495
false
4.022737
false
false
false
Katello/katello-cli
src/katello/client/api/custom_info.py
1
1815
# -*- coding: utf-8 -*- # # Copyright 2013 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. from katello.client.api.base import KatelloAPI class CustomInfoAPI(KatelloAPI): """ Connection class to access custom info calls """ def add_custom_info(self, informable_type, informable_id, keyname, value): data = { 'keyname': keyname, 'value': value } path = "/api/custom_info/%s/%s" % (informable_type, informable_id) return self.server.POST(path, data)[1] def get_custom_info(self, informable_type, informable_id, keyname = None): if keyname: path = "/api/custom_info/%s/%s/%s" % (informable_type, informable_id, keyname) else: path = "/api/custom_info/%s/%s" % (informable_type, informable_id) return self.server.GET(path)[1] def update_custom_info(self, informable_type, informable_id, keyname, new_value): data = { 'value': new_value } path = "/api/custom_info/%s/%s/%s" % (informable_type, informable_id, keyname) return self.server.PUT(path, data)[1] def remove_custom_info(self, informable_type, informable_id, keyname): path = "/api/custom_info/%s/%s/%s" % (informable_type, informable_id, keyname) return self.server.DELETE(path)[1]
gpl-2.0
2,375,057,709,698,116,000
43.268293
90
0.677135
false
3.282098
false
false
false
SDSG-Invenio/invenio
invenio/ext/logging/backends/legacy.py
17
1757
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2014 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Invenio 1.x style error handling. Logs exceptions to database and sends emails. Works only in connection with register_exception(). **Configuration** ======================== ====================================================== `LOGGING_LEGACY_LEVEL` Log level threshold for handler. **Default:** ``ERROR``. ======================== ====================================================== """ from __future__ import absolute_import import logging from ..handlers import InvenioLegacyHandler from ..formatters import InvenioExceptionFormatter def setup_app(app): """Invenio 1.x log handler.""" if not app.debug: app.config.setdefault('LOGGING_LEGACY_LEVEL', 'ERROR') handler = InvenioLegacyHandler() handler.setFormatter(InvenioExceptionFormatter()) handler.setLevel(getattr(logging, app.config['LOGGING_LEGACY_LEVEL'])) # Add handler to application logger app.logger.addHandler(handler)
gpl-2.0
458,113,086,534,045,800
33.45098
79
0.654525
false
4.505128
false
false
false
ctb/pygr
tests/sqlsequence_test.py
1
3025
# test will be skipped if MySqlDB is unavailable import unittest, string from testlib import testutil, SkipTest, PygrTestProgram from pygr import sqlgraph, seqdb, classutil, logger class SQLSequence_Test(unittest.TestCase): '''Basic SQL sequence class tests This test setup uses the common (?) method of having the SQLSequence objects created by a SQLTable object rather than instantiating the SQLSequence objects directly. ''' def setUp(self, serverInfo=None, dbname='test.sqlsequence_test'): if not testutil.mysql_enabled(): raise SkipTest, "no MySQL installed" createTable = """\ CREATE TABLE %s (primary_id INTEGER PRIMARY KEY %%(AUTO_INCREMENT)s, sequence TEXT) """ % dbname self.db = sqlgraph.SQLTable(dbname, serverInfo=serverInfo, dropIfExists=True, createTable=createTable, attrAlias=dict(seq='sequence')) self.db.cursor.execute("""\ INSERT INTO %s (sequence) VALUES ('CACCCTGCCCCATCTCCCCAGCCTGGCCCCTCGTGTCTCAGAACCCTCGGGGGGAGGCACAGAAGCCTTCGGGG') """ % dbname) self.db.cursor.execute("""\ INSERT INTO %s (sequence) VALUES ('GAAAGAAAGAAAGAAAGAAAGAAAGAGAGAGAGAGAGACAGAAG') """ % dbname) class DNASeqRow(seqdb.DNASQLSequence): def __len__(self): # just speed optimization return self._select('length(sequence)') # SQL SELECT expression # force the table object to return DNASeqRow objects self.db.objclass(DNASeqRow) self.row1 = self.db[1] self.row2 = self.db[2] self.EQ = self.assertEqual def tearDown(self): self.db.cursor.execute('drop table if exists test.sqlsequence_test') def test_print(self): "Testing identities" self.EQ(str(self.row2), 'GAAAGAAAGAAAGAAAGAAAGAAAGAGAGAGAGAGAGACAGAAG') self.EQ(repr(self.row2), '2[0:44]') def test_len(self): "Testing lengths" self.EQ(len(self.row2), 44) def test_strslice(self): "Testing slices" self.EQ(self.row2.strslice(3,10), 'AGAAAGA') def init_subclass_test(self): "Testing subclassing" self.row2._init_subclass(self.db) class SQLiteSequence_Test(testutil.SQLite_Mixin, SQLSequence_Test): def sqlite_load(self): SQLSequence_Test.setUp(self, self.serverInfo, 'sqlsequence_test') def get_suite(): "Returns the testsuite" tests = [] # detect mysql if testutil.mysql_enabled(): tests.append(SQLSequence_Test) else: testutil.info('*** skipping SQLSequence_Test') if testutil.sqlite_enabled(): tests.append(SQLiteSequence_Test) else: testutil.info('*** skipping SQLSequence_Test') return testutil.make_suite(tests) if __name__ == '__main__': PygrTestProgram(verbosity=2)
bsd-3-clause
-8,949,302,890,382,394,000
32.611111
99
0.618182
false
3.938802
true
false
false
Anderson-Lab/anderson-lab.github.io
csc_466_2021_spring/MLCode/Ch6/kernelpca.py
3
2005
# Code from Chapter 6 of Machine Learning: An Algorithmic Perspective (2nd Edition) # by Stephen Marsland (http://stephenmonika.net) # You are free to use, change, or redistribute the code in any way you wish for # non-commercial purposes, but please maintain the name of the original author. # This code comes with no warranty of any kind. # Stephen Marsland, 2008, 2014 # The Kernel PCA algorithm import numpy as np import pylab as pl def kernelmatrix(data,kernel,param=np.array([3,2])): if kernel=='linear': return np.dot(data,transpose(data)) elif kernel=='gaussian': K = np.zeros((np.shape(data)[0],np.shape(data)[0])) for i in range(np.shape(data)[0]): for j in range(i+1,np.shape(data)[0]): K[i,j] = np.sum((data[i,:]-data[j,:])**2) K[j,i] = K[i,j] return np.exp(-K**2/(2*param[0]**2)) elif kernel=='polynomial': return (np.dot(data,np.transpose(data))+param[0])**param[1] def kernelpca(data,kernel,redDim): nData = np.shape(data)[0] nDim = np.shape(data)[1] K = kernelmatrix(data,kernel) # Compute the transformed data D = np.sum(K,axis=0)/nData E = np.sum(D)/nData J = np.ones((nData,1))*D K = K - J - np.transpose(J) + E*np.ones((nData,nData)) # Perform the dimensionality reduction evals,evecs = np.linalg.eig(K) indices = np.argsort(evals) indices = indices[::-1] evecs = evecs[:,indices[:redDim]] evals = evals[indices[:redDim]] sqrtE = np.zeros((len(evals),len(evals))) for i in range(len(evals)): sqrtE[i,i] = np.sqrt(evals[i]) #print shape(sqrtE), shape(data) newData = np.transpose(np.dot(sqrtE,np.transpose(evecs))) return newData #data = array([[0.1,0.1],[0.2,0.2],[0.3,0.3],[0.35,0.3],[0.4,0.4],[0.6,0.4],[0.7,0.45],[0.75,0.4],[0.8,0.35]]) #newData = kernelpca(data,'gaussian',2) #plot(data[:,0],data[:,1],'o',newData[:,0],newData[:,0],'.') #show()
mit
-4,231,561,555,081,488,000
31.33871
110
0.601496
false
2.848011
false
false
false
procamora/Wiki-Personal
make_entrymd.py
1
1396
#!/usr/bin/env python3 import sys from datetime import datetime TEMPLATE = """ --- Title: {title} Date: {year}-{month}-{day} {hour}:{minute:02d} Modified: {year}-{month}-{day} {hour}:{minute:02d} Category: Tags: Authors: procamora Slug: {slug} Summary: Status: draft --- """ def make_entrymdAntiguo(title): today = datetime.today() slug = title.lower().strip().replace(' ', '-') f_create = "content/draft/{}_{:0>2}_{:0>2}_{}.md".format( today.year, today.month, today.day, slug) t = TEMPLATE.strip().format(title=title, year=today.year, month=today.month, day=today.day, hour=today.hour, minute=today.minute, slug="{}_{:0>2}_{:0>2}_{}".format(today.year, today.month, today.day, slug)) with open(f_create, 'w') as w: w.write(t) print("File created -> " + f_create) def make_entrymd(title): today = datetime.today() slug = title.lower().strip().replace(' ', '_').replace(':', '').replace('.','') f_create = "content/draft/{}.md".format(slug) t = TEMPLATE.strip().format(title=title, year=today.year, month=today.month, day=today.day, hour=today.hour, minute=today.minute, slug=slug) with open(f_create, 'w') as w: w.write(t) print("File created -> " + f_create) if __name__ == '__main__': if len(sys.argv) > 1: make_entrymd(sys.argv[1]) else: print("No title given")
gpl-3.0
-4,138,845,697,370,725,400
23.491228
84
0.59957
false
2.797595
false
false
false
icelinker/pylsge
pylsge/rot3z.py
1
2515
#tested beta #function [U] = rot3z(a) #% -------------------------------------------------------------------------- #% ROT3Z.M Form rotation matrix U to rotate the vector a to a point along #% the positive z-axis. #% #% Version 1.0 #% Last amended I M Smith 2 May 2002. #% Created I M Smith 2 May 2002. #% -------------------------------------------------------------------------- #% Input #% a Vector. #% Dimension: 3 x 1. #% #% Output #% U Rotation matrix with U * a = [0 0 z]', z > 0. #% Dimension: 3 x 3. #% #% Modular structure: GR.M. #% #% [U] = rot3z(a) #% -------------------------------------------------------------------------- # #% form first Givens rotation # [W, c1, s1] = gr(a(2), a(3)); # z = c1*a(2) + s1*a(3); # V = [1 0 0; 0 s1 -c1; 0 c1 s1]; #% #% form second Givens rotation # [W, c2, s2] = gr(a(1), z); #% #% check positivity # if c2 * a(1) + s2 * z < 0 # c2 = -c2; # s2 = -s2; # end % if #% # W = [s2 0 -c2; 0 1 0; c2 0 s2]; # U = W * V; #% -------------------------------------------------------------------------- #% End of ROT3Z.M. import math import numpy as np from gr import * import types def rot3z(a=None): """function [U] = rot3z(a) rot3z.py Form rotation matrix U to rotate the vector a to a point along the positive z-axis. Input a Vector. Dimension: 3 x 1. Output U Rotation matrix with U * a = [0 0 z]', r > 0. Dimension: 3 x 3. Modular structure: gr.py """ #form first Givens rotation if a!=None: a=np.mat(a); m,n=a.shape; if n!=1: a=a.T else: raise ArithmeticError, "A is None" W, c1, s1= gr(a[1,0], a[2,0]); z = c1*a[1,0] + s1*a[2,0]; V = np.mat([[1.0 ,0.0 ,0.0],[0.0 ,s1,-c1],[ 0.0 ,c1 ,s1]]); #form second Givens rotation W, c2, s2 = gr(a[0,0], z); #check positivity if c2 * a[0,0] + s2 * z < 0: c2 = -c2; s2 = -s2; # end % if #% W = np.mat([[s2,0.0,-c2],[0.0,1.0,0.0], [c2,0.0,s2]]) U = W * V; return U if __name__=="__main__": print rot3z.__doc__ a=np.mat([[1.0], [1/np.sqrt(2)], [1/np.sqrt(2)]]) r=rot3z(a) print a print r # r: 0.707106781186548 -0.5 -0.5 # 0 0.707106781186548 -0.707106781186548 # 0.707106781186548 0.5 0.5
lgpl-3.0
-6,474,368,271,176,664,000
25.765957
78
0.405964
false
2.695606
false
false
false
OCA/reporting-engine
report_label/wizards/report_label_wizard.py
1
3190
from odoo import api, models, fields class ReportLabelWizard(models.TransientModel): _name = "report.label.wizard" _description = "Report Label Wizard" @api.model def _default_line_ids(self): """ Compute line_ids based on context """ active_model = self.env.context.get("active_model") active_ids = self.env.context.get("active_ids", []) if not active_model or not active_ids: return False return [ (0, 0, { "res_id": res_id, "quantity": 1, }) for res_id in active_ids ] model_id = fields.Many2one( "ir.model", "Model", required=True, default=lambda self: self.env.context.get("res_model_id"), ) label_paperformat_id = fields.Many2one( "report.paperformat.label", "Label Paper Format", readonly=True, required=True, default=lambda self: self.env.context.get("label_paperformat_id"), ) label_template = fields.Char( "Label QWeb Template", readonly=True, required=True, default=lambda self: self.env.context.get("label_template"), ) offset = fields.Integer( help="Number of labels to skip when printing", ) line_ids = fields.One2many( "report.label.wizard.line", "wizard_id", "Lines", default=_default_line_ids, required=True, ) def _prepare_report_data(self): self.ensure_one() return { "label_format": self.label_paperformat_id.read()[0], "label_template": self.label_template, "offset": self.offset, "res_model": self.model_id.model, "lines": [ { "res_id": line.res_id, "quantity": line.quantity, } for line in self.line_ids ], } def print_report(self): self.ensure_one() report = self.env.ref("report_label.report_label") action = report.report_action(self, data=self._prepare_report_data()) action["context"] = { "paperformat_id": self.label_paperformat_id.paperformat_id.id, } return action class ReportLabelWizardLine(models.TransientModel): _name = "report.label.wizard.line" _description = "Report Label Wizard Line" _order = "sequence" wizard_id = fields.Many2one( "report.label.wizard", "Wizard", required=True, ondelete="cascade", ) sequence = fields.Integer(default=10) res_id = fields.Integer("Resource ID", required=True) res_name = fields.Char(compute="_compute_res_name") quantity = fields.Integer(default=1, required=True) @api.depends("wizard_id.model_id", "res_id") def _compute_res_name(self): wizard = self.mapped("wizard_id") wizard.ensure_one() res_model = wizard.model_id.model res_ids = self.mapped("res_id") names_map = dict(self.env[res_model].browse(res_ids).name_get()) for rec in self: rec.res_name = names_map.get(rec.res_id)
agpl-3.0
6,794,300,847,196,247,000
29.970874
77
0.562696
false
3.761792
false
false
false
galtys/galtys-addons
simple_barcoding/simple_barcoding.py
1
2337
from datetime import datetime, timedelta import time from openerp import pooler, tools from openerp.osv import fields, osv from openerp.tools.translate import _ import math import base64 from openerp import netsvc import openerp.addons.decimal_precision as dp def print_barcode(code): import barcodes from barcodes import code128 from barcodes.write import CairoRender def get_geometry(s): spl = s.split("x", 1) if len(spl) == 2: try: return int(spl[0]), int(spl[1]) except ValueError: pass raise ValueError("invalid geometry") barcode = code128.Code128.from_unicode(code) #width, height = get_geometry("2000x442") width, height = get_geometry("708x342") data = CairoRender(barcode, margin=8).get_png(width, height) from subprocess import call fn='barcode_image_data.png' fp=open(fn,'wb') fp.write(data) fp.close() call(['lpr', fn]) import os os.unlink(fn) class product_product(osv.osv): _inherit="product.product" def print_barcode_label(self, cr, uid, ids, context=None): for p in self.browse(cr, uid, ids): print_barcode(p.default_code) product_product() class sale_order(osv.osv): _inherit="sale.order" def print_barcode_label(self, cr, uid, ids, context=None): for p in self.browse(cr, uid, ids): print_barcode(p.name) sale_order() class purchase_order(osv.osv): _inherit="purchase.order" def print_barcode_label(self, cr, uid, ids, context=None): for p in self.browse(cr, uid, ids): print_barcode(p.name) purchase_order() class stock_picking(osv.osv): _inherit="stock.picking" def print_barcode_label(self, cr, uid, ids, context=None): for p in self.browse(cr, uid, ids): print_barcode(p.name) stock_picking() class stock_picking_out(osv.osv): _inherit="stock.picking.out" def print_barcode_label(self, cr, uid, ids, context=None): for p in self.browse(cr, uid, ids): print_barcode(p.name) stock_picking_out() class stock_picking_in(osv.osv): _inherit="stock.picking.in" def print_barcode_label(self, cr, uid, ids, context=None): for p in self.browse(cr, uid, ids): print_barcode(p.name) stock_picking_in()
agpl-3.0
-8,593,698,705,259,154,000
28.2125
64
0.645272
false
3.406706
false
false
false
idrogeno/enigma2
lib/python/Screens/Satconfig.py
1
44050
from enigma import eDVBDB, eDVBResourceManager from Screens.Screen import Screen from Components.SystemInfo import SystemInfo from Components.ActionMap import ActionMap from Components.ConfigList import ConfigListScreen from Components.NimManager import nimmanager from Components.Button import Button from Components.Label import Label from Components.SelectionList import SelectionList, SelectionEntryComponent from Components.config import getConfigListEntry, config, configfile, ConfigNothing, ConfigSatlist, ConfigYesNo from Components.Sources.StaticText import StaticText from Components.Sources.List import List from Screens.MessageBox import MessageBox from Screens.ChoiceBox import ChoiceBox from Screens.ServiceStopScreen import ServiceStopScreen from Screens.AutoDiseqc import AutoDiseqc from Tools.BoundFunction import boundFunction from boxbranding import getBoxType from time import mktime, localtime from datetime import datetime from os import path def isFBCTuner(nim): if nim.description.find("FBC") == -1: return False return True def isFBCRoot(nim): if nim.slot %8 < 2: return True return False def isFBCLink(nim): if isFBCTuner(nim) and not isFBCRoot(nim): return True return False class NimSetup(Screen, ConfigListScreen, ServiceStopScreen): def createSimpleSetup(self, list, mode): nim = self.nimConfig if mode == "single": self.singleSatEntry = getConfigListEntry(_("Satellite"), nim.diseqcA) list.append(self.singleSatEntry) if nim.diseqcA.value in ("360", "560"): list.append(getConfigListEntry(_("Use circular LNB"), nim.simpleDiSEqCSetCircularLNB)) list.append(getConfigListEntry(_("Send DiSEqC"), nim.simpleSingleSendDiSEqC)) else: list.append(getConfigListEntry(_("Port A"), nim.diseqcA)) if mode in ("toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): list.append(getConfigListEntry(_("Port B"), nim.diseqcB)) if mode == "diseqc_a_b_c_d": list.append(getConfigListEntry(_("Port C"), nim.diseqcC)) list.append(getConfigListEntry(_("Port D"), nim.diseqcD)) if mode != "toneburst_a_b": list.append(getConfigListEntry(_("Set voltage and 22KHz"), nim.simpleDiSEqCSetVoltageTone)) list.append(getConfigListEntry(_("Send DiSEqC only on satellite change"), nim.simpleDiSEqCOnlyOnSatChange)) def createPositionerSetup(self, list): nim = self.nimConfig if nim.diseqcMode.value == "positioner_select": self.selectSatsEntry = getConfigListEntry(_("Press OK to select satellites"), self.nimConfig.pressOKtoList) list.append(self.selectSatsEntry) list.append(getConfigListEntry(_("Longitude"), nim.longitude)) list.append(getConfigListEntry(" ", nim.longitudeOrientation)) list.append(getConfigListEntry(_("Latitude"), nim.latitude)) list.append(getConfigListEntry(" ", nim.latitudeOrientation)) if SystemInfo["CanMeasureFrontendInputPower"]: self.advancedPowerMeasurement = getConfigListEntry(_("Use power measurement"), nim.powerMeasurement) list.append(self.advancedPowerMeasurement) if nim.powerMeasurement.value: list.append(getConfigListEntry(_("Power threshold in mA"), nim.powerThreshold)) self.turningSpeed = getConfigListEntry(_("Rotor turning speed"), nim.turningSpeed) list.append(self.turningSpeed) if nim.turningSpeed.value == "fast epoch": self.turnFastEpochBegin = getConfigListEntry(_("Begin time"), nim.fastTurningBegin) self.turnFastEpochEnd = getConfigListEntry(_("End time"), nim.fastTurningEnd) list.append(self.turnFastEpochBegin) list.append(self.turnFastEpochEnd) else: if nim.powerMeasurement.value: nim.powerMeasurement.value = False nim.powerMeasurement.save() if not hasattr(self, 'additionalMotorOptions'): self.additionalMotorOptions = ConfigYesNo(False) self.showAdditionalMotorOptions = getConfigListEntry(_("Extra motor options"), self.additionalMotorOptions) self.list.append(self.showAdditionalMotorOptions) if self.additionalMotorOptions.value: self.list.append(getConfigListEntry(" " + _("Horizontal turning speed") + " [" + chr(176) + "/sec]", nim.turningspeedH)) self.list.append(getConfigListEntry(" " + _("Vertical turning speed") + " [" + chr(176) + "/sec]", nim.turningspeedV)) self.list.append(getConfigListEntry(" " + _("Turning step size") + " [" + chr(176) + "]", nim.tuningstepsize)) self.list.append(getConfigListEntry(" " + _("Max memory positions"), nim.rotorPositions)) def createConfigMode(self): if self.nim.isCompatible("DVB-S"): choices = {"nothing": _("not configured"), "simple": _("Simple"), "advanced": _("Advanced")} if len(nimmanager.canEqualTo(self.slotid)) > 0: choices["equal"] = _("Equal to") if len(nimmanager.canDependOn(self.slotid)) > 0: choices["satposdepends"] = _("Second cable of motorized LNB") if len(nimmanager.canConnectTo(self.slotid)) > 0: choices["loopthrough"] = _("Loop through to") if isFBCLink(self.nim): choices = { "nothing": _("not configured"), "advanced": _("advanced")} self.nimConfig.configMode.setChoices(choices, default = "simple") def createSetup(self): print "Creating setup" self.list = [ ] self.multiType = None self.configMode = None self.diseqcModeEntry = None self.advancedSatsEntry = None self.advancedLnbsEntry = None self.advancedDiseqcMode = None self.advancedUsalsEntry = None self.advancedLof = None self.advancedPowerMeasurement = None self.turningSpeed = None self.turnFastEpochBegin = None self.turnFastEpochEnd = None self.toneburst = None self.committedDiseqcCommand = None self.uncommittedDiseqcCommand = None self.commandOrder = None self.cableScanType = None self.have_advanced = False self.advancedUnicable = None self.advancedType = None self.advancedManufacturer = None self.advancedSCR = None self.advancedDiction = None self.advancedConnected = None self.advancedUnicableTuningAlgo = None self.showAdditionalMotorOptions = None self.selectSatsEntry = None self.advancedSelectSatsEntry = None self.singleSatEntry = None if self.nim.isMultiType(): try: multiType = self.nimConfig.multiType self.multiType = getConfigListEntry(_("Tuner type"), multiType) self.list.append(self.multiType) except: self.multiType = None if self.nim.isCompatible("DVB-S"): self.configMode = getConfigListEntry(_("Configuration mode"), self.nimConfig.configMode) self.list.append(self.configMode) if self.nimConfig.configMode.value == "simple": #simple setup self.diseqcModeEntry = getConfigListEntry(pgettext("Satellite configuration mode", "Mode"), self.nimConfig.diseqcMode) self.list.append(self.diseqcModeEntry) if self.nimConfig.diseqcMode.value in ("single", "toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): self.createSimpleSetup(self.list, self.nimConfig.diseqcMode.value) if self.nimConfig.diseqcMode.value in ("positioner", "positioner_select"): self.createPositionerSetup(self.list) elif self.nimConfig.configMode.value == "equal": choices = [] nimlist = nimmanager.canEqualTo(self.nim.slot) for id in nimlist: choices.append((str(id), nimmanager.getNimDescription(id))) self.nimConfig.connectedTo.setChoices(choices) self.list.append(getConfigListEntry(_("Tuner"), self.nimConfig.connectedTo)) elif self.nimConfig.configMode.value == "satposdepends": choices = [] nimlist = nimmanager.canDependOn(self.nim.slot) for id in nimlist: choices.append((str(id), nimmanager.getNimDescription(id))) self.nimConfig.connectedTo.setChoices(choices) self.list.append(getConfigListEntry(_("Tuner"), self.nimConfig.connectedTo)) elif self.nimConfig.configMode.value == "loopthrough": choices = [] print "connectable to:", nimmanager.canConnectTo(self.slotid) connectable = nimmanager.canConnectTo(self.slotid) for id in connectable: choices.append((str(id), nimmanager.getNimDescription(id))) self.nimConfig.connectedTo.setChoices(choices) self.list.append(getConfigListEntry(_("Connected to"), self.nimConfig.connectedTo)) elif self.nimConfig.configMode.value == "nothing": pass elif self.nimConfig.configMode.value == "advanced": # advanced # SATs self.advancedSatsEntry = getConfigListEntry(_("Satellite"), self.nimConfig.advanced.sats) self.list.append(self.advancedSatsEntry) current_config_sats = self.nimConfig.advanced.sats.value if current_config_sats in ("3605", "3606"): self.advancedSelectSatsEntry = getConfigListEntry(_("Press OK to select satellites"), self.nimConfig.pressOKtoList) self.list.append(self.advancedSelectSatsEntry) self.fillListWithAdvancedSatEntrys(self.nimConfig.advanced.sat[int(current_config_sats)]) else: cur_orb_pos = self.nimConfig.advanced.sats.orbital_position satlist = self.nimConfig.advanced.sat.keys() if cur_orb_pos is not None: if cur_orb_pos not in satlist: cur_orb_pos = satlist[0] self.fillListWithAdvancedSatEntrys(self.nimConfig.advanced.sat[cur_orb_pos]) self.have_advanced = True if path.exists("/proc/stb/frontend/%d/tone_amplitude" % self.nim.slot) and config.usage.setup_level.index >= 2: # expert self.list.append(getConfigListEntry(_("Tone amplitude"), self.nimConfig.toneAmplitude)) if path.exists("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % self.nim.slot) and config.usage.setup_level.index >= 2: # expert self.list.append(getConfigListEntry(_("SCPC optimized search range"), self.nimConfig.scpcSearchRange)) if path.exists("/proc/stb/frontend/fbc/force_lnbon") and config.usage.setup_level.index >= 2: # expert self.list.append(getConfigListEntry(_("Force LNB Power"), self.nimConfig.forceLnbPower)) if path.exists("/proc/stb/frontend/fbc/force_toneburst") and config.usage.setup_level.index >= 2: # expert self.list.append(getConfigListEntry(_("Force ToneBurst"), self.nimConfig.forceToneBurst)) elif self.nim.isCompatible("DVB-C"): self.configMode = getConfigListEntry(_("Configuration mode"), self.nimConfig.configMode) self.list.append(self.configMode) if self.nimConfig.configMode.value == "enabled": self.list.append(getConfigListEntry(_("Network ID"), self.nimConfig.cable.scan_networkid)) self.cableScanType=getConfigListEntry(_("Used service scan type"), self.nimConfig.cable.scan_type) self.list.append(self.cableScanType) if self.nimConfig.cable.scan_type.value == "provider": self.list.append(getConfigListEntry(_("Provider to scan"), self.nimConfig.cable.scan_provider)) else: if self.nimConfig.cable.scan_type.value == "bands": # TRANSLATORS: option name, indicating which type of (DVB-C) band should be scanned. The name of the band is printed in '%s'. E.g.: 'Scan EU MID band' self.list.append(getConfigListEntry(_("Scan %s band") % "EU VHF I", self.nimConfig.cable.scan_band_EU_VHF_I)) self.list.append(getConfigListEntry(_("Scan %s band") % "EU MID", self.nimConfig.cable.scan_band_EU_MID)) self.list.append(getConfigListEntry(_("Scan %s band") % "EU VHF III", self.nimConfig.cable.scan_band_EU_VHF_III)) self.list.append(getConfigListEntry(_("Scan %s band") % "EU UHF IV", self.nimConfig.cable.scan_band_EU_UHF_IV)) self.list.append(getConfigListEntry(_("Scan %s band") % "EU UHF V", self.nimConfig.cable.scan_band_EU_UHF_V)) self.list.append(getConfigListEntry(_("Scan %s band") % "EU SUPER", self.nimConfig.cable.scan_band_EU_SUPER)) self.list.append(getConfigListEntry(_("Scan %s band") % "EU HYPER", self.nimConfig.cable.scan_band_EU_HYPER)) self.list.append(getConfigListEntry(_("Scan %s band") % "US LOW", self.nimConfig.cable.scan_band_US_LOW)) self.list.append(getConfigListEntry(_("Scan %s band") % "US MID", self.nimConfig.cable.scan_band_US_MID)) self.list.append(getConfigListEntry(_("Scan %s band") % "US HIGH", self.nimConfig.cable.scan_band_US_HIGH)) self.list.append(getConfigListEntry(_("Scan %s band") % "US SUPER", self.nimConfig.cable.scan_band_US_SUPER)) self.list.append(getConfigListEntry(_("Scan %s band") % "US HYPER", self.nimConfig.cable.scan_band_US_HYPER)) elif self.nimConfig.cable.scan_type.value == "steps": self.list.append(getConfigListEntry(_("Frequency scan step size(khz)"), self.nimConfig.cable.scan_frequency_steps)) # TRANSLATORS: option name, indicating which type of (DVB-C) modulation should be scanned. The modulation type is printed in '%s'. E.g.: 'Scan QAM16' self.list.append(getConfigListEntry(_("Scan %s") % "QAM16", self.nimConfig.cable.scan_mod_qam16)) self.list.append(getConfigListEntry(_("Scan %s") % "QAM32", self.nimConfig.cable.scan_mod_qam32)) self.list.append(getConfigListEntry(_("Scan %s") % "QAM64", self.nimConfig.cable.scan_mod_qam64)) self.list.append(getConfigListEntry(_("Scan %s") % "QAM128", self.nimConfig.cable.scan_mod_qam128)) self.list.append(getConfigListEntry(_("Scan %s") % "QAM256", self.nimConfig.cable.scan_mod_qam256)) self.list.append(getConfigListEntry(_("Scan %s") % "SR6900", self.nimConfig.cable.scan_sr_6900)) self.list.append(getConfigListEntry(_("Scan %s") % "SR6875", self.nimConfig.cable.scan_sr_6875)) self.list.append(getConfigListEntry(_("Scan additional SR"), self.nimConfig.cable.scan_sr_ext1)) self.list.append(getConfigListEntry(_("Scan additional SR"), self.nimConfig.cable.scan_sr_ext2)) self.have_advanced = False elif self.nim.isCompatible("DVB-T"): self.configMode = getConfigListEntry(_("Configuration mode"), self.nimConfig.configMode) self.list.append(self.configMode) self.have_advanced = False if self.nimConfig.configMode.value == "enabled": self.list.append(getConfigListEntry(_("Terrestrial provider"), self.nimConfig.terrestrial)) if not getBoxType() in ('spycat'): self.list.append(getConfigListEntry(_("Enable 5V for active antenna"), self.nimConfig.terrestrial_5V)) else: self.have_advanced = False self["config"].list = self.list self["config"].l.setList(self.list) def newConfig(self): self.setTextKeyBlue() checkList = (self.configMode, self.diseqcModeEntry, self.advancedSatsEntry, self.advancedLnbsEntry, self.advancedDiseqcMode, self.advancedUsalsEntry, self.advancedLof, self.advancedPowerMeasurement, self.turningSpeed, self.advancedType, self.advancedSCR, self.advancedDiction, self.advancedManufacturer, self.advancedUnicable, self.advancedConnected, self.advancedUnicableTuningAlgo, self.toneburst, self.committedDiseqcCommand, self.uncommittedDiseqcCommand, self.singleSatEntry, self.commandOrder, self.showAdditionalMotorOptions, self.cableScanType, self.multiType) if self["config"].getCurrent() == self.multiType: update_slots = [self.slotid] from Components.NimManager import InitNimManager InitNimManager(nimmanager, update_slots) self.nim = nimmanager.nim_slots[self.slotid] self.nimConfig = self.nim.config for x in checkList: if self["config"].getCurrent() == x: self.createSetup() break def run(self): if self.nimConfig.configMode.value == "simple": autodiseqc_ports = 0 if self.nimConfig.diseqcMode.value == "single": if self.nimConfig.diseqcA.orbital_position == 3600: autodiseqc_ports = 1 elif self.nimConfig.diseqcMode.value == "diseqc_a_b": if self.nimConfig.diseqcA.orbital_position == 3600 or self.nimConfig.diseqcB.orbital_position == 3600: autodiseqc_ports = 2 elif self.nimConfig.diseqcMode.value == "diseqc_a_b_c_d": if self.nimConfig.diseqcA.orbital_position == 3600 or self.nimConfig.diseqcB.orbital_position == 3600 or self.nimConfig.diseqcC.orbital_position == 3600 or self.nimConfig.diseqcD.orbital_position == 3600: autodiseqc_ports = 4 if autodiseqc_ports: self.autoDiseqcRun(autodiseqc_ports) return False if self.have_advanced and self.nim.config_mode == "advanced": self.fillAdvancedList() for x in self.list: if x in (self.turnFastEpochBegin, self.turnFastEpochEnd): # workaround for storing only hour*3600+min*60 value in configfile # not really needed.. just for cosmetics.. tm = localtime(x[1].value) dt = datetime(1970, 1, 1, tm.tm_hour, tm.tm_min) x[1].value = int(mktime(dt.timetuple())) x[1].save() nimmanager.sec.update() self.saveAll() return True def autoDiseqcRun(self, ports): self.session.openWithCallback(self.autoDiseqcCallback, AutoDiseqc, self.slotid, ports, self.nimConfig.simpleDiSEqCSetVoltageTone, self.nimConfig.simpleDiSEqCOnlyOnSatChange) def autoDiseqcCallback(self, result): from Screens.Wizard import Wizard if Wizard.instance is not None: Wizard.instance.back() else: self.createSetup() def fillListWithAdvancedSatEntrys(self, Sat): lnbnum = int(Sat.lnb.value) currLnb = self.nimConfig.advanced.lnb[lnbnum] diction = None if isinstance(currLnb, ConfigNothing): currLnb = None # LNBs self.advancedLnbsEntry = getConfigListEntry(_("LNB"), Sat.lnb) self.list.append(self.advancedLnbsEntry) if currLnb: if isFBCLink(self.nim): if currLnb.lof.value != "unicable": currLnb.lof.value = "unicable" self.list.append(getConfigListEntry(_("Priority"), currLnb.prio)) self.advancedLof = getConfigListEntry("LOF", currLnb.lof) self.list.append(self.advancedLof) if currLnb.lof.value == "user_defined": self.list.append(getConfigListEntry("LOF/L", currLnb.lofl)) self.list.append(getConfigListEntry("LOF/H", currLnb.lofh)) self.list.append(getConfigListEntry(_("Threshold"), currLnb.threshold)) if currLnb.lof.value == "unicable": self.advancedUnicable = getConfigListEntry("Unicable "+_("Configuration mode"), currLnb.unicable) self.list.append(self.advancedUnicable) if currLnb.unicable.value == "unicable_user": self.advancedDiction = getConfigListEntry(_("Diction"), currLnb.dictionuser) self.list.append(self.advancedDiction) if currLnb.dictionuser.value == "EN50494": satcr = currLnb.satcruserEN50494 stcrvco = currLnb.satcrvcouserEN50494[currLnb.satcruserEN50494.index] elif currLnb.dictionuser.value == "EN50607": satcr = currLnb.satcruserEN50607 stcrvco = currLnb.satcrvcouserEN50607[currLnb.satcruserEN50607.index] self.advancedSCR = getConfigListEntry(_("Channel"), satcr) self.list.append(self.advancedSCR) self.list.append(getConfigListEntry(_("Frequency"), stcrvco)) self.list.append(getConfigListEntry("LOF/L", currLnb.lofl)) self.list.append(getConfigListEntry("LOF/H", currLnb.lofh)) self.list.append(getConfigListEntry(_("Threshold"), currLnb.threshold)) elif currLnb.unicable.value == "unicable_matrix": nimmanager.sec.reconstructUnicableDate(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix, currLnb) manufacturer_name = currLnb.unicableMatrixManufacturer.value manufacturer = currLnb.unicableMatrix[manufacturer_name] product_name = manufacturer.product.value self.advancedManufacturer = getConfigListEntry(_("Manufacturer"), currLnb.unicableMatrixManufacturer) self.list.append(self.advancedManufacturer) if product_name in manufacturer.scr: diction = manufacturer.diction[product_name].value self.advancedType = getConfigListEntry(_("Type"), manufacturer.product) self.advancedSCR = getConfigListEntry(_("Channel"), manufacturer.scr[product_name]) self.list.append(self.advancedType) self.list.append(self.advancedSCR) self.list.append(getConfigListEntry(_("Frequency"), manufacturer.vco[product_name][manufacturer.scr[product_name].index])) elif currLnb.unicable.value == "unicable_lnb": nimmanager.sec.reconstructUnicableDate(currLnb.unicableLnbManufacturer, currLnb.unicableLnb, currLnb) manufacturer_name = currLnb.unicableLnbManufacturer.value manufacturer = currLnb.unicableLnb[manufacturer_name] product_name = manufacturer.product.value self.advancedManufacturer = getConfigListEntry(_("Manufacturer"), currLnb.unicableLnbManufacturer) self.list.append(self.advancedManufacturer) if product_name in manufacturer.scr: diction = manufacturer.diction[product_name].value self.advancedType = getConfigListEntry(_("Type"), manufacturer.product) self.advancedSCR = getConfigListEntry(_("Channel"), manufacturer.scr[product_name]) self.list.append(self.advancedType) self.list.append(self.advancedSCR) self.list.append(getConfigListEntry(_("Frequency"), manufacturer.vco[product_name][manufacturer.scr[product_name].index])) self.advancedUnicableTuningAlgo = getConfigListEntry(_("Tuning algorithm"), currLnb.unicableTuningAlgo) self.list.append(self.advancedUnicableTuningAlgo) choices = [] connectable = nimmanager.canConnectTo(self.slotid) for id in connectable: choices.append((str(id), nimmanager.getNimDescription(id))) if len(choices): if isFBCLink(self.nim): if self.nimConfig.advanced.unicableconnected.value != True: self.nimConfig.advanced.unicableconnected.value = True self.advancedConnected = getConfigListEntry(_("connected"), self.nimConfig.advanced.unicableconnected) self.list.append(self.advancedConnected) if self.nimConfig.advanced.unicableconnected.value: self.nimConfig.advanced.unicableconnectedTo.setChoices(choices) self.list.append(getConfigListEntry(_("Connected to"),self.nimConfig.advanced.unicableconnectedTo)) else: #no Unicable self.list.append(getConfigListEntry(_("Voltage mode"), Sat.voltage)) self.list.append(getConfigListEntry(_("Increased voltage"), currLnb.increased_voltage)) self.list.append(getConfigListEntry(_("Tone mode"), Sat.tonemode)) if lnbnum < 65 and diction !="EN50607": self.advancedDiseqcMode = getConfigListEntry(_("DiSEqC mode"), currLnb.diseqcMode) self.list.append(self.advancedDiseqcMode) if currLnb.diseqcMode.value != "none": self.list.append(getConfigListEntry(_("Fast DiSEqC"), currLnb.fastDiseqc)) self.toneburst = getConfigListEntry(_("Toneburst"), currLnb.toneburst) self.list.append(self.toneburst) self.committedDiseqcCommand = getConfigListEntry(_("DiSEqC 1.0 command"), currLnb.commitedDiseqcCommand) self.list.append(self.committedDiseqcCommand) if currLnb.diseqcMode.value == "1_0": if currLnb.toneburst.index and currLnb.commitedDiseqcCommand.index: self.list.append(getConfigListEntry(_("Command order"), currLnb.commandOrder1_0)) else: self.uncommittedDiseqcCommand = getConfigListEntry(_("DiSEqC 1.1 command"), currLnb.uncommittedDiseqcCommand) self.list.append(self.uncommittedDiseqcCommand) if currLnb.uncommittedDiseqcCommand.index: if currLnb.commandOrder.value == "ct": currLnb.commandOrder.value = "cut" elif currLnb.commandOrder.value == "tc": currLnb.commandOrder.value = "tcu" else: if currLnb.commandOrder.index & 1: currLnb.commandOrder.value = "tc" else: currLnb.commandOrder.value = "ct" self.commandOrder = getConfigListEntry(_("Command order"), currLnb.commandOrder) if 1 < ((1 if currLnb.uncommittedDiseqcCommand.index else 0) + (1 if currLnb.commitedDiseqcCommand.index else 0) + (1 if currLnb.toneburst.index else 0)): self.list.append(self.commandOrder) if currLnb.uncommittedDiseqcCommand.index: self.list.append(getConfigListEntry(_("DiSEqC 1.1 repeats"), currLnb.diseqcRepeats)) self.list.append(getConfigListEntry(_("Sequence repeat"), currLnb.sequenceRepeat)) if currLnb.diseqcMode.value == "1_2": if SystemInfo["CanMeasureFrontendInputPower"]: self.advancedPowerMeasurement = getConfigListEntry(_("Use power measurement"), currLnb.powerMeasurement) self.list.append(self.advancedPowerMeasurement) if currLnb.powerMeasurement.value: self.list.append(getConfigListEntry(_("Power threshold in mA"), currLnb.powerThreshold)) self.turningSpeed = getConfigListEntry(_("Rotor turning speed"), currLnb.turningSpeed) self.list.append(self.turningSpeed) if currLnb.turningSpeed.value == "fast epoch": self.turnFastEpochBegin = getConfigListEntry(_("Begin time"), currLnb.fastTurningBegin) self.turnFastEpochEnd = getConfigListEntry(_("End time"), currLnb.fastTurningEnd) self.list.append(self.turnFastEpochBegin) self.list.append(self.turnFastEpochEnd) else: if currLnb.powerMeasurement.value: currLnb.powerMeasurement.value = False currLnb.powerMeasurement.save() self.advancedUsalsEntry = getConfigListEntry(_("Use USALS for this sat"), Sat.usals) if lnbnum < 65: self.list.append(self.advancedUsalsEntry) if Sat.usals.value: self.list.append(getConfigListEntry(_("Longitude"), currLnb.longitude)) self.list.append(getConfigListEntry(" ", currLnb.longitudeOrientation)) self.list.append(getConfigListEntry(_("Latitude"), currLnb.latitude)) self.list.append(getConfigListEntry(" ", currLnb.latitudeOrientation)) else: self.list.append(getConfigListEntry(_("Stored position"), Sat.rotorposition)) if not hasattr(self, 'additionalMotorOptions'): self.additionalMotorOptions = ConfigYesNo(False) self.showAdditionalMotorOptions = getConfigListEntry(_("Extra motor options"), self.additionalMotorOptions) self.list.append(self.showAdditionalMotorOptions) if self.additionalMotorOptions.value: self.list.append(getConfigListEntry(" " + _("Horizontal turning speed") + " [" + chr(176) + "/sec]", currLnb.turningspeedH)) self.list.append(getConfigListEntry(" " + _("Vertical turning speed") + " [" + chr(176) + "/sec]", currLnb.turningspeedV)) self.list.append(getConfigListEntry(" " + _("Turning step size") + " [" + chr(176) + "]", currLnb.tuningstepsize)) self.list.append(getConfigListEntry(" " + _("Max memory positions"), currLnb.rotorPositions)) def fillAdvancedList(self): self.list = [ ] self.configMode = getConfigListEntry(_("Configuration mode"), self.nimConfig.configMode) self.list.append(self.configMode) self.advancedSatsEntry = getConfigListEntry(_("Satellite"), self.nimConfig.advanced.sats) self.list.append(self.advancedSatsEntry) for x in self.nimConfig.advanced.sat.keys(): Sat = self.nimConfig.advanced.sat[x] self.fillListWithAdvancedSatEntrys(Sat) self["config"].list = self.list def unicableconnection(self): if self.nimConfig.configMode.value == "advanced": connect_count = 0 dvbs_slots = nimmanager.getNimListOfType('DVB-S') dvbs_slots_len = len(dvbs_slots) for x in dvbs_slots: try: nim_slot = nimmanager.nim_slots[x] if nim_slot == self.nimConfig: self_idx = x if nim_slot.config.configMode.value == "advanced": if nim_slot.config.advanced.unicableconnected.value == True: connect_count += 1 except: pass print "adenin conections %d %d" %(connect_count, dvbs_slots_len) if connect_count >= dvbs_slots_len: return False self.slot_dest_list = [] def checkRecursiveConnect(slot_id): if slot_id in self.slot_dest_list: print slot_id return False self.slot_dest_list.append(slot_id) slot_config = nimmanager.nim_slots[slot_id].config if slot_config.configMode.value == "advanced": try: connected = slot_config.advanced.unicableconnected.value except: connected = False if connected == True: return checkRecursiveConnect(int(slot_config.advanced.unicableconnectedTo.value)) return True return checkRecursiveConnect(self.slotid) def checkLoopthrough(self): if self.nimConfig.configMode.value == "loopthrough": loopthrough_count = 0 dvbs_slots = nimmanager.getNimListOfType('DVB-S') dvbs_slots_len = len(dvbs_slots) for x in dvbs_slots: try: nim_slot = nimmanager.nim_slots[x] if nim_slot == self.nimConfig: self_idx = x if nim_slot.config.configMode.value == "loopthrough": loopthrough_count += 1 except: pass if loopthrough_count >= dvbs_slots_len: return False self.slot_dest_list = [] def checkRecursiveConnect(slot_id): if slot_id in self.slot_dest_list: return False self.slot_dest_list.append(slot_id) slot_config = nimmanager.nim_slots[slot_id].config if slot_config.configMode.value == "loopthrough": return checkRecursiveConnect(int(slot_config.connectedTo.value)) return True return checkRecursiveConnect(self.slotid) def keyOk(self): if self["config"].getCurrent() == self.advancedSelectSatsEntry: conf = self.nimConfig.advanced.sat[int(self.nimConfig.advanced.sats.value)].userSatellitesList self.session.openWithCallback(boundFunction(self.updateConfUserSatellitesList, conf), SelectSatsEntryScreen, userSatlist=conf.value) elif self["config"].getCurrent() == self.selectSatsEntry: conf = self.nimConfig.userSatellitesList self.session.openWithCallback(boundFunction(self.updateConfUserSatellitesList, conf), SelectSatsEntryScreen, userSatlist=conf.value) else: self.keySave() def updateConfUserSatellitesList(self, conf, val=None): if val is not None: conf.value = val conf.save() def keySave(self): if not self.unicableconnection(): self.session.open(MessageBox, _("The unicable connection setting is wrong.\n Maybe recursive connection of tuners."),MessageBox.TYPE_ERROR,timeout=10) return if not self.checkLoopthrough(): self.session.open(MessageBox, _("The loopthrough setting is wrong."),MessageBox.TYPE_ERROR,timeout=10) return old_configured_sats = nimmanager.getConfiguredSats() if not self.run(): return new_configured_sats = nimmanager.getConfiguredSats() self.unconfed_sats = old_configured_sats - new_configured_sats self.satpos_to_remove = None self.deleteConfirmed((None, "no")) def deleteConfirmed(self, confirmed): if confirmed is None: confirmed = (None, "no") if confirmed[1] == "yes" or confirmed[1] == "yestoall": eDVBDB.getInstance().removeServices(-1, -1, -1, self.satpos_to_remove) if self.satpos_to_remove is not None: self.unconfed_sats.remove(self.satpos_to_remove) self.satpos_to_remove = None for orbpos in self.unconfed_sats: self.satpos_to_remove = orbpos orbpos = self.satpos_to_remove try: # why we need this cast? sat_name = str(nimmanager.getSatDescription(orbpos)) except: if orbpos > 1800: # west orbpos = 3600 - orbpos h = _("W") else: h = _("E") sat_name = ("%d.%d" + h) % (orbpos / 10, orbpos % 10) if confirmed[1] == "yes" or confirmed[1] == "no": # TRANSLATORS: The satellite with name '%s' is no longer used after a configuration change. The user is asked whether or not the satellite should be deleted. self.session.openWithCallback(self.deleteConfirmed, ChoiceBox, _("%s is no longer used. Should it be deleted?") % sat_name, [(_("Yes"), "yes"), (_("No"), "no"), (_("Yes to all"), "yestoall"), (_("No to all"), "notoall")], None, 1) if confirmed[1] == "yestoall" or confirmed[1] == "notoall": self.deleteConfirmed(confirmed) break else: self.restoreService(_("Zap back to service before tuner setup?")) def __init__(self, session, slotid): Screen.__init__(self, session) Screen.setTitle(self, _("Tuner settings")) self.list = [ ] ServiceStopScreen.__init__(self) self.stopService() ConfigListScreen.__init__(self, self.list) self["key_red"] = Label(_("Close")) self["key_green"] = Label(_("Save")) self["key_yellow"] = Label(_("Configuration mode")) self["key_blue"] = Label() self["actions"] = ActionMap(["SetupActions", "SatlistShortcutAction", "ColorActions"], { "ok": self.keyOk, "save": self.keySave, "cancel": self.keyCancel, "changetype": self.changeConfigurationMode, "nothingconnected": self.nothingConnectedShortcut, "red": self.keyCancel, "green": self.keySave, }, -2) self.slotid = slotid self.nim = nimmanager.nim_slots[slotid] self.nimConfig = self.nim.config self.createConfigMode() self.createSetup() self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.setTitle(_("Reception Settings")) def keyLeft(self): if isFBCLink(self.nim): checkList = (self.advancedLof, self.advancedConnected) curEntry = self["config"].getCurrent() if curEntry in checkList: return ConfigListScreen.keyLeft(self) if self["config"].getCurrent() in (self.advancedSelectSatsEntry, self.selectSatsEntry): self.keyOk() else: self.newConfig() def setTextKeyBlue(self): self["key_blue"].setText("") if self["config"].isChanged(): self["key_blue"].setText(_("Set default")) def keyRight(self): if isFBCLink(self.nim): checkList = (self.advancedLof, self.advancedConnected) curEntry = self["config"].getCurrent() if curEntry in checkList: return ConfigListScreen.keyRight(self) if self["config"].getCurrent() in (self.advancedSelectSatsEntry, self.selectSatsEntry): self.keyOk() else: self.newConfig() def handleKeyFileCallback(self, answer): ConfigListScreen.handleKeyFileCallback(self, answer) self.newConfig() def keyCancel(self): if self["config"].isChanged(): self.session.openWithCallback(self.cancelConfirm, MessageBox, _("Really close without saving settings?"), default = False) else: self.restoreService(_("Zap back to service before tuner setup?")) def saveAll(self): if self.nim.isCompatible("DVB-S"): # reset connectedTo to all choices to properly store the default value choices = [] nimlist = nimmanager.getNimListOfType("DVB-S", self.slotid) for id in nimlist: choices.append((str(id), nimmanager.getNimDescription(id))) self.nimConfig.connectedTo.setChoices(choices) # sanity check for empty sat list if self.nimConfig.configMode.value != "satposdepends" and len(nimmanager.getSatListForNim(self.slotid)) < 1: self.nimConfig.configMode.value = "nothing" for x in self["config"].list: x[1].save() configfile.save() def cancelConfirm(self, result): if not result: return for x in self["config"].list: x[1].cancel() # we need to call saveAll to reset the connectedTo choices self.saveAll() self.restoreService(_("Zap back to service before tuner setup?")) def changeConfigurationMode(self): if self.configMode: self.nimConfig.configMode.selectNext() self["config"].invalidate(self.configMode) self.setTextKeyBlue() self.createSetup() def nothingConnectedShortcut(self): if self["config"].isChanged(): for x in self["config"].list: x[1].cancel() self.setTextKeyBlue() self.createSetup() class NimSelection(Screen): def __init__(self, session): Screen.__init__(self, session) Screen.setTitle(self, _("Tuner configuration")) self.list = [None] * nimmanager.getSlotCount() self["nimlist"] = List(self.list) self.loadFBCLinks() self.updateList() self.setResultClass() self["key_red"] = StaticText(_("Close")) self["key_green"] = StaticText(_("Select")) self["actions"] = ActionMap(["SetupActions", "ColorActions", "MenuActions", "ChannelSelectEPGActions"], { "ok": self.okbuttonClick, "info": self.extraInfo, "epg": self.extraInfo, "cancel": self.close, "red": self.close, "green": self.okbuttonClick, "menu": self.exit, }, -2) self.setTitle(_("Choose Tuner")) def loadFBCLinks(self): for x in nimmanager.nim_slots: slotid = x.slot nimConfig = nimmanager.getNimConfig(x.slot) configMode = nimConfig.configMode.value if self.showNim(x): if x.isCompatible("DVB-S"): if isFBCLink(x) and configMode != "advanced": from enigma import getLinkedSlotID link = getLinkedSlotID(x.slot) if link == -1: nimConfig.configMode.value = "nothing" else: nimConfig.configMode.value = "loopthrough" nimConfig.connectedTo.value = str(link) def exit(self): self.close(True) def setResultClass(self): self.resultclass = NimSetup def extraInfo(self): nim = self["nimlist"].getCurrent() nim = nim and nim[3] if config.usage.setup_level.index >= 2 and nim is not None: text = _("Capabilities: ") + ",".join(eDVBResourceManager.getInstance().getFrontendCapabilities(nim.slot).splitlines()) self.session.open(MessageBox, text, MessageBox.TYPE_INFO, simple=True) def okbuttonClick(self): nim = self["nimlist"].getCurrent() nim = nim and nim[3] nimConfig = nimmanager.getNimConfig(nim.slot) if isFBCLink(nim) and nimConfig.configMode.value == "loopthrough": return if nim is not None and not nim.empty and nim.isSupported(): self.session.openWithCallback(boundFunction(self.NimSetupCB, self["nimlist"].getIndex()), self.resultclass, nim.slot) def NimSetupCB(self, index=None): self.loadFBCLinks() self.updateList() def showNim(self, nim): return True def updateList(self, index=None): self.list = [ ] for x in nimmanager.nim_slots: slotid = x.slot nimConfig = nimmanager.getNimConfig(x.slot) text = nimConfig.configMode.value if self.showNim(x): if x.isCompatible("DVB-S"): if nimConfig.configMode.value in ("loopthrough", "equal", "satposdepends"): text = { "loopthrough": _("Loop through to"), "equal": _("Equal to"), "satposdepends": _("Second cable of motorized LNB") } [nimConfig.configMode.value] if len(x.input_name) > 1: text += " " + _("Tuner") + " " + ["A1", "A2", "B", "C"][int(nimConfig.connectedTo.value)] else: text += " " + _("Tuner") + " " + chr(ord('A')+int(nimConfig.connectedTo.value)) elif nimConfig.configMode.value == "nothing": text = _("not configured") elif nimConfig.configMode.value == "simple": if nimConfig.diseqcMode.value in ("single", "toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): text = {"single": _("Single"), "toneburst_a_b": _("Toneburst A/B"), "diseqc_a_b": _("DiSEqC A/B"), "diseqc_a_b_c_d": _("DiSEqC A/B/C/D")}[nimConfig.diseqcMode.value] + "\n" text += _("Sats") + ": " satnames = [] if nimConfig.diseqcA.orbital_position < 3600: satnames.append(nimmanager.getSatName(int(nimConfig.diseqcA.value))) if nimConfig.diseqcMode.value in ("toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): if nimConfig.diseqcB.orbital_position < 3600: satnames.append(nimmanager.getSatName(int(nimConfig.diseqcB.value))) if nimConfig.diseqcMode.value == "diseqc_a_b_c_d": if nimConfig.diseqcC.orbital_position < 3600: satnames.append(nimmanager.getSatName(int(nimConfig.diseqcC.value))) if nimConfig.diseqcD.orbital_position < 3600: satnames.append(nimmanager.getSatName(int(nimConfig.diseqcD.value))) if len(satnames) <= 2: text += ", ".join(satnames) elif len(satnames) > 2: # we need a newline here, since multi content lists don't support automtic line wrapping text += ", ".join(satnames[:2]) + ",\n" text += " " + ", ".join(satnames[2:]) elif nimConfig.diseqcMode.value in ("positioner", "positioner_select"): text = {"positioner": _("Positioner"), "positioner_select": _("Positioner (selecting satellites)")}[nimConfig.diseqcMode.value] text += ":" if nimConfig.positionerMode.value == "usals": text += "USALS" elif nimConfig.positionerMode.value == "manual": text += _("Manual") else: text = _("Simple") elif nimConfig.configMode.value == "advanced": text = _("Advanced") if isFBCLink(x) and nimConfig.configMode.value != "advanced": text += _("\n<This tuner is configured automatically>") elif x.isCompatible("DVB-T") or x.isCompatible("DVB-C"): if nimConfig.configMode.value == "nothing": text = _("nothing connected") elif nimConfig.configMode.value == "enabled": text = _("Enabled") if x.isMultiType(): text = _("Switchable tuner types:") + "(" + ','.join(x.getMultiTypeList().values()) + ")" + "\n" + text if not x.isSupported(): text = _("Tuner is not supported") self.list.append((slotid, x.friendly_full_description, text, x)) self["nimlist"].setList(self.list) self["nimlist"].updateList(self.list) if index is not None: self["nimlist"].setIndex(index) class SelectSatsEntryScreen(Screen): skin = """ <screen name="SelectSatsEntryScreen" position="center,center" size="560,410" title="Select Sats Entry" > <ePixmap name="red" position="0,0" zPosition="2" size="140,40" pixmap="skin_default/buttons/red.png" transparent="1" alphatest="on" /> <ePixmap name="green" position="140,0" zPosition="2" size="140,40" pixmap="skin_default/buttons/green.png" transparent="1" alphatest="on" /> <ePixmap name="yellow" position="280,0" zPosition="2" size="140,40" pixmap="skin_default/buttons/yellow.png" transparent="1" alphatest="on" /> <ePixmap name="blue" position="420,0" zPosition="2" size="140,40" pixmap="skin_default/buttons/blue.png" transparent="1" alphatest="on" /> <widget name="key_red" position="0,0" size="140,40" valign="center" halign="center" zPosition="4" foregroundColor="white" font="Regular;17" transparent="1" shadowColor="background" shadowOffset="-2,-2" /> <widget name="key_green" position="140,0" size="140,40" valign="center" halign="center" zPosition="4" foregroundColor="white" font="Regular;17" transparent="1" shadowColor="background" shadowOffset="-2,-2" /> <widget name="key_yellow" position="280,0" size="140,40" valign="center" halign="center" zPosition="4" foregroundColor="white" font="Regular;17" transparent="1" shadowColor="background" shadowOffset="-2,-2" /> <widget name="key_blue" position="420,0" size="140,40" valign="center" halign="center" zPosition="4" foregroundColor="white" font="Regular;17" transparent="1" shadowColor="background" shadowOffset="-2,-2" /> <widget name="list" position="10,40" size="540,330" scrollbarMode="showNever" /> <ePixmap pixmap="skin_default/div-h.png" position="0,375" zPosition="1" size="540,2" transparent="1" alphatest="on" /> <widget name="hint" position="10,380" size="540,25" font="Regular;19" halign="center" transparent="1" /> </screen>""" def __init__(self, session, userSatlist=[]): Screen.__init__(self, session) self["key_red"] = Button(_("Cancel")) self["key_green"] = Button(_("Save")) self["key_yellow"] = Button(_("Sort by")) self["key_blue"] = Button(_("Select all")) self["hint"] = Label(_("Press OK to toggle the selection")) SatList = [] for sat in nimmanager.getSatList(): selected = False if isinstance(userSatlist, str) and str(sat[0]) in userSatlist: selected = True SatList.append((sat[0], sat[1], sat[2], selected)) sat_list = [SelectionEntryComponent(x[1], x[0], x[2], x[3]) for x in SatList] self["list"] = SelectionList(sat_list, enableWrapAround=True) self["setupActions"] = ActionMap(["SetupActions", "ColorActions"], { "red": self.cancel, "green": self.save, "yellow": self.sortBy, "blue": self["list"].toggleAllSelection, "save": self.save, "cancel": self.cancel, "ok": self["list"].toggleSelection, }, -2) self.setTitle(_("Select satellites")) def save(self): val = [x[0][1] for x in self["list"].list if x[0][3]] self.close(str(val)) def cancel(self): self.close(None) def sortBy(self): lst = self["list"].list if len(lst) > 1: menu = [(_("Reverse list"), "2"), (_("Standard list"), "1")] connected_sat = [x[0][1] for x in lst if x[0][3]] if len(connected_sat) > 0: menu.insert(0,(_("Connected satellites"), "3")) def sortAction(choice): if choice: reverse_flag = False sort_type = int(choice[1]) if choice[1] == "2": sort_type = reverse_flag = 1 elif choice[1] == "3": reverse_flag = not reverse_flag self["list"].sort(sortType=sort_type, flag=reverse_flag) self["list"].moveToIndex(0) self.session.openWithCallback(sortAction, ChoiceBox, title= _("Select sort method:"), list=menu)
gpl-2.0
3,012,844,069,604,993,500
45.466245
234
0.711283
false
3.113514
true
false
false
sti-lyneos/shop
softwarecenter/ui/gtk3/panes/historypane.py
2
14758
# -*- coding: utf-8 -*- # # Copyright (C) 2010 Canonical # # Authors: # Olivier Tilloy # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; version 3. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. from gi.repository import GObject from gi.repository import Gtk, Gdk import logging import datetime from gettext import gettext as _ from softwarecenter.ui.gtk3.em import get_em from softwarecenter.ui.gtk3.widgets.spinner import SpinnerNotebook from basepane import BasePane from softwarecenter.enums import Icons from softwarecenter.ui.gtk3.session.viewmanager import get_viewmanager from softwarecenter.ui.gtk3.session.displaystate import DisplayState class HistoryPane(Gtk.VBox, BasePane): __gsignals__ = { "app-list-changed": (GObject.SignalFlags.RUN_LAST, None, (int, ), ), "history-pane-created": (GObject.SignalFlags.RUN_FIRST, None, ()), } (COL_WHEN, COL_ACTION, COL_PKG) = range(3) COL_TYPES = (object, int, object) (ALL, INSTALLED, REMOVED, UPGRADED) = range(4) ICON_SIZE = 1.2 * get_em() PADDING = 4 # pages for the spinner notebook (PAGE_HISTORY_VIEW, PAGE_SPINNER) = range(2) def __init__(self, cache, db, distro, icons): Gtk.VBox.__init__(self) self.cache = cache self.db = db self.distro = distro self.icons = icons self.apps_filter = None self.state = DisplayState() self.pane_name = _("History") # Icon cache, invalidated upon icon theme changes self._app_icon_cache = {} self._reset_icon_cache() self.icons.connect('changed', self._reset_icon_cache) self._emblems = {} self._get_emblems(self.icons) vm = get_viewmanager() self.searchentry = vm.get_global_searchentry() self.toolbar = Gtk.Toolbar() self.toolbar.show() self.toolbar.set_style(Gtk.ToolbarStyle.TEXT) self.pack_start(self.toolbar, False, True, 0) all_action = Gtk.RadioAction('filter_all', _('All Changes'), None, None, self.ALL) all_action.connect('changed', self.change_filter) all_button = all_action.create_tool_item() self.toolbar.insert(all_button, 0) installs_action = Gtk.RadioAction('filter_installs', _('Installations'), None, None, self.INSTALLED) installs_action.join_group(all_action) installs_button = installs_action.create_tool_item() self.toolbar.insert(installs_button, 1) upgrades_action = Gtk.RadioAction( 'filter_upgrads', _('Updates'), None, None, self.UPGRADED) upgrades_action.join_group(all_action) upgrades_button = upgrades_action.create_tool_item() self.toolbar.insert(upgrades_button, 2) removals_action = Gtk.RadioAction( 'filter_removals', _('Removals'), None, None, self.REMOVED) removals_action.join_group(all_action) removals_button = removals_action.create_tool_item() self.toolbar.insert(removals_button, 3) self.toolbar.connect('draw', self.on_toolbar_draw) self._actions_list = all_action.get_group() self._set_actions_sensitive(False) self.view = Gtk.TreeView() self.view.set_headers_visible(False) self.view.show() self.history_view = Gtk.ScrolledWindow() self.history_view.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) self.history_view.show() self.history_view.add(self.view) # make a spinner to display while history is loading self.spinner_notebook = SpinnerNotebook( self.history_view, _('Loading history')) self.pack_start(self.spinner_notebook, True, True, 0) self.store = Gtk.TreeStore(*self.COL_TYPES) self.visible_changes = 0 self.store_filter = self.store.filter_new(None) self.store_filter.set_visible_func(self.filter_row, None) self.view.set_model(self.store_filter) all_action.set_active(True) self.last = None # to save (a lot of) time at startup we load history later, only when # it is selected to be viewed self.history = None self.column = Gtk.TreeViewColumn(_('Date')) self.view.append_column(self.column) self.cell_icon = Gtk.CellRendererPixbuf() self.cell_icon.set_padding(self.PADDING, self.PADDING / 2) self.column.pack_start(self.cell_icon, False) self.column.set_cell_data_func(self.cell_icon, self.render_cell_icon) self.cell_text = Gtk.CellRendererText() self.column.pack_start(self.cell_text, True) self.column.set_cell_data_func(self.cell_text, self.render_cell_text) self.cell_time = Gtk.CellRendererText() self.cell_time.set_padding(6, 0) self.cell_time.set_alignment(1.0, 0.5) self.column.pack_end(self.cell_time, False) self.column.set_cell_data_func(self.cell_time, self.render_cell_time) # busy cursor self.busy_cursor = Gdk.Cursor.new(Gdk.CursorType.WATCH) def init_view(self): if self.history is None: # if the history is not yet initialized we have to load and parse # it show a spinner while we do that self.realize() window = self.get_window() window.set_cursor(self.busy_cursor) self.spinner_notebook.show_spinner() self.load_and_parse_history() self.spinner_notebook.hide_spinner() self._set_actions_sensitive(True) window.set_cursor(None) self.emit("history-pane-created") def on_toolbar_draw(self, widget, cr): a = widget.get_allocation() context = widget.get_style_context() color = context.get_border_color(widget.get_state_flags()) cr.set_source_rgba(color.red, color.green, color.blue, 0.5) cr.set_line_width(1) cr.move_to(0.5, a.height - 0.5) cr.rel_line_to(a.width - 1, 0) cr.stroke() def _get_emblems(self, icons): from softwarecenter.enums import USE_PACKAGEKIT_BACKEND if USE_PACKAGEKIT_BACKEND: emblem_names = ("pk-package-add", "pk-package-delete", "pk-package-update") else: emblem_names = ("package-install", "package-remove", "package-upgrade") for i, emblem in enumerate(emblem_names): pb = icons.load_icon(emblem, self.ICON_SIZE, 0) self._emblems[i + 1] = pb def _set_actions_sensitive(self, sensitive): for action in self._actions_list: action.set_sensitive(sensitive) def _reset_icon_cache(self, theme=None): self._app_icon_cache.clear() try: missing = self.icons.load_icon(Icons.MISSING_APP, self.ICON_SIZE, 0) except GObject.GError: missing = None self._app_icon_cache[Icons.MISSING_APP] = missing def load_and_parse_history(self): from softwarecenter.db.history import get_pkg_history self.history = get_pkg_history() # FIXME: a signal from AptHistory is nicer while not self.history.history_ready: while Gtk.events_pending(): Gtk.main_iteration() self.parse_history() self.history.set_on_update(self.parse_history) def parse_history(self): date = None when = None last_row = None day = self.store.get_iter_first() if day is not None: date = self.store.get_value(day, self.COL_WHEN) if len(self.history.transactions) == 0: logging.debug("AptHistory is currently empty") return new_last = self.history.transactions[0].start_date for trans in self.history.transactions: while Gtk.events_pending(): Gtk.main_iteration() when = trans.start_date if self.last is not None and when <= self.last: break if when.date() != date: date = when.date() day = self.store.append(None, (date, self.ALL, None)) last_row = None actions = {self.INSTALLED: trans.install, self.REMOVED: trans.remove, self.UPGRADED: trans.upgrade, } for action, pkgs in actions.items(): for pkgname in pkgs: row = (when, action, pkgname) last_row = self.store.insert_after(day, last_row, row) self.last = new_last self.update_view() def on_search_terms_changed(self, entry, terms): self.update_view() def change_filter(self, action, current): self.filter = action.get_current_value() self.update_view() def update_view(self): self.store_filter.refilter() self.view.collapse_all() # Expand all the matching rows if self.searchentry.get_text(): self.view.expand_all() # Compute the number of visible changes # don't do this atm - the spec doesn't mention that the history pane # should have a status text and it gives us a noticeable performance # gain if we don't calculate this #self.visible_changes = 0 #day = self.store_filter.get_iter_first() #while day is not None: # self.visible_changes += self.store_filter.iter_n_children(day) # day = self.store_filter.iter_next(day) # Expand the most recent day day = self.store.get_iter_first() if day is not None: path = self.store.get_path(day) self.view.expand_row(path, False) self.view.scroll_to_cell(path) #self.emit('app-list-changed', self.visible_changes) def _row_matches(self, store, iter): # Whether a child row matches the current filter and the search entry pkg = store.get_value(iter, self.COL_PKG) or '' filter_values = (self.ALL, store.get_value(iter, self.COL_ACTION)) filter_matches = self.filter in filter_values search_matches = self.searchentry.get_text().lower() in pkg.lower() return filter_matches and search_matches def filter_row(self, store, iter, user_data): pkg = store.get_value(iter, self.COL_PKG) if pkg is not None: return self._row_matches(store, iter) else: i = store.iter_children(iter) while i is not None: if self._row_matches(store, i): return True i = store.iter_next(i) return False def render_cell_icon(self, column, cell, store, iter, user_data): pkg = store.get_value(iter, self.COL_PKG) if pkg is None: cell.set_visible(False) return cell.set_visible(True) when = store.get_value(iter, self.COL_WHEN) if isinstance(when, datetime.datetime): action = store.get_value(iter, self.COL_ACTION) cell.set_property('pixbuf', self._emblems[action]) #~ icon_name = Icons.MISSING_APP #~ for m in self.db.xapiandb.postlist("AP" + pkg): #~ doc = self.db.xapiandb.get_document(m.docid) #~ icon_value = doc.get_value(XapianValues.ICON) #~ if icon_value: #~ icon_name = os.path.splitext(icon_value)[0] #~ break #~ if icon_name in self._app_icon_cache: #~ icon = self._app_icon_cache[icon_name] #~ else: #~ try: #~ icon = self.icons.load_icon(icon_name, self.ICON_SIZE, #~ 0) #~ except GObject.GError: #~ icon = self._app_icon_cache[Icons.MISSING_APP] #~ self._app_icon_cache[icon_name] = icon def render_cell_text(self, column, cell, store, iter, user_data): when = store.get_value(iter, self.COL_WHEN) if isinstance(when, datetime.datetime): pkg = store.get_value(iter, self.COL_PKG) text = pkg elif isinstance(when, datetime.date): today = datetime.date.today() monday = today - datetime.timedelta(days=today.weekday()) if when == today: text = _("Today") elif when >= monday: # Current week, display the name of the day text = when.strftime(_('%A')) else: if when.year == today.year: # Current year, display the day and month text = when.strftime(_('%d %B')) else: # Display the full date: day, month, year text = when.strftime(_('%d %B %Y')) cell.set_property('markup', text) def render_cell_time(self, column, cell, store, iter, user_data): when = store.get_value(iter, self.COL_WHEN) text = '' if isinstance(when, datetime.datetime): action = store.get_value(iter, self.COL_ACTION) # Translators : time displayed in history, display hours # (0-12), minutes and AM/PM. %H should be used instead # of %I to display hours 0-24 time_text = when.time().strftime(_('%I:%M %p')) if self.filter is not self.ALL: action_text = time_text else: if action == self.INSTALLED: action_text = _('installed %s') % time_text elif action == self.REMOVED: action_text = _('removed %s') % time_text elif action == self.UPGRADED: action_text = _('updated %s') % time_text color = {'color': '#8A8A8A', 'action': action_text} text = '<span color="%(color)s">%(action)s</span>' % color cell.set_property('markup', text)
lgpl-3.0
67,786,787,943,351,736
37.633508
79
0.578127
false
3.820347
false
false
false
f2nd/yandex-tank
yandextank/stepper/config.py
4
5340
import logging from netort.resource import manager as resource from . import info from . import instance_plan as ip from . import load_plan as lp from . import missile from .mark import get_marker from .module_exceptions import StepperConfigurationError, AmmoFileError class ComponentFactory(): def __init__( self, rps_schedule=None, http_ver='1.1', ammo_file=None, instances_schedule=None, instances=1000, loop_limit=-1, ammo_limit=-1, uris=None, headers=None, autocases=None, enum_ammo=False, ammo_type='phantom', chosen_cases=None, use_cache=True): self.log = logging.getLogger(__name__) self.ammo_file = ammo_file self.ammo_type = ammo_type self.rps_schedule = rps_schedule self.http_ver = http_ver self.instances_schedule = instances_schedule loop_limit = int(loop_limit) if loop_limit == -1: # -1 means infinite loop_limit = None ammo_limit = int(ammo_limit) if ammo_limit == -1: # -1 means infinite ammo_limit = None if loop_limit is None and ammo_limit is None and not rps_schedule: # we should have only one loop if we have instance_schedule loop_limit = 1 info.status.loop_limit = loop_limit info.status.ammo_limit = ammo_limit info.status.publish("instances", instances) self.uris = uris if self.uris and loop_limit: info.status.ammo_limit = len(self.uris) * loop_limit self.headers = headers self.marker = get_marker(autocases, enum_ammo) self.chosen_cases = chosen_cases or [] self.use_cache = use_cache def get_load_plan(self): """ return load plan (timestamps generator) """ if self.rps_schedule and self.instances_schedule: raise StepperConfigurationError( 'Both rps and instances schedules specified. You must specify only one of them' ) elif self.rps_schedule: info.status.publish('loadscheme', self.rps_schedule) return lp.create(self.rps_schedule) elif self.instances_schedule: info.status.publish('loadscheme', self.instances_schedule) return ip.create(self.instances_schedule) else: self.instances_schedule = [] info.status.publish('loadscheme', self.instances_schedule) return ip.create(self.instances_schedule) def get_ammo_generator(self): """ return ammo generator """ af_readers = { 'phantom': missile.AmmoFileReader, 'slowlog': missile.SlowLogReader, 'line': missile.LineReader, 'uri': missile.UriReader, 'uripost': missile.UriPostReader, 'access': missile.AccessLogReader, 'caseline': missile.CaseLineReader, } if self.uris and self.ammo_file: raise StepperConfigurationError( 'Both uris and ammo file specified. You must specify only one of them' ) elif self.uris: ammo_gen = missile.UriStyleGenerator( self.uris, self.headers, http_ver=self.http_ver) elif self.ammo_file: if self.ammo_type in af_readers: if self.ammo_type == 'phantom': opener = resource.get_opener(self.ammo_file) with opener(self.use_cache) as ammo: try: if not ammo.next()[0].isdigit(): self.ammo_type = 'uri' self.log.info( "Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format" ) else: self.log.info( "Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it" ) except StopIteration: self.log.exception( "Couldn't read first line of ammo file") raise AmmoFileError( "Couldn't read first line of ammo file") else: raise NotImplementedError( 'No such ammo type implemented: "%s"' % self.ammo_type) ammo_gen = af_readers[self.ammo_type]( self.ammo_file, headers=self.headers, http_ver=self.http_ver, use_cache=self.use_cache) else: raise StepperConfigurationError( 'Ammo not found. Specify uris or ammo file') self.log.info("Using %s ammo reader" % type(ammo_gen).__name__) return ammo_gen def get_marker(self): return self.marker def get_filter(self): if len(self.chosen_cases): def is_chosen_case(ammo_tuple): return ammo_tuple[1] in self.chosen_cases return is_chosen_case else: return lambda ammo_tuple: True
lgpl-2.1
9,019,727,846,882,107,000
37.978102
136
0.540262
false
4.191523
false
false
false
ddaeschler/pyrax
samples/queueing/list_messages.py
13
2283
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c)2013 Rackspace US, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import os import six import pyrax import pyrax.exceptions as exc pyrax.set_setting("identity_type", "rackspace") creds_file = os.path.expanduser("~/.rackspace_cloud_credentials") pyrax.set_credential_file(creds_file) pq = pyrax.queues queues = pq.list() if not queues: print("There are no queues to post to. Please create one before proceeding.") exit() if len(queues) == 1: queue = queues[0] print("Only one queue available; using '%s'." % queue.name) else: print("Queues:") for pos, queue in enumerate(queues): print("%s - %s" % (pos, queue.name)) snum = six.moves.input("Enter the number of the queue you wish to list " "messages from: ") if not snum: exit() try: num = int(snum) except ValueError: print("'%s' is not a valid number." % snum) exit() if not 0 <= num < len(queues): print("'%s' is not a valid queue number." % snum) exit() queue = queues[num] echo = claimed = False secho = six.moves.input("Do you want to include your own messages? [y/N]") if secho: echo = secho in ("Yy") sclaimed = six.moves.input("Do you want to include claimed messages? [y/N]") if sclaimed: claimed = sclaimed in ("Yy") msgs = pq.list_messages(queue, echo=echo, include_claimed=claimed) if not msgs: print("There are no messages available in this queue.") exit() for msg in msgs: print("ID:", msg.id) print("Age:", msg.age) print("TTL:", msg.ttl) print("Claim ID:", msg.claim_id) print("Body:", msg.body) print()
apache-2.0
669,154,186,075,380,400
29.851351
81
0.655716
false
3.501534
false
false
false
kenshay/ImageScripter
ProgramData/Android/ADB/platform-tools/systrace/catapult/telemetry/telemetry/internal/backends/chrome_inspector/inspector_console.py
6
2034
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import StringIO from telemetry.internal.backends.chrome_inspector import websocket class InspectorConsole(object): def __init__(self, inspector_websocket): self._inspector_websocket = inspector_websocket self._inspector_websocket.RegisterDomain('Console', self._OnNotification) self._message_output_stream = None self._last_message = None self._console_enabled = False def _OnNotification(self, msg): if msg['method'] == 'Console.messageAdded': assert self._message_output_stream if msg['params']['message']['url'] == 'chrome://newtab/': return self._last_message = '(%s) %s:%i: %s' % ( msg['params']['message']['level'], msg['params']['message']['url'], msg['params']['message']['line'], msg['params']['message']['text']) self._message_output_stream.write( '%s\n' % self._last_message) elif msg['method'] == 'Console.messageRepeatCountUpdated': if self._message_output_stream: self._message_output_stream.write( '%s\n' % self._last_message) def GetCurrentConsoleOutputBuffer(self, timeout=10): self._message_output_stream = StringIO.StringIO() self._EnableConsoleOutputStream(timeout) try: self._inspector_websocket.DispatchNotifications(timeout) return self._message_output_stream.getvalue() except websocket.WebSocketTimeoutException: return self._message_output_stream.getvalue() finally: self._DisableConsoleOutputStream(timeout) self._message_output_stream.close() self._message_output_stream = None def _EnableConsoleOutputStream(self, timeout): self._inspector_websocket.SyncRequest({'method': 'Console.enable'}, timeout) def _DisableConsoleOutputStream(self, timeout): self._inspector_websocket.SyncRequest( {'method': 'Console.disable'}, timeout)
gpl-3.0
-497,377,246,031,039,600
36.666667
80
0.682891
false
4.168033
false
false
false
s20121035/rk3288_android5.1_repo
external/lldb/test/python_api/function_symbol/TestDisasmAPI.py
2
5025
""" Test retrieval of SBAddress from function/symbol, disassembly, and SBAddress APIs. """ import os, time import re import unittest2 import lldb, lldbutil from lldbtest import * class DisasmAPITestCase(TestBase): mydir = os.path.join("python_api", "function_symbol") @unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin") @python_api_test @dsym_test def test_with_dsym(self): """Exercise getting SBAddress objects, disassembly, and SBAddress APIs.""" self.buildDsym() self.disasm_and_address_api() @python_api_test @dwarf_test def test_with_dwarf(self): """Exercise getting SBAddress objects, disassembly, and SBAddress APIs.""" self.buildDwarf() self.disasm_and_address_api() def setUp(self): # Call super's setUp(). TestBase.setUp(self) # Find the line number to of function 'c'. self.line1 = line_number('main.c', '// Find the line number for breakpoint 1 here.') self.line2 = line_number('main.c', '// Find the line number for breakpoint 2 here.') def disasm_and_address_api(self): """Exercise getting SBAddress objects, disassembly, and SBAddress APIs.""" exe = os.path.join(os.getcwd(), "a.out") # Create a target by the debugger. target = self.dbg.CreateTarget(exe) self.assertTrue(target, VALID_TARGET) # Now create the two breakpoints inside function 'a'. breakpoint1 = target.BreakpointCreateByLocation('main.c', self.line1) breakpoint2 = target.BreakpointCreateByLocation('main.c', self.line2) #print "breakpoint1:", breakpoint1 #print "breakpoint2:", breakpoint2 self.assertTrue(breakpoint1 and breakpoint1.GetNumLocations() == 1, VALID_BREAKPOINT) self.assertTrue(breakpoint2 and breakpoint2.GetNumLocations() == 1, VALID_BREAKPOINT) # Now launch the process, and do not stop at entry point. process = target.LaunchSimple(None, None, os.getcwd()) self.assertTrue(process, PROCESS_IS_VALID) # Frame #0 should be on self.line1. self.assertTrue(process.GetState() == lldb.eStateStopped) thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonBreakpoint) self.assertTrue(thread.IsValid(), "There should be a thread stopped due to breakpoint condition") frame0 = thread.GetFrameAtIndex(0) lineEntry = frame0.GetLineEntry() self.assertTrue(lineEntry.GetLine() == self.line1) address1 = lineEntry.GetStartAddress() #print "address1:", address1 # Now call SBTarget.ResolveSymbolContextForAddress() with address1. context1 = target.ResolveSymbolContextForAddress(address1, lldb.eSymbolContextEverything) self.assertTrue(context1) if self.TraceOn(): print "context1:", context1 # Continue the inferior, the breakpoint 2 should be hit. process.Continue() self.assertTrue(process.GetState() == lldb.eStateStopped) thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonBreakpoint) self.assertTrue(thread.IsValid(), "There should be a thread stopped due to breakpoint condition") frame0 = thread.GetFrameAtIndex(0) lineEntry = frame0.GetLineEntry() self.assertTrue(lineEntry.GetLine() == self.line2) # Verify that the symbol and the function has the same address range per function 'a'. symbol = context1.GetSymbol() function = frame0.GetFunction() self.assertTrue(symbol and function) disasm_output = lldbutil.disassemble(target, symbol) if self.TraceOn(): print "symbol:", symbol print "disassembly=>\n", disasm_output disasm_output = lldbutil.disassemble(target, function) if self.TraceOn(): print "function:", function print "disassembly=>\n", disasm_output sa1 = symbol.GetStartAddress() #print "sa1:", sa1 #print "sa1.GetFileAddress():", hex(sa1.GetFileAddress()) #ea1 = symbol.GetEndAddress() #print "ea1:", ea1 sa2 = function.GetStartAddress() #print "sa2:", sa2 #print "sa2.GetFileAddress():", hex(sa2.GetFileAddress()) #ea2 = function.GetEndAddress() #print "ea2:", ea2 self.assertTrue(sa1 and sa2 and sa1 == sa2, "The two starting addresses should be the same") from lldbutil import get_description desc1 = get_description(sa1) desc2 = get_description(sa2) self.assertTrue(desc1 and desc2 and desc1 == desc2, "SBAddress.GetDescription() API of sa1 and sa2 should return the same string") if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
gpl-3.0
-6,948,422,901,882,527,000
38.566929
105
0.639403
false
3.956693
true
false
false
ericholscher/fabric
tests/test_network.py
1
12649
from __future__ import with_statement from datetime import datetime import copy import getpass import sys import paramiko from nose.tools import with_setup from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify, with_patched_object, patched_context, with_fakes) from fabric.context_managers import settings, hide, show from fabric.network import (HostConnectionCache, join_host_strings, normalize, denormalize) from fabric.io import output_loop import fabric.network # So I can call patch_object correctly. Sigh. from fabric.state import env, output, _get_system_username from fabric.operations import run, sudo from utils import * from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER, CLIENT_PRIVKEY_PASSPHRASE) # # Subroutines, e.g. host string normalization # class TestNetwork(FabricTest): def test_host_string_normalization(self): username = _get_system_username() for description, input, output_ in ( ("Sanity check: equal strings remain equal", 'localhost', 'localhost'), ("Empty username is same as get_system_username", 'localhost', username + '@localhost'), ("Empty port is same as port 22", 'localhost', 'localhost:22'), ("Both username and port tested at once, for kicks", 'localhost', username + '@localhost:22'), ): eq_.description = "Host-string normalization: %s" % description yield eq_, normalize(input), normalize(output_) del eq_.description def test_normalization_without_port(self): """ normalize() and join_host_strings() omit port if omit_port given """ eq_( join_host_strings(*normalize('user@localhost', omit_port=True)), 'user@localhost' ) def test_nonword_character_in_username(self): """ normalize() will accept non-word characters in the username part """ eq_( normalize('[email protected]')[0], 'user-with-hyphens' ) def test_normalization_of_empty_input(self): empties = ('', '', '') for description, input in ( ("empty string", ''), ("None", None) ): template = "normalize() returns empty strings for %s input" eq_.description = template % description yield eq_, normalize(input), empties del eq_.description def test_host_string_denormalization(self): username = _get_system_username() for description, string1, string2 in ( ("Sanity check: equal strings remain equal", 'localhost', 'localhost'), ("Empty username is same as get_system_username", 'localhost:22', username + '@localhost:22'), ("Empty port is same as port 22", 'user@localhost', 'user@localhost:22'), ("Both username and port", 'localhost', username + '@localhost:22'), ): eq_.description = "Host-string denormalization: %s" % description yield eq_, denormalize(string1), denormalize(string2) del eq_.description # # Connection caching # @staticmethod @with_fakes def check_connection_calls(host_strings, num_calls): # Clear Fudge call stack # Patch connect() with Fake obj set to expect num_calls calls patched_connect = patch_object('fabric.network', 'connect', Fake('connect', expect_call=True).times_called(num_calls) ) try: # Make new cache object cache = HostConnectionCache() # Connect to all connection strings for host_string in host_strings: # Obtain connection from cache, potentially calling connect() cache[host_string] finally: # Restore connect() patched_connect.restore() def test_connection_caching(self): for description, host_strings, num_calls in ( ("Two different host names, two connections", ('localhost', 'other-system'), 2), ("Same host twice, one connection", ('localhost', 'localhost'), 1), ("Same host twice, different ports, two connections", ('localhost:22', 'localhost:222'), 2), ("Same host twice, different users, two connections", ('user1@localhost', 'user2@localhost'), 2), ): TestNetwork.check_connection_calls.description = description yield TestNetwork.check_connection_calls, host_strings, num_calls # # Connection loop flow # @server() def test_saved_authentication_returns_client_object(self): cache = HostConnectionCache() assert isinstance(cache[env.host_string], paramiko.SSHClient) @server() @with_fakes def test_prompts_for_password_without_good_authentication(self): env.password = None with password_response(PASSWORDS[env.user], times_called=1): cache = HostConnectionCache() cache[env.host_string] @mock_streams('stdout') @server() def test_trailing_newline_line_drop(self): """ Trailing newlines shouldn't cause last line to be dropped. """ # Multiline output with trailing newline cmd = "ls /" output_string = RESPONSES[cmd] # TODO: fix below lines, duplicates inner workings of tested code prefix = "[%s] out: " % env.host_string expected = prefix + ('\n' + prefix).join(output_string.split('\n')) # Create, tie off thread with settings(show('everything'), hide('running')): result = run(cmd) # Test equivalence of expected, received output eq_(expected, sys.stdout.getvalue()) # Also test that the captured value matches, too. eq_(output_string, result) @server() def test_sudo_prompt_kills_capturing(self): """ Sudo prompts shouldn't screw up output capturing """ cmd = "ls /simple" with hide('everything'): eq_(sudo(cmd), RESPONSES[cmd]) @server() def test_password_memory_on_user_switch(self): """ Switching users mid-session should not screw up password memory """ def _to_user(user): return join_host_strings(user, env.host, env.port) user1 = 'root' user2 = USER with settings(hide('everything'), password=None): # Connect as user1 (thus populating both the fallback and # user-specific caches) with settings( password_response(PASSWORDS[user1]), host_string=_to_user(user1) ): run("ls /simple") # Connect as user2: * First cxn attempt will use fallback cache, # which contains user1's password, and thus fail * Second cxn # attempt will prompt user, and succeed due to mocked p4p * but # will NOT overwrite fallback cache with settings( password_response(PASSWORDS[user2]), host_string=_to_user(user2) ): # Just to trigger connection run("ls /simple") # * Sudo call should use cached user2 password, NOT fallback cache, # and thus succeed. (I.e. p_f_p should NOT be called here.) with settings( password_response('whatever', times_called=0), host_string=_to_user(user2) ): sudo("ls /simple") @mock_streams('stderr') @server() def test_password_prompt_displays_host_string(self): """ Password prompt lines should include the user/host in question """ env.password = None env.no_agent = env.no_keys = True output.everything = False with password_response(PASSWORDS[env.user], silent=False): run("ls /simple") regex = r'^\[%s\] Login password: ' % env.host_string assert_contains(regex, sys.stderr.getvalue()) @mock_streams('stderr') @server(pubkeys=True) def test_passphrase_prompt_displays_host_string(self): """ Passphrase prompt lines should include the user/host in question """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY output.everything = False with password_response(CLIENT_PRIVKEY_PASSPHRASE, silent=False): run("ls /simple") regex = r'^\[%s\] Login password: ' % env.host_string assert_contains(regex, sys.stderr.getvalue()) def test_sudo_prompt_display_passthrough(self): """ Sudo prompt should display (via passthrough) when stdout/stderr shown """ TestNetwork._prompt_display(True) def test_sudo_prompt_display_directly(self): """ Sudo prompt should display (manually) when stdout/stderr hidden """ TestNetwork._prompt_display(False) @staticmethod @mock_streams('both') @server(pubkeys=True, responses={'oneliner': 'result'}) def _prompt_display(display_output): env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY output.output = display_output with password_response( (CLIENT_PRIVKEY_PASSPHRASE, PASSWORDS[env.user]), silent=False ): sudo('oneliner') if display_output: expected = """ [%(prefix)s] sudo: oneliner [%(prefix)s] Login password: [%(prefix)s] out: sudo password: [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: [%(prefix)s] out: result """ % {'prefix': env.host_string} else: # Note lack of first sudo prompt (as it's autoresponded to) and of # course the actual result output. expected = """ [%(prefix)s] sudo: oneliner [%(prefix)s] Login password: [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: """ % {'prefix': env.host_string} eq_(expected[1:], sys.stdall.getvalue()) @mock_streams('both') @server( pubkeys=True, responses={'oneliner': 'result', 'twoliner': 'result1\nresult2'} ) def test_consecutive_sudos_should_not_have_blank_line(self): """ Consecutive sudo() calls should not incur a blank line in-between """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY with password_response( (CLIENT_PRIVKEY_PASSPHRASE, PASSWORDS[USER]), silent=False ): sudo('oneliner') sudo('twoliner') expected = """ [%(prefix)s] sudo: oneliner [%(prefix)s] Login password: [%(prefix)s] out: sudo password: [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: [%(prefix)s] out: result [%(prefix)s] sudo: twoliner [%(prefix)s] out: sudo password: [%(prefix)s] out: result1 [%(prefix)s] out: result2 """ % {'prefix': env.host_string} eq_(expected[1:], sys.stdall.getvalue()) @mock_streams('both') @server(pubkeys=True, responses={'silent': '', 'normal': 'foo'}) def test_silent_commands_should_not_have_blank_line(self): """ Silent commands should not generate an extra trailing blank line After the move to interactive I/O, it was noticed that while run/sudo commands which had non-empty stdout worked normally (consecutive such commands were totally adjacent), those with no stdout (i.e. silent commands like ``test`` or ``mkdir``) resulted in spurious blank lines after the "run:" line. This looks quite ugly in real world scripts. """ env.password = None env.no_agent = env.no_keys = True env.key_filename = CLIENT_PRIVKEY with password_response(CLIENT_PRIVKEY_PASSPHRASE, silent=False): run('normal') run('silent') run('normal') with hide('everything'): run('normal') run('silent') expected = """ [%(prefix)s] run: normal [%(prefix)s] Login password: [%(prefix)s] out: foo [%(prefix)s] run: silent [%(prefix)s] run: normal [%(prefix)s] out: foo """ % {'prefix': env.host_string} eq_(expected[1:], sys.stdall.getvalue())
bsd-2-clause
-4,260,805,784,873,929,700
34.136111
79
0.59064
false
4.110822
true
false
false
laetrid/learning
First_course/ex3_1.py
1
1676
#!/usr/bin/env python ''' Learning Python Class#3 I. Create an IP address converter (dotted decimal to binary). This will be similar to what we did in class2 except: A. Make the IP address a command-line argument instead of prompting the user for it. ./binary_converter.py 10.88.17.23 B. Simplify the script logic by using the flow-control statements that we learned in this class. C. Zero-pad the digits such that the binary output is always 8-binary digits long. Strip off the leading '0b' characters. For example, OLD: 0b1010 NEW: 00001010 D. Print to standard output using a dotted binary format. For example, IP address Binary 10.88.17.23 00001010.01011000.00010001.00010111 Note, you will probably need to use a 'while' loop and a 'break' statement for part C. while True: ... break # on some condition (exit the while loop) Python will execute this loop again and again until the 'break' is encountered. ''' from sys import argv if len(argv) != 2: exit("\tYou should pass one argument for this script.\n\tExample: ./test3_1.py <IP address>") ip_addr = argv[1] formatter = "%-20s%-60s" column1 = "IP address" column2 = "Binary" octets = ip_addr.split('.') ip_addr_bin = [] if len(octets) != 4: exit("Invalid IP address entered") for octet in octets: octet = bin(int(octet)) octet = octet[2:] octet = "0" * (8 - len(octet)) + octet ip_addr_bin.append(octet) ip_addr_bin = '.'.join(ip_addr_bin) print "=" * 80 print formatter % (column1, column2) print formatter % (ip_addr, ip_addr_bin) print "=" * 80 # The END
apache-2.0
-7,259,612,408,708,792,000
28.403509
95
0.656921
false
3.365462
false
false
false
davzhang/helix-python-binding
org/apache/helix/model/Message.py
1
16701
# package org.apache.helix.model #from org.apache.helix.model import * #from java.util import ArrayList #from java.util import Collections #from java.util import Comparator #from java.util import Date #from java.util import List #from java.util import Map #from java.util import UUID from org.apache.helix.HelixException import HelixException from org.apache.helix.HelixProperty import HelixProperty from org.apache.helix.InstanceType import InstanceType #from org.apache.helix.PropertyKey import PropertyKey #from org.apache.helix.PropertyKey import Builder from org.apache.helix.ZNRecord import ZNRecord from org.apache.helix.util.UserExceptions import IllegalArgumentException from org.apache.helix.util.misc import enum import time, uuid MessageType = enum('STATE_TRANSITION', 'SCHEDULER_MSG', 'USER_DEFINE_MSG', 'CONTROLLER_MSG', 'TASK_REPLY', 'NO_OP', 'PARTICIPANT_ERROR_REPORT') Attributes = enum('MSG_ID, SRC_SESSION_ID', 'TGT_SESSION_ID', 'SRC_NAME', 'TGT_NAME', 'SRC_INSTANCE_TYPE', 'MSG_STATE', 'PARTITION_NAME', 'RESOURCE_NAME', 'FROM_STATE', 'TO_STATE', 'STATE_MODEL_DEF', 'CREATE_TIMESTAMP', 'READ_TIMESTAMP', 'EXECUTE_START_TIMESTAMP', 'MSG_TYPE', 'MSG_SUBTYPE', 'CORRELATION_ID', 'MESSAGE_RESULT', 'EXE_SESSION_ID', 'TIMEOUT', 'RETRY_COUNT', 'STATE_MODEL_FACTORY_NAME', 'BUCKET_SIZE', 'PARENT_MSG_ID') MessageState =enum('NEW', 'READ', 'UNPROCESSABLE') class Message(HelixProperty): # Attributes = enum('MSG_ID, SRC_SESSION_ID', 'TGT_SESSION_ID', 'SRC_NAME', 'TGT_NAME', 'SRC_INSTANCE_TYPE', 'MSG_STATE', # 'PARTITION_NAME', 'RESOURCE_NAME', 'FROM_STATE', 'TO_STATE', 'STATE_MODEL_DEF', # 'CREATE_TIMESTAMP', 'READ_TIMESTAMP', 'EXECUTE_START_TIMESTAMP', # 'MSG_TYPE', 'MSG_SUBTYPE', 'CORRELATION_ID', 'MESSAGE_RESULT', # 'EXE_SESSION_ID', 'TIMEOUT', 'RETRY_COUNT', 'STATE_MODEL_FACTORY_NAME', 'BUCKET_SIZE', 'PARENT_MSG_ID') """ Java modifiers: final static Type: Comparator<Message> """ @staticmethod def compare(m1, m2): """ Returns int Parameters: m1: Messagem2: Message @Override """ return int(long(m1.getCreateTimeStamp()) - long(m2.getCreateTimeStamp())) CREATE_TIME_COMPARATOR = compare def __init__(self, *args): if len(args) == 2 and (isinstance(args[1], str) or isinstance(args[1], unicode)): self.__init__type_msgId(*args) elif len(args) == 1 and isinstance(args[0], ZNRecord): self.__init__record(*args) elif len(args) == 2 and isinstance(args[0], ZNRecord): self.__init__record_id(*args) else: raise IllegalArgumentException("Input arguments not supported. args = %s" % list(args)) """ Parameters: MessageType type String msgId """ def __init__type_msgId(self, type, msgId): super(Message,self).__init__(msgId) self._record.setSimpleField("MSG_TYPE", MessageType.toString(type)) self.setMsgId(msgId) self.setMsgState(MessageState.NEW) self._record.setSimpleField('CREATE_TIMESTAMP', time.time()) """ Parameters: ZNRecord record """ def __init__record(self, record): super(Message,self).__init__(record) if self.getMsgState() == None: self.setMsgState(MessageState.NEW) if self.getCreateTimeStamp() == 0: self._record.setSimpleField("CREATE_TIMESTAMP", "" + time.time()) def setCreateTimeStamp(self, timestamp): """ Returns void Parameters: timestamp: long """ self._record.setSimpleField("CREATE_TIMESTAMP", "" + timestamp) """ Parameters: ZNRecord record String id """ def __init__record_id(self, record, id): super(Message,self).__init__(ZNRecord(record,id)) # super(ZNRecord(record, id)) self.setMsgId(id) def setMsgSubType(self, subType): """ Returns void Parameters: subType: String """ self._record.setSimpleField("MSG_SUBTYPE", subType) def getMsgSubType(self): """ Returns String """ return self._record.getSimpleField("MSG_SUBTYPE") def setMsgType(self, type): """ Returns void Parameters: type: MessageType """ self._record.setSimpleField("MSG_TYPE", type.toString()) def getMsgType(self): """ Returns String """ return self._record.getSimpleField("MSG_TYPE") def getTgtSessionId(self): """ Returns String """ return self._record.getSimpleField("TGT_SESSION_ID") def setTgtSessionId(self, tgtSessionId): """ Returns void Parameters: tgtSessionId: String """ self._record.setSimpleField("TGT_SESSION_ID", tgtSessionId) def getSrcSessionId(self): """ Returns String """ return self._record.getSimpleField("SRC_SESSION_ID") def setSrcSessionId(self, srcSessionId): """ Returns void Parameters: srcSessionId: String """ self._record.setSimpleField("SRC_SESSION_ID", srcSessionId) def getExecutionSessionId(self): """ Returns String """ return self._record.getSimpleField("EXE_SESSION_ID") def setExecuteSessionId(self, exeSessionId): """ Returns void Parameters: exeSessionId: String """ self._record.setSimpleField("EXE_SESSION_ID", exeSessionId) def getMsgSrc(self): """ Returns String """ return self._record.getSimpleField("SRC_NAME") def setSrcInstanceType(self, type): """ Returns void Parameters: type: InstanceType """ self._record.setSimpleField("SRC_INSTANCE_TYPE", type.toString()) def getSrcInstanceType(self): """ Returns InstanceType """ if self._record.getSimpleFields().containsKey("SRC_INSTANCE_TYPE"): return InstanceType.valueOf(self._record.getSimpleField("SRC_INSTANCE_TYPE")) return InstanceType.PARTICIPANT def setSrcName(self, msgSrc): """ Returns void Parameters: msgSrc: String """ self._record.setSimpleField("SRC_NAME", msgSrc) def getTgtName(self): """ Returns String """ return self._record.getSimpleField("TGT_NAME") def setMsgState(self, msgState): """ Returns void Parameters: msgState: MessageState """ self._record.setSimpleField("MSG_STATE", MessageState.toString(msgState).lower()) def getMsgState(self): """ Returns MessageState """ return getattr(MessageState, self._record.getSimpleField("MSG_STATE").upper()) def setPartitionName(self, partitionName): """ Returns void Parameters: partitionName: String """ self._record.setSimpleField("PARTITION_NAME", partitionName) def getMsgId(self): """ Returns String """ return self._record.getSimpleField("MSG_ID") def setMsgId(self, msgId): """ Returns void Parameters: msgId: String """ self._record.setSimpleField("MSG_ID", msgId) def setFromState(self, state): """ Returns void Parameters: state: String """ self._record.setSimpleField("FROM_STATE", state) def getFromState(self): """ Returns String """ return self._record.getSimpleField("FROM_STATE") def setToState(self, state): """ Returns void Parameters: state: String """ self._record.setSimpleField("TO_STATE", state) def getToState(self): """ Returns String """ return self._record.getSimpleField("TO_STATE") def setTgtName(self, msgTgt): """ Returns void Parameters: msgTgt: String """ self._record.setSimpleField("TGT_NAME", msgTgt) def getDebug(self): """ Returns Boolean """ return False def getGeneration(self): """ Returns Integer """ return 1 def setResourceName(self, resourceName): """ Returns void Parameters: resourceName: String """ self._record.setSimpleField("RESOURCE_NAME", resourceName) def getResourceName(self): """ Returns String """ return self._record.getSimpleField("RESOURCE_NAME") def getPartitionName(self): """ Returns String """ return self._record.getSimpleField("PARTITION_NAME") def getStateModelDef(self): """ Returns String """ return self._record.getSimpleField("STATE_MODEL_DEF") def setStateModelDef(self, stateModelDefName): """ Returns void Parameters: stateModelDefName: String """ self._record.setSimpleField("STATE_MODEL_DEF", stateModelDefName) def setReadTimeStamp(self, time): """ Returns void Parameters: time: long """ self._record.setSimpleField("READ_TIMESTAMP", "" + str(time)) def setExecuteStartTimeStamp(self, time): """ Returns void Parameters: time: long """ self._record.setSimpleField("EXECUTE_START_TIMESTAMP", "" + str(time)) def getReadTimeStamp(self): """ Returns long """ # String timestamp = self._record.getSimpleField("READ_TIMESTAMP") if timestamp == None: return 0 else: return timestamp def getExecuteStartTimeStamp(self): """ Returns long """ # String timestamp = self._record.getSimpleField("EXECUTE_START_TIMESTAMP") if timestamp == None: return 0 else: return timestamp def getCreateTimeStamp(self): """ Returns long """ timestamp = self._record.getSimpleField("CREATE_TIMESTAMP") if timestamp == None: return 0 else: return timestamp def setCorrelationId(self, correlationId): """ Returns void Parameters: correlationId: String """ self._record.setSimpleField("CORRELATION_ID", correlationId) def getCorrelationId(self): """ Returns String """ return self._record.getSimpleField("CORRELATION_ID") def getExecutionTimeout(self): """ Returns int """ if not "TIMEOUT" in self._record.getSimpleFields(): return -1 return self._record.getSimpleField("TIMEOUT") def setExecutionTimeout(self, timeout): """ Returns void Parameters: timeout: int """ self._record.setSimpleField("TIMEOUT", "" + str(timeout)) def setRetryCount(self, retryCount): """ Returns void Parameters: retryCount: int """ self._record.setSimpleField("RETRY_COUNT", "" + str(retryCount)) def getRetryCount(self): """ Returns int """ return self._record.getSimpleField("RETRY_COUNT") def getResultMap(self): """ Returns Map<String, String> """ return self._record.getMapField("MESSAGE_RESULT") def setResultMap(self, resultMap): """ Returns void Parameters: resultMap: Map<String, String> """ self._record.setMapField("MESSAGE_RESULT", resultMap) def getStateModelFactoryName(self): """ Returns String """ return self._record.getSimpleField("STATE_MODEL_FACTORY_NAME") def setStateModelFactoryName(self, factoryName): """ Returns void Parameters: factoryName: String """ self._record.setSimpleField("STATE_MODEL_FACTORY_NAME", factoryName) def getBucketSize(self): """ Returns int @Override """ # String bucketSizeStr = self._record.getSimpleField("BUCKET_SIZE") # int bucketSize = 0 if bucketSizeStr != None: try: bucketSize = int(bucketSizeStr) except ValueError, e: pass return bucketSize def setBucketSize(self, bucketSize): """ Returns void Parameters: bucketSize: int @Override """ if bucketSize > 0: self._record.setSimpleField("BUCKET_SIZE", "" + str(bucketSize)) def setAttribute(self, attr, val): """ Returns void Parameters: attr: Attributesval: String """ self._record.setSimpleField(attr.toString(), val) def getAttribute(self, attr): """ Returns String Parameters: attr: Attributes """ return self._record.getSimpleField(attr.toString()) def createReplyMessage(srcMessage, instanceName, taskResultMap): """ Returns Message Parameters: srcMessage: MessageinstanceName: StringtaskResultMap: Map<String, String> Java modifiers: static """ if srcMessage.getCorrelationId() == None: raise HelixException("Message " + srcMessage.getMsgId() + " does not contain correlation id") # Message replyMessage = Message(MessageType.TASK_REPLY, str(uuid.uuid4())) replyMessage.setCorrelationId(srcMessage.getCorrelationId()) replyMessage.setResultMap(taskResultMap) replyMessage.setTgtSessionId("*") replyMessage.setMsgState(MessageState.NEW) replyMessage.setSrcName(instanceName) if srcMessage.getSrcInstanceType() == InstanceType.CONTROLLER: replyMessage.setTgtName("Controller") else: replyMessage.setTgtName(srcMessage.getMsgSrc()) return replyMessage def addPartitionName(self, partitionName): """ Returns void Parameters: partitionName: String """ if self._record.getListField("PARTITION_NAME") == None: self._record.setListField("PARTITION_NAME", []) # List<String> partitionNames = self._record.getListField("PARTITION_NAME") if not partitionNames.contains(partitionName): partitionNames.add(partitionName) def getPartitionNames(self): """ Returns List<String> """ # List<String> partitionNames = self._record.getListField("PARTITION_NAME") if partitionNames == None: return [] return partitionNames def isControlerMsg(self): """ Returns boolean """ return self.getTgtName().lower() == "controller" def getKey(self, keyBuilder, instanceName): """ Returns PropertyKey Parameters: keyBuilder: BuilderinstanceName: String """ if self.isControlerMsg(): return keyBuilder.controllerMessage(self.getId()) else: return keyBuilder.message(instanceName, self.getId()) def isNullOrEmpty(self, data): """ Returns boolean Parameters: data: String Java modifiers: private """ return data == None or len(data) == 0 or len(data.strip()) == 0 def isValid(self): """ Returns boolean @Override """ if (self.getMsgType() == MessageType.toString(MessageType.STATE_TRANSITION)): # boolean isNotValid = self.isNullOrEmpty(self.getTgtName()) or self.isNullOrEmpty(self.getPartitionName()) or self.isNullOrEmpty(self.getResourceName()) or self.isNullOrEmpty(self.getStateModelDef()) or self.isNullOrEmpty(self.getToState()) or self.isNullOrEmpty(self.getStateModelFactoryName()) or self.isNullOrEmpty(self.getFromState()) return not isNotValid return True
apache-2.0
1,717,234,319,124,727,300
20.549677
341
0.573618
false
4.282308
false
false
false
clayshieh/cal_hacks_2015
maps/views.py
1
2868
from django.shortcuts import render from django.core.urlresolvers import reverse from django.http import HttpResponse, HttpResponseRedirect # from userauth.forms import UserForm, UserProfileForm, ForgotForm from maps.models import Report, Route from django.contrib.auth import authenticate, login, logout from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User # Create your views here. def test(request): return HttpResponse('test') def index(request): if request.POST: info = request.POST.get('scoreArray') alat = info[0] alng = info[1] blat = info[2] blng = info[3] qual = request.POST.get('quality') try: rpt = Report() rpt.quality = qual rpt.desc = "" rpt.lat = float(lat) rpt.lng = float(lng) rpt.save() except: return HttpResponse('something went wrong') else: try: route = Route.objects.filter(a_lat=alat).filter(a_lng=alng).filter(b_lat=blat).filter(blng) except: pass return render(request, 'index.html', {}) def slow(request): return render(request, 'index_slow.html', {}) def get_slow(request): if request.POST: alat = request.POST.get("lat1") alng = request.POST.get("lng1") blat = request.POST.get("lat2") blng = request.POST.get("lng2") print alat, alng, blat, blng try: print Route.objects.all() route = Route.objects.filter(a_lat=float(alat)).filter(a_lng=float(alng)).filter(b_lat=float(blat)).filter(b_lng=float(blng)) return HttpResponse(route[0].avg) except Exception as e: return HttpResponse(-1) else: print "2" return HttpResponse(-1) def report(request): if request.POST: a = request.POST.getlist("a[]") b = request.POST.getlist("b[]") alat = a[0] alng = a[1] blat = b[0] blng = b[1] qual = request.POST.get('rating') try: route = Route.objects.filter(a_lat=float(alat)).filter(a_lng=float(alng)).filter(b_lat=float(blat)).filter(b_lng=float(blng)) if len(route) == 0: route = Route() route.a_lat = alat route.a_lng = alng route.b_lat = blat route.b_lng = blng route.avg = qual route.save() return HttpResponse("created") else: route = route[0] if route != None or route != "null": route.avg -= route.avg / 10 route.avg += float(qual) / 10 route.save() return HttpResponse("updated route") except: return HttpResponse(-1) def get(request): if request.POST: result = [] array = request.POST.getlist("dict[]") x = 0 while x < len(array): alat = array[x] alng = array[x+1] blat = array[x+2] blng = array[x+3] x+=4 route = Route.objects.filter(a_lat=float(alat)).filter(a_lng=float(alng)).filter(b_lat=float(blat)).filter(b_lng=float(blng)) if len(route)==0: result.append(-1) else: result.append(route[0].avg) return HttpResponse(str(result)) else: return HttpResponse(-1)
gpl-2.0
2,449,341,097,109,716,000
26.056604
128
0.668759
false
2.806262
false
false
false
MindPass/Code
Interface_graphique/mindmap/svgwrite-1.1.6/examples/linearGradient.py
2
2959
#!/usr/bin/env python #coding:utf-8 # Author: mozman # Purpose: svg examples # Created: 08.09.2010 # Copyright (C) 2010, Manfred Moitzi # License: MIT License try: import svgwrite except ImportError: # if svgwrite is not 'installed' append parent dir of __file__ to sys.path import sys, os sys.path.insert(0, os.path.abspath(os.path.split(os.path.abspath(__file__))[0]+'/..')) import svgwrite def linearGradient(name): dwg = svgwrite.Drawing(name, size=('20cm', '15cm'), profile='full', debug=True) # set user coordinate space dwg.viewbox(width=200, height=150) # create a new linearGradient element horizontal_gradient = dwg.linearGradient((0, 0), (1, 0)) vertical_gradient = dwg.linearGradient((0, 0), (0, 1)) diagonal_gradient = dwg.linearGradient((0, 0), (1, 1)) tricolor_gradient = dwg.linearGradient((0, 0), (1, 1)) # add gradient to the defs section of the drawing dwg.defs.add(horizontal_gradient) dwg.defs.add(vertical_gradient) dwg.defs.add(diagonal_gradient) dwg.defs.add(tricolor_gradient) # define the gradient from white to red horizontal_gradient.add_stop_color(0, 'white') horizontal_gradient.add_stop_color(1, 'red') # define the gradient from white to green vertical_gradient.add_stop_color(0, 'white') vertical_gradient.add_stop_color(1, 'green') # define the gradient from white to blue diagonal_gradient.add_stop_color(0, 'white') diagonal_gradient.add_stop_color(1, 'blue') # define the gradient from white to red to green to blue tricolor_gradient.add_stop_color(0, 'white') tricolor_gradient.add_stop_color(.33, 'red') tricolor_gradient.add_stop_color(.66, 'green') tricolor_gradient.add_stop_color(1, 'blue') # use gradient for filling the rect dwg.add(dwg.rect((10,10), (50,50), fill=horizontal_gradient.get_paint_server(default='currentColor'))) dwg.add(dwg.rect((70,10), (50,50), fill=vertical_gradient.get_paint_server(default='currentColor'))) dwg.add(dwg.rect((130,10), (50,50), fill=diagonal_gradient.get_paint_server(default='currentColor'))) dwg.add(dwg.rect((10,70), (50,50), fill=tricolor_gradient.get_paint_server(default='currentColor'))) # rotate gradient about 90 degree # first copy gradient tricolor2_gradient = tricolor_gradient.copy() # rotate the gradient tricolor2_gradient.rotate(90, (.5, .5)) # add gradient to the defs section of the drawing dwg.defs.add(tricolor2_gradient) # use the gradient dwg.add(dwg.rect((70,70), (50,50), fill=tricolor2_gradient.get_paint_server(default='currentColor'))) updown = dwg.linearGradient() dwg.defs.add(updown) updown.add_colors(['red', 'white', 'red', 'white', 'red'], sweep=(.2, .8)) dwg.add(dwg.rect((130,70), (50,50), fill=updown.get_paint_server(default='currentColor'))) dwg.save() if __name__ == '__main__': linearGradient("linearGradient.svg")
gpl-3.0
8,424,221,764,480,674,000
35.9875
106
0.680297
false
3.168094
false
false
false
nirzari18/Query-Analysis-Application-on-Google-App-Engine
main.py
1
5291
#NAME: NIRZARI IYER #Assignment-2 #ID NUMBER: 1001117633 #BATCH TIME- 6:00 to 8:00 p.m. import MySQLdb import io import os import cloudstorage as gcs import csv import timeit from bottle import Bottle from google.appengine.api import app_identity from StringIO import StringIO from bottle import route, request, response, template bottle = Bottle() #location of file into default bucket on google cloud storage bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) bucket = '/' + bucket_name filename = bucket + '/earthquake.csv' #Get filename from user @bottle.route('/uploadform') def uploadform(): return template('upload_form') #Upload file into bucket on google cloud storage @bottle.route('/uploadfile', method='POST') def uploadfile(): start = timeit.default_timer() filecontent = request.files.get('filecontent') rawfilecontent = filecontent.file.read() write_retry_params = gcs.RetryParams(backoff_factor=1.1) gcs_file = gcs.open(filename,'w',content_type='text/plain',retry_params=write_retry_params) gcs_file.write(rawfilecontent) gcs_file.close() stop = timeit.default_timer() time_taken = stop - start return template('upload_file',time_taken=time_taken) #Read data from bucket and Insert data into google MySQLdb def parse(filename, delimiter,c): with gcs.open(filename, 'r') as gcs_file: csv_reader = csv.reader(StringIO(gcs_file.read()), delimiter=',', quotechar='"') # Skip the header line csv_reader.next() try: start = timeit.default_timer() for row in csv_reader: time = timestamp(row[0]) updated = timestamp(row[12]) for i in range (0,14): if row[i] == '': row[i] = "''" place = str(row[13]) place = place.replace("'","") insert = "INSERT INTO earthquake (time, latitude, longitude, depth, mag, magType, nst, gap, dmin, rms, net, id, updated,\ place, type) values('"+time+"',"+row[1]+","+row[2]+","+row[3]+","+row[4]+",'"+row[5]+"',"+row[6]+","+row[7]+",\ "+row[8]+","+row[9]+",'"+row[10]+"','"+row[11]+"','"+updated+"','"+place+"','"+row[14]+"')" c.execute(insert) stop = timeit.default_timer() insert_time = stop - start return insert_time except Exception as e: print ("Data can't be inserted" + str(e)) def timestamp(string): ans = string[:10] + ' ' + string[11:19] return ans def query(mag,c): query = 'SELECT week(time) as week, count(*) as count, mag as mag FROM earthquake WHERE mag = '+str(mag)+' GROUP BY week(time), mag' c.execute(query) ans_query = c.fetchall() return ans_query def bigquery(mag,c): query = 'SELECT week(time) as week, count(*) as count, mag as mag FROM earthquake WHERE mag > '+str(mag)+' GROUP BY week(time), mag' c.execute(query) ans_query = c.fetchall() return ans_query def ans_format(mag): table = "<table border='2'><tr><th>Week</th><th>Number of quakes</th><th>Magnitude</th></tr>" ans = "" for x in mag: ans = ans +"<tr><td>" + str(x[0]) + "</td><td>" + str(x[1]) + "</td><td>" + str(x[2]) +"</td></tr>" table += ans + "</table>" return table @bottle.route('/') def main(): try: connobj = MySQLdb.connect(unix_socket='/cloudsql/cloudcomp2-979:simple' ,user='root') c = connobj.cursor() createdb = 'CREATE DATABASE IF NOT EXISTS db' c.execute(createdb) connectdb = 'USE db' c.execute(connectdb) table = 'CREATE TABLE IF NOT EXISTS earthquake '\ '(time TIMESTAMP,'\ 'latitude DOUBLE,'\ 'longitude DOUBLE,'\ 'depth DOUBLE,'\ 'mag DOUBLE,'\ 'magType varchar(500),'\ 'nst DOUBLE,'\ 'gap DOUBLE,'\ 'dmin DOUBLE,'\ 'rms DOUBLE,'\ 'net varchar(500),'\ 'id varchar(500),'\ 'updated TIMESTAMP,'\ 'place VARCHAR(500),'\ 'type VARCHAR(500))' c.execute(table) c.execute("truncate table earthquake") insert_time = parse(filename,',',c) mag2 = query(2,c) mag3 = query(3,c) mag4 = query(4,c) mag5 = query(5,c) maggt5 = bigquery(5,c) ans_mag2 = ans_format(mag2) ans_mag3 = ans_format(mag3) ans_mag4 = ans_format(mag4) ans_mag5 = ans_format(mag5) ans_maggt5 = ans_format(maggt5) ans = "Final Result: <br><br> Time taken to Insert data into MySQL database is: <br>" +str(insert_time)+"<br><br>" \ "Earthquake of magnitude 2: <br> "+str(ans_mag2)+"<br><br> Earthquake of magnitude 3: <br>" \ +str(ans_mag3)+ "<br><br> Earthquake of magnitude 4: <br>" +str(ans_mag4)+ "<br><br> Earthquake" \ "of magnitude 5: <br>" +str(ans_mag5)+ "<br><br> Earthquake of magnitude greater than 5: <br>" +str(ans_maggt5) return ans except Exception as e: print str(e) return e # Define an handler for 404 errors. @bottle.error(404) def error_404(error): """Return a custom error 404.""" return 'Sorry, nothing at this URL.' # [END all]
apache-2.0
6,828,932,704,761,010,000
34.993197
137
0.585334
false
3.272109
false
false
false
rsepassi/tensor2tensor
tensor2tensor/bin/t2t_decoder.py
1
3766
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. r"""Decode from trained T2T models. This binary performs inference using the Estimator API. Example usage to decode from dataset: t2t-decoder \ --data_dir ~/data \ --problems=algorithmic_identity_binary40 \ --model=transformer --hparams_set=transformer_base Set FLAGS.decode_interactive or FLAGS.decode_from_file for alternative decode sources. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os # Dependency imports from tensor2tensor.bin import t2t_trainer from tensor2tensor.utils import decoding from tensor2tensor.utils import trainer_lib from tensor2tensor.utils import usr_dir import tensorflow as tf flags = tf.flags FLAGS = flags.FLAGS # Additional flags in bin/t2t_trainer.py and utils/flags.py flags.DEFINE_string("checkpoint_path", None, "Path to the model checkpoint. Overrides output_dir.") flags.DEFINE_string("decode_from_file", None, "Path to the source file for decoding") flags.DEFINE_string("decode_to_file", None, "Path to the decoded (output) file") flags.DEFINE_bool("keep_timestamp", False, "Set the mtime of the decoded file to the " "checkpoint_path+'.index' mtime.") flags.DEFINE_bool("decode_interactive", False, "Interactive local inference mode.") flags.DEFINE_integer("decode_shards", 1, "Number of decoding replicas.") def create_hparams(): return trainer_lib.create_hparams( FLAGS.hparams_set, FLAGS.hparams, data_dir=os.path.expanduser(FLAGS.data_dir), problem_name=FLAGS.problems) def create_decode_hparams(): decode_hp = decoding.decode_hparams(FLAGS.decode_hparams) decode_hp.add_hparam("shards", FLAGS.decode_shards) decode_hp.add_hparam("shard_id", FLAGS.worker_id) return decode_hp def decode(estimator, hparams, decode_hp): if FLAGS.decode_interactive: decoding.decode_interactively(estimator, hparams, decode_hp) elif FLAGS.decode_from_file: decoding.decode_from_file(estimator, FLAGS.decode_from_file, hparams, decode_hp, FLAGS.decode_to_file, checkpoint_path=FLAGS.checkpoint_path) if FLAGS.checkpoint_path and FLAGS.keep_timestamp: ckpt_time = os.path.getmtime(FLAGS.checkpoint_path + ".index") os.utime(FLAGS.decode_to_file, (ckpt_time, ckpt_time)) else: decoding.decode_from_dataset( estimator, FLAGS.problems.split("-"), hparams, decode_hp, decode_to_file=FLAGS.decode_to_file, dataset_split="test" if FLAGS.eval_use_test_set else None) def main(_): tf.logging.set_verbosity(tf.logging.INFO) usr_dir.import_usr_dir(FLAGS.t2t_usr_dir) FLAGS.use_tpu = False # decoding not supported on TPU hp = create_hparams() decode_hp = create_decode_hparams() estimator = trainer_lib.create_estimator( FLAGS.model, hp, t2t_trainer.create_run_config(hp), decode_hparams=decode_hp, use_tpu=False) decode(estimator, hp, decode_hp) if __name__ == "__main__": tf.app.run()
apache-2.0
1,882,406,023,209,351,400
30.915254
77
0.692512
false
3.645692
false
false
false
gborri/SickRage
sickrage/providers/torrent/filelist.py
1
6225
# coding=utf-8 # # URL: https://sickrage.ca # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals import re from requests.compat import urljoin from requests.utils import dict_from_cookiejar import sickrage from sickrage.core.caches.tv_cache import TVCache from sickrage.core.helpers import bs4_parser, try_int, convert_size from sickrage.providers import TorrentProvider class FileListProvider(TorrentProvider): def __init__(self): super(FileListProvider, self).__init__('FileList', 'http://filelist.ro', True) # Credentials self.username = None self.password = None # Torrent Stats self.minseed = None self.minleech = None # URLs self.urls.update({ "login": "{base_url}/takelogin.php".format(**self.urls), "search": "{base_url}/browse.php".format(**self.urls), }) # Proper Strings self.proper_strings = ["PROPER", "REPACK"] # Cache self.cache = TVCache(self) def login(self): if any(dict_from_cookiejar(sickrage.app.wsession.cookies).values()): return True login_params = { "username": self.username, "password": self.password } try: response = sickrage.app.wsession.post(self.urls["login"], data=login_params).text except Exception: sickrage.app.log.warning("Unable to connect to provider") return False if re.search("Invalid Username/password", response) \ or re.search("<title>Login :: FileList.ro</title>", response) \ or re.search("Login esuat!", response): sickrage.app.log.warning("Invalid username or password. Check your settings") return False return True def search(self, search_strings, age=0, ep_obj=None): results = [] if not self.login(): return results # Search Params search_params = { "search": "", "cat": 0 } for mode in search_strings: sickrage.app.log.debug("Search Mode: {0}".format(mode)) for search_string in search_strings[mode]: if mode != "RSS": sickrage.app.log.debug("Search string: {}".format(search_string)) search_params["search"] = search_string search_url = self.urls["search"] try: data = sickrage.app.wsession.get(search_url, params=search_params).text results += self.parse(data, mode) except Exception: sickrage.app.log.debug("No data returned from provider") return results def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] with bs4_parser(data, "html5lib") as html: torrent_rows = html.find_all("div", class_="torrentrow") # Continue only if at least one Release is found if not torrent_rows: sickrage.app.log.debug("Data returned from provider does not contain any torrents") return results # "Type", "Name", "Download", "Files", "Comments", "Added", "Size", "Snatched", "Seeders", "Leechers", "Upped by" labels = [] columns = html.find_all("div", class_="colhead") for index, column in enumerate(columns): lbl = column.get_text(strip=True) if lbl: labels.append(str(lbl)) else: lbl = column.find("img") if lbl: if lbl.has_attr("alt"): lbl = lbl['alt'] labels.append(str(lbl)) else: if index == 3: lbl = "Download" else: lbl = str(index) labels.append(lbl) # Skip column headers for result in torrent_rows: try: cells = result.find_all("div", class_="torrenttable") if len(cells) < len(labels): continue title = cells[labels.index("Name")].find("a").find("b").get_text(strip=True) download_url = urljoin(self.urls['base_url'], cells[labels.index("Download")].find("a")["href"]) if not all([title, download_url]): continue seeders = try_int(cells[labels.index("Seeders")].find("span").get_text(strip=True)) leechers = try_int(cells[labels.index("Leechers")].find("span").get_text(strip=True)) torrent_size = cells[labels.index("Size")].find("span").get_text(strip=True) size = convert_size(torrent_size, -1) item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': None} if mode != "RSS": sickrage.app.log.debug("Found result: {}".format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider") return results
gpl-3.0
-1,395,110,156,816,859,100
34.375
125
0.540723
false
4.386892
false
false
false
mesuutt/snipper
setup.py
1
1413
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='snipper', version=__import__('snipper').__version__, url='https://github.com/mesuutt/snipper', author='Mesut Tasci', author_email='[email protected]', description=('A command-line tool to manage Bitbucket snippets.'), license='MIT', test_suite='tests', keywords="bitbucket snippet gist command-line cli", long_description=read('README.rst'), entry_points={ 'console_scripts': [ 'snipper = snipper.snipper:cli', ] }, packages=['snipper'], install_requires=[ 'requests>=2.12', 'click>=6.7', 'prompt_toolkit>=1.0', 'pyperclip>=1.5', ], classifiers=[ "Environment :: Console", 'License :: OSI Approved :: MIT License', 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Intended Audience :: Developers', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', ], )
mit
3,697,633,336,880,219,000
29.717391
70
0.576079
false
3.892562
false
false
false
kbrebanov/ansible
lib/ansible/plugins/filter/cast_type.py
3
2207
# Author Ken Celenza <[email protected]> # Author Jason Edelman <[email protected]> # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.errors import AnsibleError, AnsibleFilterError def cast_list_to_dict(data, key): new_obj = {} if not isinstance(data, list): raise AnsibleFilterError("Type is not a valid list") for item in data: if not isinstance(item, dict): raise AnsibleFilterError("List item is not a valid dict") try: key_elem = item.get(key) except Exception as e: raise AnsibleFilterError(str(e)) if new_obj.get(key_elem): raise AnsibleFilterError("Key {0} is not unique, cannot correctly turn into dict".format(key_elem)) elif not key_elem: raise AnsibleFilterError("Key {0} was not found".format(key)) else: new_obj[key_elem] = item return new_obj def cast_dict_to_list(data, key_name): new_obj = [] if not isinstance(data, dict): raise AnsibleFilterError("Type is not a valid dict") for key, value in data.items(): if not isinstance(value, dict): raise AnsibleFilterError("Type of key {0} value {1} is not a valid dict".format(key, value)) if value.get(key_name): raise AnsibleFilterError("Key name {0} is already in use, cannot correctly turn into dict".format(key_name)) value[key_name] = key new_obj.append(value) return new_obj class FilterModule(object): '''Convert a list to a dictionary provided a key that exists in all dicts. If it does not, that dict is omitted ''' def filters(self): return { 'cast_list_to_dict': cast_list_to_dict, 'cast_dict_to_list': cast_dict_to_list, } if __name__ == "__main__": list_data = [{"proto": "eigrp", "state": "enabled"}, {"proto": "ospf", "state": "enabled"}] print(cast_list_to_dict(list_data, 'proto')) dict_data = {'eigrp': {'state': 'enabled', 'as': '1'}, 'ospf': {'state': 'enabled', 'as': '2'}} print(cast_dict_to_list(dict_data, 'proto'))
gpl-3.0
6,155,710,773,830,494,000
34.031746
120
0.617127
false
3.588618
false
false
false
MahjongRepository/mahjong
mahjong/hand_calculating/yaku_config.py
1
4030
# -*- coding: utf-8 -*- from itertools import count from mahjong.hand_calculating.yaku_list import ( AkaDora, Chankan, Chantai, Chiitoitsu, Chinitsu, Chun, DaburuOpenRiichi, DaburuRiichi, Dora, Haitei, Haku, Hatsu, Honitsu, Honroto, Houtei, Iipeiko, Ippatsu, Ittsu, Junchan, NagashiMangan, OpenRiichi, Pinfu, Renhou, Riichi, Rinshan, Ryanpeikou, Sanankou, SanKantsu, Sanshoku, SanshokuDoukou, Shosangen, Tanyao, Toitoi, Tsumo, YakuhaiEast, YakuhaiNorth, YakuhaiOfPlace, YakuhaiOfRound, YakuhaiSouth, YakuhaiWest, ) from mahjong.hand_calculating.yaku_list.yakuman import ( Chiihou, Chinroutou, ChuurenPoutou, DaburuChuurenPoutou, DaburuKokushiMusou, Daichisei, Daisangen, Daisharin, DaiSuushii, KokushiMusou, Paarenchan, RenhouYakuman, Ryuuiisou, Sashikomi, Shousuushii, Suuankou, SuuankouTanki, Suukantsu, Tenhou, Tsuuiisou, ) class YakuConfig(object): def __init__(self): id = count(0) # Yaku situations self.tsumo = Tsumo(next(id)) self.riichi = Riichi(next(id)) self.open_riichi = OpenRiichi(next(id)) self.ippatsu = Ippatsu(next(id)) self.chankan = Chankan(next(id)) self.rinshan = Rinshan(next(id)) self.haitei = Haitei(next(id)) self.houtei = Houtei(next(id)) self.daburu_riichi = DaburuRiichi(next(id)) self.daburu_open_riichi = DaburuOpenRiichi(next(id)) self.nagashi_mangan = NagashiMangan(next(id)) self.renhou = Renhou(next(id)) # Yaku 1 Han self.pinfu = Pinfu(next(id)) self.tanyao = Tanyao(next(id)) self.iipeiko = Iipeiko(next(id)) self.haku = Haku(next(id)) self.hatsu = Hatsu(next(id)) self.chun = Chun(next(id)) self.east = YakuhaiEast(next(id)) self.south = YakuhaiSouth(next(id)) self.west = YakuhaiWest(next(id)) self.north = YakuhaiNorth(next(id)) self.yakuhai_place = YakuhaiOfPlace(next(id)) self.yakuhai_round = YakuhaiOfRound(next(id)) # Yaku 2 Hans self.sanshoku = Sanshoku(next(id)) self.ittsu = Ittsu(next(id)) self.chantai = Chantai(next(id)) self.honroto = Honroto(next(id)) self.toitoi = Toitoi(next(id)) self.sanankou = Sanankou(next(id)) self.sankantsu = SanKantsu(next(id)) self.sanshoku_douko = SanshokuDoukou(next(id)) self.chiitoitsu = Chiitoitsu(next(id)) self.shosangen = Shosangen(next(id)) # Yaku 3 Hans self.honitsu = Honitsu(next(id)) self.junchan = Junchan(next(id)) self.ryanpeiko = Ryanpeikou(next(id)) # Yaku 6 Hans self.chinitsu = Chinitsu(next(id)) # Yakuman list self.kokushi = KokushiMusou(next(id)) self.chuuren_poutou = ChuurenPoutou(next(id)) self.suuankou = Suuankou(next(id)) self.daisangen = Daisangen(next(id)) self.shosuushi = Shousuushii(next(id)) self.ryuisou = Ryuuiisou(next(id)) self.suukantsu = Suukantsu(next(id)) self.tsuisou = Tsuuiisou(next(id)) self.chinroto = Chinroutou(next(id)) self.daisharin = Daisharin(next(id)) self.daichisei = Daichisei(next(id)) # Double yakuman self.daisuushi = DaiSuushii(next(id)) self.daburu_kokushi = DaburuKokushiMusou(next(id)) self.suuankou_tanki = SuuankouTanki(next(id)) self.daburu_chuuren_poutou = DaburuChuurenPoutou(next(id)) # Yakuman situations self.tenhou = Tenhou(next(id)) self.chiihou = Chiihou(next(id)) self.renhou_yakuman = RenhouYakuman(next(id)) self.sashikomi = Sashikomi(next(id)) self.paarenchan = Paarenchan(next(id)) # Other self.dora = Dora(next(id)) self.aka_dora = AkaDora(next(id))
mit
7,555,421,368,102,794,000
25.688742
66
0.604963
false
2.5
false
false
false
alphagov/notifications-api
tests/app/service/test_service_data_retention_rest.py
1
6703
import json import uuid from app.models import ServiceDataRetention from tests import create_authorization_header from tests.app.db import create_service_data_retention def test_get_service_data_retention(client, sample_service): sms_data_retention = create_service_data_retention(service=sample_service) email_data_retention = create_service_data_retention(service=sample_service, notification_type='email', days_of_retention=10) letter_data_retention = create_service_data_retention(service=sample_service, notification_type='letter', days_of_retention=30) response = client.get( '/service/{}/data-retention'.format(str(sample_service.id)), headers=[('Content-Type', 'application/json'), create_authorization_header()], ) assert response.status_code == 200 json_response = json.loads(response.get_data(as_text=True)) assert len(json_response) == 3 assert json_response[0] == email_data_retention.serialize() assert json_response[1] == sms_data_retention.serialize() assert json_response[2] == letter_data_retention.serialize() def test_get_service_data_retention_returns_empty_list(client, sample_service): response = client.get( '/service/{}/data-retention'.format(str(sample_service.id)), headers=[('Content-Type', 'application/json'), create_authorization_header()], ) assert response.status_code == 200 assert len(json.loads(response.get_data(as_text=True))) == 0 def test_get_data_retention_for_service_notification_type(client, sample_service): data_retention = create_service_data_retention(service=sample_service) response = client.get('/service/{}/data-retention/notification-type/{}'.format(sample_service.id, 'sms'), headers=[('Content-Type', 'application/json'), create_authorization_header()], ) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == data_retention.serialize() def test_get_service_data_retention_by_id(client, sample_service): sms_data_retention = create_service_data_retention(service=sample_service) create_service_data_retention(service=sample_service, notification_type='email', days_of_retention=10) create_service_data_retention(service=sample_service, notification_type='letter', days_of_retention=30) response = client.get( '/service/{}/data-retention/{}'.format(str(sample_service.id), sms_data_retention.id), headers=[('Content-Type', 'application/json'), create_authorization_header()], ) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == sms_data_retention.serialize() def test_get_service_data_retention_by_id_returns_none_when_no_data_retention_exists(client, sample_service): response = client.get( '/service/{}/data-retention/{}'.format(str(sample_service.id), uuid.uuid4()), headers=[('Content-Type', 'application/json'), create_authorization_header()], ) assert response.status_code == 200 assert json.loads(response.get_data(as_text=True)) == {} def test_create_service_data_retention(client, sample_service): data = { "notification_type": 'sms', "days_of_retention": 3 } response = client.post( '/service/{}/data-retention'.format(str(sample_service.id)), headers=[('Content-Type', 'application/json'), create_authorization_header()], data=json.dumps(data) ) assert response.status_code == 201 json_resp = json.loads(response.get_data(as_text=True))['result'] results = ServiceDataRetention.query.all() assert len(results) == 1 data_retention = results[0] assert json_resp == data_retention.serialize() def test_create_service_data_retention_returns_400_when_notification_type_is_invalid(client): data = { "notification_type": 'unknown', "days_of_retention": 3 } response = client.post( '/service/{}/data-retention'.format(str(uuid.uuid4())), headers=[('Content-Type', 'application/json'), create_authorization_header()], data=json.dumps(data) ) json_resp = json.loads(response.get_data(as_text=True)) assert response.status_code == 400 assert json_resp['errors'][0]['error'] == 'ValidationError' assert json_resp['errors'][0]['message'] == 'notification_type unknown is not one of [sms, letter, email]' def test_create_service_data_retention_returns_400_when_data_retention_for_notification_type_already_exists( client, sample_service ): create_service_data_retention(service=sample_service) data = { "notification_type": "sms", "days_of_retention": 3 } response = client.post( '/service/{}/data-retention'.format(str(uuid.uuid4())), headers=[('Content-Type', 'application/json'), create_authorization_header()], data=json.dumps(data) ) assert response.status_code == 400 json_resp = json.loads(response.get_data(as_text=True)) assert json_resp['result'] == 'error' assert json_resp['message'] == 'Service already has data retention for sms notification type' def test_modify_service_data_retention(client, sample_service): data_retention = create_service_data_retention(service=sample_service) data = { "days_of_retention": 3 } response = client.post( '/service/{}/data-retention/{}'.format(sample_service.id, data_retention.id), headers=[('Content-Type', 'application/json'), create_authorization_header()], data=json.dumps(data) ) assert response.status_code == 204 assert response.get_data(as_text=True) == '' def test_modify_service_data_retention_returns_400_when_data_retention_does_not_exist(client, sample_service): data = { "days_of_retention": 3 } response = client.post( '/service/{}/data-retention/{}'.format(sample_service.id, uuid.uuid4()), headers=[('Content-Type', 'application/json'), create_authorization_header()], data=json.dumps(data) ) assert response.status_code == 404 def test_modify_service_data_retention_returns_400_when_data_is_invalid(client): data = { "bad_key": 3 } response = client.post( '/service/{}/data-retention/{}'.format(uuid.uuid4(), uuid.uuid4()), headers=[('Content-Type', 'application/json'), create_authorization_header()], data=json.dumps(data) ) assert response.status_code == 400
mit
7,344,810,703,973,433,000
40.63354
110
0.659257
false
3.861175
true
false
false
AndyDeany/pygame-template
pygametemplate/_game.py
1
6185
import os from contextlib import suppress import time from importlib import import_module import pygame with suppress(ImportError): import pygame._view # sometimes necessary. If it isn't this will cause an error #! UPDATE: this might only be necessary for py2exe to work, so if you can # compile without it, then there's no need to import pygame_view whatsoever import psutil from pygametemplate import load_image from pygametemplate.system import System from pygametemplate.console import Console from pygametemplate.userinput import Input from pygametemplate.hotkey import Hotkey from pygametemplate.text_input import TextInput pygame.init() class Game: VIEW_MODULE = "lib.views" def __init__(self, StartingView, resolution=(1280, 720), mode="windowed", *, caption="Insert name here v0.1.0", icon=None, max_allowed_ram=1 * 2**30): """Create a new Game object. `icon` should be the name of an image file. """ self.pygame = pygame self.system = System(self) self.width, self.height = resolution self.mode = mode self.initialise_screen() pygame.display.set_caption(caption) if icon is not None: pygame.display.set_icon(load_image(icon)) self.max_allowed_ram = max_allowed_ram self.previous_views = [] self.current_view = StartingView(self) self.fps = 60 self.frame = 0 # The current frame the game is on (since the game was opened) self.input = Input(self) self.console = Console(self) self.quit_condition = Hotkey(self, "f4", alt=True).pressed def set_view(self, view_name: str): """Set the current view to the View class with the given name.""" self.previous_views.append(self.current_view) View = self.get_view_class(view_name) # pylint: disable=invalid-name for view in reversed(self.previous_views): if isinstance(view, View): self.current_view = view self.previous_views.remove(view) break else: self.current_view = View(self) while self.previous_views and self.get_memory_use() > self.max_allowed_ram: oldest_view = self.previous_views.pop(0) oldest_view.unload() def get_view_class(self, view_name: str): """Return the View class with the given view_name.""" return getattr(import_module(self.VIEW_MODULE), view_name) def logic(self): raise NotImplementedError def draw(self): raise NotImplementedError def on_quit(self): pass def quit(self): """Signal the game to quit.""" self.running = False def _logic(self): self._check_quit() self.console.logic() self.current_view.logic() self.logic() def _draw(self): self.screen.fill((0, 0, 0)) self.current_view.draw() self.draw() self.console.draw() def _quit(self): self.on_quit() pygame.quit() @staticmethod def get_memory_use(): """Return the current memory usage of the game (RSS) in bytes.""" return psutil.Process(os.getpid()).memory_info()[0] def initialise_screen(self, resolution=None, mode=None): """(Re)initialise the screen using the given resolution and mode.""" if resolution is None: resolution = (self.width, self.height) if mode is None: mode = self.mode flags = pygame.HWSURFACE | pygame.DOUBLEBUF if mode == "fullscreen": flags |= pygame.FULLSCREEN elif mode == "windowed": os.environ["SDL_VIDEO_CENTERED"] = "1" elif mode == "borderless": os.environ["SDL_VIDEO_WINDOW_POS"] = "0,0" flags |= pygame.NOFRAME else: raise ValueError("Unknown mode for reinitialise_screen(): '{}'".format(mode)) self.screen = pygame.display.set_mode(resolution, flags) self.width, self.height = resolution self.mode = mode def display(self, image, coordinates, area=None, special_flags=0): """Display the given image at the given coordinates. Coordinates and area should be givenas if they were for a 1920x1080 window. """ x_scale = self.width/1920.0 y_scale = self.height/1080.0 coordinates = (coordinates[0]*x_scale, coordinates[1]*y_scale) if area is not None: area = (area[0]*x_scale, area[1]*y_scale, area[2]*x_scale, area[3]*y_scale) self.screen.blit(image, coordinates, area, special_flags) def _inputs(self): self.input.reset() for event in pygame.event.get(): if event.type == pygame.QUIT: self.quit() elif event.type == pygame.MOUSEMOTION: self.input.mouse_pos = event.pos elif event.type == pygame.MOUSEBUTTONDOWN: self.input.buttondown(event) elif event.type == pygame.MOUSEBUTTONUP: self.input.buttonup(event.button) elif event.type == pygame.KEYDOWN: self.input.buttondown(event) TextInput.receive_single_characters(event) elif event.type == pygame.KEYUP: self.input.buttonup(event.key) TextInput.receive_multiple_characters() def _update(self): self.frame += 1 pygame.display.flip() # Updating the screen self.clock.tick(self.fps) # [fps] times per second def runtime(self) -> float: """Return the amount of time the game has been running for in seconds.""" return time.time() - self.start_time def _check_quit(self): if self.quit_condition(): self.quit() def run(self): """Run the game.""" self.running = True self.clock = pygame.time.Clock() self.start_time = time.time() while self.running: self._inputs() self._logic() self._draw() self._update() self._quit()
mit
-5,954,415,052,953,735,000
32.074866
89
0.596766
false
4.029316
false
false
false
tedunderwood/20cgenres
organizetags.py
1
5190
#!/usr/bin/env python3 # Based on gathertags, this goes further by identifying owners and # sorting files by genre. import csv, os, sys from collections import Counter import numpy as np import pandas as pd # import utils currentdir = os.path.dirname(__file__) libpath = os.path.join(currentdir, '../lib') sys.path.append(libpath) import SonicScrewdriver as utils readers = ['donofrio', 'erickson', 'alvarez', 'flynn', 'rubio', 'barajas', 'koh', 'trondson', 'lin', 'buck', 'fleming'] sourcedir = '/Volumes/TARDIS/work/readers/' subobjects = os.listdir(sourcedir) subdirs = [x for x in subobjects if os.path.isdir(os.path.join(sourcedir, x))] tagset = set() taglist = [] paths = dict() readerowners = dict() for subdir in subdirs: thisreader = 'none' for reader in readers: if reader in subdir.lower(): thisreader = reader break if thisreader == 'none': print(subdir + ' lacks a recognized reader.') sys.exit(0) wholepath = os.path.join(sourcedir, subdir, 'tags') if os.path.isdir(wholepath): tagfiles = [x for x in os.listdir(wholepath) if x.endswith('.csv')] for f in tagfiles: thispath = os.path.join(wholepath, f) okaytoadd = True with open(thispath, encoding = 'utf-8') as file: reader = csv.DictReader(file) for row in reader: if 'tag' not in row or len(row['tag']) < 3: okaytoadd = False break if okaytoadd: tagset.add(f) if f not in readerowners: readerowners[f] = [] paths[f] = [] if thisreader not in readerowners[f]: readerowners[f].append(thisreader) paths[f].append(thispath) print(len(tagset)) multipleowners = [] for filename, owners in readerowners.items(): if len(owners) > 1: multipleowners.append(filename) print(len(multipleowners)) sumpages = 0 sumdiffs = 0 disagreements = Counter() for filename in multipleowners: print() print(filename) print(len(paths[filename])) existing = dict() allcounts = 0 differences = 0 for p in paths[filename]: with open(p, encoding = 'utf-8') as f: reader = csv.DictReader(f) for row in reader: if 'tag' not in row: print(p) break allcounts += 1 page = row['page'] tag = row['tag'] if page not in existing: existing[page] = tag else: if tag != existing[page]: differences += 1 thistuple = (tag, existing[page]) if thistuple not in disagreements: thistuple = (existing[page], tag) disagreements[thistuple] += 1 print(differences/allcounts) sumpages += allcounts sumdiffs += differences if (differences / allcounts) > 0.1: print(paths[filename]) print() print(sumdiffs / sumpages) for key, value in disagreements.items(): print(key, value) allfiles = tagset train1 = pd.read_csv('bzipmeta.csv', dtype = 'object', index_col = 'docid') tidx = set(train1.index.values) for filename in allfiles: docid = filename.replace('.csv', '') if utils.dirty_pairtree(docid) not in tidx: print(docid) ficlist = [] nonficlist = [] errorlist = [] for filename, owners in readerowners.items(): path = paths[filename][0] if 'metadat' in filename: print(filename) continue docid = utils.dirty_pairtree(filename.replace('.csv', '')) genre = train1.loc[docid, 'sampledas'] with open(path, encoding = 'utf-8') as f: reader = csv.DictReader(f) ficct = 0 allct = 0 for row in reader: allct += 1 if row['tag'].lower() == 'fic': ficct += 1 ficpct = ficct / allct if genre == 'fic' and ficpct < 0.7: print('ERROR', genre, docid) errorlist.append((docid, ficpct)) if ficpct < 0.4: nonficlist.append(docid) elif genre == 'fic': ficlist.append(docid) elif genre != 'fic' and ficpct > 0.3: print('ERROR', genre, docid) errorlist.append((docid, ficpct)) if ficpct > 0.8: ficlist.append(docid) else: nonficlist.append(docid) fiction = train1.loc[ficlist, :] nonfiction = train1.loc[nonficlist, :] fiction.loc[ :, 'class'] = pd.Series([1] * len(ficlist), index = fiction.index) nonfiction.loc[ :, 'class'] = pd.Series([0] * len(nonficlist), index = nonfiction.index) forclassification = pd.concat([fiction, nonfiction]) forclassification.to_csv('firsttrainingset.csv') errorids = [x[0] for x in errorlist] errors = [x[1] for x in errorlist] errors = pd.Series(errors, index = errorids) errordf = train1.loc[errorids, :] errordf.loc[ : , 'ficpct'] = errors errordf.to_csv('errorlist.csv')
mit
-4,027,922,656,061,283,000
27.833333
119
0.566281
false
3.497305
false
false
false
hovel/django-s3direct
s3direct/widgets.py
1
2392
# coding=utf-8 from __future__ import unicode_literals import os from django.forms import widgets from django.utils.safestring import mark_safe from django.core.urlresolvers import reverse from django.conf import settings HTML = ( '<div class="s3direct" data-url="{policy_url}">' ' <div class="link-controls">' ' <a class="link" target="_blank" href="{file_url}">{file_name}</a>' ' <a class="remove" href="#remove">Очистить</a>' ' </div>' ' <div class="progress-controls">' ' <div class="progress progress-striped">' ' <div class="progress-bar progress-bar-success" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100">' ' </div>' ' <div class="info"></div>' ' </div>' ' <span class="abort btn btn-danger btn-sm">Отмена</span>' ' </div>' ' <div class="form-controls">' ' <input type="hidden" value="{file_url}" id="{element_id}" name="{name}" />' ' <input type="file" class="fileinput" />' ' </div>' '</div>' ) class S3DirectEditor(widgets.TextInput): class Media: js = ( 's3direct/js/jquery-1.10.2.min.js', 's3direct/js/jquery.iframe-transport.js', 's3direct/js/jquery.ui.widget.js', 's3direct/js/jquery.fileupload.js', 's3direct/js/s3direct.js', ) css = { 'all': ( 's3direct/css/bootstrap-progress.min.css', 's3direct/css/styles.css', ) } def __init__(self, *args, **kwargs): self.upload_to = kwargs.pop('upload_to', '') super(S3DirectEditor, self).__init__(*args, **kwargs) def render(self, name, value, attrs=None): final_attrs = self.build_attrs(attrs) element_id = final_attrs.get('id') kwargs = {'upload_to': self.upload_to} policy_url = reverse('s3direct', kwargs=kwargs) file_url = value if value else '' if hasattr(file_url, 'name'): file_url = file_url.name file_name = os.path.basename(file_url) output = HTML.format(policy_url=policy_url, file_url=file_url, file_name=file_name, element_id=element_id, name=name) return mark_safe(output)
mit
-8,850,921,872,194,016,000
33.463768
136
0.545837
false
3.431457
false
false
false
StevenTouzard/pyHFSS
test_hfss.py
2
1131
from hfss import get_active_project project = get_active_project() design = project.new_em_design("TestDesign") modeler = design.modeler # Cavity bx = design.set_variable("bx", "10mm") by = design.set_variable("by", "25mm") bz = design.set_variable("bz", "15mm") # Tunnel tx = design.set_variable("tx", "10mm") ty = design.set_variable("ty", "1mm") tz = design.set_variable("tz", "1mm") # Chip cz = design.set_variable('cz', ".45mm") def create_cavity(name): box = modeler.draw_box_center([0, 0, 0], [bx, by, bz], name=name) cyl1 = modeler.draw_cylinder_center([0, by/2, 0], bx/2, bz, axis='Z') cyl2 = modeler.draw_cylinder_center([0, -by/2, 0], bx/2, bz, axis='Z') modeler.unite([box, cyl1, cyl2]) return box cav1 = create_cavity("Cavity1") cav2 = create_cavity("Cavity2") modeler.translate(cav1, [(tx+bx)/2, 0, 0]) modeler.translate(cav2, [-(tx+bx)/2, 0, 0]) tunnel = modeler.draw_box_center([0, 0, 0], [tx, ty, tz], name='Tunnel') cav = modeler.unite([cav1, cav2, tunnel]) chip = modeler.draw_box_corner([-tx/2, -ty/2, -tz/2], [tx, ty, cz], name='Chip', material='sapphire') cav1.transparency = 1.0
mit
-258,912,402,121,123,200
29.567568
101
0.645447
false
2.381053
false
false
false
zrafa/mipsx
mipsx.py
1
12389
#!/usr/bin/python # -*- coding: utf-8 -*- """ Autor original del ejemplo de una aplicacion Tk: Jan Bodnar last modified: December 2010 website: www.zetcode.com Modificado y ampliado para ser una GUI de GDB para MIPS. (C) 2014 - Rafael Ignacio Zurita <[email protected]> Lea el archivo README.md para conocer la licencia de este programa. """ import time import sys import random from subprocess import Popen, PIPE, STDOUT from Tkinter import * from ttk import Frame, Button, Label, Style # Para extrar el nombre de archivo sin ruta import ntpath from ScrolledText import * import tkFileDialog import tkMessageBox class Mipsx(Frame): def __init__(self, parent): Frame.__init__(self, parent) self.parent = parent self.ejecucion = False def prox_instruccion(): p.stdin.write('step 1\n') mostrar_en(area4, "proximo") estado() if self.ejecucion: memoria() registros() listado() def ejecutar(): while self.ejecucion: prox_instruccion() def salida(w, findelinea): w.delete("1.0", END) a = p.stdout.readline() while not findelinea in a: # Esto es para saber si la ejecucion termino'. # TODO: Hay que quitarlo de este metodo. Donde ponerlo? if "No stack" in a: self.ejecucion = False w.insert(END,'\n\nEjecucion FINALIZADA\n\n') a = a.replace('(gdb) ', '') w.insert(END,a) a = p.stdout.readline() def mostrar_en(w, findelinea): p.stdin.write(findelinea) p.stdin.write('\r\n') salida(w, findelinea) def mostrar_en_depuracion(): file = open("/tmp/archivotemp"+PUERTOyPS+".txt") contents = file.read() #area4.delete('1.0',END) area4.insert(END,contents) file.close() def memoria(): # Para mostrar el segmento de datos, la etiqueta memoria debe estar al principio p.stdin.write('info address memoria\n') p.stdin.write('infomemoria\n') a = p.stdout.readline() solicitar_seg_de_datos = "" while not "infomemoria" in a: print "a : "+a if "Symbol " in a: a = a.replace('(gdb) Symbol "memoria" is at ', '') a = a.replace(' in a file compiled without debugging.','') solicitar_seg_de_datos = "x/40xw "+a+"\n" a = p.stdout.readline() if solicitar_seg_de_datos == "": p.stdin.write('x/40xw $pc\n') else: p.stdin.write(solicitar_seg_de_datos) p.stdin.write('x/40xw main\n') p.stdin.write('x/128 $sp - 128\n') mostrar_en(area3, "memoria") def estado(): p.stdin.write('info frame\n') mostrar_en(area4, "estado") file = open("/tmp/archivotemp"+PUERTOyPS+".txt") contents = file.readline() while not "Remote" in contents: print contents area4.insert(END,contents) contents = file.readline() area4.insert(END,"----------------------------------------\nSalida Estandar : \n\n") contents = file.read() file.close() area4.insert(END,contents) def registros(): p.stdin.write('info register\n') mostrar_en(area1, "registros") def listado(): p.stdin.write('list 1,100\n') # p.stdin.write('disas main \n') p.stdin.write('disas \n') mostrar_en(area2, "listado") def compilarparasie(): area4.delete('1.0',END) area4.insert('1.0',"Compilando para la SIE ...\r\n") root.update_idletasks() p.stdin.write('detach \n') guardar_archivo_a_compilar() tub = Popen(['mipsx_compilarparasie.sh', self.archivoacompilar, PUERTOyPS], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] mostrar_en_depuracion() if tub.returncode == 0: area4.insert(END, "Compilacion para la SIE OK\n") else: area4.insert(END, "ERROR al compilar y cargar") mostrar_en_depuracion() def compilarycargar(): area4.delete('1.0',END) area4.insert('1.0',"Compilando y Cargando ...\r\n") root.update_idletasks() p.stdin.write('detach \n') guardar_archivo_a_compilar() tub = Popen(['mipsx_compilarycargar.sh', self.archivoacompilar, PUERTOyPS], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] mostrar_en_depuracion() if tub.returncode == 0: area4.insert(END, "Compilacion y carga : OK\n") # ejecutable = self.archivoactual+".elf" # ejecutable = ntpath.basename(ejecutable) ejecutable = self.archivoacompilar+".elf" ejecutable = ntpath.basename(ejecutable) # Nos conectamos al gdbserver # ip_mips="10.0.15.232" # ip_mips="192.168.0.71" ip_mips="10.0.15.50" #comando='target extended-remote '+ip_mips+':'+PUERTOyPS+'\n' comando='target remote '+ip_mips+':'+PUERTOyPS+'\n' p.stdin.write(comando) # gdbfile = 'set remote exec-file /tmp/'+ejecutable+'\n' # p.stdin.write(gdbfile) # Respondemos "y"es a recargar p.stdin.write('y \n') # Abrimos con gdb el archivo ejecutable gdbfile = 'file /tmp/'+ejecutable+'\n' p.stdin.write(gdbfile) # Respondemos "y"es a recargar p.stdin.write('y \n') p.stdin.write('delete \n') p.stdin.write('y \n') p.stdin.write('break main\n') # p.stdin.write('run\n') p.stdin.write('continue\n') self.ejecucion = True mostrar_en(area4,"estado") memoria() registros() listado() else: area4.insert(END, "ERROR al compilar y cargar") mostrar_en_depuracion() PUERTOyPS=str( random.randrange(4000,8000+1) ) # PUERTOyPS="4567" self.parent.title("Mipsx - GUI for gdb multiarch") self.style = Style() self.style.theme_use("default") self.pack(fill=BOTH, expand=1) # Para expandir cuando las ventanas cambian de tamao for i in range(3): self.columnconfigure(i, weight=1) for i in range(20): self.rowconfigure(i, weight=1) lbl = Label(self, text="Registros GDB en MIPS - MR3020") lbl.grid(row=1,column=2, sticky=W, pady=4, padx=5) area1 = Text(self,height=12,width=80) area1.grid(row=2, column=2, columnspan=1, rowspan=5, sticky=E+W+S+N) lbl = Label(self, text="Programa en Assembler y Programa Binario Decodificado (disassemble)") lbl.grid(row=7, column=2, pady=1, padx=1, sticky=W+N+E+S) area2 = Text(self, height=6,width=80) area2.grid(row=8, column=2, columnspan=1, rowspan=5, padx=1, sticky=E+W+S+N) lbl = Label(self, text='Memoria - Segmento de datos (debe existir la etiqueta "memoria") - Segmento de texto - Pila') lbl.grid(row=13, column=2, pady=1, padx=1, sticky=W+N+E+S) area3 = Text(self,height=15,width=80) area3.grid(row=14, column=2, columnspan=1, rowspan=5, padx=1, sticky=E+W+S+N) lbl4 = Label(self, text="Mensajes de Depuracion") lbl4.grid(row=13, column=0, pady=1, padx=1, sticky=W+N+E+S) area4 = Text(self,height=8,width=60) area4.grid(row=14, column=0, columnspan=1, rowspan=5, padx=1, sticky=E+W+S+N) lbl = Label(self, text="Editor del Programa") lbl.grid(row=1,column=0, sticky=W, pady=4, padx=5) area5 = ScrolledText(self,height=20,width=60) area5.grid(row=2, column=0, columnspan=1, rowspan=10, padx=1, sticky=E+W+S+N) # Variables globales archivoactual = "hello.s" archivoacompilar = "hello.s" archivotemp = "/tmp/archivotemp"+PUERTOyPS+".txt" # ip_mips = "10.0.15.232" ip_mips = "10.0.15.50" # ip_mips = "192.168.0.71" # Al abrir un archivo deseamos tener un area de trabajo cero def limpiar_areas(): area4.delete('1.0',END) area3.delete('1.0',END) area2.delete('1.0',END) area1.delete('1.0',END) def abrir_en_editor(archivo): fd = open(archivo) contents = fd.read() area5.delete('1.0',END) area5.insert('1.0',contents) fd.close() self.archivoactual = archivo print self.archivoactual def open_command(): FILEOPENOPTIONS = dict(defaultextension='*.s', filetypes=[('Archivo assembler','*.s'), ('Todos los archivos','*.*')]) file = tkFileDialog.askopenfile(parent=root,mode='rb',title='Select a file', **FILEOPENOPTIONS) if file != None: limpiar_areas() abrir_en_editor(file.name) def guardar_archivo_a_compilar(): self.archivoacompilar = "/tmp/archivo"+PUERTOyPS+".s" tub = Popen(['rm', self.archivoacompilar], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] tub = Popen(['touch', self.archivoacompilar], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] tmp = open(self.archivoacompilar, "w") if tmp != None: data = area5.get('1.0', END+'-1c') tmp.write(data) tmp.close() archivotmppwd = "archivo"+PUERTOyPS+".s" tub = Popen(['cp', self.archivoacompilar, archivotmppwd], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] def save_command(): file = tkFileDialog.asksaveasfile(mode='w') if file != None: # slice off the last character from get, as an extra return is added data = area5.get('1.0', END+'-1c') file.write(data) file.close() self.archivoactual = file.name print self.archivoactual def exit_command(): if tkMessageBox.askokcancel("Quit", "Do you really want to quit?"): root.destroy() def about_command(): label = tkMessageBox.showinfo("Acerca de", "MIPSX - GUI for gdb multiarch\n\nEntorno de desarrollo en lenguaje assembler arquitectura MIPS\nEste programa ensabla, genera el programa ejecutable, y lo ejecuta en modo debug en una maquina MIPS real\n\nCopyright 2014 Rafael Ignacio Zurita\n\nFacultad de Informatica\nUniversidad Nacional del Comahue\n\nThis program is free software; you can redistribute it and/or modify it under the terms of the GPL v2") def dummy(): print "I am a Dummy Command, I will be removed in the next step" def no_hacer_nada(): print "nada por hacer" def archivo_sin_guardar(): data = area5.get('1.0', END+'-1c') fd = open(self.archivoactual) contents = fd.read() fd.close() if data == contents: return False res = tkMessageBox.askquestion("Confirmar", "Archivo sin guardar\nEsta seguro de finalizar el programa?", icon='warning') if res == 'yes': return False return True def salir(): if archivo_sin_guardar(): return tmp = "/tmp/archivo"+PUERTOyPS+".s" tmp2 = "archivo"+PUERTOyPS+".s" tmp3 = "/tmp/archivo"+PUERTOyPS+".s.elf" tmp4 = "/tmp/archivotemp"+PUERTOyPS+".txt" tub = Popen(['rm', tmp, tmp2, tmp3, tmp4], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] tmp2 = "/tmp/archivo"+PUERTOyPS+".s.o" ip_mips = "10.0.15.50" tub = Popen(['mipsx_finalizar_gdbserver.sh', ip_mips, PUERTOyPS, tmp, tmp2, tmp3, tmp4], stdout=PIPE, stdin=PIPE, stderr=STDOUT) streamdata = tub.communicate()[0] # ip_mips = "10.0.15.232" # ip_mips = "192.168.0.71" # killgdbserver = Popen(['sshpass', '-p', clave, 'ssh', '-o', 'StrictHostKeyChecking=no', '-l', 'root', ip_mips, comando], stdout=PIPE, stdin=PIPE, stderr=STDOUT) quit() menu = Menu(root) root.config(menu=menu) filemenu = Menu(menu) menu.add_cascade(label="Archivo", menu=filemenu) filemenu.add_command(label="Nuevo", command=dummy) filemenu.add_command(label="Abrir...", command=open_command) filemenu.add_command(label="Guardar...", command=save_command) filemenu.add_separator() filemenu.add_command(label="Salir", command=salir) menu.add_command(label="Run", command=ejecutar) menu.add_command(label="Next", command=prox_instruccion) menu.add_command(label="Breakpoint", command=no_hacer_nada) menu.add_command(label="Compilar y Cargar", command=compilarycargar) menu.add_command(label="Compilar para SIE", command=compilarparasie) helpmenu = Menu(menu) menu.add_cascade(label="Ayuda", menu=helpmenu) helpmenu.add_command(label="Acerca de...", command=about_command) menu.add_command(label="Salir", command=salir) abrir_en_editor("hello.s") # para que al cerrar la ventana cierre los temporales y los borre root.protocol("WM_DELETE_WINDOW", salir) def main(): root.mainloop() if __name__ == '__main__': p = Popen(['gdb-multiarch'], stdout=PIPE, stdin=PIPE, stderr=STDOUT) root = Tk() # Para expandir cuando las ventanas cambian de tamao root.columnconfigure(0,weight=1) root.rowconfigure(0, weight=1) app = Mipsx(root) main()
gpl-2.0
2,601,857,939,086,007,000
27.946262
458
0.648398
false
2.621456
false
false
false
jirenz/CS229_Project
learning/model.py
1
7449
import numpy as np from sklearn import linear_model from sknn.mlp import Regressor, Layer import learning.mdp from projectfiles.random_deck_generator import RandomDeckGenerator from hearthbreaker.engine import Deck, card_lookup, Game from hearthbreaker.agents import * import projectfiles.util from projectfiles.feature_extract import * class HearthstoneMDP(learning.mdp.MDP): def __init__(self, strategy): self.strategy = strategy def start_state(self): generator = RandomDeckGenerator() deck1 = generator.generate() deck2 = deck1.copy() game = Game([deck1, deck2], [RandomAgent(), RandomAgent()]) game.pre_game() return game def is_end_state(self, state): return state.game_ended def getActions(self, state): # An "action" is actually parametrized directly by the state corresponding # to the current player's actions. The strategy object enumerates a list of # possible actions return self.strategy.getActions(state.copy()) def getRandomAction(self, state): return self.strategy.getRandomAction(state.copy()) def getBestAction(self, state, heuristic): return self.strategy.getBestAction(state.copy(), heuristic) def getSuccAndReward(self, state, next_action): next_state = next_action.copy() reward = 0.0 if next_state.game_ended: if next_state.winner is None: reward = self.getReward("tie") elif state.current_player.name == next_state.winner.name: reward = self.getReward("win") else: reward = self.getReward("lose") return (next_state, reward) def getReward(self, event): return {"win" : 10, "lose" : -8, "tie" : 3}[event] def getDiscount(self): return 0.8 class Model: def __init__(self): pass def __call__(self, state, action): # the action is a state! next_state = action return self.eval(state, next_state) def eval(self, state, next_state): raise NotImplementedError("") def update(self, state, next_state, delta): assert(state.current_player.name == next_state.current_player.name) print("curplay", state.current_player.name, \ "health", state.current_player.hero.health, \ "my_next_health", next_state.current_player.hero.health, \ "enemy_health", state.current_player.opponent.hero.health, \ "enemy_next_heatlh", next_state.current_player.opponent.hero.health, \ "delta", delta) class LinearModel(Model): def __init__(self, feature_extractor, initial_weights = None): self.feature_extractor = feature_extractor self.weights = initial_weights if initial_weights is not None else feature_extractor.get_initial() class StatePairLinearModel(LinearModel): # Takes a feature extractor that expects TWO state arguments def __init__(self, feature_extractor, initial_weights = None): super().__init__(feature_extractor, initial_weights) assert(isinstance(self.feature_extractor, StatePairFeatureExtractor)) def eval(self, state, next_state): assert(state.current_player.name == next_state.current_player.name) if isinstance(self.feature_extractor, StateFeatureExtractor): return np.dot(self.weights, self.feature_extractor(next_state) - self.feature_extractor(state)) else: assert(isinstance(self.feature_extractor, StatePairFeatureExtractor)) return np.dot(self.weights, self.feature_extractor(state, next_state)) def update(self, state, next_state, delta): super().update(state, next_state, delta) phi = self.feature_extractor(state, next_state) self.weights += delta * phi # self.feature_extractor.debug(self.weights) class FinalStateLinearModel(LinearModel): # Takes a feature extractor that expects ONE state argument def __init__(self, feature_extractor, initial_weights = None): super().__init__(feature_extractor, initial_weights) assert(isinstance(self.feature_extractor, StateFeatureExtractor)) def eval(self, state, next_state): # if next_state.current_player_win(): return 1e9 # if next_state.current_player_lose(): return -1e9 return np.dot(self.weights, self.feature_extractor(next_state)) def train(self, dataset): clf = linear_model.LinearRegression() X, y = dataset # X = [self.feature_extractor(state) for state, value in dataset] # y = [value for state, value in dataset] clf.fit(X, y) self.weights = clf.coef_ # print(self.weights) def update(self, state, next_state, delta): super().update(state, next_state, delta) phi = self.feature_extractor(next_state) self.weights += delta * phi # self.feature_extractor.debug(weights) class StateDifferenceLinearModel(LinearModel): def __init__(self, feature_extractor, initial_weights = None): super().__init__(feature_extractor, initial_weights) assert(isinstance(self.feature_extractor, StateFeatureExtractor)) def eval(self, state, next_state): return np.dot(self.weights, self.feature_extractor(next_state) - self.feature_extractor(state)) def update(self, state, next_state, delta): super().update(state, next_state, delta) phi = self.feature_extractor(state) next_phi = self.feature_extractor(next_state) # self.feature_extractor.debug(next_phi - phi) self.weights += delta * (next_phi - phi) self.feature_extractor.debug(self.weights) class BasicHeuristicModel(Model): def __init__(self): super().__init__() def eval(self, state_1, state_2): def score(player): score = 0 for i in player.minions: score += i.calculate_attack() score += i.health score += len(player.hand) * 2 score += player.hero.health + player.hero.armor return score return score(state_2.current_player) - score(state_2.other_player) class FinalStateNeuralModel(Model): def __init__(self, feature_extractor, nn = None): self.feature_extractor = feature_extractor self.nn = nn if nn is not None else self.get_initial() # self.train() def get_initial(self): return Regressor( layers=[ Layer("Rectifier", units=100), # Layer("Sigmoid", units = 200), # Layer("Tanh", units = 100) Layer("Linear")], learning_rate=0.001, n_iter=10, f_stable = 0.1) def eval(self, state, next_state): if next_state.current_player_win(): return 1e9 if next_state.current_player_lose(): return -1e9 vec = np.array(self.feature_extractor(next_state)) return self.nn.predict(np.ndarray(shape = (1, len(vec)), buffer = vec)) # return np.dot(self.weights, self.feature_extractor(next_state)) def train(self, dataset): X, y = dataset # X = np.array([self.feature_extractor(state) for state, value in dataset]) # y = [value for state, value in dataset] self.nn.fit(X, y) # def train(self): # Data = open("data.txt", "r") # Tmp = Data.read().splitlines() # training_set = [] # for i in Tmp: # c = i.split(" ") # for j in range(0, len(c)): # c[j] = float(c[j]) # training_set.append(c) # X = [] # y = [] # for data_point in training_set: # X.append(data_point[0:-1]) # y.append(data_point[-1]) # for i in X: # if (len(i) != 38): # print(i) # X = np.ndarray(shape = (len(y), len(X[0])), buffer = np.array(X)) # y = np.ndarray(shape = (len(y), 1), buffer = np.array(y)) # self.nn.fit(X, y) # print("Learning from data size: " + str(len(y))) # Data.close() class DeepNeuralModel(FinalStateNeuralModel): def get_initial(self): return Regressor( layers=[ Layer("Rectifier", units=100), Layer("Sigmoid", units = 200), Layer("Tanh", units = 100), Layer("Linear")], learning_rate=0.001, n_iter=10, f_stable = 0.1)
mit
-6,037,777,014,030,121,000
31.528384
100
0.696872
false
3.088308
false
false
false
muniri92/portfolio-remastered
portfolio/portfolio_app/migrations/0001_initial.py
1
1917
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-12 02:59 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='About', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('description', models.CharField(max_length=1000)), ], ), migrations.CreateModel( name='Education', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('institution', models.CharField(max_length=100)), ('dates', models.CharField(max_length=100)), ('degree', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Expericence', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('dates', models.CharField(max_length=100)), ('position', models.CharField(max_length=100)), ], ), migrations.CreateModel( name='Portfolio', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('description', models.CharField(max_length=1000)), ('site', models.CharField(max_length=500)), ('repo', models.CharField(max_length=500)), ], ), ]
mit
-9,137,160,564,085,212,000
35.865385
114
0.53782
false
4.406897
false
false
false
ShareDVI/adventofcode
day10.py
1
1039
#!/usr/bin/python3 # -*- coding: utf-8 -*- """Day 10 of AdventOfCode.com: What's common in between digits and nuclear decay?""" import os # ToDo: rewrite this one with groupby, and\or ConwayMendeleevTable, and\or multithreading def look_and_say(numbers): """ Performs a look'n'say iteration. Repeated digits are collapsed into one and preceeded by their amount. Add 1 before each single digit. '111' -> '31' :param numbers: string of digits :return: look'n'say op over digits """ digit = "" result = "" count = 0 for c in numbers: if c == digit: count += 1 else: if count: result += str(count) + digit digit = c count = 1 result += str(count) + digit return result with open(os.path.dirname(os.path.realpath('__file__')) + "/input/day10.txt", "r") as datafile: data = datafile.read().replace('\n', '') print(0, len(data)) for i in range(0, 79): data = look_and_say(data) print(i + 1, len(data))
mit
-1,406,064,836,263,220,500
28.685714
106
0.592878
false
3.486577
false
false
false
zhangfangyan/devide
module_kits/matplotlib_kit/__init__.py
7
2895
# $Id: __init__.py 1945 2006-03-05 01:06:37Z cpbotha $ # importing this module shouldn't directly cause other large imports # do large imports in the init() hook so that you can call back to the # ModuleManager progress handler methods. """matplotlib_kit package driver file. Inserts the following modules in sys.modules: matplotlib, pylab. @author: Charl P. Botha <http://cpbotha.net/> """ import os import re import sys import types # you have to define this VERSION = '' def init(theModuleManager, pre_import=True): if hasattr(sys, 'frozen') and sys.frozen: # matplotlib supports py2exe by checking for matplotlibdata in the appdir # but this is only done on windows (and therefore works for our windows # installer builds). On non-windows, we have to stick it in the env # to make sure that MPL finds its datadir (only if we're frozen) mpldir = os.path.join(theModuleManager.get_appdir(), 'matplotlibdata') os.environ['MATPLOTLIBDATA'] = mpldir # import the main module itself # this doesn't import numerix yet... global matplotlib import matplotlib # use WX + Agg backend (slower, but nicer that WX) matplotlib.use('WXAgg') # interactive mode: user can use pylab commands from any introspection # interface, changes will be made immediately and matplotlib cooperates # nicely with main WX event loop matplotlib.interactive(True) # with matplotlib 1.0.1 we can't do this anymore. # makes sure we use the numpy backend #from matplotlib import rcParams #rcParams['numerix'] = 'numpy' theModuleManager.setProgress(25, 'Initialising matplotlib_kit: config') # @PATCH: # this is for the combination numpy 1.0.4 and matplotlib 0.91.2 # matplotlib/numerix/ma/__init__.py: # . normal installation fails on "from numpy.ma import *", so "from # numpy.core.ma import *" is done, thus bringing in e.g. getmask # . pyinstaller binaries for some or other reason succeed on # "from numpy.ma import *" (no exception raised), therefore do # not do "from numpy.core.ma import *", and therefore things like # getmask are not imported. # solution: # we make sure that "from numpy.ma import *" actually brings in # numpy.core.ma by importing that and associating the module # binding to the global numpy.ma. #if hasattr(sys, 'frozen') and sys.frozen: # import numpy.core.ma # sys.modules['numpy.ma'] = sys.modules['numpy.core.ma'] # import the pylab interface, make sure it's available from this namespace global pylab import pylab theModuleManager.setProgress(90, 'Initialising matplotlib_kit: pylab') # build up VERSION global VERSION VERSION = '%s' % (matplotlib.__version__,) theModuleManager.setProgress(100, 'Initialising matplotlib_kit: complete')
bsd-3-clause
-4,838,247,926,058,217,000
34.304878
81
0.691192
false
3.912162
false
false
false
mlvander/montasola
imageManager/migrations/0001_initial.py
1
1758
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-06-17 02:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Gallery', fields=[ ('galleryID', models.AutoField(primary_key=True, serialize=False)), ('gallery', models.CharField(max_length=255)), ('galleryDescription', models.CharField(max_length=255)), ('create_dt', models.DateField()), ('edit_dt', models.DateField()), ], ), migrations.CreateModel( name='GalleryImage', fields=[ ('galleryImageID', models.AutoField(primary_key=True, serialize=False)), ('isCover', models.BooleanField(default=False)), ('galleryID_fk', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='imageManager.Gallery')), ], ), migrations.CreateModel( name='Image', fields=[ ('imageID', models.AutoField(primary_key=True, serialize=False)), ('image', models.CharField(max_length=255)), ('imageDescription', models.CharField(max_length=255)), ('create_dt', models.DateField()), ('edit_dt', models.DateField()), ], ), migrations.AddField( model_name='galleryimage', name='imageID_fk', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='imageManager.Image'), ), ]
mit
7,176,416,791,405,718,000
34.16
124
0.552901
false
4.530928
false
false
false
mc2014/anvil
anvil/origins.py
1
1234
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonpatch from anvil import utils class Origin(dict): def __init__(self, filename, patched=False): super(Origin, self).__init__() self.filename = filename self.patched = patched def load(filename, patch_file=None): base = utils.load_yaml(filename) patched = False if patch_file: patch = jsonpatch.JsonPatch(patch_file) patch.apply(base, in_place=True) patched = True origin = Origin(filename, patched=patched) origin.update(base) return origin
apache-2.0
-1,253,710,296,261,885,700
29.85
78
0.683144
false
3.868339
false
false
false
jeffmahoney/crash-python
crash/infra/lookup.py
1
12098
# -*- coding: utf-8 -*- # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: from typing import Tuple, Any, Union, Optional from crash.infra.callback import ObjfileEventCallback from crash.infra.callback import Callback from crash.exceptions import DelayedAttributeError import gdb class NamedCallback(ObjfileEventCallback): """ A base class for Callbacks with names This cannot be used directly since it does not provide a method for :meth:`.ObjfileEventCallback.callback`. Args: name: The name of the symbol or type to be resolved. callback: A function to call with the result of the derived class's :meth:`.ObjfileEventCallback.check_ready` method. attrname (optional): A name safe for use as an attribute name. If unspecified, defaults to the same string as name. Attributes: name (:obj:`str`): The name of the symbol or type being resolved. attrname (:obj:`str`): The name of symbol or type being resolved translated for use as an attribute name. """ def __init__(self, name: str, callback: Callback, attrname: str = None) -> None: super().__init__() self.name = name self.attrname = self.name if attrname is not None: self.attrname = attrname self._callback = callback # This is silly but it avoids pylint abstract-method warnings def check_ready(self) -> Any: """ The method that derived classes implement for detecting when the conditions required to call the callback have been met. Returns: :obj:`object`: This method can return an arbitrary object. It will be passed untouched to :meth:`callback` if the result is anything other than :obj:`None` or :obj:`False`. """ raise NotImplementedError("check_ready must be implemented by derived class.") def callback(self, result: Any) -> Union[None, bool]: """ The callback for handling the sucessful result of :meth:`check_ready`. It indirectly calls the callback specified in the constructor. Args: result: The result returned from :meth:`check_ready` Returns: :obj:`None` or :obj:`bool`: If :obj:`None` or :obj:`True`, the callback succeeded and will be completed and removed. Otherwise, the callback will stay connected for future completion. """ return self._callback(result) class MinimalSymbolCallback(NamedCallback): """ A callback that executes when the named minimal symbol is discovered in the objfile and returns the :obj:`gdb.MinSymbol`. The callback must accept a :obj:`gdb.MinSymbol` and return :obj:`bool` or :obj:`None`. Args: name: The name of the minimal symbol to discover callback: The callback to execute when the minimal symbol is discovered symbol_file (optional): Name of the symbol file to use """ def __init__(self, name: str, callback: Callback, symbol_file: str = None) -> None: super().__init__(name, callback) self.symbol_file = symbol_file self.connect_callback() def check_ready(self) -> Optional[gdb.MinSymbol]: """ Returns the result of looking up the minimal symbol when a new object file is loaded. Returns: :obj:`gdb.MinSymbol`: The requested minimal symbol """ return gdb.lookup_minimal_symbol(self.name, self.symbol_file, None) def __str__(self) -> str: return ("<{}({}, {}, {})>" .format(self.__class__.__name__, self.name, self.symbol_file, self.callback)) class SymbolCallback(NamedCallback): """ A callback that executes when the named symbol is discovered in the objfile and returns the :obj:`gdb.Symbol`. The callback must accept a :obj:`gdb.Symbol` and return :obj:`bool` or :obj:`None`. Args: name: The name of the symbol to discover callback: The callback to execute when the symbol is discovered domain (optional): The domain to search for the symbol. The value is assumed to be one of the value associated with :obj:`gdb.Symbol` constant, i.e. SYMBOL_*_DOMAIN. """ def __init__(self, name: str, callback: Callback, domain: int = gdb.SYMBOL_VAR_DOMAIN) -> None: super().__init__(name, callback) self.domain = domain self.connect_callback() def check_ready(self) -> Optional[gdb.Symbol]: """ Returns the result of looking up the symbol when a new object file is loaded. Returns: :obj:`gdb.Symbol`: The requested symbol """ return gdb.lookup_symbol(self.name, None, self.domain)[0] def __str__(self) -> str: return ("<{}({}, {})>" .format(self.__class__.__name__, self.name, self.domain)) class SymvalCallback(SymbolCallback): """ A callback that executes when the named symbol is discovered in the objfile and returns the :obj:`gdb.Value` associated with the :obj:`gdb.Symbol`. The callback must accept a :obj:`gdb.Value` and return :obj:`bool` or :obj:`None`. See :obj:`SymbolCallback` for arguments. """ def check_ready(self) -> Optional[gdb.Value]: # type: ignore """ After successfully looking up the :obj:`gdb.Symbol`, returns the :obj:`gdb.Value` associated with it. Returns: :obj:`gdb.Value`: The value associated with the requested symbol """ sym = super().check_ready() if sym is not None: try: return sym.value() except gdb.MemoryError: pass return None class TypeCallback(NamedCallback): """ A callback that executes when the named type is discovered in the objfile and returns the :obj:`gdb.Type` associated with it. The callback must accept a :obj:`gdb.Type` and return :obj:`bool` or :obj:`None`. Args: name: The name of the type to discover callback: The callback to execute when the type is discovered block (optional): The :obj:`gdb.Block` to search for the symbol """ def __init__(self, name: str, callback: Callback, block: gdb.Block = None) -> None: (name, attrname, self.pointer) = self.resolve_type(name) super().__init__(name, callback, attrname) self.block = block self.connect_callback() @staticmethod def resolve_type(name: str) -> Tuple[str, str, bool]: """ This function takes a C type name and translates it into a 3-tuple that contains the basic type name, the type name translated to a form suitable for an attribute name, and whether the type corresponds to a pointer. The basic type name has all leading and trailing whitespace stripped, and any ``*`` removed. The attribute type name takes that base, removes the leading ``struct`` for structure types, removes any leading or trailing whitespace, replaces internal spaces with underscores, and appends a ``_type`` or ``_p_type`` suffix, depending on whether the type is a pointer type. Some examples: - ``struct foo`` → ``foo_type`` - ``struct foo *`` → ``foo_p_type`` - ``unsigned long`` → ``unsigned_long_type`` *Notes*: - Multiple levels of pointers are not handled properly. In practice this means that ``struct foo *`` and ``struct foo **`` can't be used simultaneously. This is typically not a problem. - Unions are not handled as a special case as structs are. A union type would use an attribute name of ``union_foo_type``. Returns: (:obj:`str`, :obj:`str`, :obj:`bool`): A 3-tuple consisting of the basic type name, the name formatted for use as an attribute name, and whether the type is a pointer type. """ pointer = False name = name.strip() if name[-1] == '*': pointer = True name = name[:-1].strip() attrname = name if name.startswith('struct '): attrname = name[7:].strip() if pointer: attrname += '_p_type' else: attrname += '_type' name = name attrname = attrname.replace(' ', '_') return (name, attrname, pointer) def check_ready(self) -> Optional[gdb.Type]: try: return gdb.lookup_type(self.name, self.block) except gdb.error: return None def __str__(self) -> str: return ("<{}({}, {})>" .format(self.__class__.__name__, self.name, self.block)) class DelayedValue: """ A generic class for making class attributes available that describe to-be-loaded symbols, minimal symbols, and types. """ def __init__(self, name: str, attrname: str = None) -> None: if name is None or not isinstance(name, str): raise ValueError("Name must be a valid string") self.name = name if attrname is None: self.attrname = name else: self.attrname = attrname assert self.attrname is not None self.value: Any = None def get(self) -> Any: if self.value is None: raise DelayedAttributeError(self.name) return self.value def callback(self, value: Any) -> None: if self.value is not None: return self.value = value class DelayedMinimalSymbol(DelayedValue): """ A DelayedValue that handles minimal symbols. Args: name: The name of the minimal symbol """ def __init__(self, name: str) -> None: super().__init__(name) self.cb = MinimalSymbolCallback(name, self.callback) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb)) class DelayedSymbol(DelayedValue): """ A DelayedValue that handles symbols. Args: name: The name of the symbol """ def __init__(self, name: str) -> None: super().__init__(name) self.cb = SymbolCallback(name, self.callback) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb)) class DelayedType(DelayedValue): """ A DelayedValue for types. Args: name: The name of the type. """ def __init__(self, name: str) -> None: (name, attrname, self.pointer) = TypeCallback.resolve_type(name) super().__init__(name, attrname) self.cb = TypeCallback(name, self.callback) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.callback)) def callback(self, value: gdb.Type) -> None: if self.pointer: value = value.pointer() self.value = value class DelayedSymval(DelayedSymbol): """ A :obj:`DelayedSymbol` that returns the :obj:`gdb.Value` associated with the symbol. Args: name: The name of the symbol. """ def callback(self, value: gdb.Symbol) -> None: symval = value.value() if symval.type.code == gdb.TYPE_CODE_FUNC: symval = symval.address self.value = symval def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb)) class DelayedMinimalSymval(DelayedMinimalSymbol): """ A DelayedMinimalSymbol that returns the address of the minimal symbol as an :obj:`int`. Args: name: The name of the minimal symbol. """ def callback(self, value: gdb.MinSymbol) -> None: self.value = int(value.value().address) def __str__(self) -> str: return "{} attached with {}".format(self.__class__, str(self.cb))
gpl-2.0
6,585,040,455,276,762,000
31.769648
86
0.598495
false
4.271282
false
false
false
nickjstevens/eCIP
eCIP/settings.py
1
3869
""" Django settings for eCIP project. Generated by 'django-admin startproject' using Django 1.10.6. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '7@w69y(17k10@!*_bddkq+^cw11f7r7#h_%x5ug1czi#$b%p27' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [u'nickjstevens.pythonanywhere.com'] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'app', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'eCIP.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'eCIP.wsgi.application' # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'nickjstevens$django_eCIP', 'USER': 'nickjstevens', 'PASSWORD':'django_eCIP_password', 'HOST': 'nickjstevens.mysql.pythonanywhere-services.com', 'PORT': '', } } # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' # default static files settings for PythonAnywhere. # see https://help.pythonanywhere.com/pages/DjangoStaticFiles for more info MEDIA_ROOT = u'/home/nickjstevens/django_apps/eCIP/app/media' MEDIA_URL = '/media/' STATIC_ROOT = u'/home/nickjstevens/django_apps/eCIP/app/static' STATIC_URL = '/static/' REST_FRAMEWORK = { # Use Django's standard `django.contrib.auth` permissions, # or allow read-only access for unauthenticated users. 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly' ] }
mit
2,021,033,068,940,957,000
25.875
91
0.689842
false
3.439111
false
false
false
rupumped/Gen2-UHF-RFID-Reader
gr-rfid/apps/reader.py
1
5065
#Developed by: Nikos Kargas from gnuradio import gr from gnuradio import uhd from gnuradio import blocks from gnuradio import filter from gnuradio import analog from gnuradio import digital from gnuradio import qtgui import rfid DEBUG = False class reader_top_block(gr.top_block): # Configure usrp source def u_source(self): self.source = uhd.usrp_source( device_addr=self.usrp_address_source, stream_args=uhd.stream_args( cpu_format="fc32", channels=range(1), ), ) self.source.set_samp_rate(self.adc_rate) self.source.set_center_freq(self.freq, 0) self.source.set_gain(self.rx_gain, 0) self.source.set_antenna("RX2", 0) self.source.set_auto_dc_offset(False,0) # Uncomment this line for SBX daughterboard # Configure usrp sink def u_sink(self): self.sink = uhd.usrp_sink( device_addr=self.usrp_address_sink, stream_args=uhd.stream_args( cpu_format="fc32", channels=range(1), ), ) self.sink.set_samp_rate(self.dac_rate) self.sink.set_center_freq(self.freq, 0) self.sink.set_gain(self.tx_gain, 0) self.sink.set_antenna("TX/RX", 0) def __init__(self): gr.top_block.__init__(self) #rt = gr.enable_realtime_scheduling() ######## Variables ######### self.dac_rate = 1e6 # DAC rate self.adc_rate = 10e6 # ADC rate self.decim = 5 # Decimation (downsampling factor) self.ampl = 0.5 # Output signal amplitude (signal power vary for different RFX900 cards) self.freq = 910e6 # Modulation frequency (can be set between 902-920) self.rx_gain = 0 # RX Gain (gain at receiver) self.tx_gain = 25 # RFX900 no Tx gain option BLF = 100e3 # Backscatter link frequency self.usrp_address_source = "addr=192.168.10.2,recv_frame_size=256" self.usrp_address_sink = "addr=192.168.10.2,recv_frame_size=256" # Each FM0 symbol consists of ADC_RATE/BLF samples (2e6/40e3 = 50 samples) # 10 samples per symbol after matched filtering and decimation self.num_taps = [1] * (int)(self.adc_rate/BLF/2) # matched to half symbol period ######## File sinks for debugging (1 for each block) ######### self.file_sink_source = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/source", False) self.file_sink_matched_filter = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/matched_filter", False) self.file_sink_gate = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/gate", False) self.file_sink_decoder = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/decoder", False) self.file_sink_reader = blocks.file_sink(gr.sizeof_float*1, "../misc/data/reader", False) ######## Blocks ######### self.matched_filter = filter.fir_filter_ccc(self.decim, self.num_taps); self.gate = rfid.gate(int(self.adc_rate/self.decim)) self.tag_decoder = rfid.tag_decoder(int(self.adc_rate/self.decim)) self.reader = rfid.reader(int(self.adc_rate/self.decim),int(self.dac_rate)) self.amp = blocks.multiply_const_ff(self.ampl) self.to_complex = blocks.float_to_complex() if (DEBUG == False) : # Real Time Execution # USRP blocks self.u_source() self.u_sink() ######## Connections ######### self.connect(self.source, self.matched_filter) self.connect(self.matched_filter, self.gate) self.connect(self.gate, self.tag_decoder) self.connect((self.tag_decoder,0), self.reader) self.connect(self.reader, self.amp) self.connect(self.amp, self.to_complex) self.connect(self.to_complex, self.sink) #File sinks for logging (Remove comments to log data) self.connect(self.source, self.file_sink_source) else : # Offline Data self.file_source = blocks.file_source(gr.sizeof_gr_complex*1, "../misc/data/file_source_test",False) ## instead of uhd.usrp_source self.file_sink = blocks.file_sink(gr.sizeof_gr_complex*1, "../misc/data/file_sink", False) ## instead of uhd.usrp_sink ######## Connections ######### self.connect(self.file_source, self.matched_filter) self.connect(self.matched_filter, self.gate) self.connect(self.gate, self.tag_decoder) self.connect((self.tag_decoder,0), self.reader) self.connect(self.reader, self.amp) self.connect(self.amp, self.to_complex) self.connect(self.to_complex, self.file_sink) #File sinks for logging self.connect(self.gate, self.file_sink_gate) self.connect((self.tag_decoder,1), self.file_sink_decoder) # (Do not comment this line) #self.connect(self.file_sink_reader, self.file_sink_reader) #self.connect(self.matched_filter, self.file_sink_matched_filter) if __name__ == '__main__': main_block = reader_top_block() main_block.start() while(1): inp = raw_input("'Q' to quit \n") if (inp == "q" or inp == "Q"): break main_block.reader.print_results() main_block.stop()
gpl-3.0
5,839,815,719,848,992,000
37.371212
152
0.643633
false
3.138166
false
false
false
eneldoserrata/marcos_openerp
oemedical/hms_lite/medical_inpatient/medical_inpatient.py
1
4138
import time # from mx import datetime import datetime from openerp.osv import fields, osv from openerp.tools.translate import _ import sys class inpatient_registration (osv.osv): # Method to check for availability and make the hospital bed reservation def registration_confirm(self, cr, uid, ids, context={}): for reservation in self.browse(cr,uid,ids): bed_id= str(reservation.bed.id) cr.execute("select count (*) from medical_inpatient_registration where (hospitalization_date::timestamp,discharge_date::timestamp) overlaps ( timestamp %s , timestamp %s ) and state= %s and bed = cast(%s as integer)", (reservation.hospitalization_date,reservation.discharge_date,'confirmed',bed_id)) res = cr.fetchone() if res[0] > 0: raise osv.except_osv('Warning', 'Bed has been already reserved in this period' ) else: self.write(cr, uid, ids, {'state':'confirmed'}) return True def patient_discharge(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'free'}) return True def registration_cancel(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'cancelled'}) return True def registration_admission(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'hospitalized'}) return True _name = "medical.inpatient.registration" _description = "Patient admission History" _columns = { 'name' : fields.char ('Registration Code',size=128), 'patient' : fields.many2one ('medical.patient','Patient'), 'admission_type' : fields.selection([('routine','Routine'),('maternity','Maternity'),('elective','Elective'),('urgent','Urgent'),('emergency','Emergency')],'Admission type'), 'hospitalization_date' : fields.datetime ('Hospitalization date'), 'discharge_date' : fields.datetime ('Discharge date'), 'attending_physician' : fields.many2one ('medical.physician','Attending Physician'), 'operating_physician' : fields.many2one ('medical.physician','Operating Physician'), 'admission_reason' : fields.many2one ('medical.pathology','Reason for Admission', help="Reason for Admission"), 'bed' : fields.many2one ('medical.hospital.bed','Hospital Bed'), 'nursing_plan' : fields.text ('Nursing Plan'), 'discharge_plan' : fields.text ('Discharge Plan'), 'info' : fields.text ('Extra Info'), 'state': fields.selection((('free','Free'),('cancelled','Cancelled'),('confirmed','Confirmed'),('hospitalized','Hospitalized')),'Status'), } _defaults = { 'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'medical.inpatient.registration'), 'state': lambda *a : 'free' } _sql_constraints = [ ('name_uniq', 'unique (name)', 'The Registration code already exists')] inpatient_registration () class appointment (osv.osv): _name = "medical.appointment" _inherit = "medical.appointment" _columns = { 'inpatient_registration_code' : fields.many2one ('medical.inpatient.registration','Inpatient Registration',help="Enter the patient hospitalization code"), } appointment () # Add the patient status to the partner class patient_data (osv.osv): _name = "medical.patient" _inherit = "medical.patient" _description = "Patient related information" def _get_patient_status (self, cr, uid, ids,name, arg, context={}): def get_hospitalization_status (patient_dbid): cr.execute ( 'select state from medical_inpatient_registration where patient=%s and state=\'hospitalized\'', (patient_dbid,)) try: patient_status = str(cr.fetchone()[0]) except: patient_status = "outpatient" return patient_status result={} # Get the patient (DB) id to be used in the search on the medical inpatient registration table lookup for patient_data in self.browse(cr, uid, ids, context=context): patient_dbid = patient_data.id if patient_dbid: result[patient_data.id] = get_hospitalization_status (patient_dbid) return result _columns = { 'patient_status': fields.function(_get_patient_status, method=True, type='char', string='Hospitalization Status', help="Shows whether the patient is hospitalized"), } patient_data ()
agpl-3.0
4,914,711,355,959,508,000
33.198347
302
0.697438
false
3.278922
false
false
false
rbwinslow/hq
hq/hquery/functions/core_node_set.py
1
1395
from hq.hquery.evaluation_error import HqueryEvaluationError from hq.hquery.expression_context import peek_context, get_context_node from hq.hquery.functions.core_number import number from hq.hquery.object_type import string_value, is_sequence, object_type_name from hq.hquery.sequences import make_node_set from hq.soup_util import root_tag_from_any_tag, is_tag_node exports = ('count', 'id', 'last', 'name', 'position') def count(sequence): HqueryEvaluationError.must_be_node_set_or_sequence(sequence) return number(len(sequence)) def id(ids): if is_sequence(ids): ids = set(string_value(item) for item in ids) else: ids = set(string_value(ids).split()) result = [] for node in root_tag_from_any_tag(get_context_node()).descendants: if is_tag_node(node) and 'id' in node.attrs and node['id'] in ids: result.append(node) return make_node_set(result) def last(): return number(peek_context().size) def name(*args): if len(args) > 0: value = args[0] if is_sequence(value): value = value[0] if is_tag_node(value): return value.name else: return '' else: node = get_context_node() if is_tag_node(node): return node.name else: return '' def position(): return number(peek_context().position)
mit
-1,951,695,412,253,512,700
26.9
77
0.636559
false
3.419118
false
false
false
sernst/cauldron
cauldron/runner/__init__.py
1
7585
import glob import importlib import os import sys import typing import cauldron from cauldron import environ from cauldron.environ import Response from cauldron.runner import source from cauldron.session.projects import Project from cauldron.session.projects import ProjectStep def add_library_path(path: str) -> bool: """ Adds the path to the Python system path if not already added and the path exists. :param path: The path to add to the system paths :return: Whether or not the path was added. Only returns False if the path was not added because it doesn't exist """ if not path or not os.path.exists(path): return False if path not in sys.path: sys.path.append(path) return True def remove_library_path(path: str) -> bool: """ Removes the path from the Python system path if it is found in the system paths. :param path: The path to remove from the system paths :return: Whether or not the path was removed. """ if path in sys.path: sys.path.remove(path) return True return False def initialize(project: typing.Union[str, Project]): """ :param project: :return: """ if isinstance(project, str): project = Project(source_directory=project) # When opening a project, if there are any steps in the project, the # first step should be selected by default. has_selected_step = any([s.is_selected for s in project.steps]) if not has_selected_step and project.steps: project.steps[0].is_selected = True cauldron.project.load(project) return project def close(): """...""" os.chdir(environ.configs.fetch('directory', os.path.expanduser('~'))) project = cauldron.project.internal_project if not project: return False [remove_library_path(path) for path in project.library_directories] remove_library_path(project.source_directory) cauldron.project.unload() return True def _reload_module(path: str, library_directory: str): """ Reloads the module at the specified path within the package rooted at the given library_directory. """ path = os.path.dirname(path) if path.endswith('__init__.py') else path start_index = len(library_directory) + 1 end_index = -3 if path.endswith('.py') else None package_path = path[start_index:end_index] module = sys.modules.get(package_path.replace(os.sep, '.')) return importlib.reload(module) if module is not None else None def _reload_library(directory: str) -> list: """ Carries out a reload action on the specified root library directory that is assumed to contain a python local package with potential module changes. :param directory: Root directory of the library package to reload. """ if not add_library_path(directory): # If the library wasn't added because it doesn't exist, remove it # in case the directory has recently been deleted and then return # an empty result remove_library_path(directory) return [] glob_path = os.path.join(os.path.realpath(directory), '**', '*.py') # Force file paths to be sorted by hierarchy from deepest to shallowest, # which ensures that changes are reloaded by children before any dependencies # are encountered in parents. found_file_paths = sorted( glob.glob(glob_path, recursive=True), key=lambda p: "{}--{}".format(str(p.count(os.sep)).zfill(4), p), reverse=True, ) # Iterate over imports multiple times in case there's a failed import as the # result of dependency changes between multiple files. However, after 20 # iterations give up and fail. outputs = [] last_error = None for i in range(20): for path in [*found_file_paths]: try: outputs.append(_reload_module(path, directory)) # Remove the path if the reload operation succeeded. found_file_paths.remove(path) except Exception as error: # Ignore failures and hope they can be resolved in another pass. last_error = error if not found_file_paths: # If there's nothing left to reload, return the reloaded modules. return outputs # If 20 attempts to reload modules fail, it's time to error out. raise RuntimeError( "Failed to reload modified modules. This could be due to a circular import." ) from last_error def reload_libraries(library_directories: list = None) -> list: """ Reload the libraries stored in the project's local and shared library directories to ensure that any modifications since the previous load/reload have been refreshed. """ directories = library_directories or [] project = cauldron.project.get_internal_project() if project: directories += project.library_directories if not directories: return [] return [ reloaded_module for directory in directories for reloaded_module in _reload_library(directory) if reloaded_module is not None ] def section( response: Response, project: typing.Union[Project, None], starting: ProjectStep = None, limit: int = 1, force: bool = False, skips: typing.List[ProjectStep] = None ) -> list: """ :param response: :param project: :param starting: :param limit: :param force: :param skips: Steps that should be skipped while running this section :return: """ limit = max(1, limit) if project is None: project = cauldron.project.get_internal_project() starting_index = 0 if starting: starting_index = project.steps.index(starting) count = 0 steps_run = [] for ps in project.steps: if count >= limit: break if ps.index < starting_index: continue if skips and ps in skips: continue if not force and count == 0 and not ps.is_dirty(): continue steps_run.append(ps) if not source.run_step(response, project, ps, force=force): return steps_run count += 1 return steps_run def complete( response: Response, project: typing.Union[Project, None], starting: ProjectStep = None, force: bool = False, limit: int = -1 ) -> list: """ Runs the entire project, writes the results files, and returns the URL to the report file :param response: :param project: :param starting: :param force: :param limit: :return: Local URL to the report path """ if project is None: project = cauldron.project.get_internal_project() starting_index = 0 if starting: starting_index = project.steps.index(starting) count = 0 steps_run = [] for ps in project.steps: if 0 < limit <= count: break if ps.index < starting_index: continue if not force and not ps.is_dirty(): if limit < 1: environ.log( '[{}]: Nothing to update'.format(ps.definition.name) ) continue count += 1 steps_run.append(ps) success = source.run_step(response, project, ps, force=True) if not success or project.stop_condition.halt: return steps_run return steps_run
mit
-1,016,467,438,664,873,500
26.18638
84
0.628345
false
4.251682
false
false
false
elimence/edx-platform
lms/djangoapps/courseware/features/video.py
1
1147
#pylint: disable=C0111 from lettuce import world, step from lettuce.django import django_url from common import TEST_COURSE_NAME, TEST_SECTION_NAME, i_am_registered_for_the_course, section_location ############### ACTIONS #################### @step('when I view the video it has autoplay enabled') def does_autoplay(step): assert(world.css_find('.video')[0]['data-autoplay'] == 'True') @step('the course has a Video component') def view_video(step): coursename = TEST_COURSE_NAME.replace(' ', '_') i_am_registered_for_the_course(step, coursename) # Make sure we have a video add_video_to_course(coursename) chapter_name = TEST_SECTION_NAME.replace(" ", "_") section_name = chapter_name url = django_url('/courses/edx/Test_Course/Test_Course/courseware/%s/%s' % (chapter_name, section_name)) world.browser.visit(url) def add_video_to_course(course): template_name = 'i4x://edx/templates/video/default' world.ItemFactory.create(parent_location=section_location(course), template=template_name, display_name='Video')
agpl-3.0
-8,000,588,326,078,077,000
32.735294
104
0.644289
false
3.584375
true
false
false
atealxt/web-crawler
src/com/zhyfoundry/spider/impl/s2/Spider2.py
1
3477
from com.zhyfoundry.spider import Configuration from com.zhyfoundry.spider.impl import BaseSpider from com.zhyfoundry.spider.impl.CRM import CRM from com.zhyfoundry.spider.impl.s2 import Fetcher2, Parser2, Tracker2 import time import traceback class Spider2(BaseSpider.BaseSpider): def __init__(self): super(Spider2, self).__init__() def crawl(self, trackingTimestamp, keyword = None): config = Configuration.Configuration.readFromFile(); countLimit = 65535 if config.maxFetchCount == -1 else config.maxFetchCount urlsToFetch = self.fetchURL(trackingTimestamp, countLimit) if len(urlsToFetch) == 0: print 'No URL to fetch.' return fetcher = Fetcher2.Fetcher2() parser = Parser2.Parser2() count = 0 tracker = Tracker2.Tracker2() for url in urlsToFetch: if count >= countLimit: print 'Fetch count limitation reached: ' + str(countLimit) break; count += 1; print 'URL to fetch: ' + str(url) html = fetcher.fetch(url.url, config) if parser.needLogin(html): print 'Need to Login' html = fetcher.login(self.username, self.password) if parser.needLogin(html): raise Exception("Login fail!") print 'Login success!' html = fetcher.fetch(url.url, config) if parser.isDetailPage(html): parseResult = parser.parse(html, url.url, config) if parseResult.content != None: try: CRM.saveEnterprise(parseResult.content); except: print traceback.format_exc() tracker.updateTrackTime(url.id) tracker.track(parseResult.newSeeds, url.id, self.id, None) elif keyword != None: print 'Search term: ' + keyword html = fetcher.search(keyword) tracker.updateTrackTime(url.id) page = 1 while (True): parseSearchResult = parser.parseSearchResult(html) tracker.track(parseSearchResult.newSeeds, url.id, self.id, None) if parseSearchResult.newSeedRightNow == None or count >= countLimit: print 'parseSearchResult.newSeedRightNow == None: ' + str(parseSearchResult.newSeedRightNow == None) print 'count >= countLimit: ' + str(count >= countLimit) break page += 1 print 'Will crawl page ' + str(page) + ': ' + parseSearchResult.newSeedRightNow['href'] print 'Sleep ' + str(config.interval) + ' second.' time.sleep(config.interval) html = fetcher.fetch(parseSearchResult.newSeedRightNow['href'], config) if html == None: retryTimes = 0 while (retryTimes < config.maxRetryTimes and html == None): retryTimes += 1 print 'Retry ' + str(retryTimes) html = fetcher.fetch(parseSearchResult.newSeedRightNow['href'], config) count += 1 print 'Sleep ' + str(config.interval) + ' second.' time.sleep(config.interval)
gpl-2.0
7,434,002,309,375,221,000
44.155844
124
0.542134
false
4.515584
true
false
false
kobe25/gasistafelice
gasistafelice/gf/base/models.py
3
48753
""" This is the base model for Gasista Felice. It includes common data on which all (or almost all) other applications rely on. """ from django.db import models from django.utils.translation import ugettext, ugettext_lazy as _ from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db.models import permalink from django_comments.models import Comment from django.contrib.contenttypes.models import ContentType from django.conf import settings from django.dispatch import receiver from django.db.models.signals import post_save, pre_save from workflows.models import Workflow, Transition, State #from history.models import HistoricalRecords from consts import GAS_REFERRER_SUPPLIER from flexi_auth.models import PermissionBase # mix-in class for permissions management from flexi_auth.models import ParamRole, Param from flexi_auth.exceptions import WrongPermissionCheck from flexi_auth.utils import get_parametric_roles from flexi_auth.models import PrincipalParamRoleRelation from simple_accounting.models import economic_subject, AccountingDescriptor, LedgerEntry, account_type from lib import ClassProperty, unordered_uniq from gf.base import const from gf.base.utils import get_resource_icon_path from gf.base.accounting import PersonAccountingProxy from workflows.utils import do_transition import os import logging import geocoder log = logging.getLogger(__name__) class Resource(object): """Base class for project fundamental objects. This is a basic mix-in class used to factor out data/behaviours common to the majority of model classes in the project's applications. Resource API is composed of: * Basic methods and properties: * basic type and resource string representation * caching operations * Relational properties: * how the resource relates to other resources """ # Attribute used to make a list of confidential lists confidential_fields = () # Attribute used to cache data volatile_fields = [] #----------------------------------------- # Basic properites #----------------------------------------- @ClassProperty @classmethod def resource_type(cls): """String representation of resource type""" return cls.__name__.lower() @property def urn(self): """Unique resource name""" return '%s/%s' % (self.resource_type, self.pk) @property def ancestors(self): """List of ancestors of a resource. This is te list of parents from root to the resource itself. It is used p.e. to display navigation breadcrumbs. You SHOULD NOT implement it in subclasses """ if self.parent: return self.parent.ancestors + [self.parent] else: return [] @property def parent(self): """Identifies resource which includes this resource. Stated that there can be only one parent for a resource, (no multiple parents allowed), setting this attribute makes the resource confident of who includes itself. This attribute is then used to make the list of `:ref:ancestors`. You MUST implement it in subclasses if they have parent. """ return None def do_transition(self, transition, user): return do_transition(self, transition, user) @property def allnotes(self): ctype = ContentType.objects.get_for_model(self.__class__) notes = Comment.objects.filter(object_pk=self.pk, content_type=ctype).order_by('-submit_date') return notes @permalink def get_absolute_url(self): return ('rest.views.resource_page', (), { 'resource_type' : self.resource_type, 'resource_id' : self.pk }) def get_absolute_url_page(self): return self.get_absolute_url().replace('/rest', '/rest/#rest') def as_dict(self): return { 'name': unicode(self), 'urn' : self.urn, } #-- Referrers API --# @property def referrers(self): """Returns User QuerySet bound to resource""" raise NotImplementedError("class: %s method: referrers" % self.__class__.__name__) @property def referrer(self): """Return User bound to resource""" raise NotImplementedError("class: %s method: referrer" % self.__class__.__name__) @property def referrers_people(self): """Returns Person related to referrers QuerySet""" return Person.objects.filter(user__in=self.referrers) @property def info_people(self): """Returns Person to contact for info QuerySet""" raise NotImplementedError("class: %s method: info_people" % self.__class__.__name__) #-- History API --# # Requires that an history manager exists for the resource # TODO: encapsulate it in HistoryResource class @property def created_on(self): """Returns datetime instance of when the instance has been created.""" # There could be the case that a deleted id is reused, so, do not use .get method self_as_of_creation = \ self._default_history.filter(id=self.pk, history_type="+")[0] return self_as_of_creation.history_date @property def created_by(self): """Returns user that created the resource.""" #COMMENT fero: disabled user in history! return User.objects.none() # There could be the case that a deleted id is reused, so, do not use .get method self_as_of_creation = \ self._default_history.filter(id=self.pk, history_type="+")[0] return self_as_of_creation.history_user @property def created_by_person(self): """Returns person bound to the user that created the resource.""" u = self.created_by if u is not None: return u.person return None @property def last_update_by(self): """Returns user that has made the last update to the resource.""" #COMMENT fero: disabled user in history! return User.objects.none() # There could be the case that a deleted id is reused, so, do not use .get method try: self_as_of_last_update = \ self._default_history.filter(id=self.pk, history_type="~")[0] except IndexError: # This object has never been update return None else: return self_as_of_last_update.history_user @property def last_update_by_person(self): """Returns person bound to the user that made the last update the resource.""" u = self.last_update_by if u is not None: return u.person return None @property def updaters(self): """Returns User QuerySet of who has updated the resource.""" self_updaters = unordered_uniq( self._default_history.filter(id=self.pk, history_type="~").values_list('history_user') ) return User.objects.filter(pk__in=map(lambda x: x[0].pk, self_updaters)) #------------------------------------ # Basic properties: cache management #------------------------------------ def save_checkdata_in_cache(self): key = Resource.cache_key(self.pk) data_to_cache = {} for n in self.volatile_fields: data_to_cache[n] = getattr(self, n) if not data_to_cache: return False try: pstore.savedata(key, data_to_cache) except Exception, e: raise return True def load_checkdata_from_cache(self): if not self.volatile_fields: return False key = Resource.cache_key(self.pk) data = pstore.getalldata(key, self.volatile_fields) for n in self.volatile_fields: if data.has_key(n): setattr(self, n, data[n]) return True @classmethod def cache_key(cls, resource_id): #TODO fero CHECK #Pay attention because it is connected to class return "%s/%s" % (cls.resource_type, resource_id) #--------------------------------------------- # Relational properties: # not all must be implemented by Resource subclasses # but just only that makes sense #--------------------------------------------- @property def des_list(self): """Return DES instances bound to the resource""" raise NotImplementedError("class: %s method: des_list" % self.__class__.__name__) @property def des(self): """Return the DES instance bound to the resource""" from des.models import Siteattr return Siteattr.get_site() raise NotImplementedError("class: %s method: des" % self.__class__.__name__) @property def gas_list(self): """Return GAS list bound to resource""" raise NotImplementedError("class: %s method: gas_list" % self.__class__.__name__) @property def gas(self): """Return GAS bound to resource""" raise NotImplementedError("class: %s method: gas" % self.__class__.__name__) def categories(self): """Return ProductCategory bound to resource""" raise NotImplementedError("class: %s method: categories" % self.__class__.__name__) def category(self): """Return ProductCategory bound to resource""" raise NotImplementedError("class: %s method: category" % self.__class__.__name__) @property def persons(self): """Return persons bound to resource""" raise NotImplementedError("class: %s method: persons" % self.__class__.__name__) @property def person(self): """Return person bound to resource""" raise NotImplementedError("class: %s method: person" % self.__class__.__name__) @property def gasmembers(self): """Return GAS members bound to resource""" raise NotImplementedError("class: %s method: gasmembers" % self.__class__.__name__) @property def gasmember(self): """Return GAS member bound to resource""" raise NotImplementedError("class: %s method: gasmember" % self.__class__.__name__) @property def pacts(self): """Return pacts bound to resource""" raise NotImplementedError("class: %s method: pacts" % self.__class__.__name__) @property def pact(self): """Return pact bound to resource""" raise NotImplementedError("class: %s method: pact" % self.__class__.__name__) @property def suppliers(self): """Return suppliers bound to resource""" raise NotImplementedError("class: %s method: suppliers" % self.__class__.__name__) @property def supplier(self): """Return supplier bound to resource""" raise NotImplementedError("class: %s method: supplier" % self.__class__.__name__) @property def orders(self): """Return orders bound to resource""" raise NotImplementedError("class: %s method: orders" % self.__class__.__name__) @property def order(self): """Return order bound to resource""" raise NotImplementedError("class: %s method: order" % self.__class__.__name__) @property def deliveries(self): """Return deliveries bound to resource""" raise NotImplementedError("class: %s method: deliveries" % self.__class__.__name__) @property def delivery(self): """Return delivery bound to resource""" raise NotImplementedError("class: %s method: delivery" % self.__class__.__name__) @property def withdrawals(self): """Return withdrawals bound to resource""" raise NotImplementedError("class: %s method: withdrawals" % self.__class__.__name__) @property def withdrawal(self): """Return withdrawal bound to resource""" raise NotImplementedError("class: %s method: withdrawal" % self.__class__.__name__) @property def products(self): """Return products bound to resource""" raise NotImplementedError("class: %s method: products" % self.__class__.__name__) @property def product(self): """Return product bound to resource""" raise NotImplementedError("class: %s method: product" % self.__class__.__name__) @property def stocks(self): """Return SupplierStock list bound to resource""" raise NotImplementedError("class: %s method: stocks" % self.__class__.__name__) @property def stock(self): """Return SupplierStock bound to resource""" raise NotImplementedError("class: %s method: stock" % self.__class__.__name__) @property def orderable_products(self): """Return GASSupplierOrderProduct querySet for orders bound to resource""" raise NotImplementedError("class: %s method: orderable_products" % self.__class__.__name__) @property def ordered_products(self): """Return GASMemberOrder querySet for orders bound to resource""" raise NotImplementedError("class: %s method: ordered_products" % self.__class__.__name__) @property def basket(self): """Return GASMemberOrder querySet for open orders bound to resource""" raise NotImplementedError("class: %s method: basket" % self.__class__.__name__) #-- Contacts --# @property def contacts(self): """Contact QuerySet bound to the resource. You SHOULD override it when needed """ return self.contact_set.all() @property def email_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.contacts.filter(flavour=const.EMAIL).values_list('value')))) @property def phone_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.contacts.filter(flavour=const.PHONE).values_list('value')))) @property def preferred_email_address(self): """The email address, where we should write if we would know more info on the resource. It is not necessarily bound to a person. NOTE that it could be even a list of addresses following syntax in RFC 5322 and RFC 5321, or simply http://en.wikipedia.org/wiki/Email_address#Syntax :) Usually you SHOULD NOT NEED TO OVERRIDE IT in subclasses """ if settings.EMAIL_DEBUG: return settings.EMAIL_DEBUG_ADDR else: return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_email_contacts.values_list('value')))) @property def preferred_email_contacts(self): """Email Contacts, where we should write if we would know more info on the resource. It is not necessarily bound to a person. Usually you SHOULD NOT NEED TO OVERRIDE IT in subclasses """ return self.contacts.filter(flavour=const.EMAIL, is_preferred=True) or \ self.contacts.filter(flavour=const.EMAIL) @property def preferred_phone_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_phone_contacts.values_list('value')))) @property def preferred_phone_contacts(self): return self.contacts.filter(flavour=const.PHONE, is_preferred=True) or \ self.contacts.filter(flavour=const.PHONE) # @property # def preferred_www_address(self): # return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_www_contacts.values_list('value')))) # @property # def preferred_www_contacts(self): # return self.contacts.filter(flavour=const.WWW, is_preferred=True) or \ # self.contacts.filter(flavour=const.WWW) @property def preferred_fax_address(self): return ", ".join(unordered_uniq(map(lambda x: x[0], self.preferred_fax_contacts.values_list('value')))) @property def preferred_fax_contacts(self): return self.contacts.filter(flavour=const.FAX, is_preferred=True) or \ self.contacts.filter(flavour=const.FAX) @property def icon(self): "Returns default icon for resource""" icon = models.ImageField(upload_to="fake") basedir = os.path.join(settings.STATIC_URL, "nui", "img", settings.THEME) icon.url = os.path.join(basedir, "%s%s.%s" % (self.resource_type, "128x128", "png")) return icon #TODO CHECK if these methods SHOULD be removed from Resource API # because they are tied only to a specific resource. Leave commented now. # If you need them in a specific resource, implement in it # @property # def gasstocks(self): # """Return GASSupplierStock list bound to resource""" # raise NotImplementedError # # @property # def gasstock(self): # """Return GASSupplierStock bound to resource""" # raise NotImplementedError # # @property # def units(self): # """Return unit measure list bound to resource""" # raise NotImplementedError # # @property # def unit(self): # """Return unit measure bound to resource""" # raise NotImplementedError #--------------------------# @property def economic_movements(self): """Return accounting LedgerEntry instances.""" raise NotImplementedError @property def balance(self): """Return an economic state bound to resource (DES, GASMember, GAS or Supplier through ) Accounting sold for this ressource """ acc_tot = self.person.accounting.system['/wallet'].balance return acc_tot #------------------------------------------------------------------------------ class PermissionResource(Resource, PermissionBase): """ Just a convenience for classes inheriting both from `Resource` and `PermissionBase` """ def _get_roles(self): """ Return a QuerySet containing all the parametric roles which have been assigned to this Resource. """ # Roles MUST BE a property because roles are bound to a User # with `add_principal()` and not directly to a GAS member # costruct the result set by joining partial QuerySets roles = [] ctype = ContentType.objects.get_for_model(self) params = Param.objects.filter(content_type=ctype, object_id=self.pk) # get all parametric roles assigned to the Resource; return ParamRole.objects.filter(param_set__in=params) roles = property(_get_roles) @economic_subject class Person(models.Model, PermissionResource): """ A Person is an anagraphic record of a human being. It can be a User or not. """ name = models.CharField(max_length=128,verbose_name=_('name')) surname = models.CharField(max_length=128,verbose_name=_('surname')) display_name = models.CharField(max_length=128, blank=True, verbose_name=_('display name')) # Leave here ssn, but do not display it ssn = models.CharField(max_length=128, unique=True, editable=False, blank=True, null=True, help_text=_('Write your social security number here'),verbose_name=_('Social Security Number')) contact_set = models.ManyToManyField('Contact', null=True, blank=True,verbose_name=_('contacts')) user = models.OneToOneField(User, null=True, blank=True, verbose_name=_('User'), help_text=_("bind to a user if you want to give this person an access to the platform") ) address = models.ForeignKey('Place', null=True, blank=True,verbose_name=_('main address')) avatar = models.ImageField(upload_to=get_resource_icon_path, null=True, blank=True, verbose_name=_('avatar')) website = models.URLField(blank=True, verbose_name=_("web site")) accounting = AccountingDescriptor(PersonAccountingProxy) # #history = HistoricalRecords() class Meta: verbose_name = _("person") verbose_name_plural = _("people") ordering = ('display_name',) db_table = 'base_person' def __unicode__(self): rv = self.display_name if not rv: # If display name is not provided --> save display name rv = u'%(name)s %(surname)s' % {'name' : self.name, 'surname': self.surname} self.display_name = rv self.save() # Removed city visualization following Orlando's and Dominique's agreements # WAS: if self.city: # WAS: rv += u" (%s)" % self.city return rv @property def report_name(self): return u"%(name)s %(surname)s" % {'name' : self.name, 'surname': self.surname} def clean(self): if not self.user and self.gasmembers.count(): raise ValidationError(_("A person without user cannot be a GAS member")) self.name = self.name.strip().lower().capitalize() self.surname = self.surname.strip().lower().capitalize() self.display_name = self.display_name.strip() if not self.ssn: self.ssn = None else: self.ssn = self.ssn.strip().upper() return super(Person, self).clean() @property def uid(self): """ A unique ID (an ASCII string) for ``Person`` model instances. """ return self.urn.replace('/','-') @property def parent(self): return self.des @property def icon(self): return self.avatar or super(Person, self).icon ## START Resource API # Note that all the following methods return a QuerySet @property def persons(self): return Person.objects.filter(pk=self.pk) @property def person(self): return self @property def gasmembers(self): #TODO UNITTEST """ GAS members associated to this person; to each of them corresponds a membership of this person in a GAS. """ return self.gasmember_set.all() @property def gas_list(self): #TODO UNITTEST """ All GAS this person belongs to (remember that a person may be a member of more than one GAS). """ from gf.gas.models import GAS gas_pks = set(member.gas.pk for member in self.gasmembers) return GAS.objects.filter(pk__in=gas_pks) @property def des_list(self): #TODO UNITTEST """ All DESs this person belongs to (either as a member of one or more GAS or as a referrer for one or more suppliers in the DES). """ from des.models import DES des_set = set([gas.des for gas in self.gas_list]) return DES.objects.filter(pk__in=[obj.pk for obj in des_set]) @property def des(self): from des.models import Siteattr return Siteattr.get_site() @property def pacts(self): """ A person is related to: pacts signed with a GAS he/she belongs to """ from gf.gas.models import GASSupplierSolidalPact # initialize the return QuerySet qs = GASSupplierSolidalPact.objects.none() #add the suppliers who have signed a pact with a GAS this person belongs to for gas in self.gas_list: qs = qs | gas.pacts return qs @property def suppliers(self): #TODO UNITTEST """ A person is related to: 1) suppliers for which he/she is a referrer 2) suppliers who have signed a pact with a GAS he/she belongs to """ from gf.supplier.models import Supplier # initialize the return QuerySet qs = Supplier.objects.none() #add the suppliers who have signed a pact with a GAS this person belongs to for gas in self.gas_list: qs = qs | gas.suppliers # add the suppliers for which this person is an agent referred_set = set([sr.supplier for sr in self.supplieragent_set.all()]) qs = qs | Supplier.objects.filter(pk__in=[obj.pk for obj in referred_set]) return qs @property def orders(self): #TODO UNITTEST """ A person is related to: 1) supplier orders opened by a GAS he/she belongs to 2) supplier orders for which he/she is a referrer 3) order to suppliers for which he/she is a referrer """ from gf.gas.models import GASSupplierOrder # initialize the return QuerySet qs = GASSupplierOrder.objects.none() #add the supplier orders opened by a GAS he/she belongs to for gas in self.gas_list: qs = qs | gas.orders return qs @property def deliveries(self): #TODO UNITTEST """ A person is related to: 1) delivery appointments for which this person is a referrer 2) delivery appointments associated with a GAS he/she belongs to """ from gf.gas.models import Delivery # initialize the return QuerySet qs = Delivery.objects.none() # add delivery appointments for which this person is a referrer for member in self.gasmembers: qs = qs | member.delivery_set.all() # add delivery appointments associated with a GAS he/she belongs to for gas in self.gas_list: qs = qs | gas.deliveries return qs @property def withdrawals(self): #TODO UNITTEST """ A person is related to: 1) withdrawal appointments for which this person is a referrer 2) withdrawal appointments associated with a GAS he/she belongs to """ from gf.gas.models import Withdrawal # initialize the return QuerySet qs = Withdrawal.objects.none() # add withdrawal appointments for which this person is a referrer for member in self.gasmembers: qs = qs | member.withdrawal_set.all() # add withdrawal appointments associated with a GAS he/she belongs to for gas in self.gas_list: qs = qs | gas.withdrawals return qs ## END Resource API @property def city(self): if self.address: return self.address.city else: return None def setup_accounting(self): """ Accounting hierarchy for Person. . ROOT (/) |----------- wallet [A] +----------- incomes [P,I] + | +--- other (private order, correction, deposit) +----------- expenses [P,E] + +--- other (correction, donation, ) """ self.subject.init_accounting_system() # create a generic asset-type account (a sort of "virtual wallet") system = self.accounting.system system.get_or_create_account( parent_path='/', name='wallet', kind=account_type.asset ) # Expenses and incomes of other kind... system.get_or_create_account( parent_path='/expenses', name='other', kind=account_type.expense ) system.get_or_create_account( parent_path='/incomes', name='other', kind=account_type.income ) #----------------- Authorization API ------------------------# # Table-level CREATE permission @classmethod def can_create(cls, user, context): # Who can create a new Person in a DES ? # * DES administrators allowed_users = User.objects.none() try: des = context['site'] except KeyError: return User.objects.none() #raise WrongPermissionCheck('CREATE', cls, context) else: allowed_users = des.gas_tech_referrers return user in allowed_users # Row-level EDIT permission def can_edit(self, user, context): # Who can edit a Person in a DES ? # * the person itself # * administrators of one of the DESs this person belongs to des_admins = [] for des in self.des_list: des_admins += des.admins allowed_users = list(des_admins) + [self.user] return user in allowed_users # Row-level DELETE permission def can_delete(self, user, context): # Who can delete a Person from the system ? allowed_users = [self.user] return user in allowed_users #-----------------------------------------------------# @property def username(self): if self.user: return self.user.username else: return ugettext("has not an account in the system") display_fields = ( name, surname, models.CharField(name="city", verbose_name=_("City")), models.CharField(name="username", verbose_name=_("Username")), #DO NOT SHOW now models.CharField(name="email_address", verbose_name=_("Email")), #DO NOT SHOW now models.CharField(name="phone_address", verbose_name=_("Phone")), address, ) def has_been_member(self, gas): """ Return ``True`` if this person is bound to the GAS ``gas`` (GASMember exist whether it is suspended or not), ``False`` otherwise. If ``gas`` is not a ``GAS`` model instance, raise ``TypeError``. """ from gf.gas.models import GAS, GASMember if not isinstance(gas, GAS): raise TypeError(_(u"GAS membership can only be tested against a GAS model instance")) return bool(GASMember.all_objects.filter(gas=gas, person=self).count()) def is_member(self, gas): """ Return ``True`` if this person is an active (not suspended) member of GAS ``gas``, ``False`` otherwise. If ``gas`` is not a ``GAS`` model instance, raise ``TypeError``. """ from gf.gas.models import GAS if not isinstance(gas, GAS): raise TypeError(_(u"GAS membership can only be tested against a GAS model instance")) return gas in [member.gas for member in self.gasmembers] @property def full_name(self): return self.name + self.surname def save(self, *args, **kw): if not self.display_name: self.display_name = u"%(name)s %(surname)s" % {'name' : self.name, 'surname': self.surname} super(Person, self).save(*args, **kw) class Contact(models.Model): """If is a contact, just a contact email or phone""" flavour = models.CharField(max_length=32, choices=const.CONTACT_CHOICES, default=const.EMAIL,verbose_name=_('flavour')) value = models.CharField(max_length=256,verbose_name=_('value')) is_preferred = models.BooleanField(default=False,verbose_name=_('preferred')) description = models.CharField(max_length=128, blank=True, default='',verbose_name=_('description')) ##history = HistoricalRecords() class Meta: verbose_name = _("contact") verbose_name_plural = _("contacts") db_table = 'base_contact' def __unicode__(self): return u"%(t)s: %(v)s" % {'t': self.flavour, 'v': self.value} def clean(self): self.flavour = self.flavour.strip() if self.flavour not in map(lambda x: x[0], const.CONTACT_CHOICES): raise ValidationError(_("Contact flavour MUST be one of %s" % map(lambda x: x[0], const.CONTACT_CHOICES))) self.value = self.value.strip() self.description = self.description.strip() return super(Contact, self).clean() class Place(models.Model, PermissionResource): """Places should be managed as separate entities for various reasons: * among the entities arising in the description of GAS' activities, there are several being places or involving places, so abstracting this information away seems a good thing; * in the context of multi-GAS (retina) orders, multiple delivery and/or withdrawal locations can be present. """ name = models.CharField(max_length=128, blank=True, help_text=_("You can avoid to specify a name if you specify an address"),verbose_name=_('name')) description = models.TextField(blank=True,verbose_name=_('description')) # QUESTION: add place type from CHOICE (HOME, WORK, HEADQUARTER, WITHDRAWAL...) # ANSWER: no place type here. It is just a point in the map address = models.CharField(max_length=128, blank=True,verbose_name=_('address')) #zipcode as a string: see http://stackoverflow.com/questions/747802/integer-vs-string-in-database zipcode = models.CharField(verbose_name=_("Zip code"), max_length=128, blank=True) city = models.CharField(max_length=128,verbose_name=_('city')) province = models.CharField(max_length=2, help_text=_("Insert the province code here (max 2 char)"),verbose_name=_('province')) #Geolocation: do not use GeoDjango PointField here. #We can make a separate geo application maybe in future lon = models.FloatField(null=True, blank=True,verbose_name=_('lon')) lat = models.FloatField(null=True, blank=True,verbose_name=_('lat')) ##history = HistoricalRecords() class Meta: verbose_name = _("place") verbose_name_plural = _("places") ordering = ('name', 'address', 'city') db_table = 'base_place' def __unicode__(self): rv = u"" if self.name: rv += self.name + u" - " if self.address: rv += self.address + u", " if self.zipcode: rv += u"%s " % self.zipcode rv += self.city.lower().capitalize() if self.province: rv += u" (%s)" % self.province.upper() return rv # fetch coords from open street map def update_coords(self): addressString = self.zipcode + ' ' + self.city + ' ' + self.province + ' ' + self.address location = geocoder.osm(addressString) if location.status == 'OK': self.lon = location.lng self.lat = location.lat def clean(self): self.name = self.name.strip().lower().capitalize() self.address = self.address.strip().lower().capitalize() #TODO: we should compute city and province starting from zipcode using local_flavor in forms self.city = self.city.lower().capitalize() self.province = self.province.upper() self.zipcode = self.zipcode.strip() if self.zipcode: if settings.VALIDATE_NUMERICAL_ZIPCODES: try: int(self.zipcode) except ValueError: raise ValidationError(_("Wrong ZIP CODE provided")) self.description = self.description.strip() return super(Place, self).clean() def save(self, *args, **kw): #TODO: Copy-on-write model # a) check if an already existent place with the same full address exist and in that case force update # b) if we are updating a Place --> detach it from other stuff pointing to it and clone super(Place, self).save(*args, **kw) #----------------- Authorization API ------------------------# # Table-level CREATE permission @classmethod def can_create(cls, user, context): # Who can create a new Place in a DES ? # Everyone belongs to the DES try: des = context['site'] except KeyError: raise WrongPermissionCheck('CREATE', cls, context) else: # It's ok because only one DES is supported return not user.is_anonymous() # otherwhise it should be # return user in User.objects.filter(person__in=des.persons) # Row-level EDIT permission def can_edit(self, user, context): # Who can edit details of an existing place in a DES ? # (note that places can be shared among GASs) # * DES administrators # * User that created the place # * User who has updated it. How he can do it? # If a User try to create a new place with the same parameters # of an already existent one, he updates the place allowed_users = self.des.admins | self.created_by | self.updaters return user in allowed_users # Row-level DELETE permission def can_delete(self, user, context): # Who can delete an existing place from a DES ? # (note that places can be shared among GASs) # * DES administrators # * User that created the place # * User who has updated it. How he can do it? see can_edit above allowed_users = self.des.admins | self.created_by | self.updaters return user in allowed_users #-----------------------------------------------------# display_fields = ( name, description, address, zipcode, city, province ) # Generic workflow management class DefaultTransition(models.Model, PermissionResource): workflow = models.ForeignKey(Workflow, related_name="default_transition_set",verbose_name=_('workflow')) state = models.ForeignKey(State,verbose_name=_('state')) transition = models.ForeignKey(Transition,verbose_name=_('transition')) class Meta: verbose_name = _("default transition") verbose_name_plural = _("default transitions") db_table = 'base_defaulttransition' class WorkflowDefinition(object): """ This class encapsulates all the data and logic needed to create and setup a Workflow (as in the `django-workflows` app), including creation of States and Transitions, assignment of Transitions to States and specification of the initial state and the default Transition for each State. To setup a new Workflow, just specify the needed data in the declarative format described below, then call the `register_workflow` method. ## TODO: workflow declaration's specs go here. """ def __init__(self, workflow_name, state_list, transition_list, state_transition_map, initial_state, default_transitions): # stash the workflow specs for later use self.workflow_name = workflow_name self.state_list = state_list self.transition_list = transition_list self.state_transition_map = state_transition_map self.initial_state_name = initial_state self.default_transitions = default_transitions def register_workflow(self): # check workflow specifications for internal consistency; # return an informative error message to the user if the check fails try: self.check_workflow_specs() except ImproperlyConfigured, e: raise ImproperlyConfigured(_("Workflow specifications are not consistent.\n %s") % e) try: # Check for already existent workflow. Operation `register_workflow` is idempotent... Workflow.objects.get(name=self.workflow_name) except Workflow.DoesNotExist: # Initialize workflow self.workflow = Workflow.objects.create(name=self.workflow_name) ## create States objects self.states = {} # dictionary containing State objects for our Workflow for (key, name) in self.state_list: self.states[key] = State.objects.create(name=name, workflow=self.workflow) ## create Transition objects self.transitions = {} # dictionary containing Transition objects for the current Workflow for (key, transition_name, destination_name) in self.transition_list: dest_state = self.states[destination_name] self.transitions[key] = Transition.objects.create(name=transition_name, workflow=self.workflow, destination=dest_state) ## associate Transitions to States for (state_name, transition_name) in self.state_transition_map: log.debug("Workflow %(w)s, adding state=%(s)s transition=%(t)s" % { 'w' : self.workflow_name, 's' : state_name, 't' : transition_name, }) state = self.states[state_name] transition = self.transitions[transition_name] state.transitions.add(transition) ## set the initial State for the Workflow state = self.states[self.initial_state_name] self.workflow.initial_state = state self.workflow.save() ## define default Transitions for States in a Workflow, ## so we can suggest to end-users what the next "logical" State could be for (state_name, transition_name) in self.default_transitions: state = self.states[state_name] transition = self.transitions[transition_name] self.workflow.default_transition_set.add(DefaultTransition(state=state, transition=transition)) def check_workflow_specs(self): """Check the provided workflow specifications for internal consistency. Return True if the specs are fine, False otherwise. """ state_names = [key for (key, name) in self.state_list] transition_names = [key for (key, transition_name, destination_name) in self.transition_list] ## States have to be unique # TODO ## Transitions have to be unique # TODO ## a Transition must point to an existing State for (key, transition_name, destination_name) in self.transition_list: if destination_name not in state_names: raise ImproperlyConfigured("Transition %s points to the non-existent State %s" % (key, destination_name)) ## a Transition must be assigned to an existing State for (state_name, transition_name) in self.state_transition_map: if state_name not in state_names: raise ImproperlyConfigured("Transition %s can't be assigned to the non-existent State %s" % (transition_name, state_name)) ## initial State must exists if self.initial_state_name not in state_names: raise ImproperlyConfigured("Workflow %s: initial state %s must be included in state names %s" % (self.workflow_name, self.initial_state_name, state_names)) ## a default Transition for a State must exists and had to be previously assigned to that State for (state_name, transition_name) in self.default_transitions: if state_name not in state_names: raise ImproperlyConfigured("A default Transition can't be defined for the non-existent State %s" % state_name) elif transition_name not in transition_names: raise ImproperlyConfigured("The default Transition for the State %s can't be set to a non-existent Transitions %s" % (state_name, transition_name)) elif (state_name, transition_name) not in self.state_transition_map: raise ImproperlyConfigured("The default Transition for the State %s must be one of its valid Transitions" % state_name) #------------------------------------------------------------------------------- #This is an HACK used just because we need these users use parts of the web admin interface from consts import GAS_MEMBER , GAS_REFERRER_TECH, SUPPLIER_REFERRER from django.contrib.auth.models import Group, Permission # groups for users GROUP_TECHS = "techs" GROUP_SUPPLIERS = "suppliers" GROUP_REFERRER_SUPPLIERS = "gas_referrer_suppliers" GROUP_USERS = "users" GROUP_MEMBERS = "gasmembers" def init_perms_for_groups(): from gf.base.models import Person, Place, Contact from gf.gas.models import GAS, GASConfig, GASMember from gf.supplier.models import ( SupplierConfig, SupplierProductCategory, ProductCategory, SupplierStock, Product, Supplier ) from django.contrib.auth.models import User from django.contrib.auth import get_permission_codename g_techs = Group.objects.get(name=GROUP_TECHS) g_suppliers = Group.objects.get(name=GROUP_SUPPLIERS) g_referrers_suppliers = Group.objects.get(name=GROUP_REFERRER_SUPPLIERS) g_gasmembers = Group.objects.get(name=GROUP_MEMBERS) techs_perms_d = { Person : ('add', 'change', 'delete'), Place : ('add', 'change', 'delete'), Contact : ('add', 'change', 'delete'), GAS : ('change',), GASConfig : ('change',), SupplierConfig : ('change',), GASMember : ('add', 'change', 'delete'), SupplierProductCategory : ('add', 'change', 'delete'), ProductCategory : ('add', 'change', 'delete'), SupplierStock : ('add', 'change', 'delete'), Product : ('add', 'change', 'delete'), Supplier : ('add', 'change'), User : ('add', 'change',), # add User is important for Add GASMember Form! Leave it here now. TODO } supplier_perms_d = { Person : ('add', 'change'), Place : ('add', 'change'), Contact : ('add', 'change'), SupplierConfig : ('change',), SupplierProductCategory : ('add', 'change', 'delete'), SupplierStock : ('add', 'change', 'delete'), Product : ('add', 'change', 'delete'), Supplier : ('change',), } gas_referrer_supplier_perms_d = supplier_perms_d.copy() gas_referrer_supplier_perms_d.update({ Supplier : ('add', 'change'), }) gm_perms_d = { Person : ('change',), Place : ('add', 'change',), Contact : ('add', 'change',), } group_perms_d_tuples = ( (g_techs , techs_perms_d), (g_suppliers , supplier_perms_d), (g_referrers_suppliers , gas_referrer_supplier_perms_d), (g_gasmembers , gm_perms_d), ) for gr, perms_d in group_perms_d_tuples: for klass, actions in perms_d.items(): ctype = ContentType.objects.get_for_model(klass) for action in actions: codename = get_permission_codename(action, klass._meta) log.debug("Adding perm %s to group %s" % (codename, gr)) p = Permission.objects.get( content_type=ctype, codename=codename ) gr.permissions.add(p) def setup_data_handler(sender, instance, created, **kwargs): """ Ovverride temporarly for associating some groups to users This will be in use until some part of the interface use admin-interface. After this can be removed """ if created: # Check that groups exist. Create them the first time g_techs, created = Group.objects.get_or_create(name=GROUP_TECHS) g_suppliers, created = Group.objects.get_or_create(name=GROUP_SUPPLIERS) g_referrers_suppliers, created = Group.objects.get_or_create(name=GROUP_REFERRER_SUPPLIERS) g_gasmembers, created = Group.objects.get_or_create(name=GROUP_MEMBERS) if created: # Create all groups needed for this hack # Check only last... init_perms_for_groups() role_group_map = { GAS_MEMBER : g_gasmembers, GAS_REFERRER_SUPPLIER : g_referrers_suppliers, SUPPLIER_REFERRER : g_suppliers, GAS_REFERRER_TECH : g_techs, } # Set "is_staff" to access the admin inteface instance.user.is_staff = True instance.user.save() role_name = instance.role.role.name group = role_group_map.get(role_name) if group: try: instance.user.groups.add(group) except KeyError: log.debug("%s create cannot add %s's group %s(%s)" % (role_name, group, instance, instance.pk) ) # END hack #------------------------------------------------------------------------------- def validate(sender, instance, **kwargs): try: # `instance` is the model instance that has just been created instance.clean() except AttributeError: # sender model doesn't specify any sanitize operations, so just ignore the signal pass def setup_data(sender, instance, created, **kwargs): """ Setup proper data after a model instance is saved to the DB for the first time. This function just calls the `setup_data()` instance method of the sender model class (if defined); actual role-creation/setup logic is encapsulated there. """ if created: # Automatic data-setup should happen only at instance-creation time try: # `instance` is the model instance that has just been created instance.setup_data() except AttributeError: # sender model doesn't specify any data-related setup operations, so just ignore the signal pass # add `validate` function as a listener to the `pre_save` signal pre_save.connect(validate) # add `setup_data` function as a listener to the `post_save` signal post_save.connect(setup_data) post_save.connect(setup_data_handler, sender=PrincipalParamRoleRelation)
agpl-3.0
8,131,172,791,004,369,000
35.355705
190
0.618936
false
4.167992
false
false
false
bert9bert/statsmodels
statsmodels/sandbox/mcevaluate/arma.py
34
4556
import numpy as np from statsmodels.tsa.arima_process import arma_generate_sample from statsmodels.tsa.arma_mle import Arma #TODO: still refactoring problem with cov_x #copied from sandbox.tsa.arima.py def mcarma22(niter=10, nsample=1000, ar=None, ma=None, sig=0.5): '''run Monte Carlo for ARMA(2,2) DGP parameters currently hard coded also sample size `nsample` was not a self contained function, used instances from outer scope now corrected ''' #nsample = 1000 #ar = [1.0, 0, 0] if ar is None: ar = [1.0, -0.55, -0.1] #ma = [1.0, 0, 0] if ma is None: ma = [1.0, 0.3, 0.2] results = [] results_bse = [] for _ in range(niter): y2 = arma_generate_sample(ar,ma,nsample+1000, sig)[-nsample:] y2 -= y2.mean() arest2 = Arma(y2) rhohat2a, cov_x2a, infodict, mesg, ier = arest2.fit((2,2)) results.append(rhohat2a) err2a = arest2.geterrors(rhohat2a) sige2a = np.sqrt(np.dot(err2a,err2a)/nsample) #print('sige2a', sige2a, #print('cov_x2a.shape', cov_x2a.shape #results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) if not cov_x2a is None: results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) else: results_bse.append(np.nan + np.zeros_like(rhohat2a)) return np.r_[ar[1:], ma[1:]], np.array(results), np.array(results_bse) def mc_summary(res, rt=None): if rt is None: rt = np.zeros(res.shape[1]) nanrows = np.isnan(res).any(1) print('fractions of iterations with nans', nanrows.mean()) res = res[~nanrows] print('RMSE') print(np.sqrt(((res-rt)**2).mean(0))) print('mean bias') print((res-rt).mean(0)) print('median bias') print(np.median((res-rt),0)) print('median bias percent') print(np.median((res-rt)/rt*100,0)) print('median absolute error') print(np.median(np.abs(res-rt),0)) print('positive error fraction') print((res > rt).mean(0)) if __name__ == '__main__': #short version # true, est, bse = mcarma22(niter=50) # print(true # #print(est # print(est.mean(0) ''' niter 50, sample size=1000, 2 runs [-0.55 -0.1 0.3 0.2 ] [-0.542401 -0.09904305 0.30840599 0.2052473 ] [-0.55 -0.1 0.3 0.2 ] [-0.54681176 -0.09742921 0.2996297 0.20624258] niter=50, sample size=200, 3 runs [-0.55 -0.1 0.3 0.2 ] [-0.64669489 -0.01134491 0.19972259 0.20634019] [-0.55 -0.1 0.3 0.2 ] [-0.53141595 -0.10653234 0.32297968 0.20505973] [-0.55 -0.1 0.3 0.2 ] [-0.50244588 -0.125455 0.33867488 0.19498214] niter=50, sample size=100, 5 runs --> ar1 too low, ma1 too high [-0.55 -0.1 0.3 0.2 ] [-0.35715008 -0.23392766 0.48771794 0.21901059] [-0.55 -0.1 0.3 0.2 ] [-0.3554852 -0.21581914 0.51744748 0.24759245] [-0.55 -0.1 0.3 0.2 ] [-0.3737861 -0.24665911 0.48031939 0.17274438] [-0.55 -0.1 0.3 0.2 ] [-0.30015385 -0.27705506 0.56168199 0.21995759] [-0.55 -0.1 0.3 0.2 ] [-0.35879991 -0.22999604 0.4761953 0.19670835] new version, with burnin 1000 in DGP and demean [-0.55 -0.1 0.3 0.2 ] [-0.56770228 -0.00076025 0.25621825 0.24492449] [-0.55 -0.1 0.3 0.2 ] [-0.27598305 -0.2312364 0.57599134 0.23582417] [-0.55 -0.1 0.3 0.2 ] [-0.38059051 -0.17413628 0.45147109 0.20046776] [-0.55 -0.1 0.3 0.2 ] [-0.47789765 -0.08650743 0.3554441 0.24196087] ''' ar = [1.0, -0.55, -0.1] ma = [1.0, 0.3, 0.2] nsample = 200 run_mc = True#False if run_mc: for sig in [0.1, 0.5, 1.]: import time t0 = time.time() rt, res_rho, res_bse = mcarma22(niter=100, sig=sig) print('\nResults for Monte Carlo') print('true') print(rt) print('nsample =', nsample, 'sigma = ', sig) print('elapsed time for Monte Carlo', time.time()-t0) # 20 seconds for ARMA(2,2), 1000 iterations with 1000 observations #sige2a = np.sqrt(np.dot(err2a,err2a)/nsample) #print('\nbse of one sample' #print(sige2a * np.sqrt(np.diag(cov_x2a)) print('\nMC of rho versus true') mc_summary(res_rho, rt) print('\nMC of bse versus zero') # this implies inf in percent mc_summary(res_bse) print('\nMC of bse versus std') mc_summary(res_bse, res_rho.std(0))
bsd-3-clause
8,047,128,683,676,541,000
29.993197
78
0.561238
false
2.518519
false
false
false
AnhellO/DAS_Sistemas
Ene-Jun-2021/aguilar-cedillo-jonathan-ivan/primerparcial/ejercicio4/ejercicio4.py
1
2219
from abc import ABCMeta, abstractclassmethod class Cajero(metaclass=ABCMeta): #Interfaz @abstractclassmethod def next_succesor(succesor): pass @abstractclassmethod def handle(cantidad): pass class Cajero50(Cajero): def __init__(self): self._succesor = None def next_succesor(self, succesor): self._succesor = succesor def handle(self,cantidad): if cantidad >=50: num = cantidad // 50 sobrante = cantidad % 50 print(f"entregando {num} billetes de 50") if sobrante != 0: self._succesor.handle(sobrante) else: self._succesor.handle(cantidad) class Cajero20(Cajero): def __init__(self): self._succesor = None def next_succesor(self, succesor): self._succesor = succesor def handle(self,cantidad): if cantidad >= 20: num = cantidad // 20 sobrante = cantidad % 20 print(f"entregando {num} billetes de 20") if sobrante != 0: self._succesor.handle(sobrante) else: self._succesor.handle(cantidad) class Cajero10(Cajero): def __init__(self): self._succesor = None def next_succesor(self, succesor): self._succesor = succesor def handle(self,cantidad): if cantidad >= 10: num = cantidad // 10 sobrante = cantidad % 10 print(f"entregando {num} billetes de 10") if sobrante != 0: self._succesor.handle(sobrante) else: self._succesor.handle(cantidad) class CajeroATMChain: def __init__(self): self.chain1 = Cajero50() self.chain2 = Cajero20() self.chain3 = Cajero10() self.chain1.next_succesor(self.chain2) self.chain2.next_succesor(self.chain3) if __name__ == '__main__': ATM = CajeroATMChain() cantidad = int(input("que cantidad desea sacar?: ")) if cantidad < 10 or cantidad % 10 != 0: print("saldo insuficiente") exit() ATM.chain1.handle(cantidad) print(f"la cantidad de {cantidad} fue entregada...") print("que tenga un buen dia...")
mit
-3,379,360,154,780,902,000
26.395062
56
0.572781
false
3.248902
false
false
false
topic2k/EventGhost
plugins/CyberlinkUniversalRemote/__init__.py
1
4075
# -*- coding: utf-8 -*- # # This file is a plugin for EventGhost. # Copyright © 2005-2019 EventGhost Project <http://www.eventghost.org/> # # EventGhost is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 2 of the License, or (at your option) # any later version. # # EventGhost is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along # with EventGhost. If not, see <http://www.gnu.org/licenses/>. ur"""<rst> Plugin for the CyberLink Universal Remote Control """ import eg eg.RegisterPlugin( name = "CyberLink Universal Remote Control", author = "Bitmonster", version = "1.0.1", kind = "remote", guid = "{097D33BE-FD65-43D2-852B-5DA8A3FBC489}", description = __doc__, hardwareId = "USB\\VID_0766&PID_0204", ) KEY_CODES_1 = { (0, 0, 30, 0, 0, 0, 0, 0): "Num1", (0, 0, 31, 0, 0, 0, 0, 0): "Num2", (0, 0, 32, 0, 0, 0, 0, 0): "Num3", (0, 0, 33, 0, 0, 0, 0, 0): "Num4", (0, 0, 34, 0, 0, 0, 0, 0): "Num5", (0, 0, 35, 0, 0, 0, 0, 0): "Num6", (0, 0, 36, 0, 0, 0, 0, 0): "Num7", (0, 0, 37, 0, 0, 0, 0, 0): "Num8", (0, 0, 38, 0, 0, 0, 0, 0): "Num9", (0, 0, 39, 0, 0, 0, 0, 0): "Num0", (0, 0, 76, 0, 0, 0, 0, 0): "Clear", (0, 0, 40, 0, 0, 0, 0, 0): "Ok", (0, 0, 79, 0, 0, 0, 0, 0): "Right", (0, 0, 80, 0, 0, 0, 0, 0): "Left", (0, 0, 81, 0, 0, 0, 0, 0): "Down", (0, 0, 82, 0, 0, 0, 0, 0): "Up", } KEY_CODES_2 = { (3, 0, 0, 2): "Info", (3, 0, 0, 4): "Rewind", (3, 0, 0, 8): "Forward", (3, 0, 0, 64): "Play", (3, 0, 0, 128): "Pause", } KEY_CODES_3 = { (3, 0, 1): "ChannelUp", (3, 0, 2): "ChannelDown", (3, 0, 4): "Back", (3, 0, 16): "Stop", (3, 1, 0): "NextTrack", (3, 2, 0): "PreviousTrack", (3, 4, 0): "Radio", (3, 16, 0): "Mute", (3, 32, 0): "VolumeUp", (3, 64, 0): "VolumeDown", (3, 128, 0): "Record", (4, 0, 1): "Angel", (4, 0, 2): "Language", (4, 0, 4): "DvdMenu", (4, 0, 8): "Subtitle", (4, 0, 16): "SAP", (4, 0, 32): "Teletext", (4, 0, 64): "LastChannel", (4, 1, 0): "Home", (4, 2, 0): "TV", (4, 8, 0): "Green", (4, 16, 0): "Yellow", (4, 32, 0): "Blue", (4, 128, 0): "Red", } KEY_CODES_4 = { (2, 2): "Power", } class CyberlinkUniversalRemote(eg.PluginBase): def __start__(self): self.buffer = [] self.expectedLength = 0 self.winUsb = eg.WinUsb(self) self.winUsb.Device(self.Callback, 8).AddHardwareId( "CyberLink Universal Remote Control (Keypad)", "USB\\VID_0766&PID_0204&MI_00" ) self.winUsb.Device(self.Callback, 4).AddHardwareId( "CyberLink Universal Remote Control (Buttons)", "USB\\VID_0766&PID_0204&MI_01" ) self.winUsb.Start() self.last_data = [] def __stop__(self): self.winUsb.Stop() def Callback(self, data): if self.last_data != data: # print data if data in KEY_CODES_1: self.TriggerEnduringEvent(KEY_CODES_1[data]) self.last_data = data elif data in KEY_CODES_2: self.TriggerEnduringEvent(KEY_CODES_2[data]) self.last_data = data elif data[:3] in KEY_CODES_3: self.TriggerEnduringEvent(KEY_CODES_3[data[:3]]) self.last_data = data elif data[:2] in KEY_CODES_4: self.TriggerEnduringEvent(KEY_CODES_4[data[:2]]) self.last_data = data elif len(data) == len(self.last_data): self.EndLastEvent() self.last_data = [] # print "EndLastEvent"
gpl-2.0
6,561,192,143,097,264,000
29.402985
77
0.515709
false
2.756428
false
false
false
abilian/abilian-core
src/abilian/web/tags/admin.py
1
9205
"""Admin panel for tags.""" import logging from typing import Callable, List import sqlalchemy as sa import sqlalchemy.orm from flask import current_app, flash, redirect, render_template, request from sqlalchemy.sql import functions as func from abilian.core.entities import Entity from abilian.core.extensions import db from abilian.core.models.tag import Tag, entity_tag_tbl from abilian.i18n import _, _l, _n from abilian.services import get_service from abilian.services.indexing.service import index_update from abilian.web import url_for from abilian.web.admin import AdminPanel from abilian.web.views import ObjectEdit from abilian.web.views.base import View from .forms import TagForm logger = logging.getLogger(__name__) _OBJ_COUNT = func.count(entity_tag_tbl.c.entity_id).label("obj_count") def get_entities_for_reindex(tags): """Collect entities for theses tags.""" if isinstance(tags, Tag): tags = (tags,) session = db.session() indexing = get_service("indexing") tbl = Entity.__table__ tag_ids = [t.id for t in tags] query = ( sa.sql.select([tbl.c.entity_type, tbl.c.id]) .select_from(tbl.join(entity_tag_tbl, entity_tag_tbl.c.entity_id == tbl.c.id)) .where(entity_tag_tbl.c.tag_id.in_(tag_ids)) ) entities = set() with session.no_autoflush: for entity_type, entity_id in session.execute(query): if entity_type not in indexing.adapted: logger.debug("%r is not indexed, skipping", entity_type) item = ("changed", entity_type, entity_id, ()) entities.add(item) return entities def schedule_entities_reindex(entities): """ :param entities: as returned by :func:`get_entities_for_reindex` """ entities = [(e[0], e[1], e[2], dict(e[3])) for e in entities] return index_update.apply_async(kwargs={"index": "default", "items": entities}) class NSView(View): """View a Namespace.""" def __init__(self, view_endpoint, *args, **kwargs): super().__init__(*args, **kwargs) self.__selected_tags = None self.view_endpoint = view_endpoint def prepare_args(self, args, kwargs): self.ns = kwargs.get("ns") self.form_errors = {} return args, kwargs def get(self, ns): tags = ( Tag.query.filter(Tag.ns == ns) .outerjoin(entity_tag_tbl, entity_tag_tbl.c.tag_id == Tag.id) .add_column(_OBJ_COUNT) .group_by(Tag) .order_by(sa.sql.func.lower(Tag.label)) ) # get a list of rows instead of (Tag, count) tuples tags = list(tags.session.execute(tags)) return render_template( "admin/tags_ns.html", ns=ns, tags=tags, errors=self.form_errors, merge_to=request.form.get("merge_to", default="__None__", type=int), selected_tags={t.id for t in self._get_selected_tags()}, ) def redirect_to_view(self): return redirect(url_for(".tags_ns", ns=self.ns)) def post(self, ns): data = request.form action = data.get("__action") if action == "delete": return self.do_delete() elif action == "merge": return self.do_merge() else: flash(_("Unknown action")) self.get(self.ns) def _get_selected_tags(self) -> List[Tag]: if self.__selected_tags is None: tag_ids = request.form.getlist("selected", type=int) if not tag_ids: self.__selected_tags = [] else: self.__selected_tags = Tag.query.filter( Tag.ns == self.ns, Tag.id.in_(tag_ids) ).all() return self.__selected_tags def do_delete(self): data = request.form confirm = data.get("confirm_delete", False, type=bool) if not confirm: flash(_("Please fix the error(s) below"), "error") self.form_errors["confirm_delete"] = _( "Must be checked to ensure you " "intent to delete these tags" ) return self.get(self.ns) session = db.session() tags = self._get_selected_tags() if not tags: flash(_("No action performed: no tags selected"), "warning") return self.redirect_to_view() count = len(tags) entities_to_reindex = get_entities_for_reindex(tags) success_message = _n( "%(tag)s deleted", "%(num)d tags deleted:\n%(tags)s", count, tag=tags[0].label, tags=", ".join(t.label for t in tags), ) for tag in tags: session.delete(tag) session.commit() flash(success_message) schedule_entities_reindex(entities_to_reindex) return self.redirect_to_view() def do_merge(self): target_id = request.form.get("merge_to", type=int) if not target_id: flash(_("You must select a target tag to merge to"), "error") return self.get(self.ns) target = Tag.query.filter(Tag.ns == self.ns, Tag.id == target_id).scalar() if not target: flash(_("Target tag not found, no action performed"), "error") return self.get(self.ns) merge_from = set(self._get_selected_tags()) if target in merge_from: merge_from.remove(target) if not merge_from: flash(_("No tag selected for merging"), "warning") return self.get(self.ns) session = db.session() merge_from_ids = [t.id for t in merge_from] tbl = entity_tag_tbl entities_to_reindex = get_entities_for_reindex(merge_from) already_tagged = sa.sql.select([tbl.c.entity_id]).where( tbl.c.tag_id == target.id ) del_dup = tbl.delete().where( sa.sql.and_( tbl.c.tag_id.in_(merge_from_ids), tbl.c.entity_id.in_(already_tagged) ) ) session.execute(del_dup) update = ( tbl.update() .where(tbl.c.tag_id.in_(merge_from_ids)) .values(tag_id=target.id) ) session.execute(update) for merged in merge_from: session.delete(merged) session.commit() schedule_entities_reindex(entities_to_reindex) return self.redirect_to_view() class BaseTagView: """Mixin for tag views.""" Model = Tag Form = TagForm def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.extension = current_app.extensions["tags"] def prepare_args(self, args, kwargs): self.ns = kwargs.pop("ns") return super().prepare_args(args, kwargs) def view_url(self): return url_for(".tags_ns", ns=self.ns) index_url = view_url class TagEdit(BaseTagView, ObjectEdit): _message_success = _l("Tag edited") has_changes = False _entities_to_reindex: List[Entity] = [] def after_populate_obj(self): session = sa.orm.object_session(self.obj) self.has_changes = self.obj in (session.dirty | session.deleted) if self.has_changes: # since the tag may be in pending-delete, we must collect them # before flush self._entities_to_reindex = get_entities_for_reindex(self.obj) def commit_success(self): if not (self.has_changes and self._entities_to_reindex): return schedule_entities_reindex(self._entities_to_reindex) class TagPanel(AdminPanel): """Tags administration.""" id = "tags" label = _l("Tags") icon = "tags" def get(self): obj_count = ( sa.sql.select( [Tag.ns, func.count(entity_tag_tbl.c.entity_id).label("obj_count")] ) .select_from(Tag.__table__.join(entity_tag_tbl)) .group_by(Tag.ns) .alias() ) ns_query = ( sa.sql.select( [Tag.ns, func.count(Tag.id).label("tag_count"), obj_count.c.obj_count], from_obj=[Tag.__table__.outerjoin(obj_count, Tag.ns == obj_count.c.ns)], ) .group_by(Tag.ns, obj_count.c.obj_count) .order_by(Tag.ns) ) session = db.session() namespaces = session.execute(ns_query) return render_template("admin/tags.html", namespaces=namespaces) def install_additional_rules(self, add_url_rule: Callable) -> None: panel_endpoint = f".{self.id}" ns_base = "/<string:ns>/" add_url_rule( ns_base, endpoint="ns", view_func=NSView.as_view("ns", view_endpoint=panel_endpoint), ) tag_base = f"{ns_base}<int:object_id>/" add_url_rule( tag_base, endpoint="tag_edit", view_func=TagEdit.as_view("tag_edit", view_endpoint=panel_endpoint), ) add_url_rule( f"{tag_base}delete", endpoint="tag_delete", view_func=TagEdit.as_view("tag_delete", view_endpoint=panel_endpoint), )
lgpl-2.1
-2,298,058,158,841,750,800
29.889262
88
0.567518
false
3.676118
false
false
false
DigitalCampus/django-swaziland-oppia
oppia/migrations/0012_auto__add_field_media_filesize__add_field_media_media_length.py
1
18168
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Media.filesize' db.add_column(u'oppia_media', 'filesize', self.gf('django.db.models.fields.BigIntegerField')(default=None, null=True, blank=True), keep_default=False) # Adding field 'Media.media_length' db.add_column(u'oppia_media', 'media_length', self.gf('django.db.models.fields.IntegerField')(default=None, null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Media.filesize' db.delete_column(u'oppia_media', 'filesize') # Deleting field 'Media.media_length' db.delete_column(u'oppia_media', 'media_length') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'oppia.activity': { 'Meta': {'object_name': 'Activity'}, 'baseline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.IntegerField', [], {}), 'section': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Section']"}), 'title': ('django.db.models.fields.TextField', [], {}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}) }, u'oppia.activityschedule': { 'Meta': {'object_name': 'ActivitySchedule'}, 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'schedule': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Schedule']"}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, u'oppia.award': { 'Meta': {'object_name': 'Award'}, 'award_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Badge']"}), 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.awardcourse': { 'Meta': {'object_name': 'AwardCourse'}, 'award': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Award']"}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'course_version': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, u'oppia.badge': { 'Meta': {'object_name': 'Badge'}, 'allow_multiple_awards': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'default_icon': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'points': ('django.db.models.fields.IntegerField', [], {'default': '100'}), 'ref': ('django.db.models.fields.CharField', [], {'max_length': '20'}) }, u'oppia.cohort': { 'Meta': {'object_name': 'Cohort'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'schedule': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['oppia.Schedule']", 'null': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, u'oppia.course': { 'Meta': {'object_name': 'Course'}, 'badge_icon': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'blank': 'True'}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'filename': ('django.db.models.fields.CharField', [], {'max_length': '200'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'lastupdated_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'shortname': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'title': ('django.db.models.fields.TextField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'version': ('django.db.models.fields.BigIntegerField', [], {}) }, u'oppia.coursedownload': { 'Meta': {'object_name': 'CourseDownload'}, 'agent': ('django.db.models.fields.TextField', [], {'default': 'None', 'blank': 'True'}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'course_version': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}), 'download_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.coursetag': { 'Meta': {'object_name': 'CourseTag'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Tag']"}) }, u'oppia.media': { 'Meta': {'object_name': 'Media'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'download_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'filename': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'filesize': ('django.db.models.fields.BigIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'media_length': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}) }, u'oppia.message': { 'Meta': {'object_name': 'Message'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'icon': ('django.db.models.fields.CharField', [], {'max_length': '200'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'link': ('django.db.models.fields.URLField', [], {'max_length': '255'}), 'message': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'publish_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}) }, u'oppia.participant': { 'Meta': {'object_name': 'Participant'}, 'cohort': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Cohort']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.points': { 'Meta': {'object_name': 'Points'}, 'cohort': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Cohort']", 'null': 'True'}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']", 'null': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'points': ('django.db.models.fields.IntegerField', [], {}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, u'oppia.schedule': { 'Meta': {'object_name': 'Schedule'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lastupdated_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'title': ('django.db.models.fields.TextField', [], {}) }, u'oppia.section': { 'Meta': {'object_name': 'Section'}, 'course': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['oppia.Course']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order': ('django.db.models.fields.IntegerField', [], {}), 'title': ('django.db.models.fields.TextField', [], {}) }, u'oppia.tag': { 'Meta': {'object_name': 'Tag'}, 'courses': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['oppia.Course']", 'through': u"orm['oppia.CourseTag']", 'symmetrical': 'False'}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'icon': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'order_priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, u'oppia.tracker': { 'Meta': {'object_name': 'Tracker'}, 'activity_title': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'agent': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'course': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['oppia.Course']", 'null': 'True', 'blank': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'digest': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), 'section_title': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'submitted_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'time_taken': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'tracker_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '10', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'uuid': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}) }, u'oppia.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'about': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'job_title': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'organisation': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['oppia']
gpl-3.0
6,404,917,616,971,633,000
72.558704
195
0.545465
false
3.639423
false
false
false
tiagocardosos/stoq
plugins/optical/opticalui.py
2
14886
# -*- Mode: Python; coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 ## ## Copyright (C) 2013 Async Open Source <http://www.async.com.br> ## All rights reserved ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., or visit: http://www.gnu.org/. ## ## Author(s): Stoq Team <[email protected]> ## import decimal import logging import gtk from storm.expr import LeftJoin from stoqlib.api import api from stoqlib.database.runtime import get_default_store from stoqlib.database.viewable import Viewable from stoqlib.domain.product import Product, ProductManufacturer from stoqlib.domain.sellable import Sellable from stoqlib.domain.workorder import WorkOrder from stoqlib.gui.base.dialogs import run_dialog from stoqlib.gui.editors.personeditor import ClientEditor from stoqlib.gui.editors.producteditor import ProductEditor from stoqlib.gui.editors.workordereditor import WorkOrderEditor from stoqlib.gui.events import (StartApplicationEvent, StopApplicationEvent, EditorCreateEvent, RunDialogEvent, PrintReportEvent, SearchDialogSetupSearchEvent, ApplicationSetupSearchEvent) from stoqlib.gui.search.searchcolumns import SearchColumn from stoqlib.gui.search.searchextension import SearchExtension from stoqlib.gui.utils.keybindings import add_bindings, get_accels from stoqlib.gui.utils.printing import print_report from stoqlib.gui.wizards.personwizard import PersonRoleWizard from stoqlib.gui.wizards.workorderquotewizard import WorkOrderQuoteWizard from stoqlib.lib.message import warning from stoqlib.lib.translation import stoqlib_gettext from stoqlib.reporting.sale import SaleOrderReport from stoq.gui.services import ServicesApp from .medicssearch import OpticalMedicSearch, MedicSalesSearch from .opticaleditor import MedicEditor, OpticalWorkOrderEditor from .opticalhistory import OpticalPatientDetails from .opticalreport import OpticalWorkOrderReceiptReport from .opticalslave import ProductOpticSlave, WorkOrderOpticalSlave from .opticalwizard import OpticalSaleQuoteWizard, MedicRoleWizard from .opticaldomain import OpticalProduct _ = stoqlib_gettext log = logging.getLogger(__name__) class ProductSearchExtention(SearchExtension): spec_attributes = dict( gf_glass_type=OpticalProduct.gf_glass_type, gf_size=OpticalProduct.gf_size, gf_lens_type=OpticalProduct.gf_lens_type, gf_color=OpticalProduct.gf_color, gl_photosensitive=OpticalProduct.gl_photosensitive, gl_anti_glare=OpticalProduct.gl_anti_glare, gl_refraction_index=OpticalProduct.gl_refraction_index, gl_classification=OpticalProduct.gl_classification, gl_addition=OpticalProduct.gl_addition, gl_diameter=OpticalProduct.gl_diameter, gl_height=OpticalProduct.gl_height, gl_availability=OpticalProduct.gl_availability, cl_degree=OpticalProduct.cl_degree, cl_classification=OpticalProduct.cl_classification, cl_lens_type=OpticalProduct.cl_lens_type, cl_discard=OpticalProduct.cl_discard, cl_addition=OpticalProduct.cl_addition, cl_cylindrical=OpticalProduct.cl_cylindrical, cl_axis=OpticalProduct.cl_axis, cl_color=OpticalProduct.cl_color, cl_curvature=OpticalProduct.cl_curvature, ) spec_joins = [ LeftJoin(OpticalProduct, OpticalProduct.product_id == Product.id) ] def get_columns(self): info_cols = { _('Frame'): [ ('gf_glass_type', _('Glass Type'), str, False), ('gf_size', _('Size'), str, False), ('gf_lens_type', _('Lens Type'), str, False), ('gf_color', _('Color'), str, False), ], _('Glass Lenses'): [ ('gl_photosensitive', _('Photosensitive'), str, False), ('gl_anti_glare', _('Anti Glare'), str, False), ('gl_refraction_index', _('Refraction Index'), decimal.Decimal, False), ('gl_classification', _('Classification'), str, False), ('gl_addition', _('Addition'), str, False), ('gl_diameter', _('Diameter'), str, False), ('gl_height', _('Height'), str, False), ('gl_availability', _('Availability'), str, False), ], _('Contact Lenses'): [ ('cl_degree', _('Degree'), decimal.Decimal, False), ('cl_classification', _('Classification'), str, False), ('cl_lens_type', _('Lens Type'), str, False), ('cl_discard', _('Discard'), str, False), ('cl_addition', _('Addition'), str, False), ('cl_cylindrical', _('Cylindrical'), decimal.Decimal, False), ('cl_axis', _('Axis'), decimal.Decimal, False), ('cl_color', _('Color'), str, False), ('cl_curvature', _('Curvature'), str, False), ], } columns = [] for label, columns_list in info_cols.iteritems(): for c in columns_list: columns.append( SearchColumn(c[0], title='%s - %s' % (label, c[1]), data_type=c[2], visible=c[3])) return columns class ServicesSearchExtention(SearchExtension): spec_attributes = dict( manufacturer_name=ProductManufacturer.name ) spec_joins = [ LeftJoin(Product, Product.sellable_id == Sellable.id), LeftJoin(ProductManufacturer, Product.manufacturer_id == ProductManufacturer.id) ] def get_columns(self): return [ SearchColumn('manufacturer_name', title=_('Manufacturer'), data_type=str, visible=False) ] class OpticalUI(object): def __init__(self): # This will contain a mapping of (appname, uimanager) -> extra_ui # We need to store that like this because each windows has it's unique # uimanager, and we have an extra_ui for different apps self._app_ui = dict() self.default_store = get_default_store() StartApplicationEvent.connect(self._on_StartApplicationEvent) StopApplicationEvent.connect(self._on_StopApplicationEvent) EditorCreateEvent.connect(self._on_EditorCreateEvent) RunDialogEvent.connect(self._on_RunDialogEvent) PrintReportEvent.connect(self._on_PrintReportEvent) SearchDialogSetupSearchEvent.connect(self._on_SearchDialogSetupSearchEvent) ApplicationSetupSearchEvent.connect(self._on_ApplicationSetupSearchEvent) add_bindings([ ('plugin.optical.pre_sale', ''), ('plugin.optical.search_medics', ''), ]) # # Private # def _add_sale_menus(self, sale_app): uimanager = sale_app.uimanager ui_string = """ <ui> <menubar name="menubar"> <placeholder name="ExtraMenubarPH"> <menu action="OpticalMenu"> <menuitem action="OpticalPreSale"/> <menuitem action="OpticalMedicSaleItems"/> <menuitem action="OpticalMedicSearch"/> </menu> </placeholder> </menubar> </ui> """ group = get_accels('plugin.optical') ag = gtk.ActionGroup('OpticalSalesActions') ag.add_actions([ ('OpticalMenu', None, _(u'Optical')), ('OpticalPreSale', None, _(u'Sale with work order...'), group.get('pre_sale'), None, self._on_OpticalPreSale__activate), ('OpticalMedicSearch', None, _(u'Medics...'), group.get('search_medics'), None, self._on_MedicsSearch__activate), ('OpticalMedicSaleItems', None, _(u'Medics sold items...'), None, None, self._on_MedicSaleItems__activate), ]) uimanager.insert_action_group(ag, 0) self._app_ui[('sales', uimanager)] = uimanager.add_ui_from_string(ui_string) def _add_services_menus(self, services_app): uimanager = services_app.uimanager ui_string = """ <ui> <menubar name="menubar"> <placeholder name="AppMenubarPH"> <menu action="OrderMenu"> <separator/> <menuitem action="OpticalDetails"/> </menu> </placeholder> </menubar> <popup name="ServicesSelection"> <placeholder name="ServicesSelectionPH"> <separator/> <menuitem action="OpticalDetails"/> </placeholder> </popup> </ui> """ ag = gtk.ActionGroup('OpticalServicesActions') ag.add_actions([ ('OpticalDetails', None, _(u'Edit optical details...'), None, None, self._on_OpticalDetails__activate), ]) uimanager.insert_action_group(ag, 0) self._app_ui[('services', uimanager)] = uimanager.add_ui_from_string(ui_string) services_app.search.connect( 'result-selection-changed', self._on_ServicesApp__result_selection_changed, uimanager) def _remove_app_ui(self, appname, uimanager): ui = self._app_ui.pop((appname, uimanager), None) if ui is not None: uimanager.remove_ui(ui) def _fix_work_order_editor(self, editor, model, store): slave = WorkOrderOpticalSlave(store, model, show_finish_date=False, visual_mode=editor.visual_mode) editor.add_extra_tab('Ótico', slave) def _print_report(button): print_report(OpticalWorkOrderReceiptReport, [model]) # Also add an print button if editor.edit_mode: print_button = editor.add_button(_('Print'), gtk.STOCK_PRINT) print_button.connect('clicked', _print_report) def _add_product_slave(self, editor, model, store): editor.add_extra_tab(ProductOpticSlave.title, ProductOpticSlave(store, model)) def _create_pre_sale(self): if self._current_app.check_open_inventory(): warning(_("You cannot create a pre-sale with an open inventory.")) return with api.new_store() as store: run_dialog(OpticalSaleQuoteWizard, self._current_app, store) if store.committed: self._current_app.refresh() def _add_patient_history_button(self, editor, model): button = editor.add_button(_(u'Patient History')) button.connect('clicked', self._on_patient_history_clicked, editor, model) # Save the button on the editor, so the tests can click on it editor.patient_history_button = button # # Events # def _on_StartApplicationEvent(self, appname, app): self._current_app = app if appname == 'sales': self._add_sale_menus(app) elif appname == 'services': self._add_services_menus(app) def _on_StopApplicationEvent(self, appname, app): self._remove_app_ui(appname, app.uimanager) def _on_EditorCreateEvent(self, editor, model, store, *args): # Use type() instead of isinstance so tab does not appear on subclasses # (unless thats the desired effect) editor_type = type(editor) if editor_type is ProductEditor: self._add_product_slave(editor, model, store) elif editor_type is WorkOrderEditor: self._fix_work_order_editor(editor, model, store) elif editor_type is ClientEditor: self._add_patient_history_button(editor, model) def _on_RunDialogEvent(self, dialog, parent, *args, **kwargs): # Every sale with work order should use OpticalSaleQuoteWizard instead # of WorkOrderQuoteWizard when this plugin is enabled if dialog is WorkOrderQuoteWizard: return OpticalSaleQuoteWizard elif dialog is PersonRoleWizard and MedicEditor in args: return MedicRoleWizard def _on_SearchDialogSetupSearchEvent(self, dialog): if not issubclass(dialog.search_spec, Viewable): return viewable = dialog.search_spec if (viewable.has_column(Sellable.description) and viewable.has_join_with(Product)): dialog.add_extension(ProductSearchExtention()) def _on_ApplicationSetupSearchEvent(self, app): if isinstance(app, ServicesApp): extention = ServicesSearchExtention() extention.attach(app) # # Callbacks # def _on_PrintReportEvent(self, report_class, *args, **kwargs): if issubclass(report_class, SaleOrderReport): sale = args[0] store = sale.store workorders = list(WorkOrder.find_by_sale(store, sale)) if len(workorders): print_report(OpticalWorkOrderReceiptReport, workorders) return True return False def _on_patient_history_clicked(self, widget, editor, client): run_dialog(OpticalPatientDetails, editor, client.store, client) def _on_OpticalPreSale__activate(self, action): self._create_pre_sale() def _on_MedicsSearch__activate(self, action): with api.new_store() as store: run_dialog(OpticalMedicSearch, None, store, hide_footer=True) def _on_MedicSaleItems__activate(self, action): store = api.new_store() run_dialog(MedicSalesSearch, None, store, hide_footer=True) store.rollback() def _on_OpticalDetails__activate(self, action): wo_view = self._current_app.search.get_selected_item() with api.new_store() as store: work_order = store.fetch(wo_view.work_order) run_dialog(OpticalWorkOrderEditor, None, store, work_order) def _on_ServicesApp__result_selection_changed(self, search, uimanager): optical_details = uimanager.get_action( '/menubar/AppMenubarPH/OrderMenu/OpticalDetails') optical_details.set_sensitive(bool(search.get_selected_item()))
gpl-2.0
6,566,709,317,868,000,000
38.693333
87
0.623043
false
3.95142
false
false
false
BechtelCIRT/cassava
cassava/Automater/outputs.py
2
33878
""" The outputs.py module represents some form of all outputs from the Automater program to include all variation of output files. Any addition to the Automater that brings any other output requirement should be programmed in this module. Class(es): SiteDetailOutput -- Wrapper class around all functions that pass #NOMOREPRINTS print output from Automater, to include standard output and file system output. Function(s): No global exportable functions are defined. Exception(s): No exceptions exported. """ import csv import socket import re from datetime import datetime from operator import attrgetter class SiteDetailOutput(object): """ SiteDetailOutput provides the capability to output information to the screen, a text file, a comma-seperated value file, or a file formatted with html markup (readable by web browsers). Public Method(s): createOutputInfo Instance variable(s): _listofsites - list storing the list of site results stored. """ def __init__(self,sitelist): """ Class constructor. Stores the incoming list of sites in the _listofsites list. Argument(s): sitelist -- list containing site result information to be printed. Return value(s): Nothing is returned from this Method. """ self._listofsites = [] self._listofsites = sitelist @property def ListOfSites(self): """ Checks instance variable _listofsites for content. Returns _listofsites if it has content or None if it does not. Argument(s): No arguments are required. Return value(s): _listofsites -- list containing list of site results if variable contains data. None -- if _listofsites is empty or not assigned. Restriction(s): This Method is tagged as a Property. """ if self._listofsites is None or len(self._listofsites) == 0: return None return self._listofsites def createOutputInfo(self,parser): """ Checks parser information calls correct pass #NOMOREPRINTS print methods based on parser requirements. Returns nothing. Argument(s): parser -- Parser object storing program input parameters used when program was run. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ self.PrintToScreen() if parser.hasCEFOutFile(): self.PrintToCEFFile(parser.CEFOutFile) if parser.hasTextOutFile(): self.PrintToTextFile(parser.TextOutFile) if parser.hasHTMLOutFile(): self.PrintToHTMLFile(parser.HTMLOutFile) if parser.hasCSVOutSet(): self.PrintToCSVFile(parser.CSVOutFile) def PrintToScreen(self): """ Formats site information correctly and prints it to the user's standard output. Returns nothing. Argument(s): No arguments are required. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: pass #NOMOREPRINTS print "\n____________________ Results found for: " + site.Target + " ____________________" target = site.Target if siteimpprop is None or len(siteimpprop)==0: pass #NOMOREPRINTS print "No results in the " + site.FriendlyName[index] + " category" else: if siteimpprop[index] is None or len(siteimpprop[index])==0: pass #NOMOREPRINTS print "No results found for: " + site.ReportStringForResult[index] else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult[index] + " " + str(siteimpprop) laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult[index] + " " + str(siteresult) laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: pass #NOMOREPRINTS print "\n____________________ Results found for: " + site.Target + " ____________________" target = site.Target if siteimpprop is None or len(siteimpprop)==0: pass #NOMOREPRINTS print "No results found in the " + site.FriendlyName else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult + " " + str(siteimpprop) laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: pass #NOMOREPRINTS print "" + site.ReportStringForResult + " " + str(siteresult) laststring = "" + site.ReportStringForResult + " " + str(siteresult) else: pass def PrintToCEFFile(self,cefoutfile): """ Formats site information correctly and prints it to an output file in CEF format. CEF format specification from http://mita-tac.wikispaces.com/file/view/CEF+White+Paper+071709.pdf "Jan 18 11:07:53 host message" where message: "CEF:Version|Device Vendor|Device Product|Device Version|Signature ID|Name|Severity|Extension" Returns nothing. Argument(s): cefoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) curr_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') hostname = socket.gethostname() prefix = ' '.join([curr_date,hostname]) cef_version = "CEF:Version1.1" cef_deviceVendor = "TekDefense" cef_deviceProduct = "Automater" cef_deviceVersion = "2.1" cef_SignatureID = "0" cef_Severity = "2" cef_Extension = " " cef_fields = [cef_version,cef_deviceVendor,cef_deviceProduct,cef_deviceVersion, \ cef_SignatureID, cef_Severity, cef_Extension] pattern = "^\[\+\]\s+" target = "" pass #NOMOREPRINTS print '\n[+] Generating CEF output: ' + cefoutfile f = open(cefoutfile, "wb") csv.register_dialect('escaped',delimiter='|',escapechar='\\',doublequote=False,quoting=csv.QUOTE_NONE) cefRW = csv.writer(f,'escaped') #cefRW.writerow(['Target', 'Type', 'Source', 'Result']) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] " + \ re.sub(pattern,"",site.ReportStringForResult[index])+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + res #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult if "" + tgt + typ + source + str(res) != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+str(res)])+"] " + \ re.sub(pattern,"",site.ReportStringForResult[index])+ str(siteresult)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + str(res) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] "] + \ [1] + [tgt]) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+res])+"] " + \ re.sub(pattern,"",site.ReportStringForResult)+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + res else: laststring = "" for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult if "" + tgt + typ + source + str(res) != laststring: cefRW.writerow([prefix] + cef_fields[:5] + \ ["["+",".join(["tgt="+tgt,"typ="+typ,"src="+source,"res="+str(res)])+"] " + \ re.sub(pattern,"",site.ReportStringForResult)+ str(siteimpprop)] + \ [cef_Severity] + [tgt]) laststring = "" + tgt + typ + source + str(res) f.flush() f.close() pass #NOMOREPRINTS print "" + cefoutfile + " Generated" def PrintToTextFile(self,textoutfile): """ Formats site information correctly and prints it to an output file in text format. Returns nothing. Argument(s): textoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" pass #NOMOREPRINTS print "\n[+] Generating text output: " + textoutfile f = open(textoutfile, "w") if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if target != site.Target: f.write("\n____________________ Results found for: " + site.Target + " ____________________") target = site.Target if siteimpprop is None or len(siteimpprop)==0: f.write("\nNo results in the " + site.FriendlyName[index] + " category") else: if siteimpprop[index] is None or len(siteimpprop[index])==0: f.write("\nNo results found for: " + site.ReportStringForResult[index]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop[index], basestring): if "" + site.ReportStringForResult[index] + " " + str(siteimpprop) != laststring: f.write("\n" + site.ReportStringForResult[index] + " " + str(siteimpprop)) laststring = "" + site.ReportStringForResult[index] + " " + str(siteimpprop) #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: if "" + site.ReportStringForResult[index] + " " + str(siteresult) != laststring: f.write("\n" + site.ReportStringForResult[index] + " " + str(siteresult)) laststring = "" + site.ReportStringForResult[index] + " " + str(siteresult) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if target != site.Target: f.write("\n____________________ Results found for: " + site.Target + " ____________________") target = site.Target if siteimpprop is None or len(siteimpprop)==0: f.write("\nNo results found in the " + site.FriendlyName) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): if "" + site.ReportStringForResult + " " + str(siteimpprop) != laststring: f.write("\n" + site.ReportStringForResult + " " + str(siteimpprop)) laststring = "" + site.ReportStringForResult + " " + str(siteimpprop) else: laststring = "" for siteresult in siteimpprop: if "" + site.ReportStringForResult + " " + str(siteresult) != laststring: f.write("\n" + site.ReportStringForResult + " " + str(siteresult)) laststring = "" + site.ReportStringForResult + " " + str(siteresult) f.flush() f.close() pass #NOMOREPRINTS print "" + textoutfile + " Generated" def PrintToCSVFile(self,csvoutfile): """ Formats site information correctly and prints it to an output file with comma-seperators. Returns nothing. Argument(s): csvoutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" pass #NOMOREPRINTS print '\n[+] Generating CSV output: ' + csvoutfile f = open(csvoutfile, "wb") csvRW = csv.writer(f, quoting=csv.QUOTE_ALL) csvRW.writerow(['Target', 'Type', 'Source', 'Result']) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: laststring = "" #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + res #must be a list since it failed the isinstance check on string else: laststring = "" for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult if "" + tgt + typ + source + str(res) != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + str(res) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" csvRW.writerow([tgt,typ,source,res]) else: laststring = "" #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop if "" + tgt + typ + source + res != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + res else: laststring = "" for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult if "" + tgt + typ + source + str(res) != laststring: csvRW.writerow([tgt,typ,source,res]) laststring = "" + tgt + typ + source + str(res) f.flush() f.close() pass #NOMOREPRINTS print "" + csvoutfile + " Generated" def PrintToHTMLFile(self,htmloutfile): """ Formats site information correctly and prints it to an output file using HTML markup. Returns nothing. Argument(s): htmloutfile -- A string representation of a file that will store the output. Return value(s): Nothing is returned from this Method. Restriction(s): The Method has no restrictions. """ sites = sorted(self.ListOfSites, key=attrgetter('Target')) target = "" pass #NOMOREPRINTS print '\n[+] Generating HTML output: ' + htmloutfile f = open(htmloutfile, "w") f.write(self.getHTMLOpening()) if sites is not None: for site in sites: if not isinstance(site._regex,basestring): #this is a multisite: for index in range(len(site.RegEx)): #the regexs will ensure we have the exact number of lookups siteimpprop = site.getImportantProperty(index) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: if siteimpprop[index] is None or len(siteimpprop[index])==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: #if it's just a string we don't want it to output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: for siteresult in siteimpprop[index]: tgt = site.Target typ = site.TargetType source = site.FriendlyName[index] res = siteresult tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else:#this is a singlesite siteimpprop = site.getImportantProperty(0) if siteimpprop is None or len(siteimpprop)==0: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = "No results found" tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: #if it's just a string we don't want it output like a list if isinstance(siteimpprop, basestring): tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteimpprop tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) else: for siteresult in siteimpprop: tgt = site.Target typ = site.TargetType source = site.FriendlyName res = siteresult tableData = '<tr><td>' + tgt + '</td><td>' + typ + '</td><td>' + source + '</td><td>' + str(res) + '</td></tr>' f.write(tableData) f.write(self.getHTMLClosing()) f.flush() f.close() pass #NOMOREPRINTS print "" + htmloutfile + " Generated" def getHTMLOpening(self): """ Creates HTML markup to provide correct formatting for initial HTML file requirements. Returns string that contains opening HTML markup information for HTML output file. Argument(s): No arguments required. Return value(s): string. Restriction(s): The Method has no restrictions. """ return '''<style type="text/css"> #table-3 { border: 1px solid #DFDFDF; background-color: #F9F9F9; width: 100%; -moz-border-radius: 3px; -webkit-border-radius: 3px; border-radius: 3px; font-family: Arial,"Bitstream Vera Sans",Helvetica,Verdana,sans-serif; color: #333; } #table-3 td, #table-3 th { border-top-color: white; border-bottom: 1px solid #DFDFDF; color: #555; } #table-3 th { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 14px; } #table-3 td { font-size: 12px; padding: 4px 7px 2px; vertical-align: top; }res h1 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: Center; line-height: 1.3em; font-size: 40px; } h2 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 16px; } h4 { text-shadow: rgba(255, 255, 255, 0.796875) 0px 1px 0px; font-family: Georgia,"Times New Roman","Bitstream Charter",Times,serif; font-weight: normal; padding: 7px 7px 8px; text-align: left; line-height: 1.3em; font-size: 10px; } </style> <html> <body> <title> Automater Results </title> <h1> Automater Results </h1> <table id="table-3"> <tr> <th>Target</th> <th>Type</th> <th>Source</th> <th>Result</th> </tr> ''' def getHTMLClosing(self): """ Creates HTML markup to provide correct formatting for closing HTML file requirements. Returns string that contains closing HTML markup information for HTML output file. Argument(s): No arguments required. Return value(s): string. Restriction(s): The Method has no restrictions. """ return ''' </table> <br> <br> <p>Created using Automater.py by @TekDefense <a href="http://www.tekdefense.com">http://www.tekdefense.com</a>; <a href="https://github.com/1aN0rmus/TekDefense">https://github.com/1aN0rmus/TekDefense</a></p> </body> </html> '''
mit
8,456,188,284,988,222,000
51.039939
219
0.443857
false
5.006354
false
false
false
Natalia-28028/kpk2016
Cath_the_ball/ball_1.py
2
1945
from tkinter import * from random import * root = Tk() canvas = Canvas(root) canvas.pack() def create_scores_text(): global scores_text scores_text = canvas.create_text(60, 12, text="Scores: " + str(scores), font="Sans 18") def change_scores_text(): canvas.itemconfigure(scores_text, text="Scores: " + str(scores)) def generate_random_ball_coord(): x = randint(r, screen_width-r) y = randint(r, screen_height-r) return x, y def generate_random_ball_velocity(): vx = randint(-10, +10) vy = randint(-10, +10) return vx, vy def create_ball(): global x, y, vx, vy, ball x, y = generate_random_ball_coord() vx, vy = generate_random_ball_velocity() ball = canvas.create_oval(x - r, y - r, x + r, y + r, fill="green") def move_ball(): global x, y, vx, vy new_x, new_y = x - vx, y - vy if new_x < r or new_x > screen_width - r: new_x = x # rolling back coordinate! vx = -vx if new_y < r or new_y > screen_height - r: new_y = y # rolling back coordinate! vy = -vy canvas.move(ball, new_x - x, new_y - y) x, y = new_x, new_y def flick_ball(): global x, y, vx, vy new_x, new_y = generate_random_ball_coord() vx, vy = generate_random_ball_velocity() canvas.move(ball, new_x - x, new_y - y) x, y = new_x, new_y def time_event(): move_ball() canvas.after(100, time_event) def mouse_click(event): global scores if (event.x - x)**2 + (event.y - y)**2 <= r**2: scores += 1 change_scores_text() flick_ball() scores = 0 x = y = 0 # not needed r = 50 screen_width = int(canvas["width"]) screen_height = int(canvas["height"]) create_ball() create_scores_text() canvas.bind('<Button-1>', mouse_click) time_event() # начинаю циклически запускать таймер root.mainloop()
gpl-3.0
5,534,573,525,987,266,000
22.060241
71
0.572399
false
2.91172
false
false
false
mes3hacklab/sajuke
handlers/dao.py
1
4078
import sqlalchemy from sqlalchemy import Table, MetaData, Column, Integer, Text, ForeignKey from dbconnection import dbconnection authors = Table('authors', MetaData(), Column('id', Integer, primary_key=True), Column('name', Text)) albums = Table('albums', MetaData(), Column('id', Integer, primary_key=True), Column('authorid', Integer, ForeignKey('authors.id')), Column('name', Text)) songs = Table('songs', MetaData(), Column('id', Integer, primary_key=True), Column('authorid', Integer, ForeignKey('authors.id')), Column('albumid', Integer, ForeignKey('albums.id')), Column('name', Text)) def getAnagraphicTableResults(table, query, limit, offset): """Returns a dictionary representing the results of a query on a table composed of two columns: id (Integer Primary Key) and Name (Text)""" results = [] s = sqlalchemy.sql.select([table.c.id, table.c.name]) if query is not None and query.strip() != '': s = s.where(table.c.name.like('%' + query + '%')) if limit != -1: s = s.limit(limit) if offset != -1: s = s.offset(offset) for row in dbconnection.execute(s): r = {} r['id'] = row[0] r['name'] = row[1] results.append(r) return results def getBasicSearch(query, limit, offset): """Returns a combined search of authors, albums and songs matching the query""" r = {} r['authors'] = getAuthors(query, limit, offset) r['albums'] = getAlbums(query, None, limit, offset) r['songs'] = getSongs(query, None, None, limit, offset) return r def getAuthors(query, limit, offset): """Returns a dictionary of authors array""" return getAnagraphicTableResults(authors, query, limit, offset) def getAlbums(query, authorid, limit, offset): """Returns a dictionary of albums array""" results = [] s = sqlalchemy.sql.select([albums.c.id, albums.c.name, authors.c.id, authors.c.name]).where( albums.c.authorid == authors.c.id) if query is not None and query.strip() != '': s = s.where(albums.c.name.like('%' + query + '%')) if authorid is not None and authorid.strip() != '': try: s = s.where(authors.c.id == int(authorid)) except ValueError: pass if limit != -1: s = s.limit(limit) if offset != -1: s = s.offset(offset) for row in dbconnection.execute(s): r = {"id": row[0], "name": row[1], "author": {"id": row[2], "name": row[3]}} results.append(r) return results def getSongs(query, authorid, albumid, limit, offset): """Returns a dictionary of songs array""" results = [] s = sqlalchemy.sql.select([songs.c.id, songs.c.name, authors.c.id, authors.c.name, albums.c.id, albums.c.name]).where( songs.c.authorid == authors.c.id).where( songs.c.albumid == albums.c.id) if query is not None and query.strip() != '': s = s.where(songs.c.name.like('%' + query + '%')) if authorid is not None and authorid.strip() != '': try: s = s.where(authors.c.id == int(authorid)) except ValueError: pass if albumid is not None and albumid.strip() != '': try: s = s.where(albums.c.id == int(albumid)) except ValueError: pass if limit != -1: s = s.limit(limit) if offset != -1: s = s.offset(offset) for row in dbconnection.execute(s): r = {"id": row[0], "name": row[1], "author": { "id": row[2], "name": row[3]}, "album": { "id": row[4], "name": row[5]}} results.append(r) return results
mit
-7,827,855,297,687,735,000
30.129771
83
0.529181
false
3.761993
false
false
false
pyladieshre/pyladies
harare/settings.py
1
4469
""" Django settings for harare project. Generated by 'django-admin startproject' using Django 1.10.3. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os import dj_database_url # Build paths inside the project like this: os.path.join(BASE_DIR, ...) # BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'xnk36o$h1m!)p0y!b(63myjcw_69be&1k@e91(jdftia3^h1h*' # SECURITY WARNING: don't run with debug turned on in production! # The following checks to see if running on Heroku and then disables debugging. # http://stackoverflow.com/questions/9383450/how-can-i-detect-herokus-environment ON_HEROKU = False if 'DATABASE_URL' in os.environ: ON_HEROKU = True DEBUG = True if ON_HEROKU: DEBUG = False ALLOWED_HOSTS = ['*'] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', # third_party_apps 'crispy_forms', 'bootstrap3', 'markitup', 'autoslug', # my_apps 'pyladies_harare', 'talks', 'profiles', 'accounts', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'whitenoise.middleware.WhiteNoiseMiddleware', ] ROOT_URLCONF = 'harare.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] CRISPY_TEMPLATE_PACK = 'bootstrap3' WSGI_APPLICATION = 'harare.wsgi.application' MARKITUP_FILTER = ('markdown.markdown', {'safe_mode': True}) # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Update database configuration with $DATABASE_URL db_from_env = dj_database_url.config(conn_max_age=500) DATABASES['default'].update(db_from_env) # AUTH_USER_MODEL = 'auth.User' # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Africa/Harare' USE_I18N = True USE_L10N = True USE_TZ = True # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ # PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles') STATIC_URL = '/static/' # Extra places for collectstatic to find static files. STATICFILES_DIRS = ( os.path.join(PROJECT_ROOT, 'static'), )
mit
-8,864,186,225,389,897,000
26.757764
91
0.692996
false
3.325149
false
false
false
romanofski/superorganism
src/superorganism/gui/keys.py
1
1061
import zope.interface import zope.event import superorganism.gui.interfaces import urwid class CharKeyPressed(object): zope.interface.implements(superorganism.gui.interfaces.ICharKeyPressEvent) def __init__(self, screen, key): self.screen = screen self.key = key class FunctionKeyPressed(CharKeyPressed): zope.interface.implements(superorganism.gui.interfaces.IFunctionKeyPressEvent) class Dispatcher(object): zope.interface.implements(superorganism.gui.interfaces.IKeyDispatcher) def __init__(self, screen): self.screen = screen def dispatch_key_events(self): keys = self.screen.get_input() for key in self.screen.get_input(): if self.is_valid_char(key): zope.event.notify( CharKeyPressed(self.screen, key)) else: zope.event.notify( FunctionKeyPressed(self.screen, key)) def is_valid_char(self, key): return urwid.util.is_wide_char(key,0) or (len(key)==1 and ord(key) >= 32)
gpl-3.0
2,086,409,582,921,099,800
26.205128
82
0.6541
false
3.762411
false
false
false
silps/solesite
solesite/settings.py
1
5012
# Django settings for SOLE project. DEBUG = True TEMPLATE_DEBUG = DEBUG import os settings_dir = os.path.dirname(__file__) PROJECT_ROOT = os.path.abspath(os.path.dirname(settings_dir)) ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(PROJECT_ROOT, 'private/development.db'), } } # Email settings EMAIL_HOST = '' EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_PORT = 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL = '' MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'public/media/') MEDIA_URL = '/media/' STATIC_ROOT = os.path.join(PROJECT_ROOT, 'public/static/') STATIC_URL = '/static/' # when running on openshift if 'OPENSHIFT_REPO_DIR' in os.environ: PROJECT_ROOT = os.path.join(os.environ.get('OPENSHIFT_REPO_DIR'), 'wsgi', 'solesite') DATA_DIR = os.path.join(os.environ['OPENSHIFT_DATA_DIR']) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(DATA_DIR, 'development.db'), } } MEDIA_ROOT = os.path.join(DATA_DIR, 'media') STATIC_ROOT = os.path.join(os.environ.get('OPENSHIFT_REPO_DIR'), 'wsgi', 'static') # Internationalization TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ugettext = lambda s: s LANGUAGES = ( ('--', ugettext('select here')), ('nl', ugettext('Dutch')), ('fr', ugettext('French')), ('pl', ugettext('Polish')), ('pt', ugettext('Portugese')), ('pt-br', ugettext('Brazilian Portuguese')), ('es', ugettext('Spanish')), ('el', ugettext('Greek')), ('en', ugettext('English')), ('jp', ugettext('Japanese')), ) STATICFILES_DIRS = ( os.path.join(PROJECT_ROOT, 'solesite/static/'), ) LOCALE_PATHS = ( os.path.join(PROJECT_ROOT, 'locale'), ) SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '_g-js)o8z#8=9pr1&amp;05h^1_#)91sbo-)g^(*=-+epxmt4kc9m#' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.locale.LocaleMiddleware', 'userena.middleware.UserenaLocaleMiddleware', ) # Add the Guardian and userena authentication backends AUTHENTICATION_BACKENDS = ( 'userena.backends.UserenaAuthenticationBackend', 'guardian.backends.ObjectPermissionBackend', 'django.contrib.auth.backends.ModelBackend', ) # Settings used by SOLE LOGIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' AUTH_PROFILE_MODULE = 'profiles.Profile' USERENA_DISABLE_PROFILE_LIST = False USERENA_MUGSHOT_SIZE = 140 ROOT_URLCONF = 'solesite.urls' WSGI_APPLICATION = 'solesite.wsgi.application' TEMPLATE_DIRS = ( os.path.join(PROJECT_ROOT, 'solesite/templates/'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', 'django.contrib.admindocs', 'guardian', 'south', 'userena', 'userena.contrib.umessages', 'profiles', 'easy_thumbnails', 'jobs', ) LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } # Needed for Django guardian ANONYMOUS_USER_ID = -1 # Test runner TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
bsd-3-clause
-5,469,266,478,667,101,000
26.23913
89
0.662809
false
3.372813
false
false
false
pw31/GGchem
tools/Plot_fast.py
1
3437
import matplotlib.pyplot as plt import numpy as np from matplotlib.ticker import MultipleLocator, FormatStrFormatter, ScalarFormatter, LogLocator from matplotlib.backends.backend_pdf import PdfPages plt.rcParams['axes.linewidth'] = 1.5 pp = PdfPages('ggchem.pdf') file = 'Static_fast.dat' data = open(file) dummy = data.readline() dimens = data.readline() dimens = np.array(dimens.split()) NELEM1 = int(dimens[0]) NMOLE1 = int(dimens[1]) NDUST1 = int(dimens[2]) header = data.readline() data.close() dat1 = np.loadtxt(file,skiprows=3) keyword1 = np.array(header.split()) Tg1 = dat1[:,0] # T [K] ntot = 0.0*Tg1 for i in range(3,4+NELEM1+NMOLE1): # electrons, all atoms, ions and cations ntot = ntot + 10**dat1[:,i] lntot1 = np.log10(ntot) file = 'Static_gas.dat' data = open(file) dummy = data.readline() dimens = data.readline() dimens = np.array(dimens.split()) NELEM2 = int(dimens[0]) NMOLE2 = int(dimens[1]) NDUST2 = int(dimens[2]) header = data.readline() data.close() dat2 = np.loadtxt(file,skiprows=3) keyword2 = np.array(header.split()) Tg2 = dat2[:,0] # T [K] ntot = 0.0*Tg2 for i in range(3,4+NELEM2+NMOLE2): # electrons, all atoms, ions and cations ntot = ntot + 10**dat2[:,i] lntot2 = np.log10(ntot) file = 'Static_cond.dat' data = open(file) dummy = data.readline() dimens = data.readline() dimens = np.array(dimens.split()) NELEM3 = int(dimens[0]) NMOLE3 = int(dimens[1]) NDUST3 = int(dimens[2]) header = data.readline() data.close() dat3 = np.loadtxt(file,skiprows=3) keyword3 = np.array(header.split()) Tg3 = dat3[:,0] # T [K] ntot = 0.0*Tg3 for i in range(3,4+NELEM3+NMOLE3): # electrons, all atoms, ions and cations ntot = ntot + 10**dat3[:,i] lntot3 = np.log10(ntot) bar = 1.E+6 # 1 bar in dyn/cm2 Tmin = 500 Tmax = 3000 sep = 100 col = ['darkgoldenrod','darkgray','darkgreen','darkmagenta','red','darkorange','darkorchid','aqua','cadetblue'] col2 = ['aquamarine','beige','darkolivegreen','bisque','burlywood','chartreuse','chocolate','coral','cornflowerblue','crimson','darkcyan','darkkhaki'] #================== some important molecules ==================== fig,ax = plt.subplots() mols = ['CO','CO2','CH4','N2','NH3','HCN','C2H2','C2H4','H2O'] mols = np.array(mols) count = 0 for mol in mols: i = np.where(mol==keyword1)[0][0] yy = dat1[:,i]-lntot1 # log10 nmol/ntot plt.plot(Tg1,yy,c=col[count],lw=3,label=mol) i = np.where(mol==keyword2)[0][0] yy = dat2[:,i]-lntot2 # log10 nmol/ntot plt.plot(Tg2,yy,c=col[count],lw=2,ls='--') i = np.where(mol==keyword3)[0][0] yy = dat3[:,i]-lntot3 # log10 nmol/ntot plt.plot(Tg3,yy,c=col[count],lw=2,ls=':') count = count + 1 plt.xlabel(r'$T\ \mathrm{[K]}$',fontsize=20) plt.ylabel(r'$\mathrm{log}_{10}\ n_\mathrm{mol}/n_\mathrm{tot}$',fontsize=20) plt.xlim(Tmin,Tmax) plt.ylim(-15,-2) plt.tick_params(axis='both', labelsize=14) plt.tick_params('both', length=6, width=1.5, which='major') plt.tick_params('both', length=3, width=1, which='minor') minorLocator = MultipleLocator(sep) ax.xaxis.set_minor_locator(minorLocator) minorLocator = MultipleLocator(1) ax.yaxis.set_minor_locator(minorLocator) plt.legend(loc='lower right',fontsize=11,fancybox=True) plt.tight_layout() plt.savefig(pp,format='pdf') plt.clf() pp.close() print '... written output to ggchem.pdf.'
gpl-3.0
-5,654,245,361,752,050,000
31.733333
150
0.643875
false
2.467337
false
false
false
schleichdi2/OPENNFR-6.1-CORE
opennfr-openembedded-core/meta/lib/oeqa/utils/metadata.py
1
3931
# Copyright (C) 2016 Intel Corporation # # Released under the MIT license (see COPYING.MIT) # # Functions to get metadata from the testing host used # for analytics of test results. from collections import OrderedDict from collections.abc import MutableMapping from xml.dom.minidom import parseString from xml.etree.ElementTree import Element, tostring from oeqa.utils.commands import runCmd, get_bb_vars def get_os_release(): """Get info from /etc/os-release as a dict""" data = OrderedDict() os_release_file = '/etc/os-release' if not os.path.exists(os_release_file): return None with open(os_release_file) as fobj: for line in fobj: key, value = line.split('=', 1) data[key.strip().lower()] = value.strip().strip('"') return data def metadata_from_bb(): """ Returns test's metadata as OrderedDict. Data will be gathered using bitbake -e thanks to get_bb_vars. """ metadata_config_vars = ('MACHINE', 'BB_NUMBER_THREADS', 'PARALLEL_MAKE') info_dict = OrderedDict() hostname = runCmd('hostname') info_dict['hostname'] = hostname.output data_dict = get_bb_vars() # Distro information info_dict['distro'] = {'id': data_dict['DISTRO'], 'version_id': data_dict['DISTRO_VERSION'], 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} # Host distro information os_release = get_os_release() if os_release: info_dict['host_distro'] = OrderedDict() for key in ('id', 'version_id', 'pretty_name'): if key in os_release: info_dict['host_distro'][key] = os_release[key] info_dict['layers'] = get_layers(data_dict['BBLAYERS']) info_dict['bitbake'] = git_rev_info(os.path.dirname(bb.__file__)) info_dict['config'] = OrderedDict() for var in sorted(metadata_config_vars): info_dict['config'][var] = data_dict[var] return info_dict def metadata_from_data_store(d): """ Returns test's metadata as OrderedDict. Data will be collected from the provided data store. """ # TODO: Getting metadata from the data store would # be useful when running within bitbake. pass def git_rev_info(path): """Get git revision information as a dict""" from git import Repo, InvalidGitRepositoryError, NoSuchPathError info = OrderedDict() try: repo = Repo(path, search_parent_directories=True) except (InvalidGitRepositoryError, NoSuchPathError): return info info['commit'] = repo.head.commit.hexsha info['commit_count'] = repo.head.commit.count() try: info['branch'] = repo.active_branch.name except TypeError: info['branch'] = '(nobranch)' return info def get_layers(layers): """Returns layer information in dict format""" layer_dict = OrderedDict() for layer in layers.split(): layer_name = os.path.basename(layer) layer_dict[layer_name] = git_rev_info(layer) return layer_dict def write_metadata_file(file_path, metadata): """ Writes metadata to a XML file in directory. """ xml = dict_to_XML('metadata', metadata) xml_doc = parseString(tostring(xml).decode('UTF-8')) with open(file_path, 'w') as f: f.write(xml_doc.toprettyxml()) def dict_to_XML(tag, dictionary, **kwargs): """ Return XML element converting dicts recursively. """ elem = Element(tag, **kwargs) for key, val in dictionary.items(): if tag == 'layers': child = (dict_to_XML('layer', val, name=key)) elif isinstance(val, MutableMapping): child = (dict_to_XML(key, val)) else: if tag == 'config': child = Element('variable', name=key) else: child = Element(key) child.text = str(val) elem.append(child) return elem
gpl-2.0
-6,370,462,564,864,132,000
32.313559
103
0.624269
false
3.779808
false
false
false
expressly/expressly-plugin-sdk-python3-core
expressly/tests/api/test_banner.py
1
1224
from unittest import TestCase from httpretty import activate, register_uri, GET from schematics.validate import validate from expressly import Api from expressly.api_responses import BannerResponse from expressly.tests import dummy_api_key, api_dev_url, dummy_campaign_uuid class BannerTest(TestCase): def setUp(self): self.api = Api(dummy_api_key, api_dev_url, False) self.dummy_email = '[email protected]' @activate def test_request(self): register_uri( GET, 'http://%s/api/v2/banner/%s?email=%s' % (api_dev_url, dummy_campaign_uuid, self.dummy_email), body=bytearray(""" { "bannerImageUrl": "https://buyexpressly.com/assets/banner/awesome-banner.jpg", "migrationLink": "https://www.myblog.com/expressly/api/3aff1880-b0f5-45bd-8f33-247f55981f2c" }""", 'utf-8'), status=200, content_type='application/json' ) response = self.api.get_banner(dummy_campaign_uuid, self.dummy_email) self.assertEqual(response.status, 200) self.assertIsInstance(response.data, BannerResponse) self.assertTrue(validate(BannerResponse, response.data))
mit
-976,098,644,903,744,400
35
108
0.651961
false
3.578947
true
false
false
SpotlightKid/mididings
mididings/live/widgets.py
2
3258
# -*- coding: utf-8 -*- # # mididings # # Copyright (C) 2008-2014 Dominic Sacré <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # import sys if sys.version_info < (3,): import Tkinter else: import tkinter as Tkinter class AutoScrollbar(Tkinter.Scrollbar): def set_show_hide(self, show, hide): self._show = show self._hide = hide def set(self, lo, hi): if float(lo) <= 0.0 and float(hi) >= 1.0: self._hide() else: self._show() Tkinter.Scrollbar.set(self, lo, hi) class LiveThemedFactory(object): def __init__(self, color, color_highlight, color_background): self.color = color self.color_highlight = color_highlight self.color_background = color_background def Tk(self, **options): w = Tkinter.Tk() w.config(background=self.color_background) w.config(**options) return w def Frame(self, master, **options): w = Tkinter.Frame(master) w.config(background=self.color) w.config(**options) return w def AutoScrollbar(self, master, **options): w = AutoScrollbar(master) w.config( background=self.color, activebackground=self.color_highlight, troughcolor=self.color_background, borderwidth=1, relief='flat', width=16, ) w.config(**options) return w def Listbox(self, master, **options): w = Tkinter.Listbox(master) w.config( background=self.color_background, foreground=self.color, selectbackground=self.color_background, selectforeground=self.color_highlight, selectborderwidth=0, borderwidth=0, ) w.config(**options) return w def Button(self, master, **options): w = Tkinter.Button(master) w.config( background=self.color_background, foreground=self.color, activebackground=self.color_background, activeforeground=self.color_highlight, borderwidth=0, highlightthickness=0, relief='flat', ) w.config(**options) return w def Canvas(self, master, **options): w = Tkinter.Canvas(master) w.config(background=self.color_background) w.config(**options) return w class UnthemedFactory(object): def Tk(self, **options): w = Tkinter.Tk() w.config(**options) return w def Frame(self, master, **options): return Tkinter.Frame(master, **options) def AutoScrollbar(self, master, **options): return AutoScrollbar(master, **options) def Listbox(self, master, **options): return Tkinter.Listbox(master, **options) def Button(self, master, **options): return Tkinter.Button(master, **options) def Canvas(self, master, **options): return Tkinter.Canvas(master, **options)
gpl-2.0
-8,183,129,618,553,754,000
26.837607
70
0.595333
false
3.971951
true
false
false
Skytim/nccuTEG
pybossa/api/task_run.py
4
2652
# -*- coding: utf8 -*- # This file is part of PyBossa. # # Copyright (C) 2014 SF Isle of Man Limited # # PyBossa is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBossa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBossa. If not, see <http://www.gnu.org/licenses/>. """ PyBossa api module for exposing domain object TaskRun via an API. This package adds GET, POST, PUT and DELETE methods for: * task_runs """ from flask import request from flask.ext.login import current_user from pybossa.model.task_run import TaskRun from werkzeug.exceptions import Forbidden, BadRequest from api_base import APIBase from pybossa.util import get_user_id_or_ip from pybossa.core import task_repo, sentinel class TaskRunAPI(APIBase): """Class API for domain object TaskRun.""" __class__ = TaskRun reserved_keys = set(['id', 'created', 'finish_time']) def _update_object(self, taskrun): """Update task_run object with user id or ip.""" # validate the task and project for that taskrun are ok task = task_repo.get_task(taskrun.task_id) if task is None: # pragma: no cover raise Forbidden('Invalid task_id') if (task.project_id != taskrun.project_id): raise Forbidden('Invalid project_id') if _check_task_requested_by_user(taskrun, sentinel.master) is False: raise Forbidden('You must request a task first!') # Add the user info so it cannot post again the same taskrun if current_user.is_anonymous(): taskrun.user_ip = request.remote_addr else: taskrun.user_id = current_user.id def _forbidden_attributes(self, data): for key in data.keys(): if key in self.reserved_keys: raise BadRequest("Reserved keys in payload") def _check_task_requested_by_user(taskrun, redis_conn): user_id_ip = get_user_id_or_ip() usr = user_id_ip['user_id'] or user_id_ip['user_ip'] key = 'pybossa:task_requested:user:%s:task:%s' % (usr, taskrun.task_id) task_requested = bool(redis_conn.get(key)) if user_id_ip['user_id'] is not None: redis_conn.delete(key) return task_requested
agpl-3.0
-6,216,720,680,259,834,000
35.833333
77
0.682881
false
3.642857
false
false
false
ovnicraft/odoo_addons
smile_decimal_precision/models/decimal_precision.py
5
2306
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api, fields, models, registry, tools class DecimalPrecision(models.Model): _inherit = 'decimal.precision' display_digits = fields.Integer('Display Digits', required=True, default=2) @tools.ormcache(skiparg=3) def display_precision_get(self, cr, uid, application): cr.execute('select display_digits from decimal_precision where name=%s', (application,)) res = cr.fetchone() return res[0] if res else 2 @api.model @api.returns('self', lambda value: value.id) def create(self, vals): record = super(DecimalPrecision, self).create(vals) self.display_precision_get.clear_cache(self) return record @api.multi def write(self, vals): result = super(DecimalPrecision, self).write(vals) self.display_precision_get.clear_cache(self) return result @api.multi def unlink(self): result = super(DecimalPrecision, self).unlink() self.display_precision_get.clear_cache(self) return result @staticmethod def get_display_precision(cr, uid, application): res = 2 dp_obj = registry(cr.dbname)['decimal.precision'] if hasattr(dp_obj, 'display_precision_get'): res = dp_obj.display_precision_get(cr, uid, application) return 16, res
agpl-3.0
3,048,453,814,624,087,000
36.803279
96
0.629662
false
4.177536
false
false
false
lucuma/Clay
clay/server.py
1
4505
import mimetypes import socket from urllib.parse import quote import gunicorn.app.base from whitenoise import WhiteNoise from .request import Request from .utils import make_active_helper def _get_local_ip(): ip = socket.gethostbyname(socket.gethostname()) if not ip.startswith("127."): return ip sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # doesn't even have to be reachable sock.connect(("8.8.8.8", 1)) ip = sock.getsockname()[0] except Exception: ip = "127.0.0.1" finally: sock.close() return ip DISPLAY = """ ┌─────────────────────────────────────────────────┐ │ Clay is running │ │ │ │ - Your machine: {local}│ │ - Your network: {network}│ │ │ │ Press `ctrl+c` to quit. │ └─────────────────────────────────────────────────┘ """ def _display_running_message(host, port): # pragma: no cover local = "{:<29}".format(f"http://{host}:{port}") network = "{:<29}".format(f"http://{_get_local_ip()}:{port}") print(DISPLAY.format(local=local, network=network)) def on_starting(server): """Gunicorn hook""" _display_running_message(*server.address[0]) class GunicornMiddleware(gunicorn.app.base.BaseApplication): def __init__(self, app, **options): self.app = app self.options = options super().__init__() def load_config(self): config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None} for key, value in config.items(): self.cfg.set(key.lower(), value) def load(self): return self.app class WSGIApp: def __init__(self, clay): self.clay = clay def __call__(self, environ, start_response): return self.wsgi(environ, start_response) def wsgi(self, environ, start_response): request = Request(environ) body, status, headers = self.call(request) if hasattr(body, "encode"): body = body.encode("utf8") headers.append(("Content-Length", str(len(body)))) start_response(status, headers) return [body] def call(self, request): path = request.path print(path) if not self.clay.file_exists(path): print("file doesnt exists", path) path += "/index.html" if not self.clay.file_exists(path): return self.not_found(request) active = make_active_helper(request) if request.method == "HEAD": body = "" else: print("rendering file", path) body = self.clay.render_file(path, request=request, active=active) mime = mimetypes.guess_type(path)[0] or "text/plain" response_headers = [("Content-Type", mime)] return body, "200 OK", response_headers def not_found(self, request): mime = "text/plain" body = f"File {request.path} not found." active = make_active_helper(request) for path in ["not-found.html", "_notfound.html", "404.html"]: if self.clay.file_exists(path): mime = "text/html" body = self.clay.render_file(path, request=request, active=active) break response_headers = [ ("Content-Type", mime), ("Content-Length", str(len(body))) ] return body, "404 Not Found", response_headers def redirect_to(self, path): return "", "302 Found", [("Location", quote(path.encode("utf8")))] def run(self, host, port): # pragma: no cover server = GunicornMiddleware( self, bind=f"{host}:{port}", worker_class="eventlet", accesslog="-", access_log_format="%(h)s %(m)s %(U)s -> HTTP %(s)s", on_starting=on_starting ) server.run() def make_app(clay): app = WSGIApp(clay) app.wsgi = WhiteNoise( app.wsgi, root=clay.static_path, prefix="static/", index_file=True, autorefresh=True, ) return app
apache-2.0
5,077,850,476,091,318,000
28.294521
82
0.535656
false
3.643101
false
false
false
remram44/gitobox
gitobox/watch.py
1
2264
"""Directory-watching logic. Contains :class:`~gitobox.watch.DirectoryWatcher`, the class that monitors the directory for changes. Uses `pyinotify`, so it's only available on Linux. """ from __future__ import unicode_literals import logging from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler from gitobox.timer import ResettableTimer class DirectoryWatcher(FileSystemEventHandler): ALL_CHANGED = None def __init__(self, folder, callback, lock, timeout): self._callback = callback self.observer = Observer() self._folder = folder self._changes = set() self.observer.schedule(self, str(folder), recursive=True) self._timer = ResettableTimer(timeout, self._timer_expired, lock=lock) def assume_all_changed(self): self._changes.add(DirectoryWatcher.ALL_CHANGED) self._timer.start() def run(self): self.observer.start() def _timer_expired(self): changes = self._changes self._changes = set() logging.info("Directory stable, syncing...") if DirectoryWatcher.ALL_CHANGED in changes: self._callback() else: self._callback(changes) def on_moved(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Moved %s: from %s to %s", what, event.src_path, event.dest_path) self._changes.add(event.src_path) self._changes.add(event.dest_path) self._timer.start() def on_created(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Created %s: %s", what, event.src_path) self._changes.add(event.src_path) self._timer.start() def on_deleted(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Deleted %s: %s", what, event.src_path) self._changes.add(event.src_path) self._timer.start() def on_modified(self, event): what = 'directory' if event.is_directory else 'file' logging.info("Modified %s: %s", what, event.src_path) self._changes.add(event.src_path) self._timer.start()
bsd-3-clause
-7,034,034,042,734,324,000
30.887324
78
0.626325
false
3.856899
false
false
false
zo7/ios-s3-dist
ios-s3-dist.py
1
3680
''' ios-s3-dist.py Python 3 Script to assist in distributing ad-hoc and enterprise iOS builds. Uploads build to S3 and creates a manifest.plist file to install it with. Required information should be profiled in a 'config.json' file. Usage: ios-dist.py {filename or path to build} ''' def main(build_filename): import tinys3, json, os, plistlib, shutil, zipfile cnfg = json.load( open('config.json') ) # ---- Get information from build's Info.plist zfile = zipfile.ZipFile(build_filename) for name in zfile.namelist(): if name.endswith('Info.plist'): zfile.extract(name, 'temp') shutil.move('temp/'+name, 'temp/Info.plist') shutil.rmtree('temp/Payload') info = plistlib.load( open('temp/Info.plist', 'rb') ) bundle_name = info['CFBundleName'] bundle_identifier = info['CFBundleIdentifier'] bundle_version = info['CFBundleVersion'] # ---- Determine which build # this is by the number of existing builds conn = tinys3.Connection(cnfg['s3_access_key'], cnfg['s3_secret_key']) uploaded_builds = conn.list( 'b/'+bundle_name+'-'+bundle_version, cnfg['bucket_name'] ) b_num = 1 for x in uploaded_builds: b_num += 1 build_number = 'b{0}'.format(b_num) # ---- Generate filenames from extracted information # Ex: 'AppName-2.0-b5.ipa' bd_filename = bundle_name+'-'+bundle_version+'-'+build_number+'.ipa' # Ex: 'manifest-2.0-b5.plist' mn_filename = 'manifest-'+bundle_version+'-'+build_number+'.plist' # ---- Create manifest.plist file from template # {0} - URL to .ipa # {1} - Bundle identifier # {2} - Bundle version # {3} - Bundle name template_file = open('manifest-template', 'r') manifest_data = template_file.read().format( 'https://s3.amazonaws.com/'+cnfg['bucket_name']+'/b/'+bd_filename, bundle_identifier, bundle_version, bundle_name ) template_file.close() manifest_file = open('temp/manifest.plist', 'w') manifest_file.write(manifest_data) manifest_file.close() # ---- Upload build and manifest to S3 print('\nUploading build...') build_file = open(build_filename, 'rb') r = conn.upload(bd_filename, build_file, cnfg['bucket_name']+'/b') if r.status_code != 200: print('Error: Build upload unsuccessful (Status code {0)'\ .format(r.status_code)) shutil.rmtree('temp') return print('Uploading manifest...') manifest_file = open('temp/manifest.plist', 'rb') r = conn.upload(mn_filename, manifest_file, cnfg['bucket_name']+'/m') if r.status_code != 200: print('Error: Manifest upload unsuccessful (Status code {0)'\ .format(r.status_code)) # Try to clean up conn.delete(bd_filename, cnfg['bucket_name']+'/b') shutil.rmtree('temp') return # ---- Clean up and finish shutil.rmtree('temp') print('\nUpload successful! ({0})\n'.format(bd_filename)) aws = 'https://s3.amazonaws.com/' b_url = aws+cnfg['bucket_name']+'/b/'+bd_filename m_url = aws+cnfg['bucket_name']+'/m/'+mn_filename print('-'*32) print('Build : {0}'.format(b_url)) print('Manifest : {0}'.format(m_url)) itms = '\nitms-services://?action=download-manifest&url={0}' print(itms.format(m_url)) print('-'*32+'\n') if __name__ == '__main__': import sys if len(sys.argv) == 2: build_path = sys.argv[1] main(build_path) else: print('\nUsage:\n\tios-dist.py {filename or path to build}\n')
mit
9,146,017,136,505,671,000
25.285714
79
0.597554
false
3.4619
false
false
false
justanr/objtoolz
objtoolz/metas/memoize.py
1
1696
''' objtoolz.metas.memoized ``````````````````````` Metaclass that allows memoization of instances ''' from toolz import memoize from ..compat import wraps __all__ = ('Memoized',) def _default_cache_key(args, kwargs): """By default, toolz.memoize will only cache positional args if no cache key is passed and it can't determine if there's keyword arguments. However, this will cause memoize to cache *both* if a cache key func isn't provided. """ return (args or None, frozenset(kwargs.items()) or None) class Memoized(type): """Metaclass for memoizing object instantiation. In Python 3 a cache type and cacheing key can be specified at class creation like this: .. code-block:: python class MyClass(metaclass=Memoized, cache=OrderedDict()) However, in Python 2, they must be specified after the fact """ def __new__(mcls, name, bases, attrs, **kwargs): return super(Memoized, mcls).__new__(mcls, name, bases, attrs) def __init__(cls, name, bases, attrs, key=_default_cache_key, cache=None): if cache is None: cache = {} cls._cache = cache # wrap in staticmethod for PY2 support # otherwise it's created as an UnboundMethod cls._cache_key = staticmethod(key) return super(Memoized, cls).__init__(name, bases, attrs) def __call__(cls, *args, **kwargs): """Memoize actual object instantiation from the created class """ @wraps(cls) @memoize(cache=cls._cache, key=cls._cache_key) def rememberer(*a, **k): return super(Memoized, cls).__call__(*a, **k) return rememberer(*args, **kwargs)
mit
-205,143,517,975,639,360
31.615385
79
0.628538
false
3.845805
false
false
false
kizniche/Mycodo
mycodo/functions/examples/custom_function_simple_loop_with_status.py
1
5392
# coding=utf-8 # # custom_function_example.py - Custom function example file for importing into Mycodo # # Copyright (C) 2015-2020 Kyle T. Gabriel <[email protected]> # # This file is part of Mycodo # # Mycodo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Mycodo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Mycodo. If not, see <http://www.gnu.org/licenses/>. # # Contact at kylegabriel.com # import datetime import time from flask_babel import lazy_gettext from mycodo.databases.models import CustomController from mycodo.functions.base_function import AbstractFunction from mycodo.utils.constraints_pass import constraints_pass_positive_value from mycodo.utils.database import db_retrieve_table_daemon FUNCTION_INFORMATION = { 'function_name_unique': 'example_function_loop_with_status', 'function_name': 'Example: Simple Loop with Status', 'message': 'This is an example function that will increment a stored variable once every 60 seconds. ' 'A status call will be made to the function from the web UI and the return string along ' 'with the current time will be displayed for the user every Status Period. The Status ' 'Widget will also display this status.', 'options_disabled': [ 'measurements_select', 'measurements_configure' ], # These options will appear in the settings of the Function, # which the user can use to set different values and options for the Function. # These settings can only be changed when the Function is inactive. 'custom_options': [ { 'id': 'period', 'type': 'float', 'default_value': 60, 'required': True, 'constraints_pass': constraints_pass_positive_value, 'name': lazy_gettext('Period (seconds)'), 'phrase': lazy_gettext('The duration (seconds) between measurements or actions') }, { 'id': 'start_offset', 'type': 'integer', 'default_value': 10, 'required': True, 'name': 'Start Offset', 'phrase': 'The duration (seconds) to wait before the first operation' }, { 'id': 'period_status', 'type': 'integer', 'default_value': 60, 'required': True, 'name': 'Status Period (seconds)', 'phrase': 'The duration (seconds) to update the Function status on the UI' } ] } class CustomModule(AbstractFunction): """ Class to operate custom controller """ def __init__(self, function, testing=False): super(CustomModule, self).__init__(function, testing=testing, name=__name__) self.timer_loop = None self.loop_counter = 0 # # Initialize what you defined in custom_options, above # self.period = None self.start_offset = None self.period_status = None # # Set custom options # custom_function = db_retrieve_table_daemon( CustomController, unique_id=self.unique_id) self.setup_custom_options( FUNCTION_INFORMATION['custom_options'], custom_function) if not testing: self.initialize_variables() def initialize_variables(self): # import controller-specific modules here # You may import something you defined in dependencies_module self.timer_loop = time.time() + self.start_offset def loop(self): if self.timer_loop > time.time(): return while self.timer_loop < time.time(): self.timer_loop += self.period self.logger.info( "This text will appear in the Daemon Log as an INFO line") self.logger.debug( "This text will appear in the Daemon Log as an DEBUG line and " "will only appear if Log Level: Debug is enabled") self.loop_counter += 1 self.logger.info("Loop counter: {}".format(self.loop_counter)) def function_status(self): return_dict = { 'string_status': "This info is being returned from the Function Module." "\nCurrent time: {}" "\nLoop count: {}".format( datetime.datetime.now(), self.loop_counter), 'error': [] } return return_dict def button_one(self, args_dict): self.logger.error("Button One Pressed!: {}".format(int(args_dict['button_one_value']))) return "Here return message will be seen in the web UI. " \ "This only works when 'wait_for_return' is set True." def button_two(self, args_dict): self.logger.error("Button Two Pressed!: {}".format(int(args_dict['button_two_value']))) return "This message will never be seen in the web UI because this process is threaded"
gpl-3.0
-7,562,899,649,561,917,000
34.708609
106
0.620549
false
4.222396
false
false
false
aholkner/bacon
bacon/window.py
2
5950
from ctypes import * import sys import os import bacon from bacon.core import lib from bacon import native from bacon import graphics class Window(object): '''Properties of the game window. The window is constructed automatically when :func:`run` is called. The :data:`window` singleton provides access to the members of this class both before and after ``run`` is called. For example, to set up some common window properties for a game:: bacon.window.title = 'Destiny of Swords' bacon.window.width = 800 bacon.window.height = 600 All properties can be modified at runtime, for example to toggle in and out of fullscreen. ''' def __init__(self): self._width = -1 self._height = -1 self._resizable = False self._fullscreen = False self._target = None # Current scale/bias to apply from window space to target space self._target_offset_x = 0.0 self._target_offset_y = 0.0 self._target_scale = 0.0 if not native._mock_native: width = c_int() height = c_int() lib.GetWindowSize(byref(width), byref(height)) self._width = width.value self._height = height.value content_scale = c_float() lib.GetWindowContentScale(byref(content_scale)) self._content_scale = content_scale.value self.title = os.path.basename(sys.argv[0]) def _get_width(self): return self._width def _set_width(self, width): lib.SetWindowSize(width, self._height) self._width = width width = property(_get_width, _set_width, doc='''Get or set the width of the drawable part of the window, in pixels.''') def _get_height(self): return self._height def _set_height(self, height): lib.SetWindowSize(self._width, height) self._height = height height = property(_get_height, _set_height, doc='''Get or set the height of the drawable part of the window, in pixels.''') def _get_title(self): return self._title def _set_title(self, title): lib.SetWindowTitle(title.encode('utf-8')) self._title = title title = property(_get_title, _set_title, doc='''Get or set the title of the window (a string)''') def _is_resizable(self): return self._resizable def _set_resizable(self, resizable): lib.SetWindowResizable(resizable) self._resizable = resizable resizable = property(_is_resizable, _set_resizable, doc='''If ``True`` the window can be resized and maximized by the user. See :func:`Game.on_resize`.''') def _is_fullscreen(self): return self._fullscreen def _set_fullscreen(self, fullscreen): lib.SetWindowFullscreen(fullscreen) self._fullscreen = fullscreen fullscreen = property(_is_fullscreen, _set_fullscreen, doc='''Set to ``True`` to make the game fullscreen, ``False`` to play in a window.''') def _get_target(self): return self._target def _set_target(self, target): self._target = target target = property(_get_target, _set_target, doc='''Optional image to use as the default render target. If set, all rendering will be to this image, which will appear scaled and letterboxed if necessary in the center of the window. :attr:`width`, :attr:`height` and :attr:`content_scale` will return the dimensions of this target instead of the window dimensions. :type: :class:`Image`''') def _get_content_scale(self): return self._content_scale def _set_content_scale(self, content_scale): lib.SetWindowContentScale(content_scale) self._content_scale = content_scale content_scale = property(_get_content_scale, _set_content_scale, doc='''The scaling factor applied to the window. On Windows this is always 1.0. On OS X with a retina display attached, ``content_scale`` will default to 2.0. Fonts and offscreen render targets are created at this content scale by default, to match the pixel density. You can explicitly set ``content_scale`` to 1.0, disabling the high-resolution framebuffer. You should do so before loading any assets. :type: float ''') #: The singleton :class:`Window` instance. window = Window() def _window_resize_event_handler(width, height): window._width = width window._height = height bacon._current_game.on_resize(width, height) _window_frame_target = None def _begin_frame(): global _window_frame_target _window_frame_target = window._target if _window_frame_target: graphics.push_target(_window_frame_target) target_aspect = _window_frame_target._width / float(_window_frame_target._height) window_aspect = window._width / float(window._height) if target_aspect > window_aspect: width = window._width height = width / target_aspect else: height = window._height width = height * target_aspect window._target_scale = width / float(_window_frame_target._width) window._target_offset_x = int(window._width / 2 - width / 2) window._target_offset_y = int(window._height / 2 - height / 2) else: window._target_scale = 1.0 window._target_offset_x = window._target_offset_y = 0.0 def _end_frame(): global _window_frame_target if _window_frame_target: graphics.pop_target() graphics.clear(0, 0, 0, 1) graphics.set_color(1, 1, 1, 1) x = window._target_offset_x y = window._target_offset_y width = _window_frame_target._width * window._target_scale height = _window_frame_target._height * window._target_scale graphics.draw_image(_window_frame_target, x, y, x + width, y + height) _window_frame_target = None
mit
-1,834,301,251,757,115,100
36.904459
160
0.635126
false
3.836235
false
false
false
GetSomeBlocks/Score_Soccer
resources/src/mythbox/ui/recordingdetails.py
5
15736
# # MythBox for XBMC - http://mythbox.googlecode.com # Copyright (C) 2011 [email protected] # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # import logging import xbmcgui import mythbox.msg as m from mythbox.bus import Event from mythbox.mythtv.db import inject_db from mythbox.mythtv.conn import inject_conn from mythbox.mythtv.domain import StatusException, Job from mythbox.mythtv.enums import JobType, JobStatus from mythbox.ui.player import MountedPlayer, StreamingPlayer, NoOpCommercialSkipper, TrackingCommercialSkipper from mythbox.ui.schedules import ScheduleDialog from mythbox.ui.toolkit import Action, BaseWindow, window_busy from mythbox.util import safe_str, catchall, catchall_ui, run_async, coalesce, to_kwargs from mythbox.ui import toolkit log = logging.getLogger('mythbox.ui') class RecordingDetailsWindow(BaseWindow): def __init__(self, *args, **kwargs): BaseWindow.__init__(self, *args, **kwargs) [setattr(self,k,v) for k,v in kwargs.iteritems() if k in ('settings', 'translator', 'platform', 'fanArt', 'cachesByName', 'programIterator', 'bus',)] [setattr(self,k,v) for k,v in self.cachesByName.iteritems() if k in ('mythChannelIconCache', 'mythThumbnailCache', 'domainCache')] self.t = self.translator.get self.program = self.programIterator.current() self.isDeleted = False self.initialized = False self.streaming = self.settings.getBoolean('streaming_enabled') self.channels = None @catchall_ui def onInit(self): if not self.initialized: self.initialized = True self.win = xbmcgui.Window(xbmcgui.getCurrentWindowId()) # Buttons self.playButton = self.getControl(250) self.playSkipButton = self.getControl(251) self.deleteButton = self.getControl(252) self.rerecordButton = self.getControl(253) self.firstInQueueButton = self.getControl(254) self.refreshButton = self.getControl(255) self.editScheduleButton = self.getControl(256) self.advancedButton = self.getControl(257) self.dispatcher = { self.playButton.getId() : self.play, self.playSkipButton.getId() : self.playWithCommSkip, self.deleteButton.getId() : self.delete, self.rerecordButton.getId() : self.rerecord, self.firstInQueueButton.getId(): self.moveToFrontOfJobQueue, self.refreshButton.getId() : self.refresh, self.editScheduleButton.getId(): self.editSchedule, 301:self.doCommFlag, 302:self.doTranscode, 303:self.doUserJob1, 304:self.doUserJob2, 305:self.doUserJob3, 306:self.doUserJob4, 307:self.doRefreshFanart } self.render() def doRefreshFanart(self): self.fanArt.clear(self.program) self.refresh() self.bus.publish({'id' : Event.FANART_REFRESHED, 'program' : self.program}) toolkit.showPopup('Fan Art', 'Refreshed Fan Art for %s' % self.program.title(), 5000) def doCommFlag(self): self.queueJob(JobType.COMMFLAG) def doTranscode(self): self.queueJob(JobType.TRANSCODE) def doUserJob1(self): self.queueJob(JobType.USERJOB & JobType.USERJOB1) def doUserJob2(self): self.queueJob(JobType.USERJOB & JobType.USERJOB2) def doUserJob3(self): self.queueJob(JobType.USERJOB & JobType.USERJOB3) def doUserJob4(self): self.queueJob(JobType.USERJOB & JobType.USERJOB4) @inject_db def queueJob(self, jobType): job = Job.fromProgram(self.program, jobType) self.db().addJob(job) numJobs = len(self.db().getJobs(jobStatus=JobStatus.QUEUED)) toolkit.showPopup('Job Queue', 'Queued as job %d of %d ' % (numJobs,numJobs), 5000) @inject_db def autoexpire(self): self.db().setRecordedAutoexpire( self.program.getChannelId(), self.program.starttime(), not self.program.isAutoExpire()) self.refresh() def delete(self): yes = True if self.settings.isConfirmOnDelete(): yes = xbmcgui.Dialog().yesno(self.t(m.CONFIRMATION), self.t(m.ASK_DELETE_RECORDING)) @run_async @catchall @inject_conn def deleteAsync(self): self.conn().deleteRecording(self.program) if yes: deleteAsync(self) self.isDeleted = True self.close() def rerecord(self): yes = True if self.settings.isConfirmOnDelete(): yes = xbmcgui.Dialog().yesno(self.t(m.CONFIRMATION), self.t(m.ASK_RERECORD_RECORDING)) @run_async @catchall @inject_conn def rerecordAsync(self): self.conn().rerecordRecording(self.program) if yes: rerecordAsync(self) self.isDeleted = True self.close() @inject_db def moveToFrontOfJobQueue(self): jobs = self.db().getJobs(program=self.program, jobStatus=JobStatus.QUEUED, jobType=JobType.COMMFLAG) if len(jobs) == 1: job = jobs[0] job.moveToFrontOfQueue() self.refresh() else: xbmcgui.Dialog().ok(self.t(m.ERROR), self.t(m.JOB_NOT_FOUND)) @inject_conn def canStream(self): # TODO: Merge with duplicate method in RecordingDetailsWindow if not self.conn().protocol.supportsStreaming(self.platform): xbmcgui.Dialog().ok(self.t(m.ERROR), 'Streaming from a MythTV %s backend to XBMC' % self.conn().protocol.mythVersion(), '%s is broken. Try playing again after deselecting' % self.platform.xbmcVersion(), 'MythBox > Settings > MythTV > Enable Streaming') return False return True @catchall_ui def play(self): log.debug("Playing %s .." % safe_str(self.program.title())) deps = to_kwargs(self, ['program', 'mythThumbnailCache', 'translator', 'settings', 'platform']) if self.streaming: if not self.canStream(): return # Play via myth:// p = StreamingPlayer(**deps) p.playRecording(NoOpCommercialSkipper(p, self.program, self.translator)) else: # Play via local fs p = MountedPlayer(**deps) p.playRecording(NoOpCommercialSkipper(p, self.program, self.translator)) del p def playWithCommSkip(self): log.debug("Playing with skip %s .." % safe_str(self.program.title())) deps = to_kwargs(self, ['program', 'mythThumbnailCache', 'translator', 'settings', 'platform']) if self.streaming: if not self.canStream(): return # Play via myth:// p = StreamingPlayer(**deps) p.playRecording(NoOpCommercialSkipper(p, self.program, self.translator)) else: # Play via local fs p = MountedPlayer(**deps) p.playRecording(TrackingCommercialSkipper(p, self.program, self.translator)) del p @inject_db def editSchedule(self): if self.program.getScheduleId() is None: xbmcgui.Dialog().ok(self.t(m.INFO), self.t(m.ERR_NO_RECORDING_SCHEDULE)) return schedules = self.db().getRecordingSchedules(scheduleId=self.program.getScheduleId()) if len(schedules) == 0: xbmcgui.Dialog().ok(self.t(m.INFO), self.t(m.ERR_SCHEDULE_NOT_FOUND) % self.program.getScheduleId()) return editScheduleDialog = ScheduleDialog( 'mythbox_schedule_dialog.xml', self.platform.getScriptDir(), forceFallback=True, schedule=schedules[0], **to_kwargs(self, ['translator', 'platform', 'settings', 'mythChannelIconCache'])) editScheduleDialog.doModal() if editScheduleDialog.shouldRefresh: self.render() def nextRecording(self): self.program = self.programIterator.next() self.render() def previousRecording(self): self.program = self.programIterator.previous() self.render() def isAdvancedBladeActive(self): buttonIds = [self.firstInQueueButton.getId(),300,301,302,303,304,305,306] return self.getFocusId() in buttonIds @catchall_ui def onAction(self, action): id = action.getId() if id in (Action.PREVIOUS_MENU, Action.PARENT_DIR): if self.isAdvancedBladeActive(): self.setFocus(self.advancedButton) else: self.close() elif id == Action.PAGE_UP: self.previousRecording() elif id == Action.PAGE_DOWN: self.nextRecording() else: log.debug('unhandled action = %s id = %s' % (action, action.getId())) def onFocus(self, controlId): pass @catchall_ui @window_busy def onClick(self, controlId): #log.debug('onClick %s ' % controlId) source = self.getControl(controlId) try: self.dispatcher[source.getId()]() return True except KeyError: return False @inject_conn def refresh(self): refreshedProgram = self.conn().getRecording(self.program.getChannelId(), self.program.recstarttime()) if refreshedProgram: self.program = refreshedProgram self.render() else: raise Exception, self.t(m.RECORDING_NOT_FOUND) % self.program.title() @window_busy def render(self): self.renderDetail() self.renderChannel() self.renderThumbnail() self.renderUserJobs() self.renderCommBreaks() # async self.renderSeasonAndEpisode(self.program) # async def renderDetail(self): s = self.program self.setWindowProperty('title', s.fullTitle()) self.setWindowProperty('airDate', s.formattedAirDateTime()) self.setWindowProperty('originalAirDate', s.formattedOriginalAirDate()) self.setWindowProperty('channel', s.formattedChannel()) self.setWindowProperty('description', s.formattedDescription()) self.setWindowProperty('category', s.category()) self.setWindowProperty('episode', '...') self.setWindowProperty('fileSize', s.formattedFileSize()) self.setWindowProperty('autoExpire', (('No', 'Yes')[s.isAutoExpire()])) self.setWindowProperty('commBreaks', '...') self.setWindowProperty('recordingNofM', self.t(m.RECORDING_N_OF_M) % (str(self.programIterator.index() + 1), str(self.programIterator.size()))) @catchall @inject_db def renderChannel(self): if not self.channels: self.channels = {} for c in self.domainCache.getChannels(): self.channels[c.getChannelId()] = c if self.program.getChannelId() in self.channels: icon = self.mythChannelIconCache.get(self.channels[self.program.getChannelId()]) if icon: self.setWindowProperty('channelIcon', icon) def renderThumbnail(self): thumbFile = self.mythThumbnailCache.get(self.program) self.setWindowProperty('thumbnailShadow', 'mb-DialogBack.png') if thumbFile: self.setWindowProperty('thumbnail', thumbFile) else: self.setWindowProperty('thumbnail', 'mythbox-logo.png') log.error('Recording thumbnail preview image not found: %s' % safe_str(self.program.title())) @run_async @catchall @inject_db @coalesce def renderCommBreaks(self): self.playSkipButton.setEnabled(self.program.hasCommercials()) self.firstInQueueButton.setEnabled(False) commBreaks = '-' if self.program.isCommFlagged(): if self.program.hasCommercials(): # Only move focus to Skip button if user hasn't changed the initial focus if self.getFocusId() == self.playButton.getId(): self.setFocus(self.playSkipButton) commBreaks = "%d" % len(self.program.getCommercials()) else: commBreaks = self.t(m.NONE) else: jobs = self.db().getJobs(program=self.program, jobType=JobType.COMMFLAG) if len(jobs) == 1: job = jobs[0] if job.jobStatus == JobStatus.QUEUED: position, numJobs = job.getPositionInQueue() commBreaks = self.t(m.QUEUED_N_OF_M) % (position, numJobs) if position != 1: self.firstInQueueButton.setEnabled(True) elif job.jobStatus == JobStatus.RUNNING: try: commBreaks = self.t(m.N_AT_M_FPS) % ('%d%%' % job.getPercentComplete(), '%2.0f' % job.getCommFlagRate()) except StatusException: commBreaks = job.comment else: commBreaks = job.formattedJobStatus() if log.isEnabledFor(logging.DEBUG): commBreaks += ' (%s)' % self.program.getFPS() self.setWindowProperty('commBreaks', commBreaks) @run_async @catchall @coalesce def renderSeasonAndEpisode(self, boundProgram): season, episode = None, None try: season, episode = self.fanArt.getSeasonAndEpisode(boundProgram) finally: if boundProgram == self.program: self.setWindowProperty('episode', ['-', '%sx%s' % (season, episode)][bool(season) and bool(episode)]) else: log.debug('Program changed since spawning...recursing...') self.renderSeasonAndEpisode(self.program) @inject_db def renderUserJobs(self): jobs = { 'UserJob1': {'control':303, 'descColumn':'UserJobDesc1'}, 'UserJob2': {'control':304, 'descColumn':'UserJobDesc2'}, 'UserJob3': {'control':305, 'descColumn':'UserJobDesc3'}, 'UserJob4': {'control':306, 'descColumn':'UserJobDesc4'} } for jobName in jobs.keys(): jobCommand = self.db().getMythSetting(jobName) jobButton = self.getControl(jobs[jobName]['control']) if jobCommand is None or len(jobCommand) == 0: jobButton.setVisible(False) else: jobButton.setLabel(self.db().getMythSetting(jobs[jobName]['descColumn']))
mit
5,722,216,857,312,534,000
39.043257
157
0.596467
false
3.983797
false
false
false
ashleywaite/django-more
django_cte/django_db_models_query.py
1
2770
import django from itertools import chain from .django_db_models_expressions import CTERef from django.db.models import sql class QuerySet: def attach(self, *querysets): clone = sql.WithQuery(self.query) for qs in querysets: clone.query.add_with(qs.query) return clone def as_insert(self, **kwargs): raise NotImplementedError("Not implemented yet") """ clone = self._clone() clone.query = self.query.clone(sql.InsertSelectQuery) self._for_write = True clone.query.add_update_values(kwargs) if fields: fields = [self.model._meta.get_field(f) for f in fields] clone.query.insert_values(fields, objs, raw=raw) return clone """ def as_update(self, **kwargs): clone = self._clone() clone.query = self.query.clone(sql.UpdateReturningQuery) print("clone is", type(clone)) print("clone query is", type(clone.query)) self._for_write = True clone.query.add_update_values(kwargs) # Clear any annotations so that they won't be present in subqueries. clone.query._annotations = None return clone def with_literals(self, qs): pass def ref(self, field): # These are not validated return CTERef(with_query=self.query, field_name=field) class LiteralQuerySet(django.db.models.QuerySet): """ CTEs can be connected to a query to enable WITH style queries """ def __init__(self, model=None, query=None, values=None, enum_field=None, *args, **kwargs): query = query or sql.LiteralQuery(model) super().__init__(model=model, query=query, *args, **kwargs) if values: self.append(values) if enum_field: self.enum_field(enum_field) def enum_field(self, field_name): self.query.enum_field = field_name return self def clear(self): self.query.clear_values() return self def append(self, values): self.query.literal_values(values) return self def defer(self, *fields): raise NotImplementedError("LiteralQuerySet does not implement defer()") def delete(self): raise TypeError("Queries with literal values can't be deleted") def order_by(self, *field_names): raise NotImplementedError("LiteralQuerySet does not implement order_by()") def distinct(self, *field_names): raise NotImplementedError("LiteralQuerySet does not implement distinct()") def extra(self, *args, **kwargs): raise NotImplementedError("LiteralQuerySet does not implement extra()") def reverse(self): raise NotImplementedError("LiteralQuerySet does not implement reverse()")
bsd-3-clause
2,937,635,270,745,801,700
31.209302
94
0.639711
false
4.18429
false
false
false
jdonkervliet/crazyflie-for-newbs
Android.py
1
3469
''' Created on Nov 5, 2014 @author: jesse ''' import Queue import SocketServer from BaseCommand import BaseCommand from FlyCommand import FlyCommand from TerminateCommand import TerminateCommand from getch import getch from multiprocessing import Process import multiprocessing from threading import Thread import time import math class Android(object): ''' Allows for live keyboard input to calibrate or fly the crazyflie. ''' crazyFlie = None parameters = None data = None control = {} stopThread = False def __init__(self, crazyFlie, parameters, data, queue): ''' Creates the new object. ''' self.crazyFlie = crazyFlie self.queue = queue def start(self): t = Thread(target = self.run) t.start() def run(self): acceloServer = AcceloServer() t = Thread(target = acceloServer.run) t.start() prevCommandTime = time.time() noDataCount = 0 while not self.stopThread: data = AcceloHandler.data if data is not None: command = FlyCommand() if "dir" in data: dirData = data["dir"] command.pitch = dirData[0] command.roll = dirData[1] if "thr" in data: thrData = data["thr"] command.thrust = thrData[0] command.yaw = thrData[1] print "Command pitch: " + str(command.pitch) + ", roll: " + str(command.roll) self.queue.addCommand(command) elif noDataCount < 1000: noDataCount += 1 else: print "Receiving no data, stopping." self.queue.addCommand(TerminateCommand()) self.stopThread = True acceloServer.stop() time.sleep(0.1) class AcceloServer(): HOST = "" PORT = 5555 server = None def __init__(self): self.server = SocketServer.UDPServer((self.HOST, self.PORT), AcceloHandler) def run(self): self.server.serve_forever() def stop(self): self.server.shutdown() class AcceloHandler(SocketServer.BaseRequestHandler): data = {} ips = {} def handle(self): addr = self.client_address[0] data = self.request[0].strip() if "dir" not in AcceloHandler.ips: print "{} controls direction.".format(addr) AcceloHandler.ips["dir"] = addr AcceloHandler.data["dir"] = {} elif "thr" not in AcceloHandler.ips and addr != AcceloHandler.ips["dir"]: print "{} controls thrust and yaw.".format(addr) AcceloHandler.ips["thr"] = addr AcceloHandler.data["thr"] = {} splitData = data.split(",") dirs = self.acceloToValue(float(splitData[2]), float(splitData[3]), float(splitData[4])) if addr == AcceloHandler.ips["dir"]: AcceloHandler.data["dir"] = dirs elif addr == AcceloHandler.ips["thr"]: # FIXME Very hacky, but see if it works. AcceloHandler.data["thr"] = (dirs[0]*-1000, 10*dirs[1]) def acceloToValue(self, ax, ay, az): if az != 0: sDir = math.atan(-ax/az) * (90 / math.pi) fDir = math.atan(-ay/az) * (90 / math.pi) else: fDir = 0 sDir = 0 return (fDir, sDir)
mit
83,067,341,520,781,780
26.531746
96
0.551167
false
3.863029
false
false
false
mesnardo/snake
snake/solutions/koumoutsakosLeonard1995.py
2
2230
""" Implementation of the class `KoumoutsakosLeonard1995` that contains the instantaneous drag coefficients of an impulsively-started 2D cylinder for Reynolds numbers 40, 550, and 3000. The drag coefficient data files are located in the folder `resources/results` of the snake package. _References:_ * Koumoutsakos, P., & Leonard, A. (1995). High-resolution simulations of the flow around an impulsively started cylinder using vortex methods. Journal of Fluid Mechanics, 296, 1-38. """ import os import numpy from snake.force import Force class KoumoutsakosLeonard1995(object): """ Container to store results from Koumoutsakos and Leonard (1995). """ def __init__(self, file_path=None, Re=None): """ Initializes. Parameters ---------- file_path: string, optional Path of the file containing the instantaneous drag coefficients; default: None. Re: float, optional Reynolds number; default: None. """ self.description = 'Koumoutsakos and Leonard (1995)' self.cd = None if file_path or Re: self.read_drag(file_path=file_path, Re=Re) def read_drag(self, file_path=None, Re=None): """ Reads the instantaneous drag coefficients from file. Parameters ---------- file_path: string, optional Path of the file containing the instantaneous drag coefficients; default: None. Re: float, optional Reynolds number; default: None. """ if not (file_path or Re): print('[error] please provide path of file of Reynolds number') return print('[info] reading drag coefficients ...'), if not file_path: file_name = ('koumoutsakos_leonard_1995_' 'cylinder_dragCoefficientRe{}.dat'.format(Re)) file_path = os.path.join(os.environ['SNAKE'], 'resources', 'results', file_name) with open(file_path, 'r') as infile: times, drag = numpy.loadtxt(infile, dtype=float, comments='#', unpack=True) self.cd = Force(0.5 * times, drag) print('done')
mit
-4,460,972,278,539,322,400
28.342105
73
0.60852
false
3.851468
false
false
false
pcapriotti/pledger
pledger/parser.py
1
5762
import itertools import re import codecs from datetime import datetime, date from .account import Account, AccountFactory from .value import Value from .ledger import Ledger from .transaction import Transaction, UndefinedTransaction, UnbalancedTransaction from .directive import Directive, UnsupportedDirective from .entry import Entry from .util import PledgerException, itersplit date_formats = { "default": "%Y/%m/%d", "year": "%Y", "month": "%b"} class MalformedHeader(PledgerException): pass class Parser(object): def __init__(self): self.precision = 2 self.repo = AccountFactory() def parse_account(self, name): return self.repo.parse(name) def parse_value(self, str): return Value.parse(str) def parse_ledger(self, filename, str=None): if str is None: str = codecs.open(filename, "r", "utf-8").read() def f(number_line): return number_line[1] == "" lines = zip(itertools.count(1), str.split("\n")) try: transactions = [self.parse_transaction( group) for group in itersplit(f, lines)] except PledgerException as e: e.filename = filename raise e return Ledger(filename, [t for t in transactions if t], self) def parse_entry(self, str): tags = self.parse_tags(str) or {} str = re.sub(";.*$", "", str) elements = [e for e in re.split(r" +", str) if e] if len(elements) >= 1: account = self.parse_account(elements[0]) amount = None if len(elements) >= 2: amount = self.parse_value(elements[1]) if account: return Entry(account, amount, tags) def parse_transaction(self, lines): if hasattr(lines, "split"): lines = list(zip(itertools.count(1), iter(lines.split("\n")))) tags = {} # discard initial comments while lines and re.match(r'\s*;', lines[0][1]): lines = lines[1:] if len(lines) == 0: return None n, header = lines[0] lines = lines[1:] # skip rules if len(header) == 0 or header[0] == "=": return None directive = self.parse_directive(header) if directive: return directive # parse transaction tags if lines: n, line = lines[0] tags = self.parse_tags(line, begin=True) if tags: lines = lines[1:] try: date, label, cleared = self.parse_header(header) date = self.parse_date(date) if date is None: raise MalformedHeader() entries = [self.parse_entry(line) for n, line in lines] line_numbers = [n for n, line in lines] transaction = Transaction.balanced(entries, date, label) if tags: transaction.tags = tags if cleared: transaction.tags["cleared"] = True return transaction except UnbalancedTransaction as e: e.line_number = n raise e except UndefinedTransaction as e: e.line_number = line_numbers[e.index] raise e except MalformedHeader as e: e.line_number = n raise e def parse_date(self, str, format="default"): try: return datetime.strptime(str, date_formats[format]).date() except ValueError: pass def parse_month(self, str): base = self.parse_date(str, "month") if base: return date(date.today().year, base.month, 1) def parse_year(self, str): base = self.parse_date(str, "year") if base: return date(base.year, 1, 1) def parse_fuzzy_date(self, str): for parser in [self.parse_date, self.parse_month, self.parse_year]: result = parser(str) if result: return result return None def parse_header(self, str): m = re.match(r'^(\S+)\s+(\*\s+)?(.*)$', str) if m: return m.group(1), m.group(3), m.group(2) else: raise MalformedHeader() def parse_tags(self, str, begin=False): pattern = r'\s*;\s*(.*)$' if begin: m = re.match(pattern, str) else: m = re.search(pattern, str) if m: tagstring = m.group(1) tag_dict = [] while True: result = self.parse_tag(tagstring) if result is None: break tag, index = result tag_dict.append(tag) tagstring = tagstring[index:] return dict(tag_dict) def parse_tag(self, str): m = re.match(r':?(\S+):"([^"]*)"\s*', str) if m: return ((m.group(1), m.group(2)), m.end()) m = re.match(r":?(\S+):'([^']*)'\s*", str) if m: return ((m.group(1), m.group(2)), m.end()) m = re.match(r':?(\S+):(\S*)\s*', str) if m: return ((m.group(1), m.group(2)), m.end()) m = re.match(r'\[(\S+)\]\s*', str) if m: try: return (("date", self.parse_date(m.group(1))), m.end()) except ValueError: pass def parse_directive(self, str): if str[0] == '!': args = str[1:].split(' ') name = args[0] args = args[1:] directive_class = Directive.directives.get(name) if directive_class: return directive_class(*args) else: raise UnsupportedDirective(name)
mit
-3,773,451,124,369,703,400
29.326316
81
0.515793
false
3.946575
false
false
false
SnabbCo/neutron
neutron/services/provider_configuration.py
20
6052
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg from neutron.common import exceptions as n_exc from neutron.openstack.common import log as logging from neutron.plugins.common import constants LOG = logging.getLogger(__name__) serviceprovider_opts = [ cfg.MultiStrOpt('service_provider', default=[], help=_('Defines providers for advanced services ' 'using the format: ' '<service_type>:<name>:<driver>[:default]')) ] cfg.CONF.register_opts(serviceprovider_opts, 'service_providers') #global scope function that should be used in service APIs def normalize_provider_name(name): return name.lower() def parse_service_provider_opt(): """Parse service definition opts and returns result.""" def validate_name(name): if len(name) > 255: raise n_exc.Invalid( _("Provider name is limited by 255 characters: %s") % name) svc_providers_opt = cfg.CONF.service_providers.service_provider res = [] for prov_def in svc_providers_opt: split = prov_def.split(':') try: svc_type, name, driver = split[:3] except ValueError: raise n_exc.Invalid(_("Invalid service provider format")) validate_name(name) name = normalize_provider_name(name) default = False if len(split) == 4 and split[3]: if split[3] == 'default': default = True else: msg = (_("Invalid provider format. " "Last part should be 'default' or empty: %s") % prov_def) LOG.error(msg) raise n_exc.Invalid(msg) if svc_type not in constants.ALLOWED_SERVICES: msg = (_("Service type '%(svc_type)s' is not allowed, " "allowed types: %(allowed)s") % {'svc_type': svc_type, 'allowed': constants.ALLOWED_SERVICES}) LOG.error(msg) raise n_exc.Invalid(msg) res.append({'service_type': svc_type, 'name': name, 'driver': driver, 'default': default}) return res class ServiceProviderNotFound(n_exc.InvalidInput): message = _("Service provider '%(provider)s' could not be found " "for service type %(service_type)s") class DefaultServiceProviderNotFound(n_exc.InvalidInput): message = _("Service type %(service_type)s does not have a default " "service provider") class ServiceProviderAlreadyAssociated(n_exc.Conflict): message = _("Resource '%(resource_id)s' is already associated with " "provider '%(provider)s' for service type '%(service_type)s'") class ProviderConfiguration(object): def __init__(self, prov_data): self.providers = {} for prov in prov_data: self.add_provider(prov) def _ensure_driver_unique(self, driver): for k, v in self.providers.items(): if v['driver'] == driver: msg = (_("Driver %s is not unique across providers") % driver) LOG.exception(msg) raise n_exc.Invalid(msg) def _ensure_default_unique(self, type, default): if not default: return for k, v in self.providers.items(): if k[0] == type and v['default']: msg = _("Multiple default providers " "for service %s") % type LOG.exception(msg) raise n_exc.Invalid(msg) def add_provider(self, provider): self._ensure_driver_unique(provider['driver']) self._ensure_default_unique(provider['service_type'], provider['default']) provider_type = (provider['service_type'], provider['name']) if provider_type in self.providers: msg = (_("Multiple providers specified for service " "%s") % provider['service_type']) LOG.exception(msg) raise n_exc.Invalid(msg) self.providers[provider_type] = {'driver': provider['driver'], 'default': provider['default']} def _check_entry(self, k, v, filters): # small helper to deal with query filters if not filters: return True for index, key in enumerate(['service_type', 'name']): if key in filters: if k[index] not in filters[key]: return False for key in ['driver', 'default']: if key in filters: if v[key] not in filters[key]: return False return True def _fields(self, resource, fields): if fields: return dict(((key, item) for key, item in resource.items() if key in fields)) return resource def get_service_providers(self, filters=None, fields=None): return [self._fields({'service_type': k[0], 'name': k[1], 'driver': v['driver'], 'default': v['default']}, fields) for k, v in self.providers.items() if self._check_entry(k, v, filters)]
apache-2.0
1,647,451,946,463,824,000
36.358025
78
0.559154
false
4.479645
false
false
false
nakednamor/naked-python
Python Crash Course/chapter 9/e6.py
1
1217
class Restaurant(): """Describes a place where you can get somethint to eat and drink""" def __init__(self, restaurant_name, cuisine_type): self.name = restaurant_name self.cuisine = cuisine_type self.number_served = 0 def describe_restaurant(self): print('name: ' + self.name) print('cuisine: ' + self.cuisine) def open_restaurant(self): print('we are open!') def set_number_served(self, number_served): self.number_served = number_served def increment_number_served(self, number_served): self.number_served += number_served class IceCreamStand(Restaurant): """A small stand where you can get some cold ice cream""" def __init__( self, restaurant_name, cuisine_type, flavors=['vanilla', 'chocolate', 'strawberry']): super().__init__(restaurant_name, cuisine_type) self.flavors = flavors def display_flavors(self): text = 'I have: \n' for flavor in self.flavors: text += '\t- ' + flavor + '\n' print(text) stand = IceCreamStand("Sam's Ice Bar", 'Snacks') stand.describe_restaurant() stand.display_flavors()
mit
-880,110,721,172,269,800
27.97619
72
0.602301
false
3.622024
false
false
false
maxamillion/product-definition-center
pdc/apps/compose/tests.py
1
97007
# -*- coding: utf-8 -*- # # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import json import mock from StringIO import StringIO from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import Client from rest_framework.test import APITestCase from rest_framework import status from pdc.apps.bindings import models as binding_models from pdc.apps.common.test_utils import create_user, TestCaseWithChangeSetMixin from pdc.apps.release.models import Release, ProductVersion from pdc.apps.component.models import (ReleaseComponent, BugzillaComponent) import pdc.apps.release.models as release_models import pdc.apps.common.models as common_models from . import models class ComposeModelTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.compose = models.Compose.objects.get(id=1) def test_get_rpms_existing(self): self.assertEqual(unicode(self.compose.get_rpms('bash')), '[<RPM: bash-0:1.2.3-4.b1.x86_64.rpm>]') def test_get_rpms_nonexisting(self): self.assertEqual(list(self.compose.get_rpms('foo')), []) def test_get_arch_testing_status(self): self.assertDictEqual(self.compose.get_arch_testing_status(), {'Server': {'x86_64': 'untested'}, 'Server2': {'x86_64': 'untested'}}) class VersionFinderTestCase(APITestCase): # TODO: This test case could be removed after removing endpoint 'compose/package' fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/product.json", "pdc/apps/release/fixtures/tests/product_version.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def setUp(self): self.url = reverse('findcomposewitholderpackage-list') def test_bad_args_missing_rpm_name(self): response = self.client.get(self.url, {'compose': 'compose-1'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn('rpm_name', response.data.get('detail')) def test_bad_args_missing_release_and_compose(self): response = self.client.get(self.url, {'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn('release', response.data.get('detail')) self.assertIn('compose', response.data.get('detail')) def test_missing_previous_compose(self): response = self.client.get(self.url, {'compose': 'compose-1', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_same_version(self): response = self.client.get(self.url, {'compose': 'compose-2', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_older_rpm(self): response = self.client.get(self.url, {'compose': 'compose-3', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose'), "compose-2") self.assertEqual(response.data.get('packages'), ["bash-0:1.2.3-4.b1.x86_64.rpm"]) def test_same_version_different_arch(self): """There is a previous compose with same version of package, but with different RPM.arch.""" models.ComposeRPM.objects.filter(pk=1).update(rpm=3) response = self.client.get(self.url, {'compose': 'compose-2', 'rpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_get_for_release(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_with_latest(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_to_dict(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'to_dict': True}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected = [ {'compose': u'compose-1', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': [u'compose-1', u'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-2', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': [u'compose-1', u'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-3', 'packages': [ {'name': u'bash', 'version': u'5.6.7', 'epoch': 0, 'release': u'8', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': None, 'filename': 'bash-5.6.7-8.x86_64.rpm', 'id': 2, 'linked_composes': [u'compose-3'], 'linked_releases': []}]} ] self.assertEqual(response.data, expected) def test_get_for_product_version(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() response = self.client.get(self.url, {'rpm_name': 'bash', 'product_version': 'product-1'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_product_version_with_latest(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() response = self.client.get(self.url, {'rpm_name': 'bash', 'product_version': 'product-1', 'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_included_compose_type(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'included_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}]) def test_get_for_excluded_compose_type(self): response = self.client.get(self.url, {'rpm_name': 'bash', 'release': 'release-1.0', 'excluded_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) class FindComposeByReleaseRPMTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def test_get_for_release(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_with_latest(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_release_to_dict(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'to_dict': True}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected = [ {'compose': u'compose-1', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': ['compose-1', 'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-2', 'packages': [ {'name': u'bash', 'version': u'1.2.3', 'epoch': 0, 'release': u'4.b1', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': u'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm', 'id': 1, 'linked_composes': ['compose-1', 'compose-2'], 'linked_releases': []}]}, {'compose': u'compose-3', 'packages': [ {'name': u'bash', 'version': u'5.6.7', 'epoch': 0, 'release': u'8', 'arch': u'x86_64', 'srpm_name': u'bash', 'srpm_nevra': None, 'filename': 'bash-5.6.7-8.x86_64.rpm', 'id': 2, 'linked_composes': ['compose-3'], 'linked_releases': []}]} ] self.assertEqual(response.data, expected) def test_get_for_excluded_compose_type(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'excluded_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_included_compose_type(self): url = reverse('findcomposebyrr-list', kwargs={'rpm_name': 'bash', 'release_id': 'release-1.0'}) response = self.client.get(url, {'included_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}]) class FindOlderComposeByComposeRPMTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def test_missing_previous_compose(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-1', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_same_version(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-2', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_previous_compose_has_older_rpm(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-3', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose'), "compose-2") self.assertEqual(response.data.get('packages'), ["bash-0:1.2.3-4.b1.x86_64.rpm"]) def test_previous_compose_has_older_rpm_with_to_dict(self): url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-3', 'rpm_name': 'bash'}) response = self.client.get(url, {'to_dict': True}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose'), "compose-2") packages = response.data.get('packages') self.assertEqual(len(packages), 1) self.assertItemsEqual(packages[0].pop('linked_composes'), ['compose-1', 'compose-2']) self.assertEqual(packages[0].pop('linked_releases'), []) packages[0].pop('id') self.assertDictEqual( dict(packages[0]), {'name': 'bash', 'version': '1.2.3', 'epoch': 0, 'release': '4.b1', 'arch': 'x86_64', 'srpm_name': 'bash', 'srpm_nevra': 'bash-0:1.2.3-4.b1.src', 'filename': 'bash-1.2.3-4.b1.x86_64.rpm'}) def test_same_version_different_arch(self): """There is a previous compose with same version of package, but with different RPM.arch.""" models.ComposeRPM.objects.filter(pk=1).update(rpm=3) url = reverse('findoldercomposebycr-list', kwargs={'compose_id': 'compose-2', 'rpm_name': 'bash'}) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) class FindCompoeByProductVersionRPMTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/product.json", "pdc/apps/release/fixtures/tests/product_version.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def setUp(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() self.url = reverse('findcomposesbypvr-list', kwargs={'rpm_name': 'bash', 'product_version': 'product-1'}) def test_get_for_product_version(self): response = self.client.get(self.url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_product_version_with_latest(self): product_version = ProductVersion.objects.get(short='product', version='1') release = Release.objects.get(release_id='release-1.0') release.product_version = product_version release.save() response = self.client.get(self.url, {'latest': 'True'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) def test_get_for_included_compose_type(self): response = self.client.get(self.url, {'included_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-1', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}, {'compose': 'compose-2', 'packages': ['bash-0:1.2.3-4.b1.x86_64.rpm']}]) def test_get_for_excluded_compose_type(self): response = self.client.get(self.url, {'excluded_compose_type': 'production'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [{'compose': 'compose-3', 'packages': ['bash-0:5.6.7-8.x86_64.rpm']}]) class ComposeAPITestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def test_get_existing(self): response = self.client.get(reverse('compose-detail', args=["compose-1"])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['sigkeys'], ['ABCDEF']) self.assertEqual(response.data['rpm_mapping_template'], 'http://testserver/rest_api/v1/composes/compose-1/rpm-mapping/{{package}}/') def test_compose_with_unsigned_package(self): crpm = models.ComposeRPM.objects.all()[0] crpm.sigkey = None crpm.save() response = self.client.get(reverse('compose-detail', args=["compose-1"])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertItemsEqual(response.data['sigkeys'], ['ABCDEF', None]) def test_get_nonexisting(self): response = self.client.get(reverse('compose-detail', args=["does-not-exist"])) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_list(self): response = self.client.get(reverse('compose-list'), {}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_composeid(self): response = self.client.get(reverse('compose-list'), {"compose_id": "compose-1"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_composeid_nonexisting(self): response = self.client.get(reverse('compose-list'), {"compose_id": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmname(self): response = self.client.get(reverse('compose-list'), {"rpm_name": "bash"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmname_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_name": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_srpmname(self): response = self.client.get(reverse('compose-list'), {"srpm_name": "bash"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_srpmname_nonexisting(self): response = self.client.get(reverse('compose-list'), {"srpm_name": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmversion(self): response = self.client.get(reverse('compose-list'), {"rpm_version": "1.2.3"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmversion_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_version": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmrelease(self): response = self.client.get(reverse('compose-list'), {"rpm_release": "4.b1"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmrelease_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_release": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmarch(self): response = self.client.get(reverse('compose-list'), {"rpm_arch": "x86_64"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmarch_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_arch": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmnvr(self): response = self.client.get(reverse('compose-list'), {"rpm_nvr": "bash-1.2.3-4.b1"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmnvr_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_nvr": "does-not-exist"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmnvr_invalid(self): response = self.client.get(reverse('compose-list'), {"rpm_nvr": "invalid"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_query_compose_rpmnvra(self): response = self.client.get(reverse('compose-list'), {"rpm_nvra": "bash-1.2.3-4.b1.x86_64"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_rpmnvra_nonexisting(self): response = self.client.get(reverse('compose-list'), {"rpm_nvra": "does-not-exist.arch"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_query_compose_rpmnvra_invalid(self): response = self.client.get(reverse('compose-list'), {"rpm_nvra": "invalid"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_query_compose_acceptance_testing(self): response = self.client.get(reverse('compose-list'), {"acceptance_testing": "untested"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) def test_query_compose_acceptance_testing_nonexisting(self): response = self.client.get(reverse('compose-list'), {"acceptance_testing": "broken"}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) class ComposeApiOrderingTestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/product.json", "pdc/apps/release/fixtures/tests/product_version.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/more_composes.json", ] def test_compose_list_is_ordered(self): response = self.client.get(reverse('compose-list')) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( [x['compose_id'] for x in response.data.get('results', [])], ['compose-1', 'compose-2', 'compose-3'] ) def test_compose_in_release_are_ordered(self): response = self.client.get(reverse('release-detail', args=['release-1.0'])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('compose_set', []), ['compose-1', 'compose-2', 'compose-3']) class ComposeUpdateTestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/more_releases.json", ] def test_can_not_perform_full_update(self): response = self.client.put(reverse('compose-detail', args=['compose-1']), {}) self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) def test_can_update_acceptance_testing_state(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'acceptance_testing': 'passed'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('acceptance_testing'), 'passed') self.assertNumChanges([1]) def test_can_not_update_compose_label(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'compose_label': 'i am a label'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_update_linked_releases(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates']}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) self.assertNumChanges([1]) def test_update_both_linked_release_and_acceptance(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates'], 'acceptance_testing': 'passed'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) self.assertEqual(response.data.get('acceptance_testing'), 'passed') self.assertNumChanges([2]) def test_update_acceptance_preserves_links(self): self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates']}, format='json') response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'acceptance_testing': 'passed'}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) self.assertNumChanges([1, 1]) def test_update_can_not_link_to_same_release(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0']}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertIn('non_field_errors', response.data) def test_update_can_not_link_to_same_release_twice(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': ['release-1.0-updates', 'release-1.0-updates']}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('linked_releases'), ['release-1.0-updates']) def test_partial_update_empty(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_patch_linked_releases_not_a_list(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': 'release-1.0-updates'}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, {'linked_releases': ['Expected a list.']}) self.assertNumChanges([]) def test_patch_linked_releases_null(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': None}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, {'linked_releases': ['This field may not be null.']}) self.assertNumChanges([]) def test_patch_linked_releases_list_with_null(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'linked_releases': [None]}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data, {'linked_releases': ['Expected a string instead of <None>.']}) self.assertNumChanges([]) def test_bulk_update_put(self): response = self.client.put(reverse('compose-list'), {'compose-1': {'linked_releases': []}}, format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) self.assertNumChanges([]) def test_bulk_update_patch(self): response = self.client.patch(reverse('compose-list'), {'compose-1': {'linked_releases': ['release-1.0-updates']}}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertNumChanges([1]) self.assertEqual(response.data.keys(), ['compose-1']) self.assertEqual(response.data['compose-1'].get('linked_releases'), ['release-1.0-updates']) def test_partial_update_extra_field(self): response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'foo': 'bar'}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_update_testing_status_on_arch(self): data = {'Server': {'x86_64': 'passed'}, 'Server2': {'x86_64': 'untested'}} response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('rtt_tested_architectures', {}), data) vararch = models.VariantArch.objects.get(arch__name='x86_64', variant__variant_uid='Server', variant__compose__compose_id='compose-1') self.assertEqual(vararch.rtt_testing_status.name, 'passed') self.assertNumChanges([1]) def test_update_testing_status_on_non_existing_tree(self): inputs = [ ({'Foo': {'x86_64': 'passed'}}, 'Foo.x86_64 not in compose compose-1.'), ({'Server': {'foo': 'passed'}}, 'Server.foo not in compose compose-1.'), ] for data, err in inputs: response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data.get('rtt_tested_architectures', ''), err) self.assertNumChanges([]) def test_update_testing_status_to_non_existing_status(self): data = {'Server': {'x86_64': 'awesome'}} response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data.get('rtt_tested_architectures', ''), '"awesome" is not a known testing status for Server.x86_64.') def test_update_testing_status_with_malformed_data(self): inputs = [ ({'Server': 'passed'}, 'Server: "passed" is not a dict'), ('passed', 'rtt_tested_architectures: "passed" is not a dict'), ] for data, err in inputs: response = self.client.patch(reverse('compose-detail', args=['compose-1']), {'rtt_tested_architectures': data}, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.data.get('detail', []), [err]) self.assertNumChanges([]) class OverridesRPMAPITestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ 'pdc/apps/release/fixtures/tests/release.json', 'pdc/apps/compose/fixtures/tests/compose_overriderpm.json', ] def setUp(self): self.release = release_models.Release.objects.get(release_id='release-1.0') self.override_rpm = {'id': 1, 'release': 'release-1.0', 'variant': 'Server', 'arch': 'x86_64', 'srpm_name': 'bash', 'rpm_name': 'bash-doc', 'rpm_arch': 'x86_64', 'include': False, 'comment': '', 'do_not_delete': False} self.do_not_delete_orpm = {'release': 'release-1.0', 'variant': 'Server', 'arch': 'x86_64', 'srpm_name': 'bash', 'rpm_name': 'bash-doc', 'rpm_arch': 'src', 'include': True, 'comment': '', 'do_not_delete': True} def test_query_existing(self): response = self.client.get(reverse('overridesrpm-list'), {'release': 'release-1.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1) self.assertEqual(response.data['results'][0], self.override_rpm) def test_query_nonexisting(self): response = self.client.get(reverse('overridesrpm-list'), {'release': 'release-1.1'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 0) def test_delete_existing(self): response = self.client.delete(reverse('overridesrpm-detail', args=[1])) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertEqual(models.OverrideRPM.objects.count(), 0) self.assertNumChanges([1]) def test_delete_non_existing(self): response = self.client.delete(reverse('overridesrpm-list', args=[42])) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(models.OverrideRPM.objects.count(), 1) self.assertNumChanges([]) def test_create_duplicit(self): response = self.client.post(reverse('overridesrpm-list'), self.override_rpm) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_create_correct(self): self.override_rpm["rpm_name"] = "bash-debuginfo" del self.override_rpm["id"] response = self.client.post(reverse('overridesrpm-list'), self.override_rpm) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_create_extra_field(self): self.override_rpm["rpm_name"] = "bash-debuginfo" self.override_rpm["foo"] = "bar" response = self.client.post(reverse('overridesrpm-list'), self.override_rpm) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_clear(self): response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0'}) self.assertEqual(models.OverrideRPM.objects.count(), 0) self.assertItemsEqual(response.data, [self.override_rpm]) def test_clear_preserve_do_not_delete(self): models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0'}) self.assertEqual(models.OverrideRPM.objects.count(), 1) self.assertItemsEqual(response.data, [self.override_rpm]) def test_delete_with_extra_param(self): models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0', 'variant': "Server", 'arch': 'x86_64', 'rpm_name': 'bash-doc', 'rpm_arch': 'src', 'srpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_clear_with_extra_param(self): models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0', 'srpm_name': 'bash'}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_clear_force(self): override = models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") self.do_not_delete_orpm['id'] = override.pk response = self.client.delete(reverse('overridesrpm-list'), {'release': 'release-1.0', 'force': True}) self.assertEqual(models.OverrideRPM.objects.count(), 0) self.assertItemsEqual(response.data, [self.override_rpm, self.do_not_delete_orpm]) def test_delete_two_by_id(self): override = models.OverrideRPM.objects.create(release=self.release, variant="Server", arch="x86_64", rpm_name="bash-doc", rpm_arch="src", include=True, do_not_delete=True, srpm_name="bash") response = self.client.delete(reverse('overridesrpm-list'), [1, override.pk], format='json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) self.assertNumChanges([2]) self.assertEqual(models.OverrideRPM.objects.count(), 0) class ComposeRPMViewAPITestCase(TestCaseWithChangeSetMixin, APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", ] def setUp(self): with open('pdc/apps/release/fixtures/tests/composeinfo.json', 'r') as f: self.compose_info = json.loads(f.read()) with open('pdc/apps/compose/fixtures/tests/rpm-manifest.json', 'r') as f: self.manifest = json.loads(f.read()) # Caching ids makes it faster, but the cache needs to be cleared for each test. models.Path.CACHE = {} common_models.SigKey.CACHE = {} def test_import_and_retrieve_manifest(self): response = self.client.post(reverse('releaseimportcomposeinfo-list'), self.compose_info, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) response = self.client.post(reverse('composerpm-list'), {'rpm_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertNumChanges([11, 5]) self.assertEqual(models.ComposeRPM.objects.count(), 6) response = self.client.get(reverse('composerpm-detail', args=['TP-1.0-20150310.0'])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertDictEqual(dict(response.data), self.manifest) class ComposeImageAPITestCase(TestCaseWithChangeSetMixin, APITestCase): def setUp(self): with open('pdc/apps/release/fixtures/tests/composeinfo.json', 'r') as f: self.compose_info = json.loads(f.read()) with open('pdc/apps/compose/fixtures/tests/image-manifest.json', 'r') as f: self.manifest = json.loads(f.read()) self.client.post(reverse('releaseimportcomposeinfo-list'), self.compose_info, format='json') # Caching ids makes it faster, but the cache needs to be cleared for each test. models.Path.CACHE = {} def test_import_images_by_deprecated_api(self): # TODO: remove this test after next release response = self.client.post(reverse('composeimportimages-list'), {'image_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertNumChanges([11, 5]) self.assertEqual(models.ComposeImage.objects.count(), 4) response = self.client.get(reverse('image-list'), {'compose': 'TP-1.0-20150310.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('count'), 4) def test_import_images(self): response = self.client.post(reverse('composeimage-list'), {'image_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertNumChanges([11, 5]) self.assertEqual(models.ComposeImage.objects.count(), 4) response = self.client.get(reverse('image-list'), {'compose': 'TP-1.0-20150310.0'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data.get('count'), 4) def test_import_and_retrieve_images(self): response = self.client.post(reverse('composeimage-list'), {'image_manifest': self.manifest, 'release_id': 'tp-1.0', 'composeinfo': self.compose_info}, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) response = self.client.get(reverse('composeimage-detail', args=['TP-1.0-20150310.0'])) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertDictEqual(dict(response.data), self.manifest) class RPMMappingAPITestCase(APITestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.release = release_models.Release.objects.latest('id') self.compose = models.Compose.objects.get(compose_id='compose-1') self.url = reverse('composerpmmapping-detail', args=[self.compose.compose_id, 'bash']) def test_get_rpm_mapping(self): response = self.client.get(self.url, {}, format='json') expected_data = { 'Server': { 'x86_64': { 'bash': ['x86_64'], } } } self.assertEqual(response.data, expected_data) def test_get_rpm_mapping_for_nonexisting_compose(self): url = reverse('composerpmmapping-detail', args=['foo-bar', 'bash']) response = self.client.get(url, {}, format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_rpm_mapping_includes_overrides(self): models.OverrideRPM.objects.create(variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='src', include=True, release=self.release) response = self.client.get(self.url, {}, format='json') expected_data = { 'Server': { 'x86_64': { 'bash': ['src', 'x86_64'], } } } self.assertEqual(response.data, expected_data) def test_rpm_mapping_can_exclude_overrides(self): models.OverrideRPM.objects.create(variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='src', include=True, release=self.release) self.url += '?disable_overrides=1' response = self.client.get(self.url, {}, format='json') expected_data = { 'Server': { 'x86_64': { 'bash': ['x86_64'], 'bash-doc': ['x86_64'], } } } self.assertEqual(response.data, expected_data) def test_does_not_return_empty_container(self): models.OverrideRPM.objects.create(variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=False, release=self.release) response = self.client.get(self.url, {}, format='json') self.assertEqual(response.data, {}) def test_partial_update(self): self.client.force_authenticate(create_user("user", perms=[])) self.client.patch(self.url, [{"action": "create", "srpm_name": "bash", "rpm_name": "bash-magic", "rpm_arch": "src", "variant": "Client", "arch": "x86_64", "do_not_delete": False, "comment": "", "include": True}], format='json') orpm = models.OverrideRPM.objects.get(srpm_name="bash", rpm_name="bash-magic", rpm_arch="src", variant="Client", arch="x86_64", include=True, do_not_delete=False, comment="") self.assertIsNotNone(orpm) def test_update(self): self.client.force_authenticate(create_user("user", perms=[])) new_mapping = {'Server': {'x86_64': {'bash': ['x86_64', 'i386']}}} response = self.client.put(self.url, new_mapping, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [{'action': 'create', 'srpm_name': 'bash', 'rpm_name': 'bash', 'rpm_arch': 'i386', 'variant': 'Server', 'arch': 'x86_64', 'include': True, 'release_id': 'release-1.0'}]) self.assertEqual(0, models.OverrideRPM.objects.filter(rpm_arch='i386').count()) def test_update_with_perform(self): self.client.force_authenticate(create_user("user", perms=[])) new_mapping = {'Server': {'x86_64': {'bash': ['x86_64', 'i386']}}} response = self.client.put(self.url + '?perform=1', new_mapping, format='json') self.assertEqual(response.status_code, 200) self.assertEqual(response.data, [{'action': 'create', 'srpm_name': 'bash', 'rpm_name': 'bash', 'rpm_arch': 'i386', 'variant': 'Server', 'arch': 'x86_64', 'include': True, 'release_id': 'release-1.0'}]) self.assertEqual(1, models.OverrideRPM.objects.filter(rpm_arch='i386').count()) class FilterBugzillaProductsAndComponentsTestCase(APITestCase): fixtures = [ "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/component/fixtures/tests/release_component.json", "pdc/apps/component/fixtures/tests/upstream.json", "pdc/apps/component/fixtures/tests/global_component.json" ] def setUp(self): # Construct a new release and release component self.release = Release.objects.create( release_id='release-2.0', short='release', version='2.0', name='Awesome Release', release_type_id=1, ) self.bugzilla_component = BugzillaComponent.objects.create(name='kernel') filesystems = BugzillaComponent.objects.create(name='filesystems', parent_component=self.bugzilla_component) BugzillaComponent.objects.create(name='ext4', parent_component=filesystems) pyth = BugzillaComponent.objects.create(name='python', parent_component=self.bugzilla_component) BugzillaComponent.objects.create(name='bin', parent_component=pyth) ReleaseComponent.objects.create( release=self.release, global_component_id=1, name='kernel', bugzilla_component=self.bugzilla_component ) def test_filter_bugzilla_products_components_with_rpm_nvr(self): url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=bash-1.2.3-4.b1', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) def test_filter_with_invalid_nvr(self): url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=xxx', format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_filter_with_nvr_without_rpms(self): url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=GConf2-3.2.6-8.el71', format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_filter_without_nvr(self): url = reverse('bugzilla-list') response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @mock.patch('pdc.apps.compose.models.Compose.objects.filter') def test_filter_without_srpm_component_name_mapping(self, mock_filter): release_component, _ = ReleaseComponent.objects.get_or_create( global_component_id=1, release=self.release, bugzilla_component=self.bugzilla_component, name='bash') mock_filter.return_value = mock.Mock() mock_filter.return_value.distinct.return_value = [mock.Mock()] mock_filter.return_value.distinct.return_value[0].release = self.release.release_id url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=bash-1.2.3-4.b1', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn('kernel', response.content) @mock.patch('pdc.apps.compose.models.Compose.objects.filter') def test_filter_with_srpm_component_name_mapping(self, mock_filter): release_component, _ = ReleaseComponent.objects.get_or_create( global_component_id=1, release=self.release, name='kernel') binding_models.ReleaseComponentSRPMNameMapping.objects.create( srpm_name='bash', release_component=release_component) mock_filter.return_value = mock.Mock() mock_filter.return_value.distinct.return_value = [mock.Mock()] mock_filter.return_value.distinct.return_value[0].release = self.release.release_id url = reverse('bugzilla-list') response = self.client.get(url + '?nvr=bash-1.2.3-4.b1', format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn('kernel', response.content) class RPMMappingTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.compose = models.Compose.objects.get(compose_id='compose-1') self.mapping, _ = self.compose.get_rpm_mapping('bash') def test_compute_diff_add_new(self): new_mapping = models.ComposeRPMMapping(data={'Server': {'x86_64': {'bash': ['src', 'x86_64']}}}) changes = self.mapping.compute_changes(new_mapping) self.assertEqual(len(changes), 1) self.assertEqual(changes[0], {'action': 'create', 'variant': 'Server', 'arch': 'x86_64', 'include': True, 'release_id': 'release-1.0', 'rpm_name': 'bash', 'srpm_name': 'bash', 'rpm_arch': 'src'}) def test_compute_diff_add_excluded(self): new_mapping = models.ComposeRPMMapping(data={'Server': {'x86_64': {'bash': ['x86_64'], 'bash-doc': ['x86_64']}}}) changes = self.mapping.compute_changes(new_mapping) self.assertEqual(len(changes), 1) self.assertEqual(changes[0], {'action': 'delete', 'variant': 'Server', 'arch': 'x86_64', 'include': False, 'release_id': 'release-1.0', 'rpm_name': 'bash-doc', 'srpm_name': 'bash', 'rpm_arch': 'x86_64'}) def test_compute_diff_remove_existing(self): new_mapping = models.ComposeRPMMapping(data={}) changes = self.mapping.compute_changes(new_mapping) self.assertEqual(len(changes), 1) self.assertEqual(changes[0], {'action': 'create', 'variant': 'Server', 'arch': 'x86_64', 'include': False, 'release_id': 'release-1.0', 'rpm_name': 'bash', 'srpm_name': 'bash', 'rpm_arch': 'x86_64'}) class OverrideManagementTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.initial_form_data = { 'checks-0-included': 'on', 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-1-variant': 'Server', 'checks-1-arch': 'x86_64', 'checks-1-rpm_name': 'bash-doc', 'checks-1-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 2, 'checks-TOTAL_FORMS': 2, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } def test_can_access_management_form(self): client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.status_code, 200) # There is one package in fixtures self.assertEqual(len(response.context['forms']), 1) def test_submit_no_changes(self): client = Client() response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 0) def test_submit_disable(self): client = Client() del self.initial_form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash', 'rpm_arch': 'x86_64', 'include': False, 'action': 'create', 'srpm_name': 'bash', 'release_id': 'release-1.0'}, data[0]) def test_submit_enable(self): client = Client() self.initial_form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-doc', 'rpm_arch': 'x86_64', 'include': False, 'action': 'delete', 'srpm_name': 'bash', 'release_id': 'release-1.0', 'comment': '', 'do_not_delete': False}, data[0]) def test_submit_new_override(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': 'x86_64', 'news-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data[0]) def test_submit_new_override_on_new_variant(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-new_variant': 0, 'for_new_vararch-0-rpm_name': 'bash-completion', 'for_new_vararch-0-rpm_arch': 'x86_64', 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 1) self.assertEqual({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server-optional', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data[0]) def test_submit_more_different_changes(self): client = Client() del self.initial_form_data['checks-0-included'] self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': 'x86_64', 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-new_variant': 0, 'for_new_vararch-0-rpm_name': 'bash-completion', 'for_new_vararch-0-rpm_arch': 'x86_64', 'news-TOTAL_FORMS': 1, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 3) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server-optional', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash', 'rpm_arch': 'x86_64', 'include': False}, data) def test_submit_more_same_changes(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': 'x86_64', 'news-1-variant': 'Server', 'news-1-arch': 'x86_64', 'news-1-rpm_name': 'bash-magic', 'news-1-rpm_arch': 'src', 'news-TOTAL_FORMS': 2, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 2) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-completion', 'rpm_arch': 'x86_64', 'include': True}, data) self.assertIn({'action': 'create', 'release_id': 'release-1.0', 'srpm_name': 'bash', 'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-magic', 'rpm_arch': 'src', 'include': True}, data) def test_submit_enable_and_disable(self): client = Client() del self.initial_form_data['checks-0-included'] self.initial_form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertIn('compressed', response.context) data = json.loads(response.context['compressed']) self.assertEqual(len(data), 2) self.assertIn({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash-doc', 'rpm_arch': 'x86_64', 'include': False, 'action': 'delete', 'srpm_name': 'bash', 'release_id': 'release-1.0', 'comment': '', 'do_not_delete': False}, data) self.assertIn({'variant': 'Server', 'arch': 'x86_64', 'rpm_name': 'bash', 'rpm_arch': 'x86_64', 'include': False, 'action': 'create', 'srpm_name': 'bash', 'release_id': 'release-1.0'}, data) def test_submit_incorrect_new_override_missing_rpm_arch(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': 'bash-completion', 'news-0-rpm_arch': '', 'news-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_missing_rpm_name(self): client = Client() self.initial_form_data.update({ 'news-0-variant': 'Server', 'news-0-arch': 'x86_64', 'news-0-rpm_name': '', 'news-0-rpm_arch': 'src', 'news-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_rpm_arch(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': 'bash-completion', 'for_new_vararch-0-rpm_arch': '', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_v_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_rpm_name(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': '', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_v_forms', 0, None, 'Both RPM name and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_variant_name(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': '', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': 'bash-magic', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'variant_forms', 0, None, 'Both variant and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_missing_variant_arch(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': '', 'for_new_vararch-0-rpm_name': 'bash-magic', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'variant_forms', 0, None, 'Both variant and arch must be filled in.') self.assertContains(response, 'There are errors in the form.') def test_submit_incorrect_new_override_for_new_variant_and_old_variant(self): client = Client() self.initial_form_data.update({ 'vararch-0-variant': 'Server-optional', 'vararch-0-arch': 'x86_64', 'for_new_vararch-0-rpm_name': 'bash-magic', 'for_new_vararch-0-rpm_arch': 'src', 'for_new_vararch-0-new_variant': 0, 'for_new_vararch-0-variant': 'Server', 'for_new_vararch-0-arch': 'i686', 'vararch-TOTAL_FORMS': 1, 'for_new_vararch-TOTAL_FORMS': 1, }) response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertFormsetError(response, 'override_v_forms', 0, None, 'Can not reference both old and new variant.arch.') self.assertContains(response, 'There are errors in the form.') def test_submit_preview_no_change(self): client = Client() response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertContains(response, 'No changes') class OverridePreviewTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.form_data = { 'checks-0-included': 'on', 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-1-variant': 'Server', 'checks-1-arch': 'x86_64', 'checks-1-rpm_name': 'bash-doc', 'checks-1-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 2, 'checks-TOTAL_FORMS': 2, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } self.preview_form_data = { 'preview_submit': True, 'form-TOTAL_FORMS': 0, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, } def _populate_preview_form(self, response): """Parse response and prepare form data for preview submission.""" def set_val(dict, key, val): if isinstance(val, bool): if val: dict[key] = 'on' dict[key] = val for (i, action) in enumerate(json.loads(response.context['compressed'])): for k in action: set_val(self.preview_form_data, 'form-%d-%s' % (i, k), action[k]) self.preview_form_data['form-TOTAL_FORMS'] += 1 self.preview_form_data['initial_data'] = response.context['compressed'] def test_submit_with_comment_and_missing_do_not_delete(self): client = Client() del self.form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) self.preview_form_data['form-0-comment'] = 'do not delete me' response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 200) self.assertContains(response, 'There are errors in the form.') self.assertFormsetError(response, 'forms', 0, None, 'Comment needs do_not_delete checked.') def test_submit_ok_no_comment(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") del self.form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 2) orpm = models.OverrideRPM.objects.latest('id') self.assertEqual(orpm.include, False) self.assertEqual(orpm.do_not_delete, False) self.assertEqual(orpm.comment, '') def test_submit_ok_with_comment(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") del self.form_data['checks-0-included'] response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) self.preview_form_data.update({ 'form-0-do_not_delete': 'on', 'form-0-comment': 'do not delete me', }) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 2) orpm = models.OverrideRPM.objects.latest('id') self.assertEqual(orpm.include, False) self.assertEqual(orpm.do_not_delete, True) self.assertEqual(orpm.comment, 'do not delete me') def test_submit_ok_should_delete(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) del self.preview_form_data['form-0-do_not_delete'] response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 0) def test_submit_ok_should_set_do_not_delete(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) self.preview_form_data.update({ 'form-0-comment': 'comment', 'form-0-do_not_delete': 'on', }) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 1) orpm = models.OverrideRPM.objects.latest('id') self.assertEqual(orpm.do_not_delete, True) self.assertEqual(orpm.comment, 'comment') self.assertEqual(orpm.include, True) def test_submit_ok_should_remove_do_not_delete_and_delete(self): orpm = models.OverrideRPM.objects.latest('id') orpm.do_not_delete = True orpm.save() client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data['checks-1-included'] = 'on' response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self._populate_preview_form(response) del self.preview_form_data['form-0-do_not_delete'] response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 0) def test_submit_ok_disable_override_without_compose_rpm__should_delete(self): orpm = models.OverrideRPM.objects.latest('id') orpm.rpm_name = 'bash-magic' orpm.include = True orpm.save() client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") self.form_data.update({ 'checks-1-included': 'on', 'checks-2-variant': 'Server', 'checks-2-arch': 'x86_64', 'checks-2-rpm_name': 'bash-magic', 'checks-2-rpm_arch': 'x86_64', 'checks-TOTAL_FORMS': 3, }) response = client.post('/override/manage/release-1.0/?package=bash', self.form_data) self.assertEqual(len(response.context['forms']), 1) self._populate_preview_form(response) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(models.OverrideRPM.objects.count(), 0) class OverridePreviewBulkTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", "pdc/apps/compose/fixtures/tests/compose_composerpm_more.json", ] def setUp(self): self.initial_form_data = { 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-1-variant': 'Server', 'checks-1-arch': 'x86_64', 'checks-1-rpm_name': 'bash-completion', 'checks-1-rpm_arch': 'x86_64', 'checks-2-included': 'on', 'checks-2-variant': 'Server', 'checks-2-arch': 'x86_64', 'checks-2-rpm_name': 'bash-debuginfo', 'checks-2-rpm_arch': 'x86_64', 'checks-3-included': 'on', 'checks-3-variant': 'Server', 'checks-3-arch': 'x86_64', 'checks-3-rpm_name': 'bash-doc', 'checks-3-rpm_arch': 'x86_64', 'checks-4-variant': 'Server', 'checks-4-arch': 'x86_64', 'checks-4-rpm_name': 'bash-magic', 'checks-4-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 5, 'checks-TOTAL_FORMS': 5, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } self.preview_form_data = { 'preview_submit': True, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, } def test_more_changes_at_the_same_time(self): client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") response = client.post('/override/manage/release-1.0/?package=bash', self.initial_form_data) self.assertEqual(response.status_code, 200) self.assertEqual(len(response.context['forms']), 5) self.preview_form_data.update({ 'initial_data': response.context['compressed'], 'form-TOTAL_FORMS': 5, 'form-0-action': 'create', 'form-0-variant': 'Server', 'form-0-arch': 'x86_64', 'form-0-rpm_name': 'bash', 'form-0-rpm_arch': 'x86_64', 'form-0-include': 'False', 'form-1-action': 'create', 'form-1-variant': 'Server', 'form-1-arch': 'x86_64', 'form-1-rpm_name': 'bash-competion', 'form-1-rpm_arch': 'x86_64', 'form-1-include': 'False', 'form-2-action': 'delete', 'form-2-variant': 'Server', 'form-2-arch': 'x86_64', 'form-2-rpm_name': 'bash-debuginfo', 'form-2-rpm_arch': 'x86_64', 'form-2-include': 'False', 'form-3-action': 'delete', 'form-3-variant': 'Server', 'form-3-arch': 'x86_64', 'form-3-rpm_name': 'bash-doc', 'form-3-rpm_arch': 'x86_64', 'form-3-include': 'False', 'form-4-action': 'delete', 'form-4-variant': 'Server', 'form-4-arch': 'x86_64', 'form-4-rpm_name': 'bash-magic', 'form-4-rpm_arch': 'x86_64', 'form-4-include': 'False', }) response = client.post('/override/manage/release-1.0/?package=bash', self.preview_form_data) self.assertEqual(response.status_code, 302) self.assertItemsEqual( [o.export() for o in models.OverrideRPM.objects.all()], [{"release_id": 'release-1.0', "variant": 'Server', "arch": 'x86_64', "srpm_name": 'bash', "rpm_name": 'bash', "rpm_arch": 'x86_64', "include": False, "comment": '', "do_not_delete": False}, {"release_id": 'release-1.0', "variant": 'Server', "arch": 'x86_64', "srpm_name": 'bash', "rpm_name": 'bash-completion', "rpm_arch": 'x86_64', "include": False, "comment": '', "do_not_delete": False}] ) class UselessOverrideTestCase(TestCase): fixtures = [ "pdc/apps/common/fixtures/test/sigkey.json", "pdc/apps/package/fixtures/test/rpm.json", "pdc/apps/release/fixtures/tests/release.json", "pdc/apps/release/fixtures/tests/variant.json", "pdc/apps/release/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/variant.json", "pdc/apps/compose/fixtures/tests/variant_arch.json", "pdc/apps/compose/fixtures/tests/compose_overriderpm.json", "pdc/apps/compose/fixtures/tests/compose.json", "pdc/apps/compose/fixtures/tests/compose_composerpm.json", ] def setUp(self): self.release = release_models.Release.objects.latest('id') def test_delete_unused_include_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=True) client = Client() with mock.patch('sys.stdout', new_callable=StringIO) as out: response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], []) self.assertIn('NOTICE', out.getvalue()) self.assertIn(str(orpm), out.getvalue()) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_delete_unused_exclude_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash-missing', rpm_arch='x86_64', include=False) client = Client() with mock.patch('sys.stdout', new_callable=StringIO) as out: response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], []) self.assertIn('NOTICE', out.getvalue()) self.assertIn(str(orpm), out.getvalue()) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_delete_unused_exclude_override_on_new_variant_arch(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='rpm_arch', include=False) client = Client() with mock.patch('sys.stdout', new_callable=StringIO) as out: response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], []) self.assertIn('NOTICE', out.getvalue()) self.assertIn(str(orpm), out.getvalue()) self.assertEqual(models.OverrideRPM.objects.count(), 1) def test_do_not_delete_unused_include_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=True, do_not_delete=True) client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_do_not_delete_unused_exclude_override(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash-missing', rpm_arch='x86_64', include=False, do_not_delete=True) client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_do_not_delete_unused_exclude_override_on_new_variant_arch(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='rpm_arch', include=False, do_not_delete=True) client = Client() response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) self.assertEqual(models.OverrideRPM.objects.count(), 2) def test_update_unused_override_when_creating_conflict(self): orpm = models.OverrideRPM.objects.create(release=self.release, variant='Server', arch='x86_64', srpm_name='bash', rpm_name='bash', rpm_arch='x86_64', include=True, do_not_delete=True) client = Client() create_user("user", perms=["pdc.overrides"]) client.login(username="user", password="user") response = client.get('/override/manage/release-1.0/', {'package': 'bash'}) self.assertEqual(response.context['useless_overrides'], [orpm]) form_data = { 'checks-0-variant': 'Server', 'checks-0-arch': 'x86_64', 'checks-0-rpm_name': 'bash', 'checks-0-rpm_arch': 'x86_64', 'checks-MAX_NUM_FORMS': '1000', 'checks-INITIAL_FORMS': 1, 'checks-TOTAL_FORMS': 1, 'news-MAX_NUM_FORMS': '1000', 'news-INITIAL_FORMS': 1, 'news-TOTAL_FORMS': 0, 'vararch-MAX_NUM_FORMS': '1000', 'vararch-INITIAL_FORMS': 1, 'vararch-TOTAL_FORMS': 0, 'for_new_vararch-MAX_NUM_FORMS': '1000', 'for_new_vararch-INITIAL_FORMS': 0, 'for_new_vararch-TOTAL_FORMS': 0, } response = client.post('/override/manage/release-1.0/?package=bash', form_data) self.assertContains(response, 'warning') self.assertContains(response, 'Will modify override with do_not_delete set.') preview_data = { 'preview_submit': True, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, 'form-TOTAL_FORMS': 1, 'initial_data': response.context['compressed'], 'form-0-action': 'create', 'form-0-variant': 'Server', 'form-0-arch': 'x86_64', 'form-0-rpm_name': 'bash', 'form-0-rpm_arch': 'x86_64', 'form-0-include': 'False', } response = client.post('/override/manage/release-1.0/?package=bash', preview_data) self.assertEqual(response.status_code, 302) orpm = models.OverrideRPM.objects.latest('id') self.assertFalse(orpm.include)
mit
1,463,572,128,155,624,400
49.842243
125
0.579505
false
3.655262
true
false
false