repo_name
stringlengths
5
92
path
stringlengths
4
232
copies
stringclasses
19 values
size
stringlengths
4
7
content
stringlengths
721
1.04M
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
15
997
alpha_frac
float64
0.25
0.97
autogenerated
bool
1 class
dmilos/nucleotide
src/nucleotide/component/windows/cygwingcc/translator.py
1
1382
#!/usr/bin/env python2 # Copyright 2015 Dejan D. M. Milosavljevic # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import platform import nucleotide import nucleotide.component import nucleotide.component.windows import nucleotide.component.windows.cygwingcc.atom import nucleotide.component.windows.cygwingcc.atom.blank ##Detect GCC on CYgWin class Translator: m_list = [] def __init__(self): self.m_list = [] def get(self): return self.m_list @staticmethod def check(): return True @staticmethod def extend(P_options): nucleotide.component.windows.cygwingcc.atom.blank.Blank.extend(P_options) nucleotide.component.linux.gcc.translator.Translator.extend(P_options) @staticmethod def _exists( key, sub_key ): return False
apache-2.0
-6,342,641,710,223,210,000
27.404255
82
0.68958
false
datapythonista/pandas
pandas/tests/indexing/multiindex/test_getitem.py
2
12459
import numpy as np import pytest from pandas import ( DataFrame, Index, MultiIndex, Series, ) import pandas._testing as tm from pandas.core.indexing import IndexingError # ---------------------------------------------------------------------------- # test indexing of Series with multi-level Index # ---------------------------------------------------------------------------- @pytest.mark.parametrize( "access_method", [lambda s, x: s[:, x], lambda s, x: s.loc[:, x], lambda s, x: s.xs(x, level=1)], ) @pytest.mark.parametrize( "level1_value, expected", [(0, Series([1], index=[0])), (1, Series([2, 3], index=[1, 2]))], ) def test_series_getitem_multiindex(access_method, level1_value, expected): # GH 6018 # series regression getitem with a multi-index s = Series([1, 2, 3]) s.index = MultiIndex.from_tuples([(0, 0), (1, 1), (2, 1)]) result = access_method(s, level1_value) tm.assert_series_equal(result, expected) @pytest.mark.parametrize("level0_value", ["D", "A"]) def test_series_getitem_duplicates_multiindex(level0_value): # GH 5725 the 'A' happens to be a valid Timestamp so the doesn't raise # the appropriate error, only in PY3 of course! index = MultiIndex( levels=[[level0_value, "B", "C"], [0, 26, 27, 37, 57, 67, 75, 82]], codes=[[0, 0, 0, 1, 2, 2, 2, 2, 2, 2], [1, 3, 4, 6, 0, 2, 2, 3, 5, 7]], names=["tag", "day"], ) arr = np.random.randn(len(index), 1) df = DataFrame(arr, index=index, columns=["val"]) # confirm indexing on missing value raises KeyError if level0_value != "A": with pytest.raises(KeyError, match=r"^'A'$"): df.val["A"] with pytest.raises(KeyError, match=r"^'X'$"): df.val["X"] result = df.val[level0_value] expected = Series( arr.ravel()[0:3], name="val", index=Index([26, 37, 57], name="day") ) tm.assert_series_equal(result, expected) def test_series_getitem(multiindex_year_month_day_dataframe_random_data, indexer_sl): s = multiindex_year_month_day_dataframe_random_data["A"] expected = s.reindex(s.index[42:65]) expected.index = expected.index.droplevel(0).droplevel(0) result = indexer_sl(s)[2000, 3] tm.assert_series_equal(result, expected) def test_series_getitem_returns_scalar( multiindex_year_month_day_dataframe_random_data, indexer_sl ): s = multiindex_year_month_day_dataframe_random_data["A"] expected = s.iloc[49] result = indexer_sl(s)[2000, 3, 10] assert result == expected @pytest.mark.parametrize( "indexer,expected_error,expected_error_msg", [ (lambda s: s.__getitem__((2000, 3, 4)), KeyError, r"^\(2000, 3, 4\)$"), (lambda s: s[(2000, 3, 4)], KeyError, r"^\(2000, 3, 4\)$"), (lambda s: s.loc[(2000, 3, 4)], KeyError, r"^\(2000, 3, 4\)$"), (lambda s: s.loc[(2000, 3, 4, 5)], IndexingError, "Too many indexers"), (lambda s: s.__getitem__(len(s)), KeyError, ""), # match should include len(s) (lambda s: s[len(s)], KeyError, ""), # match should include len(s) ( lambda s: s.iloc[len(s)], IndexError, "single positional indexer is out-of-bounds", ), ], ) def test_series_getitem_indexing_errors( multiindex_year_month_day_dataframe_random_data, indexer, expected_error, expected_error_msg, ): s = multiindex_year_month_day_dataframe_random_data["A"] with pytest.raises(expected_error, match=expected_error_msg): indexer(s) def test_series_getitem_corner_generator( multiindex_year_month_day_dataframe_random_data, ): s = multiindex_year_month_day_dataframe_random_data["A"] result = s[(x > 0 for x in s)] expected = s[s > 0] tm.assert_series_equal(result, expected) # ---------------------------------------------------------------------------- # test indexing of DataFrame with multi-level Index # ---------------------------------------------------------------------------- def test_getitem_simple(multiindex_dataframe_random_data): df = multiindex_dataframe_random_data.T expected = df.values[:, 0] result = df["foo", "one"].values tm.assert_almost_equal(result, expected) @pytest.mark.parametrize( "indexer,expected_error_msg", [ (lambda df: df[("foo", "four")], r"^\('foo', 'four'\)$"), (lambda df: df["foobar"], r"^'foobar'$"), ], ) def test_frame_getitem_simple_key_error( multiindex_dataframe_random_data, indexer, expected_error_msg ): df = multiindex_dataframe_random_data.T with pytest.raises(KeyError, match=expected_error_msg): indexer(df) def test_frame_getitem_multicolumn_empty_level(): df = DataFrame({"a": ["1", "2", "3"], "b": ["2", "3", "4"]}) df.columns = [ ["level1 item1", "level1 item2"], ["", "level2 item2"], ["level3 item1", "level3 item2"], ] result = df["level1 item1"] expected = DataFrame( [["1"], ["2"], ["3"]], index=df.index, columns=["level3 item1"] ) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize( "indexer,expected_slice", [ (lambda df: df["foo"], slice(3)), (lambda df: df["bar"], slice(3, 5)), (lambda df: df.loc[:, "bar"], slice(3, 5)), ], ) def test_frame_getitem_toplevel( multiindex_dataframe_random_data, indexer, expected_slice ): df = multiindex_dataframe_random_data.T expected = df.reindex(columns=df.columns[expected_slice]) expected.columns = expected.columns.droplevel(0) result = indexer(df) tm.assert_frame_equal(result, expected) def test_frame_mixed_depth_get(): arrays = [ ["a", "top", "top", "routine1", "routine1", "routine2"], ["", "OD", "OD", "result1", "result2", "result1"], ["", "wx", "wy", "", "", ""], ] tuples = sorted(zip(*arrays)) index = MultiIndex.from_tuples(tuples) df = DataFrame(np.random.randn(4, 6), columns=index) result = df["a"] expected = df["a", "", ""].rename("a") tm.assert_series_equal(result, expected) result = df["routine1", "result1"] expected = df["routine1", "result1", ""] expected = expected.rename(("routine1", "result1")) tm.assert_series_equal(result, expected) def test_frame_getitem_nan_multiindex(nulls_fixture): # GH#29751 # loc on a multiindex containing nan values n = nulls_fixture # for code readability cols = ["a", "b", "c"] df = DataFrame( [[11, n, 13], [21, n, 23], [31, n, 33], [41, n, 43]], columns=cols, dtype="int64", ).set_index(["a", "b"]) idx = (21, n) result = df.loc[:idx] expected = DataFrame( [[11, n, 13], [21, n, 23]], columns=cols, dtype="int64" ).set_index(["a", "b"]) tm.assert_frame_equal(result, expected) result = df.loc[idx:] expected = DataFrame( [[21, n, 23], [31, n, 33], [41, n, 43]], columns=cols, dtype="int64" ).set_index(["a", "b"]) tm.assert_frame_equal(result, expected) idx1, idx2 = (21, n), (31, n) result = df.loc[idx1:idx2] expected = DataFrame( [[21, n, 23], [31, n, 33]], columns=cols, dtype="int64" ).set_index(["a", "b"]) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize( "indexer,expected", [ ( (["b"], ["bar", np.nan]), ( DataFrame( [[2, 3], [5, 6]], columns=MultiIndex.from_tuples([("b", "bar"), ("b", np.nan)]), dtype="int64", ) ), ), ( (["a", "b"]), ( DataFrame( [[1, 2, 3], [4, 5, 6]], columns=MultiIndex.from_tuples( [("a", "foo"), ("b", "bar"), ("b", np.nan)] ), dtype="int64", ) ), ), ( (["b"]), ( DataFrame( [[2, 3], [5, 6]], columns=MultiIndex.from_tuples([("b", "bar"), ("b", np.nan)]), dtype="int64", ) ), ), ( (["b"], ["bar"]), ( DataFrame( [[2], [5]], columns=MultiIndex.from_tuples([("b", "bar")]), dtype="int64", ) ), ), ( (["b"], [np.nan]), ( DataFrame( [[3], [6]], columns=MultiIndex( codes=[[1], [-1]], levels=[["a", "b"], ["bar", "foo"]] ), dtype="int64", ) ), ), (("b", np.nan), Series([3, 6], dtype="int64", name=("b", np.nan))), ], ) def test_frame_getitem_nan_cols_multiindex( indexer, expected, nulls_fixture, ): # Slicing MultiIndex including levels with nan values, for more information # see GH#25154 df = DataFrame( [[1, 2, 3], [4, 5, 6]], columns=MultiIndex.from_tuples( [("a", "foo"), ("b", "bar"), ("b", nulls_fixture)] ), dtype="int64", ) result = df.loc[:, indexer] tm.assert_equal(result, expected) # ---------------------------------------------------------------------------- # test indexing of DataFrame with multi-level Index with duplicates # ---------------------------------------------------------------------------- @pytest.fixture def dataframe_with_duplicate_index(): """Fixture for DataFrame used in tests for gh-4145 and gh-4146""" data = [["a", "d", "e", "c", "f", "b"], [1, 4, 5, 3, 6, 2], [1, 4, 5, 3, 6, 2]] index = ["h1", "h3", "h5"] columns = MultiIndex( levels=[["A", "B"], ["A1", "A2", "B1", "B2"]], codes=[[0, 0, 0, 1, 1, 1], [0, 3, 3, 0, 1, 2]], names=["main", "sub"], ) return DataFrame(data, index=index, columns=columns) @pytest.mark.parametrize( "indexer", [lambda df: df[("A", "A1")], lambda df: df.loc[:, ("A", "A1")]] ) def test_frame_mi_access(dataframe_with_duplicate_index, indexer): # GH 4145 df = dataframe_with_duplicate_index index = Index(["h1", "h3", "h5"]) columns = MultiIndex.from_tuples([("A", "A1")], names=["main", "sub"]) expected = DataFrame([["a", 1, 1]], index=columns, columns=index).T result = indexer(df) tm.assert_frame_equal(result, expected) def test_frame_mi_access_returns_series(dataframe_with_duplicate_index): # GH 4146, not returning a block manager when selecting a unique index # from a duplicate index # as of 4879, this returns a Series (which is similar to what happens # with a non-unique) df = dataframe_with_duplicate_index expected = Series(["a", 1, 1], index=["h1", "h3", "h5"], name="A1") result = df["A"]["A1"] tm.assert_series_equal(result, expected) def test_frame_mi_access_returns_frame(dataframe_with_duplicate_index): # selecting a non_unique from the 2nd level df = dataframe_with_duplicate_index expected = DataFrame( [["d", 4, 4], ["e", 5, 5]], index=Index(["B2", "B2"], name="sub"), columns=["h1", "h3", "h5"], ).T result = df["A"]["B2"] tm.assert_frame_equal(result, expected) def test_frame_mi_empty_slice(): # GH 15454 df = DataFrame(0, index=range(2), columns=MultiIndex.from_product([[1], [2]])) result = df[[]] expected = DataFrame( index=[0, 1], columns=MultiIndex(levels=[[1], [2]], codes=[[], []]) ) tm.assert_frame_equal(result, expected) def test_loc_empty_multiindex(): # GH#36936 arrays = [["a", "a", "b", "a"], ["a", "a", "b", "b"]] index = MultiIndex.from_arrays(arrays, names=("idx1", "idx2")) df = DataFrame([1, 2, 3, 4], index=index, columns=["value"]) # loc on empty multiindex == loc with False mask empty_multiindex = df.loc[df.loc[:, "value"] == 0, :].index result = df.loc[empty_multiindex, :] expected = df.loc[[False] * len(df.index), :] tm.assert_frame_equal(result, expected) # replacing value with loc on empty multiindex df.loc[df.loc[df.loc[:, "value"] == 0].index, "value"] = 5 result = df expected = DataFrame([1, 2, 3, 4], index=index, columns=["value"]) tm.assert_frame_equal(result, expected)
bsd-3-clause
-3,697,372,801,055,397,000
30.70229
87
0.524841
false
aiorchestra/aiorchestra-openstack-plugin
openstack_plugin/tasks/compute.py
1
6731
# Author: Denys Makogon # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from aiorchestra.core import utils from openstack_plugin.common import clients from openstack_plugin.compute import instances @utils.operation async def create(node, inputs): node.context.logger.info('[{0}] - Attempting to create ' 'compute instance.'.format(node.name)) nova = clients.openstack.nova(node) glance = clients.openstack.glance(node) host_cap = node.get_capability('host') if not host_cap: raise Exception('[{0}] - Unable to get host.flavor ' 'capability.'.format(node.name)) image_artifact = node.get_artifact_by_name('image_ref') if not image_artifact: raise Exception('[{0}] - Unable to get image ' 'node artifact.'.format(node.name)) file_injection_artifacts = setup_injections_from_artifacts( node.get_artifact_from_type( 'tosca.artifacts.openstack.compute.injection_file')) userdata = None userdata_artifact = node.get_artifact_by_name('userdata') if userdata_artifact: script_path = userdata_artifact.get('script') with open(script_path, 'r') as u_d: userdata = u_d.read() # in case if file injection was done using dedicated node files = node.runtime_properties.get('injections', {}) files.update(file_injection_artifacts) flavor = host_cap['flavor'] image = image_artifact['id'] (compute_name, compute_id, config_drive, ssh_key, nics) = ( node.properties['compute_name'], node.properties.get('compute_id'), node.properties.get('config_drive'), node.runtime_properties.get( 'ssh_keypair', {'name': None})['name'], node.runtime_properties.get('nics', []) ) identifier = compute_name if not compute_id else compute_id instance = await instances.create( node.context, nova, glance, identifier, flavor, image, ssh_keyname=ssh_key, nics=nics, config_drive=config_drive, use_existing=True if compute_id else False, files=files, userdata=userdata, ) networks = [port['net-id'] for port in nics] node.batch_update_runtime_properties(**{ 'compute_id': instance.id, 'server': instance.__dict__, 'status': instance.status, 'networks': networks, 'ports': nics, }) def setup_injections_from_artifacts(injection_artifacts): mapping = {} for artifact in injection_artifacts: source = artifact['source'] destination = artifact['destination'] with open(source, 'r') as s: mapping[destination] = s.read() return mapping @utils.operation async def setup_injection(node, inputs): node.context.logger.info('[{0}] - Setting up file injection.' .format(node.name)) local_file = node.properties['source'] remote_file_path = node.properties['destination'] with open(local_file, 'r') as injection: local_file_content = injection.read() node.update_runtime_properties( 'injection', {remote_file_path: local_file_content}) @utils.operation async def inject_file(source, target, inputs): source.context.logger.info('[{0} -----> {1}] - Injecting file to ' 'compute instance.' .format(target.name, source.name)) files = source.runtime_properties.get('injections', {}) files.update(target.runtime_properties['injection']) source.update_runtime_properties('injections', files) @utils.operation async def eject_file(source, target, inputs): source.context.logger.info('[{0} --X--> {1}] - Ejecting file from ' 'compute instance.' .format(target.name, source.name)) if 'injections' in source.runtime_properties: del source.runtime_properties['injections'] @utils.operation async def start(node, inputs): task_retries = inputs.get('task_retries', 10) task_retry_interval = inputs.get('task_retry_interval', 10) nova = clients.openstack.nova(node) use_existing = True if node.properties.get('compute_id') else False name_or_id = node.runtime_properties['compute_id'] node.context.logger.info('[{0}] - Attempting to start ' 'compute instance.'.format(node.name)) await instances.start( node.context, nova, name_or_id, use_existing=use_existing, task_retries=task_retries, task_retry_interval=task_retry_interval, ) @utils.operation async def delete(node, inputs): node.context.logger.info('[{0}] - Attempting to delete compute ' 'instance.'.format(node.name)) task_retries = inputs.get('task_retries', 10) task_retry_interval = inputs.get('task_retry_interval', 10) use_existing = True if node.properties.get('compute_id') else False name_or_id = node.runtime_properties['compute_id'] nova = clients.openstack.nova(node) await instances.delete(node.context, nova, name_or_id, use_existing=use_existing, task_retry_interval=task_retry_interval, task_retries=task_retries) for attr in ['id', 'server', 'status', 'networks', 'ports']: if attr in node.runtime_properties: del node.runtime_properties[attr] @utils.operation async def stop(node, inputs): node.context.logger.info('[{0}] - Attempting to stop compute ' 'instance.'.format(node.name)) task_retries = inputs.get('task_retries', 10) task_retry_interval = inputs.get('task_retry_interval', 10) nova = clients.openstack.nova(node) use_existing = True if node.properties.get('compute_id') else False name_or_id = node.runtime_properties['compute_id'] await instances.stop(node.context, nova, name_or_id, use_existing=use_existing, task_retries=task_retries, task_retry_interval=task_retry_interval)
apache-2.0
5,980,241,100,788,522,000
36.814607
78
0.626504
false
kancom/cca
grabber/views.py
1
1536
from django.shortcuts import render from django.http import HttpResponse, HttpResponseRedirect from crispy_forms.helper import FormHelper from crispy_forms.layout import Layout, ButtonHolder, Submit from crispy_forms.bootstrap import Field, InlineRadios, TabHolder, Tab from django import forms from . import models from . import grabber class NameForm(forms.ModelForm): class Meta: fields = ('url',) model = models.Source_site def __init__(self, *args, **kwargs): super(NameForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( 'url', ButtonHolder( Submit('start', 'Start', css_class='btn-primary') ) ) def index(request): # if this is a POST request we need to process the form data if request.method == 'POST': # create a form instance and populate it with data from the request: form = NameForm(request.POST) # check whether it's valid: if form.is_valid(): # process the data in form.cleaned_data as required # ... # redirect to a new URL: url=form.cleaned_data['url'] grabber.grab(url) return HttpResponseRedirect('/admin/') # if a GET (or any other method) we'll create a blank form else: sources = models.Source_site.objects.get() form = NameForm(instance=sources) return render(request, 'index.html', {'form': form})
apache-2.0
-3,808,581,804,322,514,000
32.391304
76
0.617839
false
ldionne/nstl-lang
nstl/sema/scope.py
1
2717
"""Interface to store lexical scope related information used during semantic analysis.""" import sys from itertools import chain from ..helpers import orderedset class Scope(object): """Parent class for all scopes. Implements generic lexical scoping operations. _parent The parent scope of this scope, or None for outermost scope. _entity The entity associated with this scope, or None. For example, the entity of a namespace scope is the namespace itself. _decls An ordered set keeping track of all declarations in this scope. """ def __init__(self, parent=None, entity=None): if not (isinstance(parent, Scope) or parent is None): raise TypeError( "invalid type {} for parent scope. must be Scope instance or None." .format(type(parent))) self._parent = parent self._entity = entity self._decls = orderedset.OrderedSet() def __contains__(self, decl): """Return whether a declaration was declared in this scope.""" return decl in self._decls def __iter__(self): """Iterate over all the declarations made in this scope. Iteration is done over the declarations in the order they were added. """ for decl in self._decls: yield decl def add(self, decl): """Add a declaration in this scope.""" self._decls.add(decl) def is_outermost(self): """Return whether this scope is an outermost scope.""" return self._parent is None @property def parent(self): """Return the direct parent of this scope, or None when outermost.""" return self._parent def parents(self): """Iterate over the parents of this scope. Iteration is done in lexical order, so innermost parents are visited first. """ if self.is_outermost(): raise StopIteration yield self._parent for parent in self._parent.parents(): yield parent def show(self, buf=sys.stdout, decls=False): """Write a formatted description of a scope and its parents to a buffer. If decls is True, the declarations contained in each scope are shown. """ lead = '' for scope in reversed(list(chain([self], self.parents()))): buf.write(lead + "scope owned by {} :\n".format(self._entity)) if decls: buf.write(lead + str(self._decls) + "\n") lead = lead + ' ' * 2 if __name__ == "__main__": pass
bsd-3-clause
-5,247,343,988,452,406,000
31.73494
80
0.577475
false
DOTOCA/plugin.video.netflixbmc
default.py
1
62469
#!/usr/bin/python # -*- coding: utf-8 -*- import os import sys import re import json import time import shutil import threading import subprocess import xbmc import xbmcplugin import xbmcgui import xbmcaddon import xbmcvfs from resources.lib import chrome_cookies trace_on = False addon = xbmcaddon.Addon() if addon.getSetting("sslEnable") == "true": try: # Add support for newer SSL connections in requests # Ensure OpenSSL is installed with system package manager on linux import resources sys.path.append(os.path.dirname(resources.lib.__file__)) import resources.lib.pyOpenSSL import OpenSSL # https://urllib3.readthedocs.org/en/latest/security.html#openssl-pyopenssl import requests.packages.urllib3.contrib.pyopenssl requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() verify_ssl = True except Exception as ex: import traceback print traceback.format_exc() print "ERROR importing OpenSSL handler" verify_ssl = False import requests import HTMLParser import urllib import socket if addon.getSetting("sslEnable") == "false": verify_ssl = False print "SSL is Disabled" #supress warnings from requests.packages.urllib3.exceptions import InsecureRequestWarning from requests.packages.urllib3.exceptions import InsecurePlatformWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) requests.packages.urllib3.disable_warnings(InsecurePlatformWarning) try: import cPickle as pickle except ImportError: import pickle socket.setdefaulttimeout(40) pluginhandle = int(sys.argv[1]) while (addon.getSetting("username") == "" or addon.getSetting("password") == ""): addon.openSettings() htmlParser = HTMLParser.HTMLParser() addonID = addon.getAddonInfo('id') osWin = xbmc.getCondVisibility('system.platform.windows') osLinux = xbmc.getCondVisibility('system.platform.linux') osOSX = xbmc.getCondVisibility('system.platform.osx') addonDir = xbmc.translatePath(addon.getAddonInfo('path')) defaultFanart = os.path.join(addonDir ,'fanart.png') addonUserDataFolder = xbmc.translatePath("special://profile/addon_data/"+addonID) icon = xbmc.translatePath('special://home/addons/'+addonID+'/icon.png') utilityPath = xbmc.translatePath('special://home/addons/'+addonID+'/resources/NetfliXBMC_Utility.exe') sendKeysPath = xbmc.translatePath('special://home/addons/'+addonID+'/resources/NetfliXBMC_SendKeys.exe') fakeVidPath = xbmc.translatePath('special://home/addons/'+addonID+'/resources/fakeVid.mp4') downloadScript = xbmc.translatePath('special://home/addons/'+addonID+'/download.py') browserScript = xbmc.translatePath('special://home/addons/'+addonID+'/browser.sh') searchHistoryFolder = os.path.join(addonUserDataFolder, "history") cacheFolder = os.path.join(addonUserDataFolder, "cache") cacheFolderCoversTMDB = os.path.join(cacheFolder, "covers") cacheFolderFanartTMDB = os.path.join(cacheFolder, "fanart") libraryFolder = xbmc.translatePath(addon.getSetting("libraryPath")) libraryFolderMovies = os.path.join(libraryFolder, "Movies") libraryFolderTV = os.path.join(libraryFolder, "TV") cookieFile = xbmc.translatePath("special://profile/addon_data/"+addonID+"/cookies") sessionFile = xbmc.translatePath("special://profile/addon_data/"+addonID+"/session") chromeUserDataFolder = os.path.join(addonUserDataFolder, "chrome-user-data") dontUseKiosk = addon.getSetting("dontUseKiosk") == "true" browseTvShows = addon.getSetting("browseTvShows") == "true" singleProfile = addon.getSetting("singleProfile") == "true" isKidsProfile = addon.getSetting('isKidsProfile') == 'true' showProfiles = addon.getSetting("showProfiles") == "true" forceView = addon.getSetting("forceView") == "true" useUtility = addon.getSetting("useUtility") == "true" useChromeProfile = addon.getSetting("useChromeProfile") == "true" remoteControl = addon.getSetting("remoteControl") == "true" updateDB = addon.getSetting("updateDB") == "true" useTMDb = addon.getSetting("useTMDb") == "true" username = addon.getSetting("username") password = addon.getSetting("password") viewIdVideos = addon.getSetting("viewIdVideos") viewIdEpisodes = addon.getSetting("viewIdEpisodesNew") viewIdActivity = addon.getSetting("viewIdActivity") winBrowser = int(addon.getSetting("winBrowserNew")) language = addon.getSetting("language") auth = addon.getSetting("auth") authMyList = addon.getSetting("authMyList") linuxUseShellScript = addon.getSetting("linuxUseShellScript") == "true" debug = addon.getSetting("debug") == "true" country = addon.getSetting("country") if len(country)==0 and len(language.split("-"))>1: country = language.split("-")[1] from requests.adapters import HTTPAdapter from requests.packages.urllib3.poolmanager import PoolManager import ssl class SSLAdapter(HTTPAdapter): '''An HTTPS Transport Adapter that uses an arbitrary SSL version.''' def init_poolmanager(self, connections, maxsize, block=False): ssl_version = addon.getSetting("sslSetting") ssl_version = None if ssl_version == 'Auto' else ssl_version self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, ssl_version=ssl_version) urlMain = "https://www.netflix.com" session = None def newSession(): s = requests.Session() s.mount('https://', SSLAdapter()) s.headers.update({ 'User-Agent': 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.6 Safari/537.36', }) return s session = newSession() def unescape(s): return htmlParser.unescape(s) def load(url, post = None): debug("URL: " + url) r = "" try: if post: r = session.post(url, data=post, verify=verify_ssl).text else: r = session.get(url, verify=verify_ssl).text except AttributeError: xbmc.executebuiltin('XBMC.Notification(NetfliXBMC Error: Cookies have been deleted. Please try again.,10000,'+icon+')') newSession() saveState() if post: r = session.post(url, data=post, verify=verify_ssl).text else: r = session.get(url, verify=verify_ssl).text return r.encode('utf-8') def saveState(): tempfile = sessionFile+".tmp" if xbmcvfs.exists(tempfile): xbmcvfs.delete(tempfile) ser = pickle.dumps(session) fh = xbmcvfs.File(tempfile, 'wb') fh.write(ser) fh.close() if xbmcvfs.exists(sessionFile): xbmcvfs.delete(sessionFile) xbmcvfs.rename(tempfile, sessionFile) # Load cached data if not os.path.isdir(addonUserDataFolder): os.mkdir(addonUserDataFolder) if not os.path.isdir(cacheFolder): os.mkdir(cacheFolder) if not os.path.isdir(cacheFolderCoversTMDB): os.mkdir(cacheFolderCoversTMDB) if not os.path.isdir(cacheFolderFanartTMDB): os.mkdir(cacheFolderFanartTMDB) if not os.path.isdir(libraryFolder): xbmcvfs.mkdir(libraryFolder) if not os.path.isdir(libraryFolderMovies): xbmcvfs.mkdir(libraryFolderMovies) if not os.path.isdir(libraryFolderTV): xbmcvfs.mkdir(libraryFolderTV) if os.path.exists(sessionFile): fh = xbmcvfs.File(sessionFile, 'rb') content = fh.read() fh.close() session = pickle.loads(content) if not addon.getSetting("html5MessageShown"): dialog = xbmcgui.Dialog() ok = dialog.ok('IMPORTANT!', 'NetfliXBMC >=1.3.0 only supports the new Netflix HTML5 User Interface! The only browsers working with HTML5 DRM playback for now are Chrome>=37 (Win/OSX/Linux) and IExplorer>=11 (Win8.1 only). Make sure you have the latest version installed and check your Netflix settings. Using Silverlight may still partially work, but its not supported anymore. The HTML5 Player is also much faster, supports 1080p and gives you a smoother playback (especially on Linux). See forum.xbmc.org for more info...') addon.setSetting("html5MessageShown", "true") def index(): if login(): addDir(translation(30011), "", 'main', "", "movie") addDir(translation(30012), "", 'main', "", "tv") addDir(translation(30143), "", 'wiHome', "", "both") if not singleProfile: profileName = addon.getSetting("profileName") addDir(translation(30113) + ' - [COLOR blue]' + profileName + '[/COLOR]', "", 'profileDisplayUpdate', 'DefaultAddonService.png', type, contextEnable=False) xbmcplugin.endOfDirectory(pluginhandle) def profileDisplayUpdate(): menuPath = xbmc.getInfoLabel('Container.FolderPath') if not showProfiles: addon.setSetting("profile", None) saveState() xbmc.executebuiltin('Container.Update('+menuPath+')') def main(type): addDir(translation(30002), urlMain+"/MyList?leid=595&link=seeall", 'listVideos', "", type) addDir(translation(30010), "", 'listViewingActivity', "", type) addDir(translation(30003), urlMain+"/WiRecentAdditionsGallery?nRR=releaseDate&nRT=all&pn=1&np=1&actionMethod=json", 'listVideos', "", type) if type=="tv": addDir(translation(30005), urlMain+"/WiGenre?agid=83", 'listVideos', "", type) addDir(translation(30007), "", 'listTvGenres', "", type) else: addDir(translation(30007), "WiGenre", 'listGenres', "", type) addDir(translation(30008), "", 'search', "", type) xbmcplugin.endOfDirectory(pluginhandle) def wiHome(type): content = load(urlMain+"/WiHome") match1 = re.compile('<div class="mrow(.+?)"><div class="hd clearfix"><h3> (.+?)</h3></div><div class="bd clearfix"><div class="slider triangleBtns " id="(.+?)"', re.DOTALL).findall(content) match2 = re.compile('class="hd clearfix"><h3><a href="(.+?)">(.+?)<', re.DOTALL).findall(content) for temp, title, sliderID in match1: if not "hide-completely" in temp: title = re.sub('<.(.+?)</.>', '', title) addDir(title.strip(), sliderID, 'listSliderVideos', "", type) for url, title in match2: if "WiAltGenre" in url or "WiSimilarsByViewType" in url or "WiRecentAdditionsGallery" in url: addDir(title.strip(), url, 'listVideos', "", type) xbmcplugin.endOfDirectory(pluginhandle) def listVideos(url, type): pDialog = xbmcgui.DialogProgress() pDialog.create('NetfliXBMC', translation(30142)+"...") pDialog.update( 0, translation(30142)+"...") xbmcplugin.setContent(pluginhandle, "movies") content = load(url) #content = load(url) # Terrible... currently first call doesn't have the content, it requires two calls.... if not 'id="page-LOGIN"' in content: if singleProfile and 'id="page-ProfilesGate"' in content: forceChooseProfile() else: if '<div id="queue"' in content: content = content[content.find('<div id="queue"'):] content = content.replace("\\t","").replace("\\n", "").replace("\\", "") match = None if not match: match = re.compile('<span id="dbs(.+?)_.+?alt=".+?"', re.DOTALL).findall(content) if not match: match = re.compile('<span class="title.*?"><a id="b(.+?)_', re.DOTALL).findall(content) #if not match: match = re.compile('<a href="http://dvd.netflix.com/WiPlayer\?movieid=(.+?)&', re.DOTALL).findall(content) #if not match: match = re.compile('<a class="playHover" href=".+?WiPlayer\?movieid=(.+?)&', re.DOTALL).findall(content) if not match: match = re.compile('"boxart":".+?","titleId":(.+?),', re.DOTALL).findall(content) if not match: match = re.compile('WiPlayer\?movieid=([0-9]+?)&', re.DOTALL).findall(content) i = 1 for videoID in match: pDialog.update(i*100/len(match), translation(30142)+"...") listVideo(videoID, "", "", False, False, type) i+=1 match1 = re.compile('&pn=(.+?)&', re.DOTALL).findall(url) match2 = re.compile('&from=(.+?)&', re.DOTALL).findall(url) matchApiRoot = re.compile('"API_ROOT":"(.+?)"', re.DOTALL).findall(content) matchApiBase = re.compile('"API_BASE_URL":"(.+?)"', re.DOTALL).findall(content) matchIdentifier = re.compile('"BUILD_IDENTIFIER":"(.+?)"', re.DOTALL).findall(content) if "agid=" in url and matchApiRoot and matchApiBase and matchIdentifier: genreID = url[url.find("agid=")+5:] addDir(translation(30001), matchApiRoot[0]+matchApiBase[0]+"/"+matchIdentifier[0]+"/wigenre?genreId="+genreID+"&full=false&from=51&to=100&_retry=0", 'listVideos', "", type) elif match1: currentPage = match1[0] nextPage = str(int(currentPage)+1) addDir(translation(30001), url.replace("&pn="+currentPage+"&", "&pn="+nextPage+"&"), 'listVideos', "", type) elif match2: currentFrom = match2[0] nextFrom = str(int(currentFrom)+50) currentTo = str(int(currentFrom)+49) nextTo = str(int(currentFrom)+99) addDir(translation(30001), url.replace("&from="+currentFrom+"&", "&from="+nextFrom+"&").replace("&to="+currentTo+"&", "&to="+nextTo+"&"), 'listVideos', "", type) if forceView: xbmc.executebuiltin('Container.SetViewMode('+viewIdVideos+')') xbmcplugin.endOfDirectory(pluginhandle) else: deleteCookies() xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30127))+',15000,'+icon+')') def listSliderVideos(sliderID, type): pDialog = xbmcgui.DialogProgress() pDialog.create('NetfliXBMC', translation(30142)+"...") pDialog.update( 0, translation(30142)+"...") xbmcplugin.setContent(pluginhandle, "movies") content = load(urlMain+"/WiHome") if not 'id="page-LOGIN"' in content: if singleProfile and 'id="page-ProfilesGate"' in content: forceChooseProfile() else: content = content.replace("\\t","").replace("\\n", "").replace("\\", "") contentMain = content content = content[content.find('id="'+sliderID+'"'):] content = content[:content.find('class="ft"')] match = re.compile('<span id="dbs(.+?)_', re.DOTALL).findall(content) i = 1 for videoID in match: listVideo(videoID, "", "", False, False, type) i+=1 spl = contentMain.split('"remainderHTML":') for i in range(1, len(spl), 1): entry = spl[i] entry = entry[:entry.find('"rowId":')] if '"domId":"'+sliderID+'"' in entry: match = re.compile('<span id="dbs(.+?)_', re.DOTALL).findall(entry) i = 1 for videoID in match: pDialog.update(i*100/(len(match)+10), translation(30142)+"...") listVideo(videoID, "", "", False, False, type) i+=1 if forceView: xbmc.executebuiltin('Container.SetViewMode('+viewIdVideos+')') xbmcplugin.endOfDirectory(pluginhandle) else: deleteCookies() xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30127))+',15000,'+icon+')') def listSearchVideos(url, type): pDialog = xbmcgui.DialogProgress() pDialog.create('NetfliXBMC', translation(30142)+"...") pDialog.update( 0, translation(30142)+"...") xbmcplugin.setContent(pluginhandle, "movies") content = load(url) content = json.loads(content) i = 1 if "galleryVideos" in content: for item in content["galleryVideos"]["items"]: pDialog.update(i*100/len(content["galleryVideos"]["items"]), translation(30142)+"...") listVideo(str(item["id"]), "", "", False, False, type) i+=1 if forceView: xbmc.executebuiltin('Container.SetViewMode('+viewIdVideos+')') xbmcplugin.endOfDirectory(pluginhandle) else: xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30146))+',5000,'+icon+')') def clean_filename(n, chars=None): if isinstance(n, str): return (''.join(c for c in unicode(n, "utf-8") if c not in '/\\:?"*|<>')).strip(chars) elif isinstance(n, unicode): return (''.join(c for c in n if c not in '/\\:?"*|<>')).strip(chars) def listVideo(videoID, title, thumbUrl, tvshowIsEpisode, hideMovies, type): videoDetails = getVideoInfo(videoID) match = re.compile('<span class="title.*?>(.+?)<', re.DOTALL).findall(videoDetails) if not title: title = match[0].strip() year = "" match = re.compile('<span class="year.*?>(.+?)<', re.DOTALL).findall(videoDetails) if match: year = match[0] if not thumbUrl: match = re.compile('src="(.+?)"', re.DOTALL).findall(videoDetails) thumbUrl = match[0].replace("/webp/","/images/").replace(".webp",".jpg") match = re.compile('<span class="mpaaRating.*?>(.+?)<', re.DOTALL).findall(videoDetails) mpaa = "" if match: mpaa = match[0].strip() match = re.compile('<span class="duration.*?>(.+?)<', re.DOTALL).findall(videoDetails) duration = "" if match: duration = match[0].lower() if duration.split(' ')[-1] in ["minutes", "minutos", "minuter", "minutter", "minuuttia", "minuten"]: videoType = "movie" videoTypeTemp = videoType duration = duration.split(" ")[0] else: videoTypeTemp = "tv" if tvshowIsEpisode: videoType = "episode" year = "" else: videoType = "tvshow" duration = "" if useTMDb: yearTemp = year titleTemp = title if " - " in titleTemp: titleTemp = titleTemp[titleTemp.find(" - ")+3:] if "-" in yearTemp: yearTemp = yearTemp.split("-")[0] filename = clean_filename(videoID)+".jpg" filenameNone = clean_filename(videoID)+".none" coverFile = os.path.join(cacheFolderCoversTMDB, filename) coverFileNone = os.path.join(cacheFolderCoversTMDB, filenameNone) if not os.path.exists(coverFile) and not os.path.exists(coverFileNone): debug("Downloading Cover art. videoType:"+videoTypeTemp+", videoID:" + videoID + ", title:"+titleTemp+", year:"+yearTemp) xbmc.executebuiltin('XBMC.RunScript('+downloadScript+', '+urllib.quote_plus(videoTypeTemp)+', '+urllib.quote_plus(videoID)+', '+urllib.quote_plus(titleTemp)+', '+urllib.quote_plus(yearTemp)+')') match = re.compile('src=".+?">.*?<.*?>(.+?)<', re.DOTALL).findall(videoDetails) desc = "" if match: descTemp = match[0].decode("utf-8", 'ignore') #replace all embedded unicode in unicode (Norwegian problem) descTemp = descTemp.replace('u2013', u'\u2013').replace('u2026', u'\u2026') desc = htmlParser.unescape(descTemp) match = re.compile('Director:</dt><dd>(.+?)<', re.DOTALL).findall(videoDetails) director = "" if match: director = match[0].strip() match = re.compile('<span class="genre.*?>(.+?)<', re.DOTALL).findall(videoDetails) genre = "" if match: genre = match[0] match = re.compile('<span class="rating">(.+?)<', re.DOTALL).findall(videoDetails) rating = "" if match: rating = match[0] title = htmlParser.unescape(title.decode("utf-8")) nextMode = "playVideoMain" if browseTvShows and videoType == "tvshow": nextMode = "listSeasons" added = False if "/MyList" in url and videoTypeTemp==type: addVideoDirR(title, videoID, nextMode, thumbUrl, videoType, desc, duration, year, mpaa, director, genre, rating) added = True elif videoType == "movie" and hideMovies: pass elif videoTypeTemp==type or type=="both": addVideoDir(title, videoID, nextMode, thumbUrl, videoType, desc, duration, year, mpaa, director, genre, rating) added = True return added def listGenres(type, videoType): xbmcplugin.addSortMethod(pluginhandle, xbmcplugin.SORT_METHOD_LABEL) if isKidsProfile: type = 'KidsAltGenre' content = load(urlMain+"/WiHome") match = re.compile('/'+type+'\\?agid=(.+?)">(.+?)<', re.DOTALL).findall(content) # A number of categories (especially in the Kids genres) have duplicate entires and a lot of whitespice; create a stripped unique set unique_match = set((k[0].strip(), k[1].strip()) for k in match) for genreID, title in unique_match: if not genreID=="83": if isKidsProfile: addDir(title, urlMain+"/"+type+"?agid="+genreID+"&pn=1&np=1&actionMethod=json", 'listVideos', "", videoType) else: addDir(title, urlMain+"/"+type+"?agid="+genreID, 'listVideos', "", videoType) xbmcplugin.endOfDirectory(pluginhandle) def listTvGenres(videoType): xbmcplugin.addSortMethod(pluginhandle, xbmcplugin.SORT_METHOD_LABEL) content = load(urlMain+"/WiGenre?agid=83") content = content[content.find('id="subGenres_menu"'):] content = content[:content.find('</div>')] match = re.compile('<li ><a href=".+?/WiGenre\\?agid=(.+?)&.+?"><span>(.+?)<', re.DOTALL).findall(content) for genreID, title in match: addDir(title, urlMain+"/WiGenre?agid="+genreID, 'listVideos', "", videoType) xbmcplugin.endOfDirectory(pluginhandle) def listSeasons(seriesName, seriesID, thumb): content = getSeriesInfo(seriesID) content = json.loads(content) seasons = [] for item in content["episodes"]: if item[0]["season"] not in seasons: seasons.append(item[0]["season"]) for season in seasons: addSeasonDir("Season "+str(season), str(season), 'listEpisodes', thumb, seriesName, seriesID) xbmcplugin.endOfDirectory(pluginhandle) def listEpisodes(seriesID, season): xbmcplugin.setContent(pluginhandle, "episodes") content = getSeriesInfo(seriesID) content = json.loads(content) for test in content["episodes"]: for item in test: episodeSeason = str(item["season"]) if episodeSeason == season: episodeID = str(item["episodeId"]) episodeNr = str(item["episode"]) episodeTitle = (episodeNr + ". " + item["title"]).encode('utf-8') duration = item["runtime"] bookmarkPosition = item["bookmarkPosition"] playcount=0 if (duration>0 and float(bookmarkPosition)/float(duration))>=0.9: playcount=1 desc = item["synopsis"].encode('utf-8') try: thumb = item["stills"][0]["url"] except: thumb = "" addEpisodeDir(episodeTitle, episodeID, 'playVideoMain', thumb, desc, str(duration), season, episodeNr, seriesID, playcount) if forceView: xbmc.executebuiltin('Container.SetViewMode('+viewIdEpisodes+')') xbmcplugin.endOfDirectory(pluginhandle) def listViewingActivity(type): pDialog = xbmcgui.DialogProgress() pDialog.create('NetfliXBMC', translation(30142)+"...") pDialog.update( 0, translation(30142)+"...") xbmcplugin.setContent(pluginhandle, "movies") content = load(urlMain+"/WiViewingActivity") count = 0 videoIDs = [] spl = re.compile('(<li .*?data-series=.*?</li>)', re.DOTALL).findall(content) #spl = content.split('') for i in range(1, len(spl), 1): entry = spl[i] pDialog.update((count+1)*100/len(spl), translation(30142)+"...") matchId = re.compile('data-movieid="(.*?)"', re.DOTALL).findall(entry) if matchId: videoID = matchId[0] match = re.compile('class="col date nowrap">(.+?)<', re.DOTALL).findall(entry) date = match[0] matchTitle1 = re.compile('class="seriestitle">(.+?)</a>', re.DOTALL).findall(entry) matchTitle2 = re.compile('class="col title">.+?>(.+?)<', re.DOTALL).findall(entry) if matchTitle1: title = htmlParser.unescape(matchTitle1[0].decode("utf-8")).replace("</span>", "").encode("utf-8") elif matchTitle2: title = matchTitle2[0] else: title = "" title = date+" - "+title if videoID not in videoIDs: videoIDs.append(videoID) # due to limitations in the netflix api, there is no way to get the seriesId of an # episode, so the 4 param is set to True to treat tv episodes the same as movies. added = listVideo(videoID, title, "", True, False, type) if added: count += 1 if count == 40: break if forceView: xbmc.executebuiltin('Container.SetViewMode('+viewIdActivity+')') xbmcplugin.endOfDirectory(pluginhandle) def getVideoInfo(videoID): cacheFile = os.path.join(cacheFolder, videoID+".cache") content = "" if os.path.exists(cacheFile): fh = xbmcvfs.File(cacheFile, 'r') content = fh.read() fh.close() if not content: content = load(urlMain+"/JSON/BOB?movieid="+videoID) fh = xbmcvfs.File(cacheFile, 'w') fh.write(content) fh.close() return content.replace("\\t","").replace("\\n", "").replace("\\", "") def getSeriesInfo(seriesID): cacheFile = os.path.join(cacheFolder, seriesID+"_episodes.cache") content = "" if os.path.exists(cacheFile) and (time.time()-os.path.getmtime(cacheFile) < 60*5): fh = xbmcvfs.File(cacheFile, 'r') content = fh.read() fh.close() if not content: url = "http://api-global.netflix.com/desktop/odp/episodes?languages="+language+"&forceEpisodes=true&routing=redirect&video="+seriesID+"&country="+country content = load(url) fh = xbmcvfs.File(cacheFile, 'w') fh.write(content) fh.close() # if netflix throws exception they may still return content after the exception index = content.find('{"title":') if index != -1: content = content[index:] return content def addMyListToLibrary(): if not singleProfile: token = "" if addon.getSetting("profile"): token = addon.getSetting("profile") load("https://www.netflix.com/SwitchProfile?tkn="+token) content = load(urlMain+"/MyList?leid=595&link=seeall") if not 'id="page-LOGIN"' in content: if singleProfile and 'id="page-ProfilesGate"' in content: forceChooseProfile() else: if '<div id="queue"' in content: content = content[content.find('<div id="queue"'):] content = content.replace("\\t","").replace("\\n", "").replace("\\", "") match1 = re.compile('<span id="dbs(.+?)_.+?alt=".+?"', re.DOTALL).findall(content) match2 = re.compile('<span class="title.*?"><a id="b(.+?)_', re.DOTALL).findall(content) match3 = re.compile('<a href="http://dvd.netflix.com/WiPlayer\?movieid=(.+?)&', re.DOTALL).findall(content) match4 = re.compile('<a class="playHover" href=".+?WiPlayer\?movieid=(.+?)&', re.DOTALL).findall(content) match5 = re.compile('"boxart":".+?","titleId":(.+?),', re.DOTALL).findall(content) if match1: match = match1 elif match2: match = match2 elif match3: match = match3 elif match4: match = match4 elif match5: match = match5 for videoID in match: videoDetails = getVideoInfo(videoID) match = re.compile('<span class="title ".*?>(.+?)<\/span>', re.DOTALL).findall(videoDetails) title = match[0].strip() title = htmlParser.unescape(title.decode("utf-8")) match = re.compile('<span class="year".*?>(.+?)<\/span>', re.DOTALL).findall(videoDetails) year = "" if match: year = match[0] match = re.compile('<span class="duration.*?".*?>(.+?)<\/span>', re.DOTALL).findall(videoDetails) duration = "" if match: duration = match[0].lower() if "minutes" in duration: try: if year: title = title+" ("+year+")" addMovieToLibrary(videoID, title, False) except: pass else: try: addSeriesToLibrary(videoID, title, "", False) except: pass if updateDB: xbmc.executebuiltin('UpdateLibrary(video)') def playVideo(id): listitem = xbmcgui.ListItem(path=fakeVidPath) xbmcplugin.setResolvedUrl(pluginhandle, True, listitem) playVideoMain(id) xbmc.PlayList(xbmc.PLAYLIST_VIDEO).clear() def playVideoMain(id): xbmc.Player().stop() if singleProfile: url = urlMain+"/WiPlayer?movieid="+id else: token = "" if addon.getSetting("profile"): token = addon.getSetting("profile") url = "https://www.netflix.com/SwitchProfile?tkn="+token+"&nextpage="+urllib.quote_plus(urlMain+"/WiPlayer?movieid="+id) if osOSX: launchChrome(url) #xbmc.executebuiltin("RunPlugin(plugin://plugin.program.chrome.launcher/?url="+urllib.quote_plus(url)+"&mode=showSite&kiosk="+kiosk+")") try: xbmc.sleep(5000) subprocess.Popen('cliclick c:500,500', shell=True) subprocess.Popen('cliclick kp:arrow-up', shell=True) xbmc.sleep(5000) subprocess.Popen('cliclick c:500,500', shell=True) subprocess.Popen('cliclick kp:arrow-up', shell=True) xbmc.sleep(5000) subprocess.Popen('cliclick c:500,500', shell=True) subprocess.Popen('cliclick kp:arrow-up', shell=True) except: pass elif osLinux: if linuxUseShellScript: xbmc.executebuiltin('LIRC.Stop') call = '"'+browserScript+'" "'+url+'"'; debug("Browser Call: " + call) subprocess.call(call, shell=True) xbmc.executebuiltin('LIRC.Start') else: launchChrome(url) #xbmc.executebuiltin("RunPlugin(plugin://plugin.program.chrome.launcher/?url="+urllib.quote_plus(url)+"&mode=showSite&kiosk="+kiosk+")") try: xbmc.sleep(5000) subprocess.Popen('xdotool mousemove 9999 9999', shell=True) xbmc.sleep(5000) subprocess.Popen('xdotool mousemove 9999 9999', shell=True) xbmc.sleep(5000) subprocess.Popen('xdotool mousemove 9999 9999', shell=True) except: pass elif osWin: if winBrowser == 1: path = 'C:\\Program Files\\Internet Explorer\\iexplore.exe' path64 = 'C:\\Program Files (x86)\\Internet Explorer\\iexplore.exe' if os.path.exists(path): subprocess.Popen('"'+path+'" -k "'+url+'"', shell=False) elif os.path.exists(path64): subprocess.Popen('"'+path64+'" -k "'+url+'"', shell=False) else: launchChrome(url) #xbmc.executebuiltin("RunPlugin(plugin://plugin.program.chrome.launcher/?url="+urllib.quote_plus(url)+"&mode=showSite&kiosk="+kiosk+")") if useUtility: subprocess.Popen('"'+utilityPath+'"', shell=False) myWindow = window('window.xml', addon.getAddonInfo('path'), 'default',) myWindow.doModal() myWindow.stopWakeupThread() # insurance, in case self.close() wasn't the method by which the window was closed def launchChrome(url): kiosk = "yes" if dontUseKiosk: kiosk = "no" profileFolder = "" if useChromeProfile: if not os.path.exists(chromeUserDataFolder): import zipfile zip = os.path.join(addonDir, "resources", "chrome-user-data.zip") with open(zip, "rb") as zf: z = zipfile.ZipFile(zf) z.extractall(addonUserDataFolder) profileFolder = "&profileFolder="+urllib.quote_plus(chromeUserDataFolder) # Inject cookies chrome_cookies.inject_cookies_into_chrome(session, os.path.join(chromeUserDataFolder, "Default", "Cookies")) xbmc.executebuiltin("RunPlugin(plugin://plugin.program.chrome.launcher/?url="+urllib.quote_plus(url)+"&mode=showSite&kiosk="+kiosk+profileFolder+")") def configureUtility(): if osWin: subprocess.Popen('"'+utilityPath+'"'+' config=yes', shell=False) def chromePluginOptions(): url = "chrome-extension://najegmllpphoobggcngjhcpknknljhkj/html/options.html" launchChrome(url) def deleteCookies(): if os.path.exists(cookieFile): os.remove(cookieFile) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,Cookies have been deleted!,5000,'+icon+')') if os.path.exists(sessionFile): os.remove(sessionFile) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,Session cookies have been deleted!,5000,'+icon+')') def deleteCache(): if os.path.exists(cacheFolder): try: shutil.rmtree(cacheFolder) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,Cache has been deleted!,5000,'+icon+')') except: pass def deleteChromeUserDataFolder(): if os.path.exists(chromeUserDataFolder): try: shutil.rmtree(chromeUserDataFolder) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,Chrome UserData has been deleted!,5000,'+icon+')') except: pass def resetAddon(): dialog = xbmcgui.Dialog() if dialog.yesno("NetfliXBMC:", "Really reset the addon?"): if os.path.exists(addonUserDataFolder): try: shutil.rmtree(addonUserDataFolder) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,Addon has been reset!,5000,'+icon+')') except: pass def search(type): keyboard = xbmc.Keyboard('', translation(30008)) keyboard.doModal() if keyboard.isConfirmed() and keyboard.getText(): search_string = keyboard.getText().replace(" ", "+") listSearchVideos("http://api-global.netflix.com/desktop/search/instantsearch?esn=www&term="+search_string+"&locale="+language+"&country="+country+"&authURL="+auth+"&_retry=0&routing=redirect", type) def addToQueue(id): if authMyList: encodedAuth = urllib.urlencode({'authURL': authMyList}) load(urlMain+"/AddToQueue?movieid="+id+"&qtype=INSTANT&"+encodedAuth) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30144))+',3000,'+icon+')') else: debug("Attempted to addToQueue without valid authMyList") def removeFromQueue(id): if authMyList: encodedAuth = urllib.urlencode({'authURL': authMyList}) load(urlMain+"/QueueDelete?"+encodedAuth+"&qtype=ED&movieid="+id) xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30145))+',3000,'+icon+')') xbmc.executebuiltin("Container.Refresh") else: debug("Attempted to removeFromQueue without valid authMyList") def displayLoginProgress(progressWindow, value, message): progressWindow.update( value, "", message, "" ) if progressWindow.iscanceled(): return False else: return True def login(): #setup login progress display loginProgress = xbmcgui.DialogProgress() loginProgress.create('NETFLIXBMC', str(translation(30216)) + '...') displayLoginProgress(loginProgress, 25, str(translation(30217))) session.cookies.clear() content = load(urlMain+"/Login") match = re.compile('"LOCALE":"(.+?)"', re.DOTALL|re.IGNORECASE).findall(content) if match and not addon.getSetting("language"): addon.setSetting("language", match[0]) if not "Sorry, Netflix is not available in your country yet." in content and not "Sorry, Netflix hasn't come to this part of the world yet" in content: match = re.compile('id="signout".+?authURL=(.+?)"', re.DOTALL).findall(content) if match: addon.setSetting("auth", match[0]) if 'id="page-LOGIN"' in content: match = re.compile('name="authURL" value="(.+?)"', re.DOTALL).findall(content) authUrl = match[0] addon.setSetting("auth", authUrl) #postdata = "authURL="+urllib.quote_plus(authUrl)+"&email="+urllib.quote_plus(username)+"&password="+urllib.quote_plus(password)+"&RememberMe=on" postdata ={ "authURL":authUrl, "email":username, "password":password, "RememberMe":"on" } #content = load("https://signup.netflix.com/Login", "authURL="+urllib.quote_plus(authUrl)+"&email="+urllib.quote_plus(username)+"&password="+urllib.quote_plus(password)+"&RememberMe=on") displayLoginProgress(loginProgress, 50, str(translation(30218))) content = load("https://signup.netflix.com/Login", postdata) if 'id="page-LOGIN"' in content: # Login Failed xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30127))+',15000,'+icon+')') return False match = re.compile('"LOCALE":"(.+?)"', re.DOTALL|re.IGNORECASE).findall(content) if match and not addon.getSetting("language"): addon.setSetting("language", match[0]) match = re.compile('"COUNTRY":"(.+?)"', re.DOTALL|re.IGNORECASE).findall(content) if match: # always overwrite the country code, to cater for switching regions debug("Setting Country: " + match[0]) addon.setSetting("country", match[0]) saveState() displayLoginProgress(loginProgress, 75, str(translation(30219))) if not addon.getSetting("profile") and not singleProfile: chooseProfile() elif not singleProfile and showProfiles: chooseProfile() elif not singleProfile and not showProfiles: loadProfile() else: getMyListChangeAuthorisation() if loginProgress: if not displayLoginProgress(loginProgress, 100, str(translation(30220))): return False xbmc.sleep(500) loginProgress.close() return True else: xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30126))+',10000,'+icon+')') if loginProgress: loginProgress.close() return False def debug(message): if debug: print message def loadProfile(): savedProfile = addon.getSetting("profile") if savedProfile: load("https://api-global.netflix.com/desktop/account/profiles/switch?switchProfileGuid="+savedProfile) saveState() else: debug("LoadProfile: No stored profile found") getMyListChangeAuthorisation() def chooseProfile(): content = load("https://www.netflix.com/ProfilesGate?nextpage=http%3A%2F%2Fwww.netflix.com%2FDefault") matchType = 0 match = re.compile('"profileName":"(.+?)".+?token":"(.+?)"', re.DOTALL).findall(content) if len(match): matchType = 1 if not len(match): match = re.compile('"firstName":"(.+?)".+?guid":"(.+?)".+?experience":"(.+?)"', re.DOTALL).findall(content) if len(match): matchType = 1 if not len(match): match = re.compile('"experience":"(.+?)".+?guid":"(.+?)".+?profileName":"(.+?)"', re.DOTALL).findall(content) if len(match): matchType = 2 profiles = [] # remove any duplicated profile data found during page scrape match = [item for count, item in enumerate(match) if item not in match[:count]] if matchType == 1: for p, t, e in match: profile = {'name': unescape(p), 'token': t, 'isKids': e=='jfk'} profiles.append(profile) elif matchType == 2: for e, t, p in match: profile = {'name': unescape(p), 'token': t, 'isKids': e=='jfk'} profiles.append(profile) if matchType > 0: dialog = xbmcgui.Dialog() nr = dialog.select(translation(30113), [profile['name'] for profile in profiles]) if nr >= 0: selectedProfile = profiles[nr] else: selectedProfile = profiles[0] load("https://api-global.netflix.com/desktop/account/profiles/switch?switchProfileGuid="+selectedProfile['token']) addon.setSetting("profile", selectedProfile['token']) addon.setSetting("isKidsProfile", 'true' if selectedProfile['isKids'] else 'false') addon.setSetting("profileName", selectedProfile['name']) saveState() getMyListChangeAuthorisation() else: debug("Netflixbmc::chooseProfile: No profiles were found") def getMyListChangeAuthorisation(): content = load(urlMain+"/WiHome") match = re.compile('"xsrf":"(.+?)"', re.DOTALL).findall(content) if match: authMyList = match[0] addon.setSetting("authMyList", match[0]) def forceChooseProfile(): addon.setSetting("singleProfile", "false") xbmc.executebuiltin('XBMC.Notification(NetfliXBMC:,'+str(translation(30111))+',5000,'+icon+')') chooseProfile() def addMovieToLibrary(movieID, title, singleUpdate=True): movieFolderName = clean_filename(title+".strm", ' .').strip(' .') dirAndFilename = os.path.join(libraryFolderMovies, movieFolderName) fh = xbmcvfs.File(dirAndFilename, 'w') fh.write("plugin://plugin.video.netflixbmc/?mode=playVideo&url="+movieID) fh.close() if updateDB and singleUpdate: xbmc.executebuiltin('UpdateLibrary(video)') def addSeriesToLibrary(seriesID, seriesTitle, season, singleUpdate=True): seriesFolderName = clean_filename(seriesTitle, ' .') seriesDir = os.path.join(libraryFolderTV, seriesFolderName) if not os.path.isdir(seriesDir): xbmcvfs.mkdir(seriesDir) content = getSeriesInfo(seriesID) content = json.loads(content) for test in content["episodes"]: for item in test: episodeSeason = str(item["season"]) seasonCheck = True if season: seasonCheck = episodeSeason == season if seasonCheck: seasonDir = os.path.join(seriesDir, "Season "+episodeSeason) if not os.path.isdir(seasonDir): xbmcvfs.mkdir(seasonDir) episodeID = str(item["episodeId"]) episodeNr = str(item["episode"]) episodeTitle = item["title"].encode('utf-8') if len(episodeNr) == 1: episodeNr = "0"+episodeNr seasonNr = episodeSeason if len(seasonNr) == 1: seasonNr = "0"+seasonNr filename = "S"+seasonNr+"E"+episodeNr+" - "+episodeTitle+".strm" filename = clean_filename(filename, ' .') fh = xbmcvfs.File(os.path.join(seasonDir, filename), 'w') fh.write("plugin://plugin.video.netflixbmc/?mode=playVideo&url="+episodeID) fh.close() if updateDB and singleUpdate: xbmc.executebuiltin('UpdateLibrary(video)') def playTrailer(title): try: content = load("http://gdata.youtube.com/feeds/api/videos?vq="+title.strip().replace(" ", "+")+"+trailer&racy=include&orderby=relevance") match = re.compile('<id>http://gdata.youtube.com/feeds/api/videos/(.+?)</id>', re.DOTALL).findall(content.split('<entry>')[2]) xbmc.Player().play("plugin://plugin.video.youtube/play/?video_id=" + match[0]) except: pass def translation(id): return addon.getLocalizedString(id).encode('utf-8') def parameters_string_to_dict(parameters): paramDict = {} if parameters: paramPairs = parameters[1:].split("&") for paramsPair in paramPairs: paramSplits = paramsPair.split('=') if (len(paramSplits)) == 2: paramDict[paramSplits[0]] = paramSplits[1] return paramDict def addDir(name, url, mode, iconimage, type="", contextEnable=True): name = htmlParser.unescape(name) u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&type="+str(type)+"&thumb="+urllib.quote_plus(iconimage) ok = True liz = xbmcgui.ListItem(name, iconImage="DefaultTVShows.png", thumbnailImage=iconimage) liz.setInfo(type="video", infoLabels={"title": name}) entries = [] if "/MyList" in url: entries.append((translation(30122), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addMyListToLibrary)',)) liz.setProperty("fanart_image", defaultFanart) if contextEnable: liz.addContextMenuItems(entries) else: emptyEntries = [] liz.addContextMenuItems(emptyEntries, replaceItems=True) ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True) return ok def addVideoDir(name, url, mode, iconimage, videoType="", desc="", duration="", year="", mpaa="", director="", genre="", rating=""): if duration: duration = str(int(duration) * 60) name = name.encode("utf-8") filename = clean_filename(url)+".jpg" coverFile = os.path.join(cacheFolderCoversTMDB, filename) fanartFile = os.path.join(cacheFolderFanartTMDB, filename) if os.path.exists(coverFile): iconimage = coverFile u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&thumb="+urllib.quote_plus(iconimage) ok = True liz = xbmcgui.ListItem(name, iconImage="DefaultTVShows.png", thumbnailImage=iconimage) liz.setInfo(type="video", infoLabels={"title": name, "plot": desc, "duration": duration, "year": year, "mpaa": mpaa, "director": director, "genre": genre, "rating": float(rating)}) if os.path.exists(fanartFile): liz.setProperty("fanart_image", fanartFile) elif os.path.exists(coverFile): liz.setProperty("fanart_image", coverFile) entries = [] if videoType == "tvshow": if browseTvShows: entries.append((translation(30121), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=playVideoMain&url='+urllib.quote_plus(url)+'&thumb='+urllib.quote_plus(iconimage)+')',)) else: entries.append((translation(30118), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=listSeasons&url='+urllib.quote_plus(url)+'&thumb='+urllib.quote_plus(iconimage)+')',)) if videoType != "episode": entries.append((translation(30134), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=playTrailer&url='+urllib.quote_plus(name)+')',)) entries.append((translation(30114), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addToQueue&url='+urllib.quote_plus(url)+')',)) entries.append((translation(30140), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=listVideos&url='+urllib.quote_plus(urlMain+"/WiMovie/"+url)+'&type=movie)',)) entries.append((translation(30141), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=listVideos&url='+urllib.quote_plus(urlMain+"/WiMovie/"+url)+'&type=tv)',)) if videoType == "tvshow": entries.append((translation(30122), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addSeriesToLibrary&url=&name='+urllib.quote_plus(name.strip())+'&seriesID='+urllib.quote_plus(url)+')',)) elif videoType == "movie": entries.append((translation(30122), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addMovieToLibrary&url='+urllib.quote_plus(url)+'&name='+urllib.quote_plus(name.strip()+' ('+year+')')+')',)) liz.addContextMenuItems(entries) ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True) return ok def addVideoDirR(name, url, mode, iconimage, videoType="", desc="", duration="", year="", mpaa="", director="", genre="", rating=""): if duration: duration = str(int(duration) * 60) name = name.encode("utf-8") filename = clean_filename(url)+".jpg" coverFile = os.path.join(cacheFolderCoversTMDB, filename) fanartFile = os.path.join(cacheFolderFanartTMDB, filename) if os.path.exists(coverFile): iconimage = coverFile u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&thumb="+urllib.quote_plus(iconimage) ok = True liz = xbmcgui.ListItem(name, iconImage="DefaultTVShows.png", thumbnailImage=iconimage) liz.setInfo(type="video", infoLabels={"title": name, "plot": desc, "duration": duration, "year": year, "mpaa": mpaa, "director": director, "genre": genre, "rating": float(rating)}) if os.path.exists(fanartFile): liz.setProperty("fanart_image", fanartFile) elif os.path.exists(coverFile): liz.setProperty("fanart_image", coverFile) entries = [] if videoType == "tvshow": if browseTvShows: entries.append((translation(30121), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=playVideoMain&url='+urllib.quote_plus(url)+'&thumb='+urllib.quote_plus(iconimage)+')',)) else: entries.append((translation(30118), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=listSeasons&url='+urllib.quote_plus(url)+'&thumb='+urllib.quote_plus(iconimage)+')',)) entries.append((translation(30134), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=playTrailer&url='+urllib.quote_plus(name)+')',)) entries.append((translation(30115), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=removeFromQueue&url='+urllib.quote_plus(url)+')',)) entries.append((translation(30140), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=listVideos&url='+urllib.quote_plus(urlMain+"/WiMovie/"+url)+'&type=movie)',)) entries.append((translation(30141), 'Container.Update(plugin://plugin.video.netflixbmc/?mode=listVideos&url='+urllib.quote_plus(urlMain+"/WiMovie/"+url)+'&type=tv)',)) if videoType == "tvshow": entries.append((translation(30122), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addSeriesToLibrary&url=&name='+str(name.strip())+'&seriesID='+str(url)+')',)) elif videoType == "movie": entries.append((translation(30122), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addMovieToLibrary&url='+urllib.quote_plus(url)+'&name='+str(name.strip()+' ('+year+')')+')',)) liz.addContextMenuItems(entries) ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True) return ok def addSeasonDir(name, url, mode, iconimage, seriesName, seriesID): filename = clean_filename(seriesID)+".jpg" fanartFile = os.path.join(cacheFolderFanartTMDB, filename) coverFile = os.path.join(cacheFolderCoversTMDB, filename) u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&seriesID="+urllib.quote_plus(seriesID) ok = True liz = xbmcgui.ListItem(name, iconImage="DefaultTVShows.png", thumbnailImage=iconimage) liz.setInfo(type="video", infoLabels={"title": name}) if os.path.exists(fanartFile): liz.setProperty("fanart_image", fanartFile) elif os.path.exists(coverFile): liz.setProperty("fanart_image", coverFile) entries = [] entries.append((translation(30122), 'RunPlugin(plugin://plugin.video.netflixbmc/?mode=addSeriesToLibrary&url='+urllib.quote_plus(url)+'&name='+str(seriesName.strip())+'&seriesID='+str(seriesID)+')',)) liz.addContextMenuItems(entries) ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True) return ok def addEpisodeDir(name, url, mode, iconimage, desc="", duration="", season="", episodeNr="", seriesID="", playcount=""): if duration: duration = str(int(duration) * 60) filename = clean_filename(seriesID)+".jpg" fanartFile = os.path.join(cacheFolderFanartTMDB, filename) coverFile = os.path.join(cacheFolderCoversTMDB, filename) u = sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode) ok = True liz = xbmcgui.ListItem(name, iconImage="DefaultTVShows.png", thumbnailImage=iconimage) liz.setInfo(type="video", infoLabels={"title": name, "plot": desc, "duration": duration, "season": season, "episode": episodeNr, "playcount": playcount}) if os.path.exists(fanartFile): liz.setProperty("fanart_image", fanartFile) elif os.path.exists(coverFile): liz.setProperty("fanart_image", coverFile) ok = xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=True) return ok class window(xbmcgui.WindowXMLDialog): def __init__(self, *args, **kwargs): xbmcgui.WindowXMLDialog.__init__(self, *args, **kwargs) self._stopEvent = threading.Event() self._wakeUpThread = threading.Thread(target=self._wakeUpThreadProc) self._wakeUpThread.start() def _wakeUpThreadProc(self): while not self._stopEvent.is_set(): if debug: print "Netflixbmc: Sending wakeup to main UI to avoid idle/DPMS..." xbmc.executebuiltin("playercontrol(wakeup)") # bit of a hack above: wakeup is actually not a valid playercontrol argument, # but there's no error printed if the argument isn't found and any playercontrol # causes the DPMS/idle timeout to reset itself self._stopEvent.wait(60) if debug: print "Netflixbmc: wakeup thread finishing." def stopWakeupThread(self): if debug: print "Netflixbmc: stopping wakeup thread" self._stopEvent.set() self._wakeUpThread.join() def close(self): if debug: print "Netflixbmc: closing dummy window" self.stopWakeupThread() xbmcgui.WindowXMLDialog.close(self) def onAction(self, action): ACTION_SELECT_ITEM = 7 ACTION_PARENT_DIR = 9 ACTION_PREVIOUS_MENU = 10 ACTION_PAUSE = 12 ACTION_STOP = 13 ACTION_SHOW_INFO = 11 ACTION_SHOW_GUI = 18 ACTION_MOVE_LEFT = 1 ACTION_MOVE_RIGHT = 2 ACTION_MOVE_UP = 3 ACTION_MOVE_DOWN = 4 ACTION_PLAYER_PLAY = 79 ACTION_VOLUME_UP = 88 ACTION_VOLUME_DOWN = 89 ACTION_MUTE = 91 ACTION_CONTEXT_MENU = 117 ACTION_BUILT_IN_FUNCTION = 122 KEY_BUTTON_BACK = 275 if not remoteControl and action != ACTION_BUILT_IN_FUNCTION: # if we're not passing remote control actions, any non-autogenerated # remote action that reaches here is a signal to close this dummy # window as Chrome is gone if debug: print "Netflixbmc: Closing dummy window after action %d" % (action.getId()) self.close() return if osWin: proc = subprocess.Popen('WMIC PROCESS get Caption', shell=True, stdout=subprocess.PIPE) procAll = "" for line in proc.stdout: procAll+=line if "chrome.exe" in procAll: if action in [ACTION_SHOW_INFO, ACTION_SHOW_GUI, ACTION_STOP, ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_BUTTON_BACK]: subprocess.Popen('"'+sendKeysPath+'"'+' sendKey=Close', shell=False) self.close() elif action==ACTION_SELECT_ITEM: subprocess.Popen('"'+sendKeysPath+'"'+' sendKey=PlayPause', shell=False) elif action==ACTION_MOVE_LEFT: subprocess.Popen('"'+sendKeysPath+'"'+' sendKey=SeekLeft', shell=False) elif action==ACTION_MOVE_RIGHT: subprocess.Popen('"'+sendKeysPath+'"'+' sendKey=SeekRight', shell=False) elif action==ACTION_MOVE_UP: subprocess.Popen('"'+sendKeysPath+'"'+' sendKey=VolumeUp', shell=False) elif action==ACTION_MOVE_DOWN: subprocess.Popen('"'+sendKeysPath+'"'+' sendKey=VolumeDown', shell=False) else: self.close() elif osLinux: doClose = False key=None if action in [ACTION_SHOW_GUI, ACTION_STOP, ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_BUTTON_BACK]: key="control+shift+q" doClose=True elif action in [ ACTION_SELECT_ITEM, ACTION_PLAYER_PLAY, ACTION_PAUSE ]: key="space" elif action==ACTION_MOVE_LEFT: key="Left" elif action==ACTION_MOVE_RIGHT: key="Right" elif action==ACTION_SHOW_INFO: key="question" elif action==ACTION_VOLUME_UP: key="Up" elif action==ACTION_VOLUME_DOWN: key="Down" elif action==ACTION_MUTE: key="M" elif action==ACTION_CONTEXT_MENU: key="ctrl+alt+shift+d" elif debug: print "Netflixbmc: unmapped key action=%d" % (action.getId()) if key is not None: p = subprocess.Popen('xdotool search --onlyvisible --class "google-chrome|Chromium" key %s' % key, shell=True) p.wait() # 0 for success, 127 if xdotool not found in PATH. Return code is 1 if window not found (indicating should close). if not p.returncode in [0,127] or doClose: self.close() if debug: print "Netflixbmc: remote action=%d key=%s xdotool result=%d" % (action.getId(), key, p.returncode) elif osOSX: proc = subprocess.Popen('/bin/ps ax', shell=True, stdout=subprocess.PIPE) procAll = "" for line in proc.stdout: procAll+=line if "chrome" in procAll: if action in [ACTION_SHOW_INFO, ACTION_SHOW_GUI, ACTION_STOP, ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_BUTTON_BACK]: subprocess.Popen('cliclick kd:cmd t:q ku:cmd', shell=True) self.close() elif action==ACTION_SELECT_ITEM: subprocess.Popen('cliclick t:p', shell=True) elif action==ACTION_MOVE_LEFT: subprocess.Popen('cliclick kp:arrow-left', shell=True) elif action==ACTION_MOVE_RIGHT: subprocess.Popen('cliclick kp:arrow-right', shell=True) elif action==ACTION_MOVE_UP: subprocess.Popen('cliclick kp:arrow-up', shell=True) elif action==ACTION_MOVE_DOWN: subprocess.Popen('cliclick kp:arrow-down', shell=True) else: self.close() params = parameters_string_to_dict(sys.argv[2]) mode = urllib.unquote_plus(params.get('mode', '')) url = urllib.unquote_plus(params.get('url', '')) thumb = urllib.unquote_plus(params.get('thumb', '')) name = urllib.unquote_plus(params.get('name', '')) season = urllib.unquote_plus(params.get('season', '')) seriesID = urllib.unquote_plus(params.get('seriesID', '')) type = urllib.unquote_plus(params.get('type', '')) if mode == 'main': main(type) elif mode == 'wiHome': wiHome(type) elif mode == 'listVideos': listVideos(url, type) elif mode == 'listSliderVideos': listSliderVideos(url, type) elif mode == 'listSearchVideos': listSearchVideos(url, type) elif mode == 'addToQueue': addToQueue(url) elif mode == 'removeFromQueue': removeFromQueue(url) elif mode == 'playVideo': playVideo(url) elif mode == 'playVideoMain': playVideoMain(url) elif mode == 'search': search(type) elif mode == 'login': login() elif mode == 'chooseProfile': chooseProfile() elif mode == 'listGenres': listGenres(url, type) elif mode == 'listTvGenres': listTvGenres(type) elif mode == 'listViewingActivity': listViewingActivity(type) elif mode == 'listSeasons': listSeasons(name, url, thumb) elif mode == 'listEpisodes': listEpisodes(seriesID, url) elif mode == 'configureUtility': configureUtility() elif mode == 'chromePluginOptions': chromePluginOptions() elif mode == 'deleteCookies': deleteCookies() elif mode == 'deleteCache': deleteCache() elif mode == 'deleteChromeUserData': deleteChromeUserDataFolder() elif mode == 'resetAddon': resetAddon() elif mode == 'playTrailer': playTrailer(url) elif mode == 'addMyListToLibrary': addMyListToLibrary() elif mode == 'addMovieToLibrary': addMovieToLibrary(url, name) elif mode == 'addSeriesToLibrary': addSeriesToLibrary(seriesID, name, url) elif mode == 'profileDisplayUpdate': profileDisplayUpdate() else: index() if trace_on: pydevd.stoptrace()
gpl-2.0
8,642,612,235,448,023,000
44.307635
530
0.60971
false
jhseu/tensorflow
tensorflow/python/ops/summary_ops_v2.py
1
46626
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Operations to emit summaries.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import collections import functools import getpass import os import re import threading import time import six from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import summary_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.python.eager import context from tensorflow.python.eager import profiler as _profiler from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import smart_cond from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import gen_summary_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import summary_op_util from tensorflow.python.platform import tf_logging as logging from tensorflow.python.training import training_util from tensorflow.python.util import deprecation from tensorflow.python.util import tf_contextlib from tensorflow.python.util.tf_export import tf_export # Name for graph collection of summary writer init ops, which is only exposed # as a legacy API for tf.contrib.summary in TF 1.x. _SUMMARY_WRITER_INIT_COLLECTION_NAME = "_SUMMARY_WRITER_V2" _EXPERIMENT_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,256}$") _RUN_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,512}$") _USER_NAME_PATTERNS = re.compile(r"^[a-z]([-a-z0-9]{0,29}[a-z0-9])?$", re.I) class _SummaryState(threading.local): def __init__(self): super(_SummaryState, self).__init__() self.is_recording = None # TODO(slebedev): why a separate flag for DS and is it on by default? self.is_recording_distribution_strategy = True self.writer = None self.step = None _summary_state = _SummaryState() def _should_record_summaries_internal(default_state): """Returns boolean Tensor if summaries should/shouldn't be recorded. Now the summary condition is decided by logical "and" of below conditions: First, summary writer must be set. Given this constraint is met, ctx.summary_recording and ctx.summary_recording_distribution_strategy. The former one is usually set by user, and the latter one is controlled by DistributionStrategy (tf.distribute.ReplicaContext). Args: default_state: can be True or False. The default summary behavior when summary writer is set and the user does not specify ctx.summary_recording and ctx.summary_recording_distribution_strategy is True. """ if _summary_state.writer is None: return constant_op.constant(False) resolve = lambda x: x() if callable(x) else x cond_distributed = resolve(_summary_state.is_recording_distribution_strategy) cond = resolve(_summary_state.is_recording) if cond is None: cond = default_state return math_ops.logical_and(cond_distributed, cond) def _should_record_summaries_v2(): """Returns boolean Tensor which is true if summaries should be recorded. If no recording status has been set, this defaults to True, unlike the public should_record_summaries(). """ return _should_record_summaries_internal(default_state=True) def should_record_summaries(): """Returns boolean Tensor which is true if summaries should be recorded.""" return _should_record_summaries_internal(default_state=False) @tf_export("summary.record_if", v1=[]) @tf_contextlib.contextmanager def record_if(condition): """Sets summary recording on or off per the provided boolean value. The provided value can be a python boolean, a scalar boolean Tensor, or or a callable providing such a value; if a callable is passed it will be invoked on-demand to determine whether summary writing will occur. Args: condition: can be True, False, a bool Tensor, or a callable providing such. Yields: Returns a context manager that sets this value on enter and restores the previous value on exit. """ old = _summary_state.is_recording try: _summary_state.is_recording = condition yield finally: _summary_state.is_recording = old # TODO(apassos) consider how to handle local step here. def record_summaries_every_n_global_steps(n, global_step=None): """Sets the should_record_summaries Tensor to true if global_step % n == 0.""" if global_step is None: global_step = training_util.get_or_create_global_step() with ops.device("cpu:0"): should = lambda: math_ops.equal(global_step % n, 0) if not context.executing_eagerly(): should = should() return record_if(should) def always_record_summaries(): """Sets the should_record_summaries Tensor to always true.""" return record_if(True) def never_record_summaries(): """Sets the should_record_summaries Tensor to always false.""" return record_if(False) @tf_export("summary.experimental.get_step", v1=[]) def get_step(): """Returns the default summary step for the current thread. Returns: The step set by `tf.summary.experimental.set_step()` if one has been set, otherwise None. """ return _summary_state.step @tf_export("summary.experimental.set_step", v1=[]) def set_step(step): """Sets the default summary step for the current thread. For convenience, this function sets a default value for the `step` parameter used in summary-writing functions elsewhere in the API so that it need not be explicitly passed in every such invocation. The value can be a constant or a variable, and can be retrieved via `tf.summary.experimental.get_step()`. Note: when using this with @tf.functions, the step value will be captured at the time the function is traced, so changes to the step outside the function will not be reflected inside the function unless using a `tf.Variable` step. Args: step: An `int64`-castable default step value, or None to unset. """ _summary_state.step = step @tf_export("summary.SummaryWriter", v1=[]) @six.add_metaclass(abc.ABCMeta) class SummaryWriter(object): """Interface representing a stateful summary writer object.""" @abc.abstractmethod def set_as_default(self): """Enables this summary writer for the current thread.""" raise NotImplementedError() @abc.abstractmethod @tf_contextlib.contextmanager def as_default(self): """Returns a context manager that enables summary writing.""" raise NotImplementedError() def init(self): """Initializes the summary writer.""" raise NotImplementedError() def flush(self): """Flushes any buffered data.""" raise NotImplementedError() def close(self): """Flushes and closes the summary writer.""" raise NotImplementedError() class ResourceSummaryWriter(SummaryWriter): """Implementation of SummaryWriter using a SummaryWriterInterface resource.""" def __init__(self, shared_name, init_op_fn, name=None, v2=False, metadata=None): self._resource = gen_summary_ops.summary_writer( shared_name=shared_name, name=name) # TODO(nickfelt): cache other constructed ops in graph mode self._init_op_fn = init_op_fn self._init_op = init_op_fn(self._resource) self._v2 = v2 self._metadata = {} if metadata is None else metadata self._closed = False if context.executing_eagerly(): self._resource_deleter = resource_variable_ops.EagerResourceDeleter( handle=self._resource, handle_device="cpu:0") else: ops.add_to_collection(_SUMMARY_WRITER_INIT_COLLECTION_NAME, self._init_op) def set_as_default(self): """Enables this summary writer for the current thread.""" if self._v2 and context.executing_eagerly() and self._closed: raise RuntimeError("SummaryWriter is already closed") _summary_state.writer = self @tf_contextlib.contextmanager def as_default(self): """Returns a context manager that enables summary writing.""" if self._v2 and context.executing_eagerly() and self._closed: raise RuntimeError("SummaryWriter is already closed") old = _summary_state.writer try: _summary_state.writer = self yield self # Flushes the summary writer in eager mode or in graph functions, but # not in legacy graph mode (you're on your own there). self.flush() finally: _summary_state.writer = old def init(self): """Initializes the summary writer.""" if self._v2: if context.executing_eagerly() and self._closed: raise RuntimeError("SummaryWriter is already closed") return self._init_op # Legacy behavior allows re-initializing the resource. return self._init_op_fn(self._resource) def flush(self): """Flushes any buffered data.""" if self._v2 and context.executing_eagerly() and self._closed: return return _flush_fn(writer=self) def close(self): """Flushes and closes the summary writer.""" if self._v2 and context.executing_eagerly() and self._closed: return try: with ops.control_dependencies([self.flush()]): with ops.device("cpu:0"): return gen_summary_ops.close_summary_writer(self._resource) finally: if self._v2 and context.executing_eagerly(): self._closed = True class NoopSummaryWriter(SummaryWriter): """A summary writer that does nothing, for create_noop_writer().""" def set_as_default(self): pass @tf_contextlib.contextmanager def as_default(self): yield def init(self): pass def flush(self): pass def close(self): pass @tf_export(v1=["summary.initialize"]) def initialize( graph=None, # pylint: disable=redefined-outer-name session=None): """Initializes summary writing for graph execution mode. This operation is a no-op when executing eagerly. This helper method provides a higher-level alternative to using `tf.contrib.summary.summary_writer_initializer_op` and `tf.contrib.summary.graph`. Most users will also want to call `tf.compat.v1.train.create_global_step` which can happen before or after this function is called. Args: graph: A `tf.Graph` or `tf.compat.v1.GraphDef` to output to the writer. This function will not write the default graph by default. When writing to an event log file, the associated step will be zero. session: So this method can call `tf.Session.run`. This defaults to `tf.compat.v1.get_default_session`. Raises: RuntimeError: If the current thread has no default `tf.contrib.summary.SummaryWriter`. ValueError: If session wasn't passed and no default session. """ if context.executing_eagerly(): return if _summary_state.writer is None: raise RuntimeError("No default tf.contrib.summary.SummaryWriter found") if session is None: session = ops.get_default_session() if session is None: raise ValueError("session must be passed if no default session exists") session.run(summary_writer_initializer_op()) if graph is not None: data = _serialize_graph(graph) x = array_ops.placeholder(dtypes.string) session.run(_graph(x, 0), feed_dict={x: data}) @tf_export("summary.create_file_writer", v1=[]) def create_file_writer_v2(logdir, max_queue=None, flush_millis=None, filename_suffix=None, name=None): """Creates a summary file writer for the given log directory. Args: logdir: a string specifying the directory in which to write an event file. max_queue: the largest number of summaries to keep in a queue; will flush once the queue gets bigger than this. Defaults to 10. flush_millis: the largest interval between flushes. Defaults to 120,000. filename_suffix: optional suffix for the event file name. Defaults to `.v2`. name: a name for the op that creates the writer. Returns: A SummaryWriter object. """ if logdir is None: raise ValueError("logdir cannot be None") inside_function = ops.inside_function() with ops.name_scope(name, "create_file_writer") as scope, ops.device("cpu:0"): # Run init inside an init_scope() to hoist it out of tf.functions. with ops.init_scope(): if context.executing_eagerly(): _check_create_file_writer_args( inside_function, logdir=logdir, max_queue=max_queue, flush_millis=flush_millis, filename_suffix=filename_suffix) logdir = ops.convert_to_tensor(logdir, dtype=dtypes.string) if max_queue is None: max_queue = constant_op.constant(10) if flush_millis is None: flush_millis = constant_op.constant(2 * 60 * 1000) if filename_suffix is None: filename_suffix = constant_op.constant(".v2") # Prepend the PID and a process-local UID to the filename suffix to avoid # filename collisions within the machine (the filename already contains # the hostname to avoid cross-machine collisions). unique_prefix = constant_op.constant(".%s.%s" % (os.getpid(), ops.uid())) filename_suffix = unique_prefix + filename_suffix # Use a unique shared_name to prevent resource sharing. if context.executing_eagerly(): shared_name = context.shared_name() else: shared_name = ops.name_from_scope_name(scope) # pylint: disable=protected-access return ResourceSummaryWriter( shared_name=shared_name, init_op_fn=functools.partial( gen_summary_ops.create_summary_file_writer, logdir=logdir, max_queue=max_queue, flush_millis=flush_millis, filename_suffix=filename_suffix), name=name, v2=True, metadata={"logdir": logdir}) def create_file_writer(logdir, max_queue=None, flush_millis=None, filename_suffix=None, name=None): """Creates a summary file writer in the current context under the given name. Args: logdir: a string, or None. If a string, creates a summary file writer which writes to the directory named by the string. If None, returns a mock object which acts like a summary writer but does nothing, useful to use as a context manager. max_queue: the largest number of summaries to keep in a queue; will flush once the queue gets bigger than this. Defaults to 10. flush_millis: the largest interval between flushes. Defaults to 120,000. filename_suffix: optional suffix for the event file name. Defaults to `.v2`. name: Shared name for this SummaryWriter resource stored to default Graph. Defaults to the provided logdir prefixed with `logdir:`. Note: if a summary writer resource with this shared name already exists, the returned SummaryWriter wraps that resource and the other arguments have no effect. Returns: Either a summary writer or an empty object which can be used as a summary writer. """ if logdir is None: return NoopSummaryWriter() logdir = str(logdir) with ops.device("cpu:0"): if max_queue is None: max_queue = constant_op.constant(10) if flush_millis is None: flush_millis = constant_op.constant(2 * 60 * 1000) if filename_suffix is None: filename_suffix = constant_op.constant(".v2") if name is None: name = "logdir:" + logdir return ResourceSummaryWriter( shared_name=name, init_op_fn=functools.partial( gen_summary_ops.create_summary_file_writer, logdir=logdir, max_queue=max_queue, flush_millis=flush_millis, filename_suffix=filename_suffix)) def create_db_writer(db_uri, experiment_name=None, run_name=None, user_name=None, name=None): """Creates a summary database writer in the current context. This can be used to write tensors from the execution graph directly to a database. Only SQLite is supported right now. This function will create the schema if it doesn't exist. Entries in the Users, Experiments, and Runs tables will be created automatically if they don't already exist. Args: db_uri: For example "file:/tmp/foo.sqlite". experiment_name: Defaults to YYYY-MM-DD in local time if None. Empty string means the Run will not be associated with an Experiment. Can't contain ASCII control characters or <>. Case sensitive. run_name: Defaults to HH:MM:SS in local time if None. Empty string means a Tag will not be associated with any Run. Can't contain ASCII control characters or <>. Case sensitive. user_name: Defaults to system username if None. Empty means the Experiment will not be associated with a User. Must be valid as both a DNS label and Linux username. name: Shared name for this SummaryWriter resource stored to default `tf.Graph`. Returns: A `tf.summary.SummaryWriter` instance. """ with ops.device("cpu:0"): if experiment_name is None: experiment_name = time.strftime("%Y-%m-%d", time.localtime(time.time())) if run_name is None: run_name = time.strftime("%H:%M:%S", time.localtime(time.time())) if user_name is None: user_name = getpass.getuser() experiment_name = _cleanse_string( "experiment_name", _EXPERIMENT_NAME_PATTERNS, experiment_name) run_name = _cleanse_string("run_name", _RUN_NAME_PATTERNS, run_name) user_name = _cleanse_string("user_name", _USER_NAME_PATTERNS, user_name) return ResourceSummaryWriter( shared_name=name, init_op_fn=functools.partial( gen_summary_ops.create_summary_db_writer, db_uri=db_uri, experiment_name=experiment_name, run_name=run_name, user_name=user_name)) @tf_export("summary.create_noop_writer", v1=[]) def create_noop_writer(): """Returns a summary writer that does nothing. This is useful as a placeholder in code that expects a context manager. """ return NoopSummaryWriter() def _cleanse_string(name, pattern, value): if isinstance(value, six.string_types) and pattern.search(value) is None: raise ValueError("%s (%s) must match %s" % (name, value, pattern.pattern)) return ops.convert_to_tensor(value, dtypes.string) def _nothing(): """Convenient else branch for when summaries do not record.""" return constant_op.constant(False) @tf_export(v1=["summary.all_v2_summary_ops"]) def all_v2_summary_ops(): """Returns all V2-style summary ops defined in the current default graph. This includes ops from TF 2.0 tf.summary and TF 1.x tf.contrib.summary (except for `tf.contrib.summary.graph` and `tf.contrib.summary.import_event`), but does *not* include TF 1.x tf.summary ops. Returns: List of summary ops, or None if called under eager execution. """ if context.executing_eagerly(): return None return ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access def summary_writer_initializer_op(): """Graph-mode only. Returns the list of ops to create all summary writers. Returns: The initializer ops. Raises: RuntimeError: If in Eager mode. """ if context.executing_eagerly(): raise RuntimeError( "tf.contrib.summary.summary_writer_initializer_op is only " "supported in graph mode.") return ops.get_collection(_SUMMARY_WRITER_INIT_COLLECTION_NAME) _INVALID_SCOPE_CHARACTERS = re.compile(r"[^-_/.A-Za-z0-9]") @tf_export("summary.experimental.summary_scope", v1=[]) @tf_contextlib.contextmanager def summary_scope(name, default_name="summary", values=None): """Experimental context manager for use when defining a custom summary op. This behaves similarly to `tf.name_scope`, except that it returns a generated summary tag in addition to the scope name. The tag is structurally similar to the scope name - derived from the user-provided name, prefixed with enclosing name scopes if any - but we relax the constraint that it be uniquified, as well as the character set limitation (so the user-provided name can contain characters not legal for scope names; in the scope name these are removed). This makes the summary tag more predictable and consistent for the user. For example, to define a new summary op called `my_op`: ```python def my_op(name, my_value, step): with tf.summary.summary_scope(name, "MyOp", [my_value]) as (tag, scope): my_value = tf.convert_to_tensor(my_value) return tf.summary.write(tag, my_value, step=step) ``` Args: name: string name for the summary. default_name: Optional; if provided, used as default name of the summary. values: Optional; passed as `values` parameter to name_scope. Yields: A tuple `(tag, scope)` as described above. """ name = name or default_name current_scope = ops.get_name_scope() tag = current_scope + "/" + name if current_scope else name # Strip illegal characters from the scope name, and if that leaves nothing, # use None instead so we pick up the default name. name = _INVALID_SCOPE_CHARACTERS.sub("", name) or None with ops.name_scope(name, default_name, values, skip_on_eager=False) as scope: yield tag, scope @tf_export("summary.write", v1=[]) def write(tag, tensor, step=None, metadata=None, name=None): """Writes a generic summary to the default SummaryWriter if one exists. This exists primarily to support the definition of type-specific summary ops like scalar() and image(), and is not intended for direct use unless defining a new type-specific summary op. Args: tag: string tag used to identify the summary (e.g. in TensorBoard), usually generated with `tf.summary.summary_scope` tensor: the Tensor holding the summary data to write or a callable that returns this Tensor. If a callable is passed, it will only be called when a default SummaryWriter exists and the recording condition specified by `record_if()` is met. step: Explicit `int64`-castable monotonic step value for this summary. If omitted, this defaults to `tf.summary.experimental.get_step()`, which must not be None. metadata: Optional SummaryMetadata, as a proto or serialized bytes name: Optional string name for this op. Returns: True on success, or false if no summary was written because no default summary writer was available. Raises: ValueError: if a default writer exists, but no step was provided and `tf.summary.experimental.get_step()` is None. """ with ops.name_scope(name, "write_summary") as scope: if _summary_state.writer is None: return constant_op.constant(False) if step is None: step = get_step() if step is None: raise ValueError("No step set via 'step' argument or " "tf.summary.experimental.set_step()") if metadata is None: serialized_metadata = b"" elif hasattr(metadata, "SerializeToString"): serialized_metadata = metadata.SerializeToString() else: serialized_metadata = metadata def record(): """Record the actual summary and return True.""" # Note the identity to move the tensor to the CPU. with ops.device("cpu:0"): summary_tensor = tensor() if callable(tensor) else array_ops.identity( tensor) write_summary_op = gen_summary_ops.write_summary( _summary_state.writer._resource, # pylint: disable=protected-access step, summary_tensor, tag, serialized_metadata, name=scope) with ops.control_dependencies([write_summary_op]): return constant_op.constant(True) op = smart_cond.smart_cond( _should_record_summaries_v2(), record, _nothing, name="summary_cond") if not context.executing_eagerly(): ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access return op @tf_export("summary.experimental.write_raw_pb", v1=[]) def write_raw_pb(tensor, step=None, name=None): """Writes a summary using raw `tf.compat.v1.Summary` protocol buffers. Experimental: this exists to support the usage of V1-style manual summary writing (via the construction of a `tf.compat.v1.Summary` protocol buffer) with the V2 summary writing API. Args: tensor: the string Tensor holding one or more serialized `Summary` protobufs step: Explicit `int64`-castable monotonic step value for this summary. If omitted, this defaults to `tf.summary.experimental.get_step()`, which must not be None. name: Optional string name for this op. Returns: True on success, or false if no summary was written because no default summary writer was available. Raises: ValueError: if a default writer exists, but no step was provided and `tf.summary.experimental.get_step()` is None. """ with ops.name_scope(name, "write_raw_pb") as scope: if _summary_state.writer is None: return constant_op.constant(False) if step is None: step = get_step() if step is None: raise ValueError("No step set via 'step' argument or " "tf.summary.experimental.set_step()") def record(): """Record the actual summary and return True.""" # Note the identity to move the tensor to the CPU. with ops.device("cpu:0"): raw_summary_op = gen_summary_ops.write_raw_proto_summary( _summary_state.writer._resource, # pylint: disable=protected-access step, array_ops.identity(tensor), name=scope) with ops.control_dependencies([raw_summary_op]): return constant_op.constant(True) with ops.device("cpu:0"): op = smart_cond.smart_cond( _should_record_summaries_v2(), record, _nothing, name="summary_cond") if not context.executing_eagerly(): ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access return op def summary_writer_function(name, tensor, function, family=None): """Helper function to write summaries. Args: name: name of the summary tensor: main tensor to form the summary function: function taking a tag and a scope which writes the summary family: optional, the summary's family Returns: The result of writing the summary. """ name_scope = ops.get_name_scope() if name_scope: # Add a slash to allow reentering the name scope. name_scope += "/" def record(): with ops.name_scope(name_scope), summary_op_util.summary_scope( name, family, values=[tensor]) as (tag, scope): with ops.control_dependencies([function(tag, scope)]): return constant_op.constant(True) if _summary_state.writer is None: return control_flow_ops.no_op() with ops.device("cpu:0"): op = smart_cond.smart_cond( should_record_summaries(), record, _nothing, name="") if not context.executing_eagerly(): ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access return op def generic(name, tensor, metadata=None, family=None, step=None): """Writes a tensor summary if possible.""" def function(tag, scope): if metadata is None: serialized_metadata = constant_op.constant("") elif hasattr(metadata, "SerializeToString"): serialized_metadata = constant_op.constant(metadata.SerializeToString()) else: serialized_metadata = metadata # Note the identity to move the tensor to the CPU. return gen_summary_ops.write_summary( _summary_state.writer._resource, # pylint: disable=protected-access _choose_step(step), array_ops.identity(tensor), tag, serialized_metadata, name=scope) return summary_writer_function(name, tensor, function, family=family) def scalar(name, tensor, family=None, step=None): """Writes a scalar summary if possible. Unlike `tf.contrib.summary.generic` this op may change the dtype depending on the writer, for both practical and efficiency concerns. Args: name: An arbitrary name for this summary. tensor: A `tf.Tensor` Must be one of the following types: `float32`, `float64`, `int32`, `int64`, `uint8`, `int16`, `int8`, `uint16`, `half`, `uint32`, `uint64`. family: Optional, the summary's family. step: The `int64` monotonic step variable, which defaults to `tf.compat.v1.train.get_global_step`. Returns: The created `tf.Operation` or a `tf.no_op` if summary writing has not been enabled for this context. """ def function(tag, scope): # Note the identity to move the tensor to the CPU. return gen_summary_ops.write_scalar_summary( _summary_state.writer._resource, # pylint: disable=protected-access _choose_step(step), tag, array_ops.identity(tensor), name=scope) return summary_writer_function(name, tensor, function, family=family) def histogram(name, tensor, family=None, step=None): """Writes a histogram summary if possible.""" def function(tag, scope): # Note the identity to move the tensor to the CPU. return gen_summary_ops.write_histogram_summary( _summary_state.writer._resource, # pylint: disable=protected-access _choose_step(step), tag, array_ops.identity(tensor), name=scope) return summary_writer_function(name, tensor, function, family=family) def image(name, tensor, bad_color=None, max_images=3, family=None, step=None): """Writes an image summary if possible.""" def function(tag, scope): bad_color_ = (constant_op.constant([255, 0, 0, 255], dtype=dtypes.uint8) if bad_color is None else bad_color) # Note the identity to move the tensor to the CPU. return gen_summary_ops.write_image_summary( _summary_state.writer._resource, # pylint: disable=protected-access _choose_step(step), tag, array_ops.identity(tensor), bad_color_, max_images, name=scope) return summary_writer_function(name, tensor, function, family=family) def audio(name, tensor, sample_rate, max_outputs, family=None, step=None): """Writes an audio summary if possible.""" def function(tag, scope): # Note the identity to move the tensor to the CPU. return gen_summary_ops.write_audio_summary( _summary_state.writer._resource, # pylint: disable=protected-access _choose_step(step), tag, array_ops.identity(tensor), sample_rate=sample_rate, max_outputs=max_outputs, name=scope) return summary_writer_function(name, tensor, function, family=family) def graph(param, step=None, name=None): """Writes a TensorFlow graph to the summary interface. The graph summary is, strictly speaking, not a summary. Conditions like `tf.summary.should_record_summaries` do not apply. Only a single graph can be associated with a particular run. If multiple graphs are written, then only the last one will be considered by TensorBoard. When not using eager execution mode, the user should consider passing the `graph` parameter to `tf.compat.v1.summary.initialize` instead of calling this function. Otherwise special care needs to be taken when using the graph to record the graph. Args: param: A `tf.Tensor` containing a serialized graph proto. When eager execution is enabled, this function will automatically coerce `tf.Graph`, `tf.compat.v1.GraphDef`, and string types. step: The global step variable. This doesn't have useful semantics for graph summaries, but is used anyway, due to the structure of event log files. This defaults to the global step. name: A name for the operation (optional). Returns: The created `tf.Operation` or a `tf.no_op` if summary writing has not been enabled for this context. Raises: TypeError: If `param` isn't already a `tf.Tensor` in graph mode. """ if not context.executing_eagerly() and not isinstance(param, ops.Tensor): raise TypeError("graph() needs a tf.Tensor (e.g. tf.placeholder) in graph " "mode, but was: %s" % type(param)) writer = _summary_state.writer if writer is None: return control_flow_ops.no_op() with ops.device("cpu:0"): if isinstance(param, (ops.Graph, graph_pb2.GraphDef)): tensor = ops.convert_to_tensor(_serialize_graph(param), dtypes.string) else: tensor = array_ops.identity(param) return gen_summary_ops.write_graph_summary( writer._resource, _choose_step(step), tensor, name=name) # pylint: disable=protected-access _graph = graph # for functions with a graph parameter def import_event(tensor, name=None): """Writes a `tf.compat.v1.Event` binary proto. This can be used to import existing event logs into a new summary writer sink. Please note that this is lower level than the other summary functions and will ignore the `tf.summary.should_record_summaries` setting. Args: tensor: A `tf.Tensor` of type `string` containing a serialized `tf.compat.v1.Event` proto. name: A name for the operation (optional). Returns: The created `tf.Operation`. """ return gen_summary_ops.import_event( _summary_state.writer._resource, tensor, name=name) # pylint: disable=protected-access @tf_export("summary.flush", v1=[]) def flush(writer=None, name=None): """Forces summary writer to send any buffered data to storage. This operation blocks until that finishes. Args: writer: The `tf.summary.SummaryWriter` resource to flush. The thread default will be used if this parameter is None. Otherwise a `tf.no_op` is returned. name: A name for the operation (optional). Returns: The created `tf.Operation`. """ if writer is None: writer = _summary_state.writer if writer is None: return control_flow_ops.no_op() if isinstance(writer, ResourceSummaryWriter): resource = writer._resource # pylint: disable=protected-access else: # Assume we were passed a raw resource tensor. resource = writer with ops.device("cpu:0"): return gen_summary_ops.flush_summary_writer(resource, name=name) _flush_fn = flush # for within SummaryWriter.flush() def eval_dir(model_dir, name=None): """Construct a logdir for an eval summary writer.""" return os.path.join(model_dir, "eval" if not name else "eval_" + name) @deprecation.deprecated(date=None, instructions="Renamed to create_file_writer().") def create_summary_file_writer(*args, **kwargs): """Please use `tf.contrib.summary.create_file_writer`.""" logging.warning("Deprecation Warning: create_summary_file_writer was renamed " "to create_file_writer") return create_file_writer(*args, **kwargs) def _serialize_graph(arbitrary_graph): if isinstance(arbitrary_graph, ops.Graph): return arbitrary_graph.as_graph_def(add_shapes=True).SerializeToString() else: return arbitrary_graph.SerializeToString() def _choose_step(step): if step is None: return training_util.get_or_create_global_step() if not isinstance(step, ops.Tensor): return ops.convert_to_tensor(step, dtypes.int64) return step def _check_create_file_writer_args(inside_function, **kwargs): """Helper to check the validity of arguments to a create_file_writer() call. Args: inside_function: whether the create_file_writer() call is in a tf.function **kwargs: the arguments to check, as kwargs to give them names. Raises: ValueError: if the arguments are graph tensors. """ for arg_name, arg in kwargs.items(): if not isinstance(arg, ops.EagerTensor) and tensor_util.is_tensor(arg): if inside_function: raise ValueError( "Invalid graph Tensor argument \"%s=%s\" to create_file_writer() " "inside an @tf.function. The create call will be lifted into the " "outer eager execution context, so it cannot consume graph tensors " "defined inside the function body." % (arg_name, arg)) else: raise ValueError( "Invalid graph Tensor argument \"%s=%s\" to eagerly executed " "create_file_writer()." % (arg_name, arg)) def run_metadata(name, data, step=None): """Writes entire RunMetadata summary. A RunMetadata can contain DeviceStats, partition graphs, and function graphs. Please refer to the proto for definition of each field. Args: name: A name for this summary. The summary tag used for TensorBoard will be this name prefixed by any active name scopes. data: A RunMetadata proto to write. step: Explicit `int64`-castable monotonic step value for this summary. If omitted, this defaults to `tf.summary.experimental.get_step()`, which must not be None. Returns: True on success, or false if no summary was written because no default summary writer was available. Raises: ValueError: if a default writer exists, but no step was provided and `tf.summary.experimental.get_step()` is None. """ summary_metadata = summary_pb2.SummaryMetadata() # Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for # the rationale. summary_metadata.plugin_data.plugin_name = "graph_run_metadata" # version number = 1 summary_metadata.plugin_data.content = b"1" with summary_scope(name, "graph_run_metadata_summary", [data, step]) as (tag, _): with ops.device("cpu:0"): tensor = constant_op.constant(data.SerializeToString(), dtype=dtypes.string) return write( tag=tag, tensor=tensor, step=step, metadata=summary_metadata) def run_metadata_graphs(name, data, step=None): """Writes graphs from a RunMetadata summary. Args: name: A name for this summary. The summary tag used for TensorBoard will be this name prefixed by any active name scopes. data: A RunMetadata proto to write. step: Explicit `int64`-castable monotonic step value for this summary. If omitted, this defaults to `tf.summary.experimental.get_step()`, which must not be None. Returns: True on success, or false if no summary was written because no default summary writer was available. Raises: ValueError: if a default writer exists, but no step was provided and `tf.summary.experimental.get_step()` is None. """ summary_metadata = summary_pb2.SummaryMetadata() # Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for # the rationale. summary_metadata.plugin_data.plugin_name = "graph_run_metadata_graph" # version number = 1 summary_metadata.plugin_data.content = b"1" data = config_pb2.RunMetadata( function_graphs=data.function_graphs, partition_graphs=data.partition_graphs) with summary_scope(name, "graph_run_metadata_graph_summary", [data, step]) as (tag, _): with ops.device("cpu:0"): tensor = constant_op.constant(data.SerializeToString(), dtype=dtypes.string) return write( tag=tag, tensor=tensor, step=step, metadata=summary_metadata) def keras_model(name, data, step=None): """Writes a Keras model as JSON to as a Summary. Writing the Keras model configuration allows the TensorBoard graph plugin to render a conceptual graph, as opposed to graph of ops. In case the model fails to serialze as JSON, it ignores and returns False. Args: name: A name for this summary. The summary tag used for TensorBoard will be this name prefixed by any active name scopes. data: A Keras Model to write. step: Explicit `int64`-castable monotonic step value for this summary. If omitted, this defaults to `tf.summary.experimental.get_step()`, which must not be None. Returns: True on success, or False if no summary was written because no default summary writer was available. Raises: ValueError: if a default writer exists, but no step was provided and `tf.summary.experimental.get_step()` is None. """ summary_metadata = summary_pb2.SummaryMetadata() # Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for # the rationale. summary_metadata.plugin_data.plugin_name = "graph_keras_model" # version number = 1 summary_metadata.plugin_data.content = b"1" try: json_string = data.to_json() except Exception as exc: # pylint: disable=broad-except # An exception should not break a model code. logging.warn("Model failed to serialize as JSON. Ignoring... %s" % exc) return False with summary_scope(name, "graph_keras_model", [data, step]) as (tag, _): with ops.device("cpu:0"): tensor = constant_op.constant(json_string, dtype=dtypes.string) return write( tag=tag, tensor=tensor, step=step, metadata=summary_metadata) _TraceContext = collections.namedtuple("TraceContext", ("graph", "profiler")) _current_trace_context_lock = threading.Lock() _current_trace_context = None @tf_export("summary.trace_on", v1=[]) def trace_on(graph=True, profiler=False): # pylint: disable=redefined-outer-name """Starts a trace to record computation graphs and profiling information. Must be invoked in eager mode. When enabled, TensorFlow runtime will collection information that can later be exported and consumed by TensorBoard. The trace is activated across the entire TensorFlow runtime and affects all threads of execution. To stop the trace and export the collected information, use `tf.summary.trace_export`. To stop the trace without exporting, use `tf.summary.trace_off`. Args: graph: If True, enables collection of executed graphs. It includes ones from tf.function invocation and ones from the legacy graph mode. The default is True. profiler: If True, enables the advanced profiler. Enabling profiler implicitly enables the graph collection. The profiler may incur a high memory overhead. The default is False. """ if ops.inside_function(): logging.warn("Cannot enable trace inside a tf.function.") return if not context.context().executing_eagerly(): logging.warn("Must enable trace in eager mode.") return global _current_trace_context with _current_trace_context_lock: if _current_trace_context: logging.warn("Trace already enabled") return if graph and not profiler: context.context().enable_graph_collection() if profiler: context.context().enable_run_metadata() _profiler.start() _current_trace_context = _TraceContext(graph=graph, profiler=profiler) @tf_export("summary.trace_export", v1=[]) def trace_export(name, step=None, profiler_outdir=None): """Stops and exports the active trace as a Summary and/or profile file. Stops the trace and exports all metadata collected during the trace to the default SummaryWriter, if one has been set. Args: name: A name for the summary to be written. step: Explicit `int64`-castable monotonic step value for this summary. If omitted, this defaults to `tf.summary.experimental.get_step()`, which must not be None. profiler_outdir: Output directory for profiler. This is only used when the profiler was enabled when the trace was started. In that case, if there is a logdir-based default SummaryWriter, this defaults to the same directory, but otherwise the argument must be passed. Raises: ValueError: if a default writer exists, but no step was provided and `tf.summary.experimental.get_step()` is None. """ global _current_trace_context if ops.inside_function(): logging.warn("Cannot export trace inside a tf.function.") return if not context.context().executing_eagerly(): logging.warn("Can only export trace while executing eagerly.") return with _current_trace_context_lock: if _current_trace_context is None: raise ValueError("Must enable trace before export.") graph, profiler = _current_trace_context # pylint: disable=redefined-outer-name if profiler_outdir is None \ and isinstance(_summary_state.writer, ResourceSummaryWriter): logdir = _summary_state.writer._metadata.get("logdir") # pylint: disable=protected-access if logdir is not None: profiler_outdir = logdir if profiler and profiler_outdir is None: raise ValueError("Must set profiler_outdir or " "enable summary writer with logdir.") run_meta = context.context().export_run_metadata() if graph and not profiler: run_metadata_graphs(name, run_meta, step) else: run_metadata(name, run_meta, step) if profiler: _profiler.save(profiler_outdir, _profiler.stop()) trace_off() @tf_export("summary.trace_off", v1=[]) def trace_off(): """Stops the current trace and discards any collected information.""" global _current_trace_context with _current_trace_context_lock: _current_trace_context = None # Disabling run_metadata disables graph collection as well. context.context().disable_run_metadata() # profiler only has start and stop. One needs to stop in order to export # and stopping when it is not running will raise an error. try: _profiler.stop() except _profiler.ProfilerNotRunningError: pass
apache-2.0
2,818,230,846,669,279,700
35.626866
104
0.690816
false
mldbai/mldb
testing/MLDB-1116-tokensplit.py
1
1989
# -*- coding: utf-8 -*- # # MLDB-1116-tokensplit.py # Mathieu Marquis Bolduc, 2015-11-24 # This file is part of MLDB. Copyright 2015 mldb.ai inc. All rights reserved. # from mldb import mldb ds1 = mldb.create_dataset({ 'type': 'sparse.mutable', 'id': 'example' }) # create the dataset ds1.record_row('1', [['x', ":)", 0]]) ds1.record_row('2', [['x', ":P", 0]]) ds1.record_row('3', [['x', "(>_<)", 0]]) ds1.record_row('4', [['x', "(ノಠ益ಠ)ノ彡┻━┻", 0]]) ds1.record_row('5', [['x', "¯\_(ツ)_/¯", 0]]) ds1.record_row('6', [['x', "¯\_(ツ)_/¯¯¯¯¯¯", 0]]) ds1.commit() result = mldb.put('/v1/functions/tokensplit_function', { 'type': 'tokensplit', 'params': {"tokens": "select * from example"} }) mldb.log(result) test_str = str( "whatever :P I do what ¯\_(ツ)_/¯¯¯¯¯¯ I want (>_<) (>_<) watwat :P " "(ノಠ益ಠ)ノ彡┻━┻ grrrr :P :P :P") result = mldb.get( '/v1/query', q="select tokensplit_function({'" + test_str + "' as text}) as query") mldb.log(result) response = result.json() assert response[0]['columns'][0][1] == test_str test_str = str("aaahhhhh ¯\_(ツ)_/¯") result = mldb.get( '/v1/query', q="select tokensplit_function({'" + test_str + "' as text}) as query") mldb.log(result) response = result.json() assert response[0]['columns'][0][1] == test_str query = "'test'" config = { 'type': 'tokensplit', 'params': { 'tokens': "select ':P', '(>_<)', ':-)'", 'splitChars': ' ', #split on spaces only 'splitCharToInsert': ' ' } } result = mldb.put('/v1/functions/split_smiley', config) mldb.log(result) test_str = str(":P Great day!!! (>_<) (>_<) :P :P :P :-)") result = mldb.get( '/v1/query', q="select split_smiley({'" + test_str + "' as text}) as query") response = result.json() mldb.log(response) assert response[0]['columns'][0][1] == test_str, \ 'tokenized string does not match the expected value' request.set_return("success")
apache-2.0
-4,337,887,372,384,549,400
23.0875
77
0.563051
false
forkbong/qutebrowser
qutebrowser/config/websettings.py
1
9471
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2021 Florian Bruhin (The Compiler) <[email protected]> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. """Bridge from QWeb(Engine)Settings to our own settings.""" import re import argparse import functools import dataclasses from typing import Any, Callable, Dict, Optional from PyQt5.QtCore import QUrl, pyqtSlot, qVersion from PyQt5.QtGui import QFont import qutebrowser from qutebrowser.config import config from qutebrowser.utils import usertypes, urlmatch, qtutils, utils from qutebrowser.misc import objects, debugcachestats UNSET = object() @dataclasses.dataclass class UserAgent: """A parsed user agent.""" os_info: str webkit_version: str upstream_browser_key: str upstream_browser_version: str qt_key: str @classmethod def parse(cls, ua: str) -> 'UserAgent': """Parse a user agent string into its components.""" comment_matches = re.finditer(r'\(([^)]*)\)', ua) os_info = list(comment_matches)[0].group(1) version_matches = re.finditer(r'(\S+)/(\S+)', ua) versions = {} for match in version_matches: versions[match.group(1)] = match.group(2) webkit_version = versions['AppleWebKit'] if 'Chrome' in versions: upstream_browser_key = 'Chrome' qt_key = 'QtWebEngine' elif 'Version' in versions: upstream_browser_key = 'Version' qt_key = 'Qt' else: raise ValueError("Invalid upstream browser key: {}".format(ua)) upstream_browser_version = versions[upstream_browser_key] return cls(os_info=os_info, webkit_version=webkit_version, upstream_browser_key=upstream_browser_key, upstream_browser_version=upstream_browser_version, qt_key=qt_key) class AttributeInfo: """Info about a settings attribute.""" def __init__(self, *attributes: Any, converter: Callable = None) -> None: self.attributes = attributes if converter is None: self.converter = lambda val: val else: self.converter = converter class AbstractSettings: """Abstract base class for settings set via QWeb(Engine)Settings.""" _ATTRIBUTES: Dict[str, AttributeInfo] = {} _FONT_SIZES: Dict[str, Any] = {} _FONT_FAMILIES: Dict[str, Any] = {} _FONT_TO_QFONT: Dict[Any, QFont.StyleHint] = {} def __init__(self, settings: Any) -> None: self._settings = settings def _assert_not_unset(self, value: Any) -> None: assert value is not usertypes.UNSET def set_attribute(self, name: str, value: Any) -> None: """Set the given QWebSettings/QWebEngineSettings attribute. If the value is usertypes.UNSET, the value is reset instead. """ info = self._ATTRIBUTES[name] for attribute in info.attributes: if value is usertypes.UNSET: self._settings.resetAttribute(attribute) else: self._settings.setAttribute(attribute, info.converter(value)) def test_attribute(self, name: str) -> bool: """Get the value for the given attribute. If the setting resolves to a list of attributes, only the first attribute is tested. """ info = self._ATTRIBUTES[name] return self._settings.testAttribute(info.attributes[0]) def set_font_size(self, name: str, value: int) -> None: """Set the given QWebSettings/QWebEngineSettings font size.""" self._assert_not_unset(value) family = self._FONT_SIZES[name] self._settings.setFontSize(family, value) def set_font_family(self, name: str, value: Optional[str]) -> None: """Set the given QWebSettings/QWebEngineSettings font family. With None (the default), QFont is used to get the default font for the family. """ self._assert_not_unset(value) family = self._FONT_FAMILIES[name] if value is None: font = QFont() font.setStyleHint(self._FONT_TO_QFONT[family]) value = font.defaultFamily() self._settings.setFontFamily(family, value) def set_default_text_encoding(self, encoding: str) -> None: """Set the default text encoding to use.""" self._assert_not_unset(encoding) self._settings.setDefaultTextEncoding(encoding) def _update_setting(self, setting: str, value: Any) -> bool: """Update the given setting/value. Unknown settings are ignored. Return: True if there was a change, False otherwise. """ if setting in self._ATTRIBUTES: self.set_attribute(setting, value) elif setting in self._FONT_SIZES: self.set_font_size(setting, value) elif setting in self._FONT_FAMILIES: self.set_font_family(setting, value) elif setting == 'content.default_encoding': self.set_default_text_encoding(value) return False def update_setting(self, setting: str) -> None: """Update the given setting.""" value = config.instance.get(setting) self._update_setting(setting, value) def update_for_url(self, url: QUrl) -> None: """Update settings customized for the given tab.""" qtutils.ensure_valid(url) for values in config.instance: if not values.opt.supports_pattern: continue value = values.get_for_url(url, fallback=False) self._update_setting(values.opt.name, value) def init_settings(self) -> None: """Set all supported settings correctly.""" for setting in (list(self._ATTRIBUTES) + list(self._FONT_SIZES) + list(self._FONT_FAMILIES)): self.update_setting(setting) @debugcachestats.register(name='user agent cache') @functools.lru_cache() def _format_user_agent(template: str, backend: usertypes.Backend) -> str: if backend == usertypes.Backend.QtWebEngine: from qutebrowser.browser.webengine import webenginesettings parsed = webenginesettings.parsed_user_agent else: from qutebrowser.browser.webkit import webkitsettings parsed = webkitsettings.parsed_user_agent assert parsed is not None return template.format( os_info=parsed.os_info, webkit_version=parsed.webkit_version, qt_key=parsed.qt_key, qt_version=qVersion(), upstream_browser_key=parsed.upstream_browser_key, upstream_browser_version=parsed.upstream_browser_version, qutebrowser_version=qutebrowser.__version__, ) def user_agent(url: QUrl = None) -> str: """Get the user agent for the given URL, or the global one if URL is None. Note that the given URL should always be valid. """ template = config.instance.get('content.headers.user_agent', url=url) return _format_user_agent(template=template, backend=objects.backend) def init(args: argparse.Namespace) -> None: """Initialize all QWeb(Engine)Settings.""" if objects.backend == usertypes.Backend.QtWebEngine: from qutebrowser.browser.webengine import webenginesettings webenginesettings.init() elif objects.backend == usertypes.Backend.QtWebKit: from qutebrowser.browser.webkit import webkitsettings webkitsettings.init() else: raise utils.Unreachable(objects.backend) # Make sure special URLs always get JS support for pattern in ['chrome://*/*', 'qute://*/*']: config.instance.set_obj('content.javascript.enabled', True, pattern=urlmatch.UrlPattern(pattern), hide_userconfig=True) def clear_private_data() -> None: """Clear cookies, cache and related data for private browsing sessions.""" if objects.backend == usertypes.Backend.QtWebEngine: from qutebrowser.browser.webengine import webenginesettings webenginesettings.init_private_profile() elif objects.backend == usertypes.Backend.QtWebKit: from qutebrowser.browser.webkit import cookies assert cookies.ram_cookie_jar is not None cookies.ram_cookie_jar.setAllCookies([]) else: raise utils.Unreachable(objects.backend) @pyqtSlot() def shutdown() -> None: """Shut down QWeb(Engine)Settings.""" if objects.backend == usertypes.Backend.QtWebEngine: from qutebrowser.browser.webengine import webenginesettings webenginesettings.shutdown() elif objects.backend == usertypes.Backend.QtWebKit: from qutebrowser.browser.webkit import webkitsettings webkitsettings.shutdown() else: raise utils.Unreachable(objects.backend)
gpl-3.0
3,179,883,137,833,848,300
34.339552
78
0.652201
false
diofant/diofant
diofant/tests/external/test_plot.py
1
10543
"""Generic plotting tests.""" from __future__ import annotations import errno import functools import os import sys import tempfile import typing import pytest from diofant import (And, I, Integral, LambertW, Piecewise, cos, exp_polar, log, meijerg, oo, pi, plot, plot3d, plot3d_parametric_line, plot3d_parametric_surface, plot_parametric, real_root, sin, sqrt, summation) from diofant.abc import x, y, z from diofant.plotting.plot import unset_show __all__ = () matplotlib = pytest.importorskip('matplotlib', minversion='1.1.0') class MockPrint: def write(self, s): pass def flush(self): pass def disable_print(func, *args, **kwargs): @functools.wraps(func) def wrapper(*args, **kwargs): sys.stdout = MockPrint() func(*args, **kwargs) sys.stdout = sys.__stdout__ return wrapper unset_show() # XXX: We could implement this as a context manager instead class TmpFileManager: tmp_files: list[typing.Any] = [] @classmethod def tmp_file(cls, name=''): cls.tmp_files.append(tempfile.NamedTemporaryFile(prefix=name, suffix='.png').name) return cls.tmp_files[-1] @classmethod def cleanup(cls): for f in cls.tmp_files: try: os.remove(f) except OSError as e: if e.errno != errno.ENOENT: raise def test_matplotlib_intro(): """Examples from the 'introduction' notebook.""" try: name = 'test' tmp_file = TmpFileManager.tmp_file p = plot(x, adaptive=False) assert str(p) == """Plot object containing: [0]: cartesian line: x for x over (-10.0, 10.0)""" p = plot(x*sin(x), x*cos(x)) p.extend(p) p[0].line_color = lambda a: a p[1].line_color = 'b' p.title = 'Big title' p.xlabel = 'the x axis' p[1].label = 'straight line' p.legend = True p.aspect_ratio = (1, 1) p.xlim = (-15, 20) p.save(tmp_file(f'{name}_basic_options_and_colors')) p.extend(plot(x + 1)) p.append(plot(x + 3, x**2)[1]) p.save(tmp_file(f'{name}_plot_extend_append')) p[2] = plot(x**2, (x, -2, 3)) p.save(tmp_file(f'{name}_plot_setitem')) del p p = plot(sin(x), (x, -2*pi, 4*pi)) p.save(tmp_file(f'{name}_line_explicit')) del p p = plot(sin(x)) p.save(tmp_file(f'{name}_line_default_range')) del p p = plot((x**2, (x, -5, 5)), (x**3, (x, -3, 3))) p.save(tmp_file(f'{name}_line_multiple_range')) del p pytest.raises(ValueError, lambda: plot(x, y)) # parametric 2d plots. # Single plot with default range. plot_parametric(sin(x), cos(x)).save(tmp_file()) # Single plot with range. p = plot_parametric(sin(x), cos(x), (x, -5, 5)) assert str(p) == """Plot object containing: [0]: parametric cartesian line: (sin(x), cos(x)) for x over (-5.0, 5.0)""" p.save(tmp_file(f'{name}_parametric_range')) del p # Multiple plots with same range. p = plot_parametric((sin(x), cos(x)), (x, sin(x))) p.save(tmp_file(f'{name}_parametric_multiple')) del p # Multiple plots with different ranges. p = plot_parametric((sin(x), cos(x), (x, -3, 3)), (x, sin(x), (x, -5, 5))) p.save(tmp_file(f'{name}_parametric_multiple_ranges')) del p # depth of recursion specified. p = plot_parametric(x, sin(x), depth=13) p.save(tmp_file(f'{name}_recursion_depth')) del p # No adaptive sampling. p = plot_parametric(cos(x), sin(x), adaptive=False, nb_of_points=500) p.save(tmp_file(f'{name}_adaptive')) del p # 3d parametric plots p = plot3d_parametric_line(sin(x), cos(x), x) assert str(p) == """Plot object containing: [0]: 3D parametric cartesian line: (sin(x), cos(x), x) for x over (-10.0, 10.0)""" p.save(tmp_file(f'{name}_3d_line')) del p p = plot3d_parametric_line( (sin(x), cos(x), x, (x, -5, 5)), (cos(x), sin(x), x, (x, -3, 3))) p.save(tmp_file(f'{name}_3d_line_multiple')) del p p = plot3d_parametric_line(sin(x), cos(x), x, nb_of_points=30) p.save(tmp_file(f'{name}_3d_line_points')) del p # 3d surface single plot. p = plot3d(x * y) assert str(p) in ("""Plot object containing: [0]: cartesian surface: x*y for %s over (-10.0, 10.0) and %s over (-10.0, 10.0)""" % _ for _ in [(x, y), (y, x)]) p.save(tmp_file(f'{name}_surface')) del p # Multiple 3D plots with same range. p = plot3d(-x * y, x * y, (x, -5, 5)) p.save(tmp_file(f'{name}_surface_multiple')) del p # Multiple 3D plots with different ranges. p = plot3d( (x * y, (x, -3, 3), (y, -3, 3)), (-x * y, (x, -3, 3), (y, -3, 3))) p.save(tmp_file(f'{name}_surface_multiple_ranges')) del p # Single Parametric 3D plot p = plot3d_parametric_surface(sin(x + y), cos(x - y), x - y) assert str(p) in ("""Plot object containing: [0]: parametric cartesian surface: (sin(x + y), cos(x - y), x - y) for %s over (-10.0, 10.0) and %s over (-10.0, 10.0)""" % _ for _ in [(x, y), (y, x)]) p.save(tmp_file(f'{name}_parametric_surface')) del p # Multiple Parametric 3D plots. p = plot3d_parametric_surface( (x*sin(z), x*cos(z), z, (x, -5, 5), (z, -5, 5)), (sin(x + y), cos(x - y), x - y, (x, -5, 5), (y, -5, 5))) p.save(tmp_file(f'{name}_parametric_surface')) del p # issue sympy/sympy#7140 p1 = plot(x) p2 = plot(x**2) # append a series p2.append(p1[0]) assert len(p2._series) == 2 with pytest.raises(TypeError): p1.append(p2) with pytest.raises(TypeError): p1.append(p2._series) # issue sympy/sympy#10925 f = Piecewise((-1, x < -1), (x, And(-1 <= x, x < 0)), (x**2, And(0 <= x, x < 1)), (x**3, True)) p = plot(f, (x, -3, 3)) p.save(tmp_file(f'{name}_10925')) del p finally: TmpFileManager.cleanup() def test_matplotlib_colors(): """Examples from the 'colors' notebook.""" try: name = 'test' tmp_file = TmpFileManager.tmp_file p = plot(sin(x)) p[0].line_color = lambda a: a p.save(tmp_file(f'{name}_colors_line_arity1')) p[0].line_color = lambda a, b: b p.save(tmp_file(f'{name}_colors_line_arity2')) p = plot(x*sin(x), x*cos(x), (x, 0, 10)) p[0].line_color = lambda a: a p.save(tmp_file(f'{name}_colors_param_line_arity1')) p[0].line_color = lambda a, b: a p.save(tmp_file(f'{name}_colors_param_line_arity2a')) p[0].line_color = lambda a, b: b p.save(tmp_file(f'{name}_colors_param_line_arity2b')) del p p = plot3d_parametric_line(sin(x) + 0.1*sin(x)*cos(7*x), cos(x) + 0.1*cos(x)*cos(7*x), 0.1*sin(7*x), (x, 0, 2*pi)) p[0].line_color = lambda a: sin(4*a) p.save(tmp_file(f'{name}_colors_3d_line_arity1')) p[0].line_color = lambda a, b: b p.save(tmp_file(f'{name}_colors_3d_line_arity2')) p[0].line_color = lambda a, b, c: c p.save(tmp_file(f'{name}_colors_3d_line_arity3')) del p p = plot3d(sin(x)*y, (x, 0, 6*pi), (y, -5, 5)) p[0].surface_color = lambda a: a p.save(tmp_file(f'{name}_colors_surface_arity1')) p[0].surface_color = lambda a, b: b p.save(tmp_file(f'{name}_colors_surface_arity2')) p[0].surface_color = lambda a, b, c: c p.save(tmp_file(f'{name}_colors_surface_arity3a')) p[0].surface_color = lambda a, b, c: sqrt((a - 3*pi)**2 + b**2) p.save(tmp_file(f'{name}_colors_surface_arity3b')) del p p = plot3d_parametric_surface(x * cos(4 * y), x * sin(4 * y), y, (x, -1, 1), (y, -1, 1)) p[0].surface_color = lambda a: a p.save(tmp_file(f'{name}_colors_param_surf_arity1')) p[0].surface_color = lambda a, b: a*b p.save(tmp_file(f'{name}_colors_param_surf_arity2')) p[0].surface_color = lambda a, b, c: sqrt(a**2 + b**2 + c**2) p.save(tmp_file(f'{name}_colors_param_surf_arity3')) del p finally: TmpFileManager.cleanup() @pytest.mark.xfail def test_matplotlib_advanced(): """Examples from the 'advanced' notebook.""" try: name = 'test' tmp_file = TmpFileManager.tmp_file s = summation(1/x**y, (x, 1, oo)) p = plot(s, (y, 2, 10)) p.save(tmp_file(f'{name}_advanced_inf_sum')) p = plot(summation(1/x, (x, 1, y)), (y, 2, 10), show=False) p[0].only_integers = True p[0].steps = True p.save(tmp_file(f'{name}_advanced_fin_sum')) ### # Test expressions that can not be translated to np and # generate complex results. ### plot(sin(x) + I*cos(x)).save(tmp_file()) plot(sqrt(sqrt(-x))).save(tmp_file()) plot(LambertW(x)).save(tmp_file()) plot(sqrt(LambertW(x))).save(tmp_file()) # Characteristic function of a StudentT distribution with nu=10 plot((meijerg(((1 / 2,), ()), ((5, 0, 1 / 2), ()), 5 * x**2 * exp_polar(-I*pi)/2) + meijerg(((1/2,), ()), ((5, 0, 1/2), ()), 5*x**2 * exp_polar(I*pi)/2)) / (48 * pi), (x, 1e-6, 1e-2)).save(tmp_file()) finally: TmpFileManager.cleanup() @pytest.mark.xfail def test_matplotlib_advanced_2(): """More examples from the 'advanced' notebook.""" try: name = 'test' tmp_file = TmpFileManager.tmp_file i = Integral(log((sin(x)**2 + 1)*sqrt(x**2 + 1)), (x, 0, y)) p = plot(i, (y, 1, 5)) p.save(tmp_file(f'{name}_advanced_integral')) finally: TmpFileManager.cleanup() @disable_print def test_sympyissue_11461(): try: name = 'test' tmp_file = TmpFileManager.tmp_file p = plot(real_root((log(x/(x-2))), 3), (x, 3, 4)) p.save(tmp_file(f'{name}_11461')) del p finally: TmpFileManager.cleanup()
bsd-3-clause
-4,773,593,944,352,649,000
30.660661
125
0.521104
false
TimBizeps/BachelorAP
V103_Biegung elastischer Stäbe/Auswertung.py
1
7054
import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np import scipy.constants as const from scipy.optimize import curve_fit def auswertung(material, querschnitt, einspannung, x, D, d, L, M): if einspannung == "einseitig": u = L*x**2 - x**3/3 g = const.g F = M*g # d = np.mean(k) # Δd = np.sqrt(1/(len(k)*(len(k)-1))*sum((d-k)**2)) if querschnitt == "kreisfoermig": I = np.pi/64*d**4 # ΔI = np.pi/16*d**3*Δd if querschnitt == "quadratisch": I = d**4/12 # ΔI = 1/3*d**3*Δd def f(x, m, b): return m*x + b params, cov = curve_fit(f, u, D) m = params[0] b = params[1] Δm = np.sqrt(cov[0][0]) Δb = np.sqrt(cov[1][1]) E = F/(2*I*m) # ΔE = np.sqrt((F/(2*I**2*m)*ΔI)**2+(F/(2*I*m**2)*Δm)**2) ΔE = np.sqrt((F/(2*I*m**2)*Δm)**2) t = np.linspace(u.min(), u.max(), 1000) plt.plot(u, 1000*D, 'rx', label='Messwerte') plt.plot(t, 1000*f(t, m, b), 'k-', label='Regressionsgerade') plt.xlim(u.min(), u.max()) plt.xlabel(r"$(Lx^2 - \frac{x^3}{3})/\mathrm{m}^3$") plt.ylabel(r"$D/\mathrm{mm}$") plt.legend(loc='best') plt.tight_layout() plt.savefig("build/plot_{}_{}_{}.pdf".format(material, querschnitt, einspannung)) plt.close() print( """ ------------------------------------------------------------------------ Material: {} Querschnitt: {} Einspannung: {} Durchmesser d: {} ± {:.5f} mm Länge L: {} cm Masse M: {} kg Flächenträgheitsmoment I: {} ± {} mm^4 Elastizitätsmodul E: {} ± {} N/m^2 Steigung m: {} ± {} Ordinatenabschnitt b: {} ± {} ------------------------------------------------------------------------ """.format(material, querschnitt, einspannung, d*1e3, 0, L*1e2, M, I*1e12, 0, E*1e0, ΔE*1e0, m, Δm, b, Δb)) if einspannung == "beidseitig": x1, x2 = np.array_split(x, 2) D1, D2 = np.array_split(D, 2) u1 = 3*L**2*x1 - 4*x1**3 u2 = 4*x2**3 - 12*L*x2**2 + 9*L**2*x2 - L**3 g = const.g F = M*g # d = np.mean(k) # Δd = np.sqrt(1/(len(k)*(len(k)-1))*sum((d-k)**2)) if querschnitt == "kreisfoermig": I = np.pi/64*d**4 # ΔI = np.pi/16*d**3*Δd if querschnitt == "quadratisch": I = d**4/12 # ΔI = 1/3*d**3*Δd def f(x, m, b): return m*x + b params1, cov1 = curve_fit(f, u1, D1) params2, cov2 = curve_fit(f, u2, D2) m1 = params1[0] m2 = params2[0] b1 = params1[1] b2 = params2[1] Δm1 = np.sqrt(cov1[0][0]) Δm2 = np.sqrt(cov2[0][0]) Δb1 = np.sqrt(cov1[1][1]) Δb2 = np.sqrt(cov2[1][1]) E1 = F/(48*I*m1) E2 = F/(48*I*m2) # ΔE1 = np.sqrt((F/(48*I**2*m1)*ΔI)**2+(F/(48*I*m1**2)*Δm1)**2) ΔE1 = np.sqrt((F/(48*I*m1**2)*Δm1)**2) # ΔE2 = np.sqrt((F/(48*I**2*m2)*ΔI)**2+(F/(48*I*m2**2)*Δm2)**2) ΔE2 = np.sqrt((F/(48*I*m2**2)*Δm2)**2) E = (E1+E2)/2 ΔE = np.sqrt(ΔE1**2+ΔE2**2)/2 t = np.linspace(u1.min(), u1.max(), 1000) plt.plot(u1, 1000*D1, 'rx', label='Messwerte') plt.plot(t, 1000*f(t, m1, b1), 'k-', label='Regressionsgerade') plt.xlim(u1.min(), u1.max()) plt.xlabel(r"$(3L^2x - 4x^3)/\mathrm{m}^3$") plt.ylabel(r"$D/\mathrm{mm}$") plt.legend(loc='best') plt.tight_layout() plt.savefig("build/plot_{}_{}_{}_1.pdf".format(material, querschnitt, einspannung)) plt.close() t = np.linspace(u2.min(), u2.max(), 1000) plt.plot(u2, 1000*D2, 'rx', label='Messwerte') plt.plot(t, 1000*f(t, m2, b2), 'k-', label='Regressionsgerade') plt.xlim(u2.min(), u2.max()) plt.xlabel(r"$(4x^3 - 12Lx^2 + 9L^2x - L^3)/\mathrm{m}^3$") plt.ylabel(r"$D/\mathrm{mm}$") plt.legend(loc='best') plt.tight_layout() plt.savefig("build/plot_{}_{}_{}_2.pdf".format(material, querschnitt, einspannung)) plt.close() print(""" ------------------------------------------------------------------------ Material: {} Querschnitt: {} Einspannung: {} Durchmesser d: {} ± {} mm Länge L: {} cm Masse M: {} kg Flächenträgheitsmoment I: {} ± {} mm^4 Elastizitätsmodul E1: {} ± {} N/m^2 Elastizitätsmodul E2: {} ± {} N/m^2 Elastizitätsmodul E: {} ± {} N/m^2 Steigung m1: {} ± {} Steigung m2: {} ± {} Ordinatenabschnitt b1: {} ± {} Ordinatenabschnitt b2: {} ± {} ------------------------------------------------------------------------ """.format(material, querschnitt, einspannung, d*1e3, 0, L*1e2, M, I*1e12, 0, E1*1e0, ΔE1*1e0, E2*1e0, ΔE2*1e0, E*1e0, ΔE*1e0, m1, Δm1, m2, Δm2, b1, Δb1, b2, Δb2)) ''' ############################################################################ # Test mit Messwerten von Philipp Leser # Aluminium, kreisförmiger Querschnitt, einseitig eingespannt # Daten einlesen x, D = np.loadtxt("data/daten_aluminium_quadratisch_beidseitig.txt", unpack=True) d = np.loadtxt("data/daten_aluminium_quadratisch_durchmesser.txt", unpack=True) L = 55.3 #[cm] M = 4.6944 #[kg] # Auswertung d *= 1e-3 L *= 1e-2 x *= 1e-2 D *= 1e-6 auswertung("Aluminium", "quadratisch", "beidseitig", x, D, d, L, M) ############################################################################ ''' # Aluminium, kreisförmiger Querschnitt, einseitig eingespannt # Daten einlesen x, D = np.loadtxt("Messing.txt", unpack=True) d = 10 #[mm] L = 40.70 #[cm] M = 2.3606 #[kg] # Auswertung d *= 1e-3 L *= 1e-2 x *= 1e-2 D *= 1e-6 auswertung("Messing", "quadratisch", "einseitig", x, D, d, L, M) # Messing, quadratischer Querschnitt, einseitig eingespannt # Daten einlesen x, D = np.loadtxt("alurund.txt", unpack=True) d = 10 #[mm] L = 34.8 #[cm] M = 1.1926 #[kg] # Auswertung d *= 1e-3 L *= 1e-2 x *= 1e-2 D *= 1e-6 auswertung("Aluminium", "kreisfoermig", "einseitig", x, D, d, L, M) # Stahl, quadratischer Querschnitt, beidseitig eingespannt # Daten einlesen x, D = np.loadtxt("alueckig.txt", unpack=True) d = 10 #[mm] L = 55.3 #[cm] M = 0 #[kg] # Auswertung d *= 1e-3 L *= 1e-2 x *= 1e-2 D *= 1e-6 auswertung("Aluminium", "quadratisch", "beidseitig" , x, D, d, L, M) x, D = np.loadtxt("alueckig2.txt", unpack=True) d = 10 #[mm] L = 55.3 #[cm] M = 3.5312 #[kg] # Auswertung d *= 1e-3 L *= 1e-2 x *= 1e-2 D *= 1e-6 auswertung("Aluminium", "quadratisch", "beidseitig", x, D, d, L, M)
gpl-3.0
-5,941,356,961,601,821,000
31.635514
171
0.463918
false
shacknetisp/fourthevaz
modules/default/chatbot/wordai.py
1
4216
# -*- coding: utf-8 -*- from random import choice import copy import random import string import pprint import pickle class wordai: """Word AI""" def load(self): """Load the file.""" try: self.dict_file = open(self.dbfile, 'rb') self.data_dict = pickle.load(self.dict_file) self.dict_file.close() except: pass def save(self): """Save the file""" output = open(self.dbfile, 'wb') pickle.dump(self.data_dict, output) output.close() def addchoice(self, a): self.choices.append(a) def ms(self, r): exclude = set(string.punctuation) r = ''.join(ch for ch in r if ch not in exclude) inp = r.lower().split() if len(inp): if not ';start' in self.data_dict: self.data_dict[';start'] = list() if not ';end' in self.data_dict: self.data_dict[';end'] = list() if not inp[0] in self.data_dict[';start'] or True: self.data_dict[';start'].append(inp[0]) if not inp[-1] in self.data_dict[';end'] or True: self.data_dict[';end'].append(inp[-1]) for i in range(len(inp)): if not inp[i] in self.data_dict: self.data_dict[inp[i]] = list() try: if not inp[i + 1] in self.data_dict[inp[i]] or True: self.data_dict[inp[i]].append(inp[i + 1]) except IndexError: pass ret = '' try: self.choices = list() for ch in range(4): try: self.addchoice(choice(inp)) except: pass try: self.addchoice(inp[0]) except: pass for ch in range(random.randrange(8, 16)): try: self.addchoice(choice(self.data_dict[';start'])) except: pass try: self.addchoice(choice(self.data_dict[inp[0]])) except: pass first = choice(self.choices) ret += first + ' ' nextword = first for numwords in range(100): if nextword in self.data_dict: if nextword in self.data_dict[';end'] \ and (int(random.randrange(0, 100)) < 5 + numwords + self.data_dict[';end'].count(nextword) / len(self.data_dict[';end']) * 1000 or len(self.data_dict[nextword]) == 0): break cnext = choice(self.data_dict[nextword]) ret += cnext + ' ' nextword = cnext else: break except IndexError: pass except KeyError: pass try: return str(str(ret[0]).upper() + ret[1:]).strip() + '.' except IndexError: return '?' def process(self, mp): """Process <mp> and return a reply.""" out = self.ms(mp) self.save() return out def replace(self, w, n): """Replace <w> with <n> in the dictionary.""" if n != w: self.data_dict[n] = self.data_dict[w] del self.data_dict[w] for k in self.data_dict: for (index, item) in enumerate(self.data_dict[k]): if item == w: self.data_dict[k][index] = n self.save() def getdictstring(self): """Return the pprinted dictionary.""" data_dict_tmp = copy.deepcopy(self.data_dict) if ';record' in data_dict_tmp: del data_dict_tmp[';record'] return pprint.pformat(data_dict_tmp) def getwords(self): """Get the number of words.""" data_dict_tmp = copy.deepcopy(self.data_dict) if ';record' in data_dict_tmp: del data_dict_tmp[';record'] return len(data_dict_tmp) - 2 def __init__(self, dbf): self.dbfile = dbf self.choices = []
mit
5,600,262,000,338,761,000
31.183206
73
0.46371
false
lukecampbell/compliance-checker
compliance_checker/cf/appendix_d.py
1
4004
#!/usr/bin/env python ''' Appendix D compliance support for CF 1.6 and CF 1.7 The definitions given here allow an application to compute dimensional coordinate values from the dimensionless ones and associated variables. The formulas are expressed for a gridpoint (n,k,j,i) where i and j are the horizontal indices, k is the vertical index and n is the time index. A coordinate variable is associated with its definition by the value of the standard_name attribute. The terms in the definition are associated with file variables by the formula_terms attribute. The formula_terms attribute takes a string value, the string being comprised of blank-separated elements of the form "term: variable", where term is a keyword that represents one of the terms in the definition, and variable is the name of the variable in a netCDF file that contains the values for that term. The order of elements is not significant. The gridpoint indices are not formally part of the definitions, but are included to illustrate the indices that might be present in the file variables. For example, a vertical coordinate whose definition contains a time index is not necessarily time dependent in all netCDF files. Also, the definitions are given in general forms that may be simplified by omitting certain terms. A term that is omitted from the formula_terms attribute should be assumed to be zero. ''' # Contains the standard name followed by a 2-tuple: # (the set of expected formula terms, set of computed_standard_name(s)). Most # vertical coordinates only have one computed_standard_name, but some have # multiple acceptable values. ocean_computed_standard_names = { 'altitude', 'height_above_geopotential_datum', 'height_above_reference_ellipsoid', 'height_above_mean_sea_level' } dimless_vertical_coordinates_1_6 = { # only for CF-1.6 "atmosphere_ln_pressure_coordinate" : ({'p0', 'lev'}, {'air_pressure'}), "atmosphere_sigma_coordinate" : ({'sigma', 'ps', 'ptop'}, {'air_pressure'}), "atmosphere_hybrid_sigma_pressure_coordinate": (({'a', 'b', 'ps'}, {'ap', 'b', 'ps'}), {'air_pressure'}), "atmosphere_hybrid_height_coordinate" : ({'a', 'b', 'orog'}, {'altitude', 'height_above_geopotential_datum'}), "atmosphere_sleve_coordinate" : ({'a', 'b1', 'b2', 'ztop', 'zsurf1', 'zsurf2'}, {'altitude', 'height_above_geopotential_datum'}), "ocean_sigma_coordinate" : ({'sigma', 'eta', 'depth'}, ocean_computed_standard_names), "ocean_s_coordinate" : ({'s', 'eta', 'depth', 'a', 'b', 'depth_c'}, ocean_computed_standard_names), "ocean_sigma_z_coordinate" : ({'sigma', 'eta', 'depth', 'depth_c', 'nsigma', 'zlev'}, ocean_computed_standard_names), "ocean_double_sigma_coordinate" : ({'sigma', 'depth', 'z1', 'z2', 'a', 'href', 'k_c'}, ocean_computed_standard_names) } dimless_vertical_coordinates_1_7 = dimless_vertical_coordinates_1_6.copy() # shallow copy dimless_vertical_coordinates_1_7.update({ # extends 1.6 "ocean_s_coordinate_g1": ({'s', 'C', 'eta', 'depth', 'depth_c'}, ocean_computed_standard_names), "ocean_s_coordinate_g2": ({'s', 'C', 'eta', 'depth', 'depth_c'}, ocean_computed_standard_names) }) def no_missing_terms(formula_name, term_set, dimless_vertical_coordinates): """ Returns true if the set is not missing terms corresponding to the entries in Appendix D, False otherwise. The set of terms should be exactly equal, and not contain more or less terms than expected. """ reqd_terms = dimless_vertical_coordinates[formula_name][0] def has_all_terms(reqd_termset): return len(reqd_termset ^ term_set) == 0 if isinstance(reqd_terms, set): return has_all_terms(reqd_terms) # if it's not a set, it's likely some other form of iterable with multiple # possible definitions i.e. a/ap are interchangeable in else: return any(has_all_terms(req) for req in reqd_terms)
apache-2.0
2,743,449,355,784,953,000
59.666667
148
0.692807
false
DemianWright/io_scene_blb
const.py
1
6023
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### """ Various constants used in multiple modules. @author: Demian Wright """ from decimal import Decimal from enum import Enum, IntEnum from math import pi # The BLB file extension. BLB_EXT = ".blb" # The log file extension. LOG_EXT = ".log" # One log indent level. LOG_INDENT = " " # Generic. X = 0 Y = 1 Z = 2 # Humans have wibbly-wobbly hands. HUMAN_BRICK_GRID_ERROR = Decimal("0.1") # The defined height of a Blockland plate at 100% scale. DEFAULT_PLATE_HEIGHT = Decimal("0.4") # Blockland does not accept bricks that are wide/deeper than 64 bricks or taller than 256 plates. MAX_BRICK_HORIZONTAL_PLATES = 64 MAX_BRICK_VERTICAL_PLATES = 256 # Blockland supports up to 10 collision cuboids per BLB. MAX_BRICK_COLLISION_CUBOIDS = 10 class BLBQuadSection(IntEnum): """The quad sections in the correct order for writing to a BLB file. Indexed from 0 to 6.""" TOP = 0 BOTTOM = 1 NORTH = 2 EAST = 3 SOUTH = 4 WEST = 5 OMNI = 6 class BrickTexture(Enum): """Valid brick texture names in alphabetical order.""" BOTTOMEDGE = 0 BOTTOMLOOP = 1 PRINT = 2 RAMP = 3 SIDE = 4 TOP = 5 def __str__(self): """Returns the name of the enum value in uppercase characters.""" return self.name @classmethod def as_list(cls): """Returns the names of the members of this enum as a list of uppercase strings.""" return [member.name for member in BrickTexture] # BLB file strings. BLB_BRICK_TYPE_SPECIAL = "SPECIAL" BLB_SECTION_SEPARATOR = "---------------- {} QUADS ----------------" BLB_HEADER_COVERAGE = "COVERAGE:" BLB_PREFIX_TEXTURE = "TEX:" BLB_HEADER_POSITION = "POSITION:" BLB_HEADER_UV = "UV COORDS:" BLB_HEADER_COLORS = "COLORS:" BLB_HEADER_NORMALS = "NORMALS:" # The default coverage value = no coverage. (Number of plates that need to cover a brick side to hide it.) # The maximum area a brick's side can cover is 64 * 256 = 16384 plates. DEFAULT_COVERAGE = 99999 # Brick grid symbols. GRID_INSIDE = "x" # Disallow building inside brick. GRID_OUTSIDE = "-" # Allow building in empty space. GRID_UP = "u" # Allow placing bricks above this plate. GRID_DOWN = "d" # Allow placing bricks below this plate. GRID_BOTH = "b" # Allow placing bricks above and below this plate. # Blender has 20 layers. BLENDER_MAX_LAYER_IDX = 19 # Maximum number of decimal places to write to file. MAX_FP_DECIMALS_TO_WRITE = 16 # The width and height of the default brick textures in pixels. BRICK_TEXTURE_RESOLUTION = 512 # The UV coordinates are a single point in the middle of the image = no uv coordinates. # The middle of the image is used instead of (0,0) due to the way Blockland brick textures are designed. DEFAULT_UV_COORDINATES = ((0.5, 0.5),) * 4 # Often used Decimal values. DECIMAL_ONE = Decimal("1.0") DECIMAL_HALF = Decimal("0.5") # Useful angles in radians. RAD_45_DEG = pi * 0.25 RAD_135_DEG = pi - RAD_45_DEG RAD_225_DEG = pi + RAD_45_DEG RAD_315_DEG = pi + RAD_135_DEG TWO_PI = 2.0 * pi class Axis3D(Enum): """An enum with values representing each axis in three-dimensional space, indexed as follows: 0: POS_X 1: NEG_X 2: POS_Y 3: NEG_Y 4: POS_Z 5: NEG_Z """ POS_X = 0 NEG_X = 1 POS_Y = 2 NEG_Y = 3 POS_Z = 4 NEG_Z = 5 def index(self): """Determines the index of this three-dimensional axis. Returns: The index 0, 1, or 2 for the axes X, Y, and Z respectively. """ if self is Axis3D.POS_X or self is Axis3D.NEG_X: return X elif self is Axis3D.POS_Y or self is Axis3D.NEG_Y: return Y else: return Z @classmethod def from_property_name(cls, axis_name): """Parses the 3D axis from the specified string. Args: axis_name (string): The name of the axis in the same format as the axis_blb_forward Blender property. Returns: An Axis3D value corresponding to the specified axis name. """ if axis_name == "POSITIVE_X": return Axis3D.POS_X elif axis_name == "NEGATIVE_X": return Axis3D.NEG_X elif axis_name == "POSITIVE_Y": return Axis3D.POS_Y elif axis_name == "NEGATIVE_Y": return Axis3D.NEG_Y elif axis_name == "POSITIVE_Z": return Axis3D.POS_Z else: # axis_name == "NEGATIVE_Z": return Axis3D.NEG_Z def is_positive(self): """Determines if this three-dimensional axis is positive or negative. Returns: True if this value represents a positive axis. """ return self is Axis3D.POS_X or self is Axis3D.POS_Y or self is Axis3D.POS_Z class AxisPlane3D(Enum): """An enum with values representing each axis-aligned plane in three-dimensional space, indexed as follows: 0: XY-plane 1: XZ-plane 2: YZ-plane """ XY = 0 XZ = 1 YZ = 2
gpl-2.0
3,007,720,878,029,224,000
27.965174
113
0.619957
false
twhiteaker/pynwm
src/pynwm/hydroshare/hs_latest.py
1
2315
#!/usr/bin/python2 """Identifies the latest National Water Model files in HydroShare.""" from hs_list import list_sims, list_dates def _find_complete_sim(sims): for key in reversed(sims): sim = sims[key] if sim['is_complete']: return (key, sim) return (None, None) def find_latest_simulation(product): """Identifies files for the most recent complete simulation. As files arrive at HydroShare from NOAA, a folder for the forecast date is created although all files may have not yet arrived from NOAA. This function checks that all files for the simulation are present before returning details of that simulation. Each simulation is represented as a dictionary describing product type, simulation date, and whether all expected files are present, and it also includes a list of filenames, e.g. {'product': 'long_range_mem1', 'date': '20170401t06-00', 'is_complete': True, 'files': ['nwm...f006.conus.nc', 'nwm...f012.conus.nc', ...], 'links': ['http...', ...]} Args: product: String product name, e.g., 'short_range'. Returns: An ordered dictionary of simulation dictionaries, indexed by product and date, e.g., 'long_range_mem1_20170401t06-00', or empty dictionary if no complete simulations found. """ sims = {} if product == 'analysis_assim': # Warning: This may change with NWM v1.1 since assim has 3 files, not one all_sims = list_sims(product) key, sim = _find_complete_sim(all_sims) if key: sims[key] = sim else: dates = reversed(list_dates(product)) for date in dates: date_sims = list_sims(product, date) if product == 'long_range' and len(date_sims) == 16: is_complete = True for key, sim in date_sims.iteritems(): if not sim['is_complete']: is_complete = False break if is_complete: sims = date_sims break elif product != 'long_range': key, sim = _find_complete_sim(date_sims) if key: sims[key] = sim break return sims
mit
5,805,429,525,390,713,000
34.075758
81
0.580562
false
VolatileDream/zzzcron
zzzc/sleep_stats.py
1
1859
from .util import * import datetime import math def sleep_changes(input_stream): for line in input_stream: if len(line) <= 0: continue line = line.rstrip("\n") portions = line.split(" ") state = SleepState[ portions[0] ] time = datetime_from_str( portions[1] ) minutes = floor_minutes( time.minute ) time = datetime.datetime(time.year, time.month, time.day, time.hour, minutes) yield (state, time) from .algorithms import * def update_sleep_probability(input_stream, output_stream=None): config = load_config()['stats'] if not output_stream: output_stream = open(config['location'], "w") algo_class = GetAlgorithm(config['algo']) algo = algo_class( **config ) last = None for state_change in sleep_changes( input_stream ): if last: algo.add_data( last, state_change ) last = state_change data = algo.get_result() for data_point in time_iter_all(): time = time_str_from_tuple( data_point ) output_stream.write( time + " " + str(data[time]) + "\n" ) output_stream.close() import click @click.command("stats") @click.option("--input", default=None, help="input file, defaults to [config].log.location") @click.option("--update", is_flag=True, help="manually update the zzzcron sleep statistics") @click.option("--out", is_flag=True, help="output stats to stdout. this is only required with --update") def update_stats(input, update, out): conf = load_config() if input: log = input else: log = conf['log']['location'] require_file(log) with open( log ) as logFile: if update: update_sleep_probability( logFile ) # unless the user wanted to update the stats, just print them out if (update and out) or not update: import sys require_file( conf['stats']['location'] ) with open(conf['stats']['location']) as stats_file: for line in stats_file: sys.stdout.write(line)
apache-2.0
4,434,437,958,906,205,000
22.531646
104
0.682625
false
z01nl1o02/tests
mxnet/cn/utils.py
1
15685
from math import exp from mxnet import gluon from mxnet import autograd from mxnet import nd from mxnet import image from mxnet.gluon import nn import mxnet as mx import numpy as np from time import time import matplotlib.pyplot as plt import matplotlib as mpl import random import pdb class DataLoader(object): """similiar to gluon.data.DataLoader, but might be faster. The main difference this data loader tries to read more exmaples each time. But the limits are 1) all examples in dataset have the same shape, 2) data transfomer needs to process multiple examples at each time """ def __init__(self, dataset, batch_size, shuffle, transform=None): self.dataset = dataset self.batch_size = batch_size self.shuffle = shuffle self.transform = transform def __iter__(self): data = self.dataset[:] X = data[0] y = nd.array(data[1]) n = X.shape[0] if self.shuffle: idx = np.arange(n) np.random.shuffle(idx) X = nd.array(X.asnumpy()[idx]) y = nd.array(y.asnumpy()[idx]) for i in range(n//self.batch_size): if self.transform is not None: yield self.transform(X[i*self.batch_size:(i+1)*self.batch_size], y[i*self.batch_size:(i+1)*self.batch_size]) else: yield (X[i*self.batch_size:(i+1)*self.batch_size], y[i*self.batch_size:(i+1)*self.batch_size]) def __len__(self): return len(self.dataset)//self.batch_size def load_data_fashion_mnist(batch_size, resize=None, root="~/.mxnet/datasets/fashion-mnist"): """download the fashion mnist dataest and then load into memory""" def transform_mnist(data, label): # Transform a batch of examples. if resize: n = data.shape[0] new_data = nd.zeros((n, resize, resize, data.shape[3])) for i in range(n): new_data[i] = image.imresize(data[i], resize, resize) data = new_data # change data from batch x height x width x channel to batch x channel x height x width return nd.transpose(data.astype('float32'), (0,3,1,2))/255, label.astype('float32') mnist_train = gluon.data.vision.FashionMNIST(root=root, train=True, transform=None) mnist_test = gluon.data.vision.FashionMNIST(root=root, train=False, transform=None) # Transform later to avoid memory explosion. train_data = DataLoader(mnist_train, batch_size, shuffle=True, transform=transform_mnist) test_data = DataLoader(mnist_test, batch_size, shuffle=False, transform=transform_mnist) return (train_data, test_data) def try_gpu(): """If GPU is available, return mx.gpu(0); else return mx.cpu()""" try: ctx = mx.gpu() _ = nd.array([0], ctx=ctx) except: ctx = mx.cpu() return ctx def try_all_gpus(): """Return all available GPUs, or [mx.gpu()] if there is no GPU""" ctx_list = [] try: for i in range(16): ctx = mx.gpu(i) _ = nd.array([0], ctx=ctx) ctx_list.append(ctx) except: pass if not ctx_list: ctx_list = [mx.cpu()] return ctx_list def SGD(params, lr): for param in params: param[:] = param - lr * param.grad def accuracy(output, label): return nd.mean(output.argmax(axis=1)==label).asscalar() def _get_batch(batch, ctx): """return data and label on ctx""" if isinstance(batch, mx.io.DataBatch): data = batch.data[0] label = batch.label[0] else: data, label = batch return (gluon.utils.split_and_load(data, ctx), gluon.utils.split_and_load(label, ctx), data.shape[0]) def evaluate_accuracy(data_iterator, net, ctx=[mx.cpu()]): if isinstance(ctx, mx.Context): ctx = [ctx] acc = nd.array([0]) n = 0. if isinstance(data_iterator, mx.io.MXDataIter) or isinstance(data_iterator,mx.image.ImageIter): data_iterator.reset() for batch in data_iterator: data, label, batch_size = _get_batch(batch, ctx) for X, y in zip(data, label): y = y.astype('float32') acc += nd.sum(net(X).argmax(axis=1)==y).copyto(mx.cpu()) n += y.size acc.wait_to_read() # don't push too many operators into backend return acc.asscalar() / n def train(train_data, test_data, net, loss, trainer, ctx, num_epochs, print_batches=None): """Train a network""" print("Start training on ", ctx) if isinstance(ctx, mx.Context): ctx = [ctx] for epoch in range(num_epochs): train_loss, train_acc, n, m = 0.0, 0.0, 0.0, 0.0 if isinstance(train_data, mx.io.MXDataIter) or isinstance(train_data,mx.image.ImageIter): train_data.reset() start = time() #i = 0 for i, batch in enumerate(train_data): #pdb.set_trace() #for batch,label in train_data: data, label, batch_size = _get_batch(batch, ctx) #batch_size = batch.shape[0] losses = [] with autograd.record(): outputs = [net(X) for X in data] losses = [loss(yhat, y) for yhat, y in zip(outputs, label)] for l in losses: l.backward() train_acc += sum([(yhat.argmax(axis=1)==y).sum().asscalar() for yhat, y in zip(outputs, label)]) train_loss += sum([l.sum().asscalar() for l in losses]) trainer.step(batch_size) n += batch_size m += sum([y.size for y in label]) if print_batches and (i+1) % print_batches == 0: print("Batch %d. Loss: %f, Train acc %f" % ( n, train_loss/n, train_acc/m )) test_acc = evaluate_accuracy(test_data, net, ctx) print("Epoch %d. Loss: %.3f, Train acc %.2f, Test acc %.2f, Time %.1f sec" % ( epoch, train_loss/n, train_acc/m, test_acc, time() - start )) print("done") class Residual(nn.HybridBlock): def __init__(self, channels, same_shape=True, **kwargs): super(Residual, self).__init__(**kwargs) self.same_shape = same_shape with self.name_scope(): strides = 1 if same_shape else 2 self.conv1 = nn.Conv2D(channels, kernel_size=3, padding=1, strides=strides) self.bn1 = nn.BatchNorm() self.conv2 = nn.Conv2D(channels, kernel_size=3, padding=1) self.bn2 = nn.BatchNorm() if not same_shape: self.conv3 = nn.Conv2D(channels, kernel_size=1, strides=strides) def hybrid_forward(self, F, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) if not self.same_shape: x = self.conv3(x) return F.relu(out + x) def resnet18(num_classes): net = nn.HybridSequential() with net.name_scope(): net.add( nn.BatchNorm(), nn.Conv2D(64, kernel_size=3, strides=1), nn.MaxPool2D(pool_size=3, strides=2), Residual(64), Residual(64), Residual(128, same_shape=False), Residual(128), Residual(256, same_shape=False), Residual(256), nn.GlobalAvgPool2D(), nn.Dense(num_classes) ) return net def show_images(imgs, nrows, ncols, figsize=None): """plot a list of images""" if not figsize: figsize = (ncols, nrows) _, figs = plt.subplots(nrows, ncols, figsize=figsize) for i in range(nrows): for j in range(ncols): figs[i][j].imshow(imgs[i*ncols+j].asnumpy()) figs[i][j].axes.get_xaxis().set_visible(False) figs[i][j].axes.get_yaxis().set_visible(False) plt.show() def data_iter_random(corpus_indices, batch_size, num_steps, ctx=None): """Sample mini-batches in a random order from sequential data.""" # Subtract 1 because label indices are corresponding input indices + 1. num_examples = (len(corpus_indices) - 1) // num_steps epoch_size = num_examples // batch_size # Randomize samples. example_indices = list(range(num_examples)) random.shuffle(example_indices) def _data(pos): return corpus_indices[pos: pos + num_steps] for i in range(epoch_size): # Read batch_size random samples each time. i = i * batch_size batch_indices = example_indices[i: i + batch_size] data = nd.array( [_data(j * num_steps) for j in batch_indices], ctx=ctx) label = nd.array( [_data(j * num_steps + 1) for j in batch_indices], ctx=ctx) yield data, label def data_iter_consecutive(corpus_indices, batch_size, num_steps, ctx=None): """Sample mini-batches in a consecutive order from sequential data.""" corpus_indices = nd.array(corpus_indices, ctx=ctx) data_len = len(corpus_indices) batch_len = data_len // batch_size indices = corpus_indices[0: batch_size * batch_len].reshape(( batch_size, batch_len)) # Subtract 1 because label indices are corresponding input indices + 1. epoch_size = (batch_len - 1) // num_steps for i in range(epoch_size): i = i * num_steps data = indices[:, i: i + num_steps] label = indices[:, i + 1: i + num_steps + 1] yield data, label def grad_clipping(params, clipping_norm, ctx): """Gradient clipping.""" if clipping_norm is not None: norm = nd.array([0.0], ctx) for p in params: norm += nd.sum(p.grad ** 2) norm = nd.sqrt(norm).asscalar() if norm > clipping_norm: for p in params: p.grad[:] *= clipping_norm / norm def predict_rnn(rnn, prefix, num_chars, params, hidden_dim, ctx, idx_to_char, char_to_idx, get_inputs, is_lstm=False): """Predict the next chars given the prefix.""" prefix = prefix.lower() state_h = nd.zeros(shape=(1, hidden_dim), ctx=ctx) if is_lstm: state_c = nd.zeros(shape=(1, hidden_dim), ctx=ctx) output = [char_to_idx[prefix[0]]] for i in range(num_chars + len(prefix)): X = nd.array([output[-1]], ctx=ctx) if is_lstm: Y, state_h, state_c = rnn(get_inputs(X), state_h, state_c, *params) else: Y, state_h = rnn(get_inputs(X), state_h, *params) if i < len(prefix)-1: next_input = char_to_idx[prefix[i+1]] else: next_input = int(Y[0].argmax(axis=1).asscalar()) output.append(next_input) return ''.join([idx_to_char[i] for i in output]) def train_and_predict_rnn(rnn, is_random_iter, epochs, num_steps, hidden_dim, learning_rate, clipping_norm, batch_size, pred_period, pred_len, seqs, get_params, get_inputs, ctx, corpus_indices, idx_to_char, char_to_idx, is_lstm=False): """Train an RNN model and predict the next item in the sequence.""" if is_random_iter: data_iter = data_iter_random else: data_iter = data_iter_consecutive params = get_params() softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss() for e in range(1, epochs + 1): # If consecutive sampling is used, in the same epoch, the hidden state # is initialized only at the beginning of the epoch. if not is_random_iter: state_h = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx) if is_lstm: state_c = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx) train_loss, num_examples = 0, 0 for data, label in data_iter(corpus_indices, batch_size, num_steps, ctx): # If random sampling is used, the hidden state has to be # initialized for each mini-batch. if is_random_iter: state_h = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx) if is_lstm: state_c = nd.zeros(shape=(batch_size, hidden_dim), ctx=ctx) with autograd.record(): # outputs shape: (batch_size, vocab_size) if is_lstm: outputs, state_h, state_c = rnn(get_inputs(data), state_h, state_c, *params) else: outputs, state_h = rnn(get_inputs(data), state_h, *params) # Let t_ib_j be the j-th element of the mini-batch at time i. # label shape: (batch_size * num_steps) # label = [t_0b_0, t_0b_1, ..., t_1b_0, t_1b_1, ..., ]. label = label.T.reshape((-1,)) # Concatenate outputs: # shape: (batch_size * num_steps, vocab_size). outputs = nd.concat(*outputs, dim=0) # Now outputs and label are aligned. loss = softmax_cross_entropy(outputs, label) loss.backward() grad_clipping(params, clipping_norm, ctx) SGD(params, learning_rate) train_loss += nd.sum(loss).asscalar() num_examples += loss.size if e % pred_period == 0: print("Epoch %d. Training perplexity %f" % (e, exp(train_loss/num_examples))) for seq in seqs: print(' - ', predict_rnn(rnn, seq, pred_len, params, hidden_dim, ctx, idx_to_char, char_to_idx, get_inputs, is_lstm)) print() def set_fig_size(mpl, figsize=(3.5, 2.5)): """set output image size for matplotlib """ mpl.rcParams['figure.figsize'] = figsize def data_iter(batch_size, num_examples, X, y): """walk around dataset""" idx = list(range(num_examples)) random.shuffle(idx) for i in range(0, num_examples, batch_size): j = nd.array(idx[i: min(i + batch_size, num_examples)]) yield X.take(j), y.take(j) def linreg(X, w, b): """linear regression""" return nd.dot(X, w) + b def squared_loss(yhat, y): return (yhat - y.reshape(yhat.shape)) ** 2 / 2 def optimize(batch_size, trainer, num_epochs, decay_epoch, log_interval, X, y, net): dataset = gluon.data.ArrayDataset(X, y) data_iter = gluon.data.DataLoader(dataset, batch_size, shuffle=True) square_loss = gluon.loss.L2Loss() y_vals = [square_loss(net(X), y).mean().asnumpy()] for epoch in range(1, num_epochs + 1): #lower lr if decay_epoch and epoch > decay_epoch: trainer.set_learning_rate(trainer.learning_rate * 0.1) for batch_i, (features, label) in enumerate(data_iter): with autograd.record(): output = net(features) loss = square_loss(output, label) loss.backward() trainer.step(batch_size) if batch_i * batch_size % log_interval == 0: y_vals.append(square_loss(net(X), y).mean().asnumpy()) print('w:', net[0].weight.data(), '\nb:', net[0].bias.data(), '\n') x_vals = np.linspace(0, num_epochs, len(y_vals), endpoint=True) semilogy(x_vals, y_vals, 'epoch', 'loss') def semilogy(x_vals, y_vals, x_label, y_label, figsize=(3.5, 2.5)): """plot log(y)""" set_fig_size(mpl, figsize) plt.semilogy(x_vals, y_vals) plt.xlabel(x_label) plt.ylabel(y_label) plt.show()
gpl-2.0
8,030,618,133,173,451,000
37.070388
99
0.562576
false
yugang/crosswalk-test-suite
webapi/tct-package-tizen-tests/inst.wgt.py
1
6800
#!/usr/bin/env python import os import shutil import glob import time import sys import subprocess import string from optparse import OptionParser, make_option SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) PKG_NAME = os.path.basename(SCRIPT_DIR) PARAMETERS = None #XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket" SRC_DIR = "/home/app/content" PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME) def doCMD(cmd): # Do not need handle timeout in this short script, let tool do it print "-->> \"%s\"" % cmd output = [] cmd_return_code = 1 cmd_proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) while True: output_line = cmd_proc.stdout.readline().strip("\r\n") cmd_return_code = cmd_proc.poll() if output_line == '' and cmd_return_code != None: break sys.stdout.write("%s\n" % output_line) sys.stdout.flush() output.append(output_line) return (cmd_return_code, output) def updateCMD(cmd=None): if "pkgcmd" in cmd: cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd) return cmd def getUSERID(): if PARAMETERS.mode == "SDB": cmd = "sdb -s %s shell id -u %s" % ( PARAMETERS.device, PARAMETERS.user) else: cmd = "ssh %s \"id -u %s\"" % ( PARAMETERS.device, PARAMETERS.user ) return doCMD(cmd) def getPKGID(pkg_name=None): if PARAMETERS.mode == "SDB": cmd = "sdb -s %s shell %s" % ( PARAMETERS.device, updateCMD('pkgcmd -l')) else: cmd = "ssh %s \"%s\"" % ( PARAMETERS.device, updateCMD('pkgcmd -l')) (return_code, output) = doCMD(cmd) if return_code != 0: return None test_pkg_id = None for line in output: pkg_infos = line.split() if len(pkg_infos) == 4: continue name = pkg_infos[5] name = name.lstrip('[').rstrip(']') print "name is: %s" % name if pkg_name == name: test_pkg_id = pkg_infos[3] test_pkg_id = test_pkg_id.lstrip('[').rstrip(']') print test_pkg_id break return test_pkg_id def doRemoteCMD(cmd=None): if PARAMETERS.mode == "SDB": cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd)) else: cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd)) return doCMD(cmd) def doRemoteCopy(src=None, dest=None): if PARAMETERS.mode == "SDB": cmd_prefix = "sdb -s %s push" % PARAMETERS.device cmd = "%s %s %s" % (cmd_prefix, src, dest) else: cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest) (return_code, output) = doCMD(cmd) doRemoteCMD("sync") if return_code != 0: return True else: return False def uninstPKGs(): action_status = True for root, dirs, files in os.walk(SCRIPT_DIR): if root.endswith("mediasrc"): continue for file in files: if file.endswith(".wgt"): pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0])) if not pkg_id: action_status = False continue (return_code, output) = doRemoteCMD( "pkgcmd -u -t wgt -q -n %s" % pkg_id) for line in output: if "Failure" in line: action_status = False break (return_code, output) = doRemoteCMD( "rm -rf %s" % PKG_SRC_DIR) if return_code != 0: action_status = False (return_code, output) = doRemoteCMD( "rm -rf %s/Others" % SRC_DIR) if return_code != 0: action_status = False return action_status def instPKGs(): action_status = True (return_code, output) = doRemoteCMD( "mkdir -p %s" % PKG_SRC_DIR) if return_code != 0: action_status = False for root, dirs, files in os.walk(SCRIPT_DIR): if root.endswith("mediasrc"): continue for file in files: if file.endswith(".wgt"): if not doRemoteCopy(os.path.join(root, file), "%s/%s" % (SRC_DIR, file)): action_status = False (return_code, output) = doRemoteCMD( "pkgcmd -i -t wgt -q -p %s/%s" % (SRC_DIR, file)) doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file)) for line in output: if "Failure" in line: action_status = False break if not doRemoteCopy("%s/mediasrc" % SCRIPT_DIR, "%s/Others" % SRC_DIR): action_status = False return action_status def main(): try: usage = "usage: inst.py -i" opts_parser = OptionParser(usage=usage) opts_parser.add_option( "-m", dest="mode", action="store", help="Specify mode") opts_parser.add_option( "-s", dest="device", action="store", help="Specify device") opts_parser.add_option( "-i", dest="binstpkg", action="store_true", help="Install package") opts_parser.add_option( "-u", dest="buninstpkg", action="store_true", help="Uninstall package") opts_parser.add_option( "-a", dest="user", action="store", help="User name") global PARAMETERS (PARAMETERS, args) = opts_parser.parse_args() except Exception, e: print "Got wrong option: %s, exit ..." % e sys.exit(1) if not PARAMETERS.user: PARAMETERS.user = "app" if not PARAMETERS.mode: PARAMETERS.mode = "SDB" if PARAMETERS.mode == "SDB": if not PARAMETERS.device: (return_code, output) = doCMD("sdb devices") for line in output: if str.find(line, "\tdevice") != -1: PARAMETERS.device = line.split("\t")[0] break else: PARAMETERS.mode = "SSH" if not PARAMETERS.device: print "No device provided" sys.exit(1) user_info = getUSERID() re_code = user_info[0] if re_code == 0 : global XW_ENV userid = user_info[1][0] XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid) else: print "[Error] cmd commands error : %s"%str(user_info[1]) sys.exit(1) if PARAMETERS.binstpkg and PARAMETERS.buninstpkg: print "-i and -u are conflict" sys.exit(1) if PARAMETERS.buninstpkg: if not uninstPKGs(): sys.exit(1) else: if not instPKGs(): sys.exit(1) if __name__ == "__main__": main() sys.exit(0)
bsd-3-clause
-8,329,334,811,198,033,000
28.694323
106
0.540735
false
teonlamont/pyeparse
pyeparse/hd5/tests/test_hd5.py
2
1178
# -*- coding: utf-8 -*- from numpy.testing import assert_array_equal import pytest from os import path as op from pyeparse import read_raw from pyeparse.utils import (_get_test_fnames, _TempDir, _requires_h5py, _requires_edfapi) temp_dir = _TempDir() fnames = _get_test_fnames() @_requires_edfapi @_requires_h5py def test_read_write_hd5(): """Test reading and writing of HD5.""" for fname in fnames: r = read_raw(fname) out_fname = op.join(temp_dir, 'temp.hd5') r.save(out_fname, overwrite=True) pytest.raises(IOError, r.save, out_fname) # overwrite=False r2 = read_raw(out_fname) r2.save(out_fname, overwrite=True) # double write (make sure works) r2 = read_raw(out_fname) # samples assert_array_equal(r._samples, r2._samples) # times assert_array_equal(r._times, r2._times) # discrete for key in r.discrete.keys(): assert_array_equal(r.discrete[key], r2.discrete[key]) # info assert set(r.info.keys()) == set(r2.info.keys()) assert_array_equal(r.info['calibrations'], r2.info['calibrations'])
bsd-3-clause
6,864,717,756,259,743,000
31.722222
76
0.612903
false
DeveloperJose/Vision-Rat-Brain
feature_matching_v3/exp_dynamic_programming.py
1
11157
# Author: Jose G Perez # Version 1.0 # Last Modified: January 31, 2018 import numpy as np import pylab as plt from skimage import color from util_im import imshow_matches from util_sm import load_sm, norm_sm, norm_prob_sm from util_sift import precompute_sift, load_sift precompute_sift('S_BB_V4', 'PW_BB_V4') s_im, s_label, s_kp, s_des = load_sift('S_BB_V4_SIFT.npz') pw_im, pw_label, pw_kp, pw_des = load_sift('PW_BB_V4_SIFT.npz') sm_matches, sm_metric = load_sm('sm_v4', s_kp, pw_kp) def idx_to_plate(labels, plate): return np.where(labels == plate) def dynamic_prog(sm, pw_penalty, s_penalty): ed = np.zeros((sm.shape[0]+1, sm.shape[1]+1)) dir = np.zeros_like(ed) ed[:,0] = np.arange(ed.shape[0]) * -s_penalty ed[0,:] = np.arange(ed.shape[1]) * -pw_penalty # ed[:,0] = ed[0,:] = 0 for i in range(1,ed.shape[0]): for j in range(1,ed.shape[1]): choices = [ed[i,j-1] - pw_penalty, # 0 = top ed[i-1,j-1] + sm[i-1,j-1], # 1 = diagonal ed[i-1,j] - s_penalty] # 2 = left idx = np.argmax(choices) dir[i,j]=idx ed[i,j]=choices[idx] return ed, dir.astype(np.uint8) # return ed, dir.astype(np.uint8) def get_pairs(dir): sidx = dir.shape[0]-1 pwidx = dir.shape[1]-1 pairs = [] while sidx > 0 and pwidx > 0: next_dir = dir[sidx, pwidx] pairs.append([sidx, pwidx]) if next_dir == 0: sidx -= 1 elif next_dir == 1: sidx -= 1 pwidx -= 1 else: pwidx -= 1 return np.array(pairs) def pair_metric(sm_metric, pairs): best6_pw = get_best_pw(sm_metric, pairs, 6) best11_pw = get_best_pw(sm_metric, pairs, 11) best23_pw = get_best_pw(sm_metric, pairs, 23) best33_pw = get_best_pw(sm_metric, pairs, 33) # PW8 S6, PW11 S11, PW42 S23, PW68 S33, # m += np.count_nonzero(best6_pw == np.where(pw_label == 8)) # m += np.count_nonzero(best11_pw == np.where(pw_label == 11)) # m += np.count_nonzero(best23_pw == np.where(pw_label == 42)) # m += np.count_nonzero(best33_pw == np.where(pw_label == 68)) return np.min(abs(best6_pw - np.where(pw_label == 8))) + \ np.min(abs(best11_pw - np.where(pw_label == 11))) + \ np.min(abs(best23_pw - np.where(pw_label == 42))) + \ np.min(abs(best33_pw - np.where(pw_label == 68))) def overlay(dir, sm): # bg = norm_sm(sm, 255).astype(np.uint8) bg = sm.astype(np.uint8) color_mask = np.zeros((dir.shape[0],dir.shape[1],3)) sidx = sm.shape[0]-1 pwidx = sm.shape[1]-1 count = 0 path = ['START'] pairs = [] while sidx >= 0 and pwidx >= 0: count += 1 color_mask[sidx, pwidx] = [0, 0, 255] bg[sidx, pwidx] = 255 next_dir = dir[sidx, pwidx] pairs.append([sidx, pwidx]) if next_dir == 0: # Left pwidx -= 1 path.append('L') elif next_dir == 1: # Diagonal sidx -= 1 pwidx -= 1 path.append('D') else: # Up sidx -= 1 path.append('U') # Remove penalty row/col dir = dir[1:,1:] color_mask = color_mask[1:,1:,:] # PW8 S6, PW11 S11, PW42 S23, PW68 S33, color_mask[np.where(s_label == 6), np.where(pw_label == 8)] = [255, 0, 0] bg[np.where(s_label == 6), np.where(pw_label == 8)] = 255 color_mask[np.where(s_label == 11), np.where(pw_label == 11)] = [255, 0, 0] bg[np.where(s_label == 11), np.where(pw_label == 11)] = 255 color_mask[np.where(s_label == 23), np.where(pw_label == 42)] = [255, 0, 0] bg[np.where(s_label == 23), np.where(pw_label == 42)] = 255 color_mask[np.where(s_label == 33), np.where(pw_label == 68)] = [255, 0, 0] bg[np.where(s_label == 33), np.where(pw_label == 68)] = 255 print("path", count, path) img_color = np.stack((bg,)*3,axis=2) img_hsv = color.rgb2hsv(img_color) color_mask_hsv = color.rgb2hsv(color_mask) img_hsv[..., 0] = color_mask_hsv[..., 0] img_hsv[..., 1] = color_mask_hsv[..., 1] im_overlay = color.hsv2rgb(img_hsv) return im_overlay, np.array(pairs) def error(best_pw, pw_plate, s_plate): # s_idx = int(np.argwhere(s_label == s_plate)) pw_idx = int(np.argwhere(pw_label == pw_plate)) pred_sidx = best_pw[pw_idx] pred_s = int(np.argwhere(s_label == pred_sidx)) return abs(pred_s - s_plate) def get_best_pw(sm_metric, pairs, s_plate): # Indices start at 0, plates start at 1 sidx = s_plate-1 pidx = np.where(pairs[:, 0] == sidx) matches = pairs[pidx, 1].flatten() # return pw_label[matches] if len(matches >= 1) else -1 return pw_label[matches] if len(matches >= 1) else np.array([np.inf]) # if len(matches) > 1: # metrics = sm_metric[sidx,matches] # best_idx = np.argmax(metrics) # return int(pw_label[matches[best_idx]]) # elif len(matches) == 1: # # Convert from PW Indices to PW Labels # return int(pw_label[matches]) # else: # return -1 if __name__ == '__main__': # lowest_error = np.inf # best_pw = -1 # best_s = -1 # for pw_penalty in np.arange(0.4, 0.5, 0.001): # for s_penalty in np.arange(0.4, 0.5, 0.001): # ed, dir = dynamic_prog(norm, pw_penalty=pw_penalty, s_penalty=s_penalty) # pairs = get_pairs(dir) # metric = pair_metric(sm_metric, pairs) # if metric < lowest_error: # print("New error", metric, pw_penalty, s_penalty) # lowest_error = metric # best_pw = pw_penalty # best_s = s_penalty # ed, dir = dynamic_prog(norm, pw_penalty=best_pw, s_penalty=best_s) # im_overlay, pairs = overlay(dir, sm_metric) # best6_pw = get_best_pw(sm_metric,pairs,6) # best11_pw = get_best_pw(sm_metric,pairs,11) # best23_pw = get_best_pw(sm_metric,pairs,23) # best33_pw = get_best_pw(sm_metric,pairs,33) # print("[PW8=%s], [PW11=%s], [PW42=%s [PW68=%s]" % (best6_pw, best11_pw, best23_pw, best33_pw)) # # imshow_matches(im_overlay, 'Dynamic Programming') # import pylab as plt # best_pw = 200 # best_s = 220 # ed, dir = dynamic_prog(norm, pw_penalty=best_pw, s_penalty=best_s) # pairs = get_pairs(dir) # metric = pair_metric(sm_metric, pairs) # im_overlay, pairs = overlay(dir, sm_metric) # best6_pw = get_best_pw(sm_metric,pairs,6) # best11_pw = get_best_pw(sm_metric,pairs,11) # best23_pw = get_best_pw(sm_metric,pairs,23) # best33_pw = get_best_pw(sm_metric,pairs,33) # print("[PW8=%s], [PW11=%s], [PW42=%s [PW68=%s]" % (best6_pw, best11_pw, best23_pw, best33_pw)) # # imshow_matches(im_overlay, 'Dynamic Programming') # plt.show() # mat = sm_matches # # pw_penalty = 50 # s_penalty = 50 # ed, dir = dynamic_prog(mat, pw_penalty=pw_penalty, s_penalty=s_penalty) # im_overlay, pairs = overlay(dir, mat) # norm = norm_sm(mat) # # import pylab as plt # fig, axes = plt.subplots(nrows=2, ncols=2) # plt.subplots_adjust(left=0.25, bottom=0.25) # plt.set_cmap(plt.get_cmap('hot')) # # axes.set_title('Dynamic') # # axes[0,0].set_title('Similarity Matrix') # axes[0,0].imshow(mat) # # axes[0,1].set_title('SM Norm') # axes[0,1].imshow(norm_prob_sm(sm_matches)) # # axes[1,0].set_title('ED') # axes[1,1].set_title('Overlay') # # # Sliders # axcolor = 'lightgoldenrodyellow' # axfreq = plt.axes([0.25, 0.1, 0.65, 0.03], facecolor=axcolor) # axamp = plt.axes([0.25, 0.15, 0.65, 0.03], facecolor=axcolor) # # s_pwp = plt.Slider(axfreq, 'PW Penalty', 0, 1, .0001, valfmt='%.8f') # # s_sp = plt.Slider(axamp, 'S Penalty', 0, 1, .0001, valfmt='%.8f') # s_pwp = plt.Slider(axfreq, 'PW Penalty', 0, 400, 10, valfmt='%.8f') # s_sp = plt.Slider(axamp, 'S Penalty', 0, 400, 10, valfmt='%.8f') # # def update(val): # pw_penalty = s_pwp.val # s_penalty = s_sp.val # # ed, dir = dynamic_prog(mat, pw_penalty=pw_penalty, s_penalty=s_penalty) # im_overlay, pairs = overlay(dir, mat) # # best6_pw = get_best_pw(sm_metric,pairs,6) # best11_pw = get_best_pw(sm_metric,pairs,11) # best23_pw = get_best_pw(sm_metric,pairs,23) # best33_pw = get_best_pw(sm_metric,pairs,33) # print("[PW8=%s], [PW11=%s], [PW42=%s [PW68=%s]" % (best6_pw, best11_pw, best23_pw, best33_pw)) # # axes[1,0].imshow(ed) # axes[1,1].imshow(im_overlay) # fig.canvas.draw_idle() # # s_pwp.on_changed(update) # s_sp.on_changed(update) # plt.show() #%% Runtime Experiments mat = sm_matches pw_penalty = 50 s_penalty = 50 ed, dir = dynamic_prog(mat, pw_penalty=pw_penalty, s_penalty=s_penalty) im_overlay, pairs = overlay(dir, mat) # Figure prep pw_ticks_idxs = [0] pw_ticks_vals = [pw_label[0]] for x in range(len(pw_label)): try: diff = pw_label[x + 1] - pw_label[x] if diff > 1: pw_ticks_idxs.append(x) pw_ticks_vals.append(pw_label[x]) # print("IDX: ", x, "DIFF:", diff) except: continue pw_ticks_idxs.append(len(pw_label) - 1) pw_ticks_vals.append(pw_label[-1]) # Figure plt.figure() ax = plt.gca() ax.set_title('Dynamic Programming Back-Tracing') plt.setp(ax.get_xticklabels(), rotation=90, horizontalalignment='right') plt.imshow(im_overlay) plt.xticks(pw_ticks_idxs, pw_ticks_vals) plt.yticks(np.arange(0, len(s_label)), np.arange(1, len(s_label) + 1)) for tick in ax.xaxis.get_major_ticks(): tick.label.set_fontsize(8) for tick in ax.yaxis.get_major_ticks(): tick.label.set_fontsize(8) plt.xlabel('PW Level') plt.ylabel('S Level') # best_pwp = 0 # best_sps = 0 # best_total = np.inf # for pw_penalty in range(0, 200): # for s_penalty in range(0, 200): # ed, ed2 = dynamic_prog(norm, pw_penalty=pw_penalty, s_penalty=s_penalty) # best_pw = s_label[np.argmin(ed, axis=0)] # # # PW8 S6, PW11 S11, PW42 S23, PW68 S33, # e = error(best_pw, 68, 33) + \ # error(best_pw, 11, 11) + \ # error(best_pw, 42, 23) + \ # error(best_pw, 68, 33) # # if e < best_total: # print("New best total", e) # best_total = e # best_pwp = pw_penalty # best_sps = s_penalty # best_pwp = 200 # best_sps = 200 # ed, ed2 = dynamic_prog(norm, pw_penalty=best_pwp, s_penalty=best_sps) # im_overlay = overlay(ed, norm) # imshow_matches(dynamic_prog(norm, pw_penalty=1, s_penalty=1)[1], '') # imshow_matches(overlay(dynamic_prog(sm_matches, 0.9, 0.1)[0], sm_matches), '') # aoi = ed[32:35, 38:41] # best_s = pw_label[np.argmin(ed,axis=1)] # print("PW68 best match", best_pw[np.where(pw_label==68)]) # print("S33 best match", best_s[np.where(s_label==33)])
mit
-8,387,339,234,302,177,000
34.531847
104
0.553554
false
facebook/chisel
commands/FBXCTestCommands.py
1
48565
#!/usr/bin/python # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import re import fbchisellldbbase as fb import lldb NOT_FOUND = 0xFFFFFFFF # UINT32_MAX def lldbcommands(): return [FBXCPrintDebugDescription(), FBXCPrintTree(), FBXCPrintObject(), FBXCNoId()] class FBXCPrintDebugDescription(fb.FBCommand): def name(self): return "xdebug" def description(self): return "Print debug description the XCUIElement in human readable format." def args(self): return [ fb.FBCommandArgument( arg="element", type="XCUIElement*", help="The element to print debug description.", default="__default__", ) ] def run(self, arguments, options): element = arguments[0] language = fb.currentLanguage() if element == "__default__": element = ( "XCUIApplication()" if language == lldb.eLanguageTypeSwift else "(XCUIApplication *)[[XCUIApplication alloc] init]" ) if language == lldb.eLanguageTypeSwift: print( fb.evaluateExpressionValue( "{}.debugDescription".format(element), language=language ) .GetObjectDescription() .replace("\\n", "\n") .replace("\\'", "'") .strip(' "\n\t') ) else: print( fb.evaluateExpressionValue( "[{} debugDescription]".format(element) ).GetObjectDescription() ) class FBXCPrintTree(fb.FBCommand): def name(self): return "xtree" def description(self): return "Print XCUIElement subtree." def args(self): return [ fb.FBCommandArgument( arg="element", type="XCUIElement*", help="The element to print tree.", default="__default__", ) ] def options(self): return [ fb.FBCommandArgument( arg="pointer", short="-p", long="--pointer", type="BOOL", boolean=True, default=False, help="Print pointers", ), fb.FBCommandArgument( arg="trait", short="-t", long="--traits", type="BOOL", boolean=True, default=False, help="Print traits", ), fb.FBCommandArgument( arg="frame", short="-f", long="--frame", type="BOOL", boolean=True, default=False, help="Print frames", ), ] def run(self, arguments, options): element = arguments[0] language = fb.currentLanguage() if element == "__default__": element = ( "XCUIApplication()" if language == lldb.eLanguageTypeSwift else "(XCUIApplication *)[[XCUIApplication alloc] init]" ) # Evaluate object element_sbvalue = fb.evaluateExpressionValue( "{}".format(element), language=language ) """:type: lldb.SBValue""" # Get pointer value, so it will be working in Swift and Objective-C element_pointer = int(element_sbvalue.GetValue(), 16) # Get XCElementSnapshot object snapshot = take_snapshot(element_pointer) # Print tree for snapshot element snapshot_object = XCElementSnapshot(snapshot, language=language) print( snapshot_object.tree().hierarchy_text( pointer=options.pointer, trait=options.trait, frame=options.frame ) ) class FBXCPrintObject(fb.FBCommand): def name(self): return "xobject" def description(self): return "Print XCUIElement details." def args(self): return [ fb.FBCommandArgument( arg="element", type="XCUIElement*", help="The element to print details.", default="__default__", ) ] def run(self, arguments, options): element = arguments[0] language = fb.currentLanguage() if element == "__default__": element = ( "XCUIApplication()" if language == lldb.eLanguageTypeSwift else "(XCUIApplication *)[[XCUIApplication alloc] init]" ) # Evaluate object element_sbvalue = fb.evaluateExpressionValue( "{}".format(element), language=language ) """:type: lldb.SBValue""" # Get pointer value, so it will be working in Swift and Objective-C element_pointer = int(element_sbvalue.GetValue(), 16) # Get XCElementSnapshot object snapshot = take_snapshot(element_pointer) # Print details of snapshot element snapshot_object = XCElementSnapshot(snapshot, language=language) print(snapshot_object.detail_summary()) class FBXCNoId(fb.FBCommand): def name(self): return "xnoid" def description(self): return "Print XCUIElement objects with label but without identifier." def args(self): return [ fb.FBCommandArgument( arg="element", type="XCUIElement*", help="The element from start to.", default="__default__", ) ] def options(self): return [ fb.FBCommandArgument( arg="status_bar", short="-s", long="--status-bar", type="BOOL", boolean=True, default=False, help="Print status bar items", ), fb.FBCommandArgument( arg="pointer", short="-p", long="--pointer", type="BOOL", boolean=True, default=False, help="Print pointers", ), fb.FBCommandArgument( arg="trait", short="-t", long="--traits", type="BOOL", boolean=True, default=False, help="Print traits", ), fb.FBCommandArgument( arg="frame", short="-f", long="--frame", type="BOOL", boolean=True, default=False, help="Print frames", ), ] def run(self, arguments, options): element = arguments[0] language = fb.currentLanguage() if element == "__default__": element = ( "XCUIApplication()" if language == lldb.eLanguageTypeSwift else "(XCUIApplication *)[[XCUIApplication alloc] init]" ) # Evaluate object element_sbvalue = fb.evaluateExpressionValue( "{}".format(element), language=language ) """:type: lldb.SBValue""" # Get pointer value, so it will be working in Swift and Objective-C element_pointer = int(element_sbvalue.GetValue(), 16) # Get XCElementSnapshot object snapshot = take_snapshot(element_pointer) # Print tree for snapshot element snapshot_object = XCElementSnapshot(snapshot, language=language) elements = snapshot_object.find_missing_identifiers( status_bar=options.status_bar ) if elements is not None: print( elements.hierarchy_text( pointer=options.pointer, trait=options.trait, frame=options.frame ) ) else: print("Couldn't found elements without identifier") def take_snapshot(element): """ Takes snapshot (XCElementSnapshot) from XCUIElement (as pointer) :param int element: Pointer to the XCUIElement :return: XCElementSnapshot object :rtype: lldb.SBValue """ return fb.evaluateExpressionValue( "(XCElementSnapshot *)[[[{} query] matchingSnapshotsWithError:nil] firstObject]".format( element ) ) class _ElementList(object): """ Store element and list of children :param XCElementSnapshot element: XCElementSnapshot :param list[_ElementList] children: List of XCElementSnapshot objects """ def __init__(self, element, children): self.element = element self.children = children def text(self, pointer, trait, frame, indent): """ String representation of the element :param bool pointer: Print pointers :param bool trait: Print traits :param bool frame: Print frames :param int indent: Indention :return: String representation of the element :rtype: str """ indent_string = " | " * indent return "{}{}\n".format( indent_string, self.element.summary(pointer=pointer, trait=trait, frame=frame), ) def hierarchy_text(self, pointer=False, trait=False, frame=False, indent=0): """ String representation of the hierarchy of elements :param bool pointer: Print pointers :param bool trait: Print traits :param bool frame: Print frames :param int indent: Indention :return: String representation of the hierarchy of elements :rtype: str """ s = self.text(pointer=pointer, trait=trait, frame=frame, indent=indent) for e in self.children: s += e.hierarchy_text( pointer=pointer, trait=trait, frame=frame, indent=indent + 1 ) return s class XCElementSnapshot(object): """ XCElementSnapshot wrapper :param lldb.SBValue element: XCElementSnapshot object :param str element_value: Pointer to XCElementSnapshot object :param language: Project language :param lldb.SBValue _type: XCUIElement type / XCUIElementType :param lldb.SBValue _traits: UIAccessibilityTraits :param lldb.SBValue | None _frame: XCUIElement frame :param lldb.SBValue _identifier: XCUIElement identifier :param lldb.SBValue _value: XCUIElement value :param lldb.SBValue _placeholderValue: XCUIElement placeholder value :param lldb.SBValue _label: XCUIElement label :param lldb.SBValue _title: XCUIElement title :param lldb.SBValue _children: XCUIElement children :param lldb.SBValue _enabled: XCUIElement is enabled :param lldb.SBValue _selected: XCUIElement is selected :param lldb.SBValue _isMainWindow: XCUIElement is main window :param lldb.SBValue _hasKeyboardFocus: XCUIElement has keyboard focus :param lldb.SBValue _hasFocus: XCUIElement has focus :param lldb.SBValue _generation: XCUIElement generation :param lldb.SBValue _horizontalSizeClass: XCUIElement horizontal class :param lldb.SBValue _verticalSizeClass: XCUIElement vertical class """ def __init__(self, element, language): """ :param lldb.SBValue element: XCElementSnapshot object :param language: Project language """ super(XCElementSnapshot, self).__init__() self.element = element self.element_value = self.element.GetValue() self.language = language self._type = None self._traits = None self._frame = None self._identifier = None self._value = None self._placeholderValue = None self._label = None self._title = None self._children = None self._enabled = None self._selected = None self._isMainWindow = None self._hasKeyboardFocus = None self._hasFocus = None self._generation = None self._horizontalSizeClass = None self._verticalSizeClass = None @property def is_missing_identifier(self): """ Checks if element has a label but doesn't have an identifier. :return: True if element has a label but doesn't have an identifier. :rtype: bool """ return len(self.identifier_value) == 0 and len(self.label_value) > 0 @property def type(self): """ :return: XCUIElement type / XCUIElementType :rtype: lldb.SBValue """ if self._type is None: name = "_elementType" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._type = fb.evaluateExpressionValue( "(int)[{} elementType]".format(self.element_value) ) else: self._type = self.element.GetChildMemberWithName(name) return self._type @property def type_value(self): """ :return: XCUIElementType value :rtype: int """ return int(self.type.GetValue()) @property def type_summary(self): """ :return: XCUIElementType summary :rtype: str """ return self.get_type_value_string(self.type_value) @property def traits(self): """ :return: UIAccessibilityTraits :rtype: lldb.SBValue """ if self._traits is None: name = "_traits" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._traits = fb.evaluateExpressionValue( "(int)[{} traits]".format(self.element_value) ) else: self._traits = self.element.GetChildMemberWithName(name) return self._traits @property def traits_value(self): """ :return: UIAccessibilityTraits value :rtype: int """ return int(self.traits.GetValue()) @property def traits_summary(self): """ :return: UIAccessibilityTraits summary :rtype: str """ return self.get_traits_value_string(self.traits_value) @property def frame(self): """ :return: XCUIElement frame :rtype: lldb.SBValue """ if self._frame is None: import_uikit() name = "_frame" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._frame = fb.evaluateExpressionValue( "(CGRect)[{} frame]".format(self.element_value) ) else: self._frame = self.element.GetChildMemberWithName(name) return self._frame @property def frame_summary(self): """ :return: XCUIElement frame summary :rtype: str """ return CGRect(self.frame).summary() @property def identifier(self): """ :return: XCUIElement identifier :rtype: lldb.SBValue """ if self._identifier is None: name = "_identifier" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._identifier = fb.evaluateExpressionValue( "(NSString *)[{} identifier]".format(self.element_value) ) else: self._identifier = self.element.GetChildMemberWithName(name) return self._identifier @property def identifier_value(self): """ :return: XCUIElement identifier value :rtype: str """ return normalize_summary(self.identifier.GetSummary()) @property def identifier_summary(self): """ :return: XCUIElement identifier summary :rtype: str | None """ if len(self.identifier_value) == 0: return None return "identifier: '{}'".format(self.identifier_value) @property def value(self): """ :return: XCUIElement value :rtype: lldb.SBValue """ if self._value is None: name = "_value" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._value = fb.evaluateExpressionValue( "(NSString *)[{} value]".format(self.element_value) ) else: self._value = self.element.GetChildMemberWithName(name) return self._value @property def value_value(self): """ :return: XCUIElement value value :rtype: str """ return normalize_summary(self.value.GetSummary()) @property def value_summary(self): """ :return: XCUIElement value summary :rtype: str | None """ if len(self.value_value) == 0: return None return "value: '{}'".format(self.value_value) @property def placeholder(self): """ :return: XCUIElement placeholder value :rtype: lldb.SBValue """ if self._placeholderValue is None: name = "_placeholderValue" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._placeholderValue = fb.evaluateExpressionValue( "(NSString *)[{} placeholderValue]".format(self.element_value) ) else: self._placeholderValue = self.element.GetChildMemberWithName(name) return self._placeholderValue @property def placeholder_value(self): """ :return: XCUIElement placeholderValue value :rtype: str """ return normalize_summary(self.placeholder.GetSummary()) @property def placeholder_summary(self): """ :return: XCUIElement placeholderValue summary :rtype: str | None """ if len(self.placeholder_value) == 0: return None return "placeholderValue: '{}'".format(self.placeholder_value) @property def label(self): """ :return: XCUIElement label :rtype: lldb.SBValue """ if self._label is None: name = "_label" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._label = fb.evaluateExpressionValue( "(NSString *)[{} label]".format(self.element_value) ) else: self._label = self.element.GetChildMemberWithName(name) return self._label @property def label_value(self): """ :return: XCUIElement label value :rtype: str """ return normalize_summary(self.label.GetSummary()) @property def label_summary(self): """ :return: XCUIElement label summary :rtype: str | None """ if len(self.label_value) == 0: return None return "label: '{}'".format(self.label_value) @property def title(self): """ :return: XCUIElement title :rtype: lldb.SBValue """ if self._title is None: name = "_title" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._title = fb.evaluateExpressionValue( "(NSString *)[{} title]".format(self.element_value) ) else: self._title = self.element.GetChildMemberWithName(name) return self._title @property def title_value(self): """ :return: XCUIElement title value :rtype: str """ return normalize_summary(self.title.GetSummary()) @property def title_summary(self): """ :return: XCUIElement title summary :rtype: str | None """ if len(self.title_value) == 0: return None return "title: '{}'".format(self.title_value) @property def children(self): """ :return: XCUIElement children :rtype: lldb.SBValue """ if self._children is None: name = "_children" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._children = fb.evaluateExpressionValue( "(NSArray *)[{} children]".format(self.element_value) ) else: self._children = self.element.GetChildMemberWithName(name) return self._children @property def children_count(self): """ :return: XCUIElement children count :rtype: int """ return self.children.GetNumChildren() @property def children_list(self): """ :return: XCUIElement children list :rtype: list[lldb.SBValue] """ return [self.children.GetChildAtIndex(i) for i in range(self.children_count)] @property def enabled(self): """ :return: XCUIElement is enabled :rtype: lldb.SBValue """ if self._enabled is None: name = "_enabled" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._enabled = fb.evaluateExpressionValue( "(BOOL)[{} enabled]".format(self.element_value) ) else: self._enabled = self.element.GetChildMemberWithName(name) return self._enabled @property def enabled_value(self): """ :return: XCUIElement is enabled value :rtype: bool """ return bool(self.enabled.GetValueAsSigned()) @property def enabled_summary(self): """ :return: XCUIElement is enabled summary :rtype: str | None """ if not self.enabled_value: return "enabled: {}".format(self.enabled_value) return None @property def selected(self): """ :return: XCUIElement is selected :rtype: lldb.SBValue """ if self._selected is None: name = "_selected" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._selected = fb.evaluateExpressionValue( "(BOOL)[{} selected]".format(self.element_value) ) else: self._selected = self.element.GetChildMemberWithName(name) return self._selected @property def selected_value(self): """ :return: XCUIElement is selected value :rtype: bool """ return bool(self.selected.GetValueAsSigned()) @property def selected_summary(self): """ :return: XCUIElement is selected summary :rtype: str | None """ if self.selected_value: return "selected: {}".format(self.selected_value) return None @property def is_main_window(self): """ :return: XCUIElement isMainWindow :rtype: lldb.SBValue """ if self._isMainWindow is None: name = "_isMainWindow" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._isMainWindow = fb.evaluateExpressionValue( "(BOOL)[{} isMainWindow]".format(self.element_value) ) else: self._isMainWindow = self.element.GetChildMemberWithName(name) return self._isMainWindow @property def is_main_window_value(self): """ :return: XCUIElement isMainWindow value :rtype: bool """ return bool(self.is_main_window.GetValueAsSigned()) @property def is_main_window_summary(self): """ :return: XCUIElement isMainWindow summary :rtype: str | None """ if self.is_main_window_value: return "MainWindow" return None @property def keyboard_focus(self): """ :return: XCUIElement hasKeyboardFocus :rtype: lldb.SBValue """ if self._hasKeyboardFocus is None: name = "_hasKeyboardFocus" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._hasKeyboardFocus = fb.evaluateExpressionValue( "(BOOL)[{} hasKeyboardFocus]".format(self.element_value) ) else: self._hasKeyboardFocus = self.element.GetChildMemberWithName(name) return self._hasKeyboardFocus @property def keyboard_focus_value(self): """ :return: XCUIElement hasKeyboardFocus value :rtype: bool """ return bool(self.keyboard_focus.GetValueAsSigned()) @property def keyboard_focus_summary(self): """ :return: XCUIElement hasKeyboardFocus summary :rtype: str | None """ if self.keyboard_focus_value: return "hasKeyboardFocus: {}".format(self.keyboard_focus_value) return None @property def focus(self): """ :return: XCUIElement hasFocus :rtype: lldb.SBValue """ if self._hasFocus is None: name = "_hasFocus" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._hasFocus = fb.evaluateExpressionValue( "(BOOL)[{} hasFocus]".format(self.element_value) ) else: self._hasFocus = self.element.GetChildMemberWithName(name) return self._hasFocus @property def focus_value(self): """ :return: XCUIElement hasFocus value :rtype: bool """ return bool(self.focus.GetValueAsSigned()) @property def focus_summary(self): """ :return: XCUIElement hasFocus summary :rtype: str | None """ if self.focus_value: return "hasFocus: {}".format(self.focus_value) return None @property def generation(self): """ :return: XCUIElement generation :rtype: lldb.SBValue """ if self._generation is None: name = "_generation" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._generation = fb.evaluateExpressionValue( "(unsigned int)[{} generation]".format(self.element_value) ) else: self._generation = self.element.GetChildMemberWithName(name) return self._generation @property def generation_value(self): """ :return: XCUIElement generation value :rtype: int """ return int(self.generation.GetValueAsUnsigned()) @property def horizontal_size_class(self): """ :return: XCUIElement horizontal size class :rtype: lldb.SBValue """ if self._horizontalSizeClass is None: name = "_horizontalSizeClass" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._horizontalSizeClass = fb.evaluateExpressionValue( "(int)[{} horizontalSizeClass]".format(self.element_value) ) else: self._horizontalSizeClass = self.element.GetChildMemberWithName(name) return self._horizontalSizeClass @property def horizontal_size_class_value(self): """ :return: XCUIElement horizontal size class value :rtype: int """ return int(self.horizontal_size_class.GetValue()) @property def horizontal_size_class_summary(self): """ :return: XCUIElement horizontal size class summary """ return self.get_user_interface_size_class_string( self.horizontal_size_class_value ) @property def vertical_size_class(self): """ :return: XCUIElement vertical size class :rtype: lldb.SBValue """ if self._verticalSizeClass is None: name = "_verticalSizeClass" if self.element.GetIndexOfChildWithName(name) == NOT_FOUND: self._verticalSizeClass = fb.evaluateExpressionValue( "(int)[{} verticalSizeClass]".format(self.element_value) ) else: self._verticalSizeClass = self.element.GetChildMemberWithName(name) return self._verticalSizeClass @property def vertical_size_class_value(self): """ :return: XCUIElement vertical size class value :rtype: int """ return int(self.vertical_size_class.GetValue()) @property def vertical_size_class_summary(self): """ :return: XCUIElement vertical size class summary """ return self.get_user_interface_size_class_string(self.vertical_size_class_value) @property def uniquely_identifying_objective_c_code(self): """ :return: XCUIElement uniquely identifying Objective-C code :rtype: lldb.SBValue """ return fb.evaluateExpressionValue( "(id)[{} _uniquelyIdentifyingObjectiveCCode]".format(self.element_value) ) @property def uniquely_identifying_objective_c_code_value(self): """ :return: XCUIElement uniquely identifying Objective-C code value :rtype: str """ return normalize_array_description( self.uniquely_identifying_objective_c_code.GetObjectDescription() ) @property def uniquely_identifying_swift_code(self): """ :return: XCUIElement uniquely identifying Swift code :rtype: lldb.SBValue """ return fb.evaluateExpressionValue( "(id)[{} _uniquelyIdentifyingSwiftCode]".format(self.element_value) ) @property def uniquely_identifying_swift_code_value(self): """ :return: XCUIElement uniquely identifying Swift code value :rtype: str """ return normalize_array_description( self.uniquely_identifying_swift_code.GetObjectDescription() ) @property def is_touch_bar_element(self): """ :return: XCUIElement is touch bar element :rtype: lldb.SBValue """ return fb.evaluateExpressionValue( "(BOOL)[{} isTouchBarElement]".format(self.element_value) ) @property def is_touch_bar_element_value(self): """ :return: XCUIElement is touch bar element value :rtype: bool """ return bool(self.is_touch_bar_element.GetValueAsSigned()) @property def is_top_level_touch_bar_element(self): """ :return: XCUIElement is top level touch bar element :rtype: lldb.SBValue """ return fb.evaluateExpressionValue( "(BOOL)[{} isTopLevelTouchBarElement]".format(self.element_value) ) @property def is_top_level_touch_bar_element_value(self): """ :return: XCUIElement is top level touch bar element value :rtype: bool """ return bool(self.is_top_level_touch_bar_element.GetValueAsSigned()) @property def suggested_hit_points(self): """ :return: XCUIElement suggested hit points :rtype: lldb.SBValue """ return fb.evaluateExpressionValue( "(NSArray *)[{} suggestedHitpoints]".format(self.element_value) ) @property def suggested_hit_points_value(self): """ :return: XCUIElement suggested hit points :rtype: str """ return normalize_array_description( self.suggested_hit_points.GetObjectDescription() ) @property def visible_frame(self): """ :return: XCUIElement visible frame :rtype: lldb.SBValue """ import_uikit() return fb.evaluateExpressionValue( "(CGRect)[{} visibleFrame]".format(self.element_value) ) @property def visible_frame_summary(self): """ :return: XCUIElement visible frame :rtype: str """ return CGRect(self.visible_frame).summary() @property def depth(self): """ :return: XCUIElement depth :rtype: lldb.SBValue """ return fb.evaluateExpressionValue("(int)[{} depth]".format(self.element_value)) @property def depth_value(self): """ :return: XCUIElement depth :rtype: int """ return int(self.depth.GetValue()) @property def hit_point(self): """ :return: XCUIElement hit point :rtype: lldb.SBValue """ import_uikit() return fb.evaluateExpressionValue( "(CGPoint)[{} hitPoint]".format(self.element_value) ) @property def hit_point_value(self): """ :return: XCUIElement hit point :rtype: str """ return CGPoint(self.hit_point).summary() @property def hit_point_for_scrolling(self): """ :return: XCUIElement hit point for scrolling :rtype: lldb.SBValue """ import_uikit() return fb.evaluateExpressionValue( "(CGPoint)[{} hitPointForScrolling]".format(self.element_value) ) @property def hit_point_for_scrolling_value(self): """ :return: XCUIElement hit point for scrolling :rtype: str """ return CGPoint(self.hit_point_for_scrolling).summary() def summary(self, pointer=False, trait=False, frame=False): """ Returns XCElementSnapshot summary :param bool pointer: Print pointers :param bool trait: Print traits :param bool frame: Print frames :return: XCElementSnapshot summary :rtype: str """ type_text = self.type_summary if pointer: type_text += " {:#x}".format(int(self.element_value, 16)) if trait: type_text += " traits: {}({:#x})".format( self.traits_summary, self.traits_value ) frame_text = self.frame_summary if frame else None identifier = self.identifier_summary label = self.label_summary title = self.title_summary value = self.value_summary placeholder = self.placeholder_summary enabled = self.enabled_summary selected = self.selected_summary main_window = self.is_main_window_summary keyboard_focus = self.keyboard_focus_summary focus = self.focus_summary texts = [ t for t in [ frame_text, identifier, label, title, value, placeholder, enabled, selected, main_window, keyboard_focus, focus, ] if t is not None ] return "{}: {}".format(type_text, ", ".join(texts)) def detail_summary(self): """ Returns XCElementSnapshot detail summary :return: XCElementSnapshot detail summary :rtype: str """ texts = list() texts.append("Pointer: {:#x}".format(int(self.element_value, 16))) texts.append("Type: {}".format(self.type_summary)) texts.append("Depth: {}".format(self.depth_value)) texts.append( "Traits: {} ({:#x})".format(self.traits_summary, self.traits_value) ) texts.append("Frame: {}".format(self.frame_summary)) texts.append("Visible frame: {}".format(self.visible_frame_summary)) texts.append("Identifier: '{}'".format(self.identifier_value)) texts.append("Label: '{}'".format(self.label_value)) texts.append("Title: '{}'".format(self.title_value)) texts.append("Value: '{}'".format(self.value_value)) texts.append("Placeholder: '{}'".format(self.placeholder_value)) if self.language != lldb.eLanguageTypeSwift: # They doesn't work on Swift :( texts.append("Hit point: {}".format(self.hit_point_value)) texts.append( "Hit point for scrolling: {}".format(self.hit_point_for_scrolling_value) ) texts.append("Enabled: {}".format(self.enabled_value)) texts.append("Selected: {}".format(self.selected_value)) texts.append("Main Window: {}".format(self.is_main_window_value)) texts.append("Keyboard focus: {}".format(self.keyboard_focus_value)) texts.append("Focus: {}".format(self.focus_value)) texts.append("Generation: {}".format(self.generation_value)) texts.append( "Horizontal size class: {}".format(self.horizontal_size_class_summary) ) texts.append("Vertical size class: {}".format(self.vertical_size_class_summary)) texts.append("TouchBar element: {}".format(self.is_touch_bar_element_value)) texts.append( "TouchBar top level element: {}".format( self.is_top_level_touch_bar_element_value ) ) texts.append( "Unique Objective-C: {}".format( self.uniquely_identifying_objective_c_code_value ) ) texts.append( "Unique Swift: {}".format(self.uniquely_identifying_swift_code_value) ) texts.append("Suggested hit points: {}".format(self.suggested_hit_points_value)) return "\n".join(texts) def tree(self): """ Returns tree of elements in hierarchy :return: Elements hierarchy :rtype: _ElementList """ children = [ XCElementSnapshot(e, self.language).tree() for e in self.children_list ] return _ElementList(self, children) def find_missing_identifiers(self, status_bar): """ Find element which has a label but doesn't have an identifier :param bool status_bar: Print status bar items :return: Hierarchy structure with items which has a label but doesn't have an identifier :rtype: _ElementList | None """ # Do not print status bar items if status_bar is not True and self.type_value == XCUIElementType.StatusBar: return None children_missing = [ XCElementSnapshot(e, self.language).find_missing_identifiers( status_bar=status_bar ) for e in self.children_list ] children_missing = [x for x in children_missing if x is not None] # Self and its children are not missing identifiers if self.is_missing_identifier is False and len(children_missing) == 0: return None return _ElementList(self, children_missing) @staticmethod def get_type_value_string(value): """ Get element type string from XCUIElementType (as int) :param int value: XCUIElementType (as int) :return: XCUIElementType string :rtype: str """ return XCUIElementType.name_for_value(value) @staticmethod def get_traits_value_string(value): """ Get element traits string from UIAccessibilityTraits (as int) :param int value: UIAccessibilityTraits (as int) :return: UIAccessibilityTraits string :rtype: str """ return UIAccessibilityTraits.name_for_value(value) @staticmethod def get_user_interface_size_class_string(value): """ Get user interface size class string from UIUserInterfaceSizeClass (as int) :param value: UIAccessibilityTraits (as int) :return: UIUserInterfaceSizeClass string :rtype: str """ return UIUserInterfaceSizeClass.name_for_value(value) class XCUIElementType(object): """ Represents all XCUIElementType types """ Any = 0 Other = 1 Application = 2 Group = 3 Window = 4 Sheet = 5 Drawer = 6 Alert = 7 Dialog = 8 Button = 9 RadioButton = 10 RadioGroup = 11 CheckBox = 12 DisclosureTriangle = 13 PopUpButton = 14 ComboBox = 15 MenuButton = 16 ToolbarButton = 17 Popover = 18 Keyboard = 19 Key = 20 NavigationBar = 21 TabBar = 22 TabGroup = 23 Toolbar = 24 StatusBar = 25 Table = 26 TableRow = 27 TableColumn = 28 Outline = 29 OutlineRow = 30 Browser = 31 CollectionView = 32 Slider = 33 PageIndicator = 34 ProgressIndicator = 35 ActivityIndicator = 36 SegmentedControl = 37 Picker = 38 PickerWheel = 39 Switch = 40 Toggle = 41 Link = 42 Image = 43 Icon = 44 SearchField = 45 ScrollView = 46 ScrollBar = 47 StaticText = 48 TextField = 49 SecureTextField = 50 DatePicker = 51 TextView = 52 Menu = 53 MenuItem = 54 MenuBar = 55 MenuBarItem = 56 Map = 57 WebView = 58 IncrementArrow = 59 DecrementArrow = 60 Timeline = 61 RatingIndicator = 62 ValueIndicator = 63 SplitGroup = 64 Splitter = 65 RelevanceIndicator = 66 ColorWell = 67 HelpTag = 68 Matte = 69 DockItem = 70 Ruler = 71 RulerMarker = 72 Grid = 73 LevelIndicator = 74 Cell = 75 LayoutArea = 76 LayoutItem = 77 Handle = 78 Stepper = 79 Tab = 80 TouchBar = 81 @classmethod def _attributes_by_value(cls): """ :return: Hash of all attributes and their values :rtype: dict[int, str] """ class_attributes = set(dir(cls)) - set(dir(object)) return dict( [ (getattr(cls, n), n) for n in class_attributes if not callable(getattr(cls, n)) and not n.startswith("__") ] ) @classmethod def name_for_value(cls, value): """ Get element type string from XCUIElementType (as int) :param int value: XCUIElementType (as int) :return: Name of type :rtype: str """ attributes = cls._attributes_by_value() if value in attributes: return attributes[value] else: return "Unknown ({:#x})".format(value) class UIAccessibilityTraits(object): """ Represents all UIAccessibilityTraits types """ Button = 0x0000000000000001 Link = 0x0000000000000002 Image = 0x0000000000000004 Selected = 0x0000000000000008 PlaysSound = 0x0000000000000010 KeyboardKey = 0x0000000000000020 StaticText = 0x0000000000000040 SummaryElement = 0x0000000000000080 NotEnabled = 0x0000000000000100 UpdatesFrequently = 0x0000000000000200 SearchField = 0x0000000000000400 StartsMediaSession = 0x0000000000000800 Adjustable = 0x0000000000001000 AllowsDirectInteraction = 0x0000000000002000 CausesPageTurn = 0x0000000000004000 TabBar = 0x0000000000008000 Header = 0x0000000000010000 @classmethod def _attributes_by_value(cls): """ :return: Hash of all attributes and their values :rtype: dict[int, str] """ class_attributes = set(dir(cls)) - set(dir(object)) return dict( [ (getattr(cls, n), n) for n in class_attributes if not callable(getattr(cls, n)) and not n.startswith("__") ] ) @classmethod def name_for_value(cls, value): """ Get element traits string from UIAccessibilityTraits (as int) :param int value: UIAccessibilityTraits (as int) :return: UIAccessibilityTraits string :rtype: str """ if value == 0: return "None" traits = [] attributes = cls._attributes_by_value() for k in attributes.keys(): if value & k: traits.append(attributes[k]) if len(traits) == 0: return "Unknown" else: return ", ".join(traits) class UIUserInterfaceSizeClass(object): """ Represents all UIUserInterfaceSizeClass types """ Unspecified = 0 Compact = 1 Regular = 2 @classmethod def name_for_value(cls, value): """ Get user interface size class string from UIUserInterfaceSizeClass (as int) :param int value: UIAccessibilityTraits (as int) :return: UIUserInterfaceSizeClass string :rtype: str """ if value == cls.Unspecified: return "Unspecified" elif value == cls.Compact: return "Compact" elif value == cls.Regular: return "Regular" else: return "Unknown ({:#x})".format(value) class CGRect(object): """ CGRect wrapper :param lldb.SBValue element: CGRect object """ def __init__(self, element): """ :param lldb.SBValue element: CGRect object """ super(CGRect, self).__init__() self.element = element def summary(self): """ :return: CGRect summary :rtype: str """ origin_element = self.element.GetChildMemberWithName("origin") origin = CGPoint(origin_element) size = self.element.GetChildMemberWithName("size") width = size.GetChildMemberWithName("width") height = size.GetChildMemberWithName("height") width_value = float(width.GetValue()) height_value = float(height.GetValue()) return "{{{}, {{{}, {}}}}}".format(origin.summary(), width_value, height_value) class CGPoint(object): """ CGPoint wrapper :param lldb.SBValue element: CGPoint object """ def __init__(self, element): super(CGPoint, self).__init__() self.element = element def summary(self): """ :return: CGPoint summary :rtype: str """ x = self.element.GetChildMemberWithName("x") y = self.element.GetChildMemberWithName("y") x_value = float(x.GetValue()) y_value = float(y.GetValue()) return "{{{}, {}}}".format(x_value, y_value) def normalize_summary(summary): """ Normalize summary by removing "'" and "@" characters :param str summary: Summary string to normalize :return: Normalized summary string :rtype: str """ return summary.lstrip("@").strip('"') def normalize_array_description(description): """ Normalize array object description by removing "<" and ">" characters and content between them. :param str description: Array object description :return: Normalized array object description string :rtype: str """ return re.sub("^(<.*>)", "", description).strip() _uikit_imported = False def import_uikit(): """ Import UIKit framework to the debugger """ global _uikit_imported if _uikit_imported: return _uikit_imported = True fb.evaluateExpressionValue("@import UIKit") def debug(element): """ Debug helper :param lldb.SBValue element: Element to debug """ print("---") print("element: {}".format(element)) print("element class: {}".format(element.__class__)) print("element name: {}".format(element.GetName())) print("element type name: {}".format(element.GetTypeName())) print("element value: {}".format(element.GetValue())) print("element value class: {}".format(element.GetValue().__class__)) print("element value type: {}".format(element.GetValueType())) print("element value signed: {0}({0:#x})".format(element.GetValueAsSigned())) print("element value unsigned: {0}({0:#x})".format(element.GetValueAsUnsigned())) print("element summary: {}".format(element.GetSummary())) print("element description: {}".format(element.GetObjectDescription())) print("element children num: {}".format(element.GetNumChildren())) for i in range(0, element.GetNumChildren()): child = element.GetChildAtIndex(i) """:type: lldb.SBValue""" print("element child {:02}: {}".format(i, child.GetName())) print("===")
mit
-1,635,572,801,278,517,000
28.776211
99
0.564789
false
fossfreedom/indicator-sysmonitor
sensors.py
1
22495
#!/usr/bin/python3 # coding: utf-8 # # A simple indicator applet displaying cpu and memory information # # Author: Alex Eftimie <[email protected]> # Fork Author: fossfreedom <[email protected]> # Original Homepage: http://launchpad.net/indicator-sysmonitor # Fork Homepage: https://github.com/fossfreedom/indicator-sysmonitor # License: GPL v3 import json import time from threading import Thread from threading import Event import subprocess import copy import logging import re import os import platform from gettext import gettext as _ from gi.repository import GLib import psutil as ps ps_v1_api = int(ps.__version__.split('.')[0]) <= 1 B_UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB'] cpu_load = [] def bytes_to_human(num): for unit in B_UNITS: if abs(num) < 1000.0: return "%3.2f %s" % (num, unit) num /= 1000.0 return "%.2f %s" % (num, 'YB') class ISMError(Exception): """General exception.""" def __init__(self, msg): Exception.__init__(self, msg) class SensorManager(object): """Singleton""" _instance = None SETTINGS_FILE = os.getenv("HOME") + '/.indicator-sysmonitor.json' digit_regex = re.compile(r'''\d+''') class __impl: settings = { 'custom_text': 'cpu: {cpu} mem: {mem}', 'interval': 2, 'on_startup': False, 'sensors': { # 'name' => (desc, cmd) } } supported_sensors = None def __init__(self): self.sensor_instances = [CPUSensor(), NvGPUSensor(), MemSensor(), NetSensor(), NetCompSensor(), TotalNetSensor(), BatSensor(), FSSensor(), SwapSensor(), UporDownSensor(), PublicIPSensor(), CPUTemp(), NvGPUTemp()] for sensor in self.sensor_instances: self.settings['sensors'][sensor.name] = (sensor.desc, sensor.cmd) self._last_net_usage = [0, 0] # (up, down) self._fetcher = None # @staticmethod @classmethod def update_regex(self, names=None): if names is None: names = list(self.settings["sensors"].keys()) reg = '|'.join(names) reg = "\A({})\Z".format(reg) # global supported_sensors self.supported_sensors = re.compile("{}".format(reg)) def get(self, name): """ :param name: of the sensor :return: the sensor instance """ for sensor in self.sensor_instances: if sensor.check(name) is not None: return sensor return None # @staticmethod def exists(self, name): """Checks if the sensor name exists""" return bool(self.supported_sensors.match(name)) # @staticmethod def check(self, sensor_string): for sensor in self.sensor_instances: sensor.check(sensor_string) def add(self, name, desc, cmd): """Adds a custom sensors.""" if self.exists(name): raise ISMError(_("Sensor name already in use.")) self.settings["sensors"][name] = (desc, cmd) self.update_regex() def delete(self, name): """Deletes a custom sensors.""" sensors = self.settings['sensors'] names = list(sensors.keys()) if name not in names: raise ISMError(_("Sensor is not defined.")) _desc, default = sensors[name] if default is True: raise ISMError(_("Can not delete default sensors.")) del sensors[name] self.update_regex() def edit(self, name, newname, desc, cmd): """Edits a custom sensors.""" try: sensors = self.settings['sensors'] _desc, default = sensors[name] except KeyError: raise ISMError(_("Sensor does not exists.")) if default is True: raise ISMError(_("Can not edit default sensors.")) if newname != name: if newname in list(sensors.keys()): raise ISMError(_("Sensor name already in use.")) sensors[newname] = (desc, cmd) del sensors[name] self.settings["custom_text"] = self.settings["custom_text"].replace( name, newname) self.update_regex() def load_settings(self): """It gets the settings from the config file and sets them to the correct vars""" try: with open(SensorManager.SETTINGS_FILE, 'r') as f: cfg = json.load(f) if cfg['custom_text'] is not None: self.settings['custom_text'] = cfg['custom_text'] if cfg['interval'] is not None: self.settings['interval'] = cfg['interval'] if cfg['on_startup'] is not None: self.settings['on_startup'] = cfg['on_startup'] if cfg['sensors'] is not None: # need to merge our current list of sensors with what was previously saved newcopy = self.settings['sensors'] newcopy.update(cfg['sensors']) self.settings['sensors'] = newcopy self.update_regex() except Exception as ex: logging.exception(ex) logging.error('Reading settings failed') def save_settings(self): """It stores the current settings to the config file.""" # TODO: use gsettings try: with open(SensorManager.SETTINGS_FILE, 'w') as f: f.write(json.dumps(self.settings)) except Exception as ex: logging.exception(ex) logging.error('Writing settings failed') def get_guide(self): """Updates the label guide from appindicator.""" # foss - I'm doubtful any of this guide stuff works - this needs to be recoded # each sensor needs a sensor guide data = self._fetcher.fetch() for key in data: if key.startswith('fs'): data[key] = '000gB' break data['mem'] = data['cpu'] = data['bat'] = '000%' data['net'] = '↓666kB/s ↑666kB/s' self.settings['custom_text'].format(**data) return self.settings['custom_text'].format(**data) def get_label(self, data): """It updates the appindicator text with the the values from data""" try: label = self.settings["custom_text"].format(**data) if len(data) \ else _("(no output)") except KeyError as ex: label = _("Invalid Sensor: {}").format(ex) except Exception as ex: logging.exception(ex) label = _("Unknown error: ").format(ex) return label def initiate_fetcher(self, parent): if self._fetcher is not None: self._fetcher.stop() self._fetcher = StatusFetcher(parent) self._fetcher.start() logging.info("Fetcher started") def fill_liststore(self, list_store): sensors = self.settings['sensors'] for name in list(sensors.keys()): list_store.append([name, sensors[name][0]]) def get_command(self, name): cmd = self.settings["sensors"][name][1] return cmd def set_custom_text(self, custom_text): self.settings["custom_text"] = custom_text def get_custom_text(self): return self.settings["custom_text"] def set_interval(self, interval): self.settings["interval"] = interval def get_interval(self): return self.settings["interval"] def get_results(self): """Return a dict whose element are the sensors and their values""" res = {} from preferences import Preferences # We call this only once per update global cpu_load cpu_load = ps.cpu_percent(interval=0, percpu=True) # print (self.settings["custom_text"]) custom_text is the full visible string seen in Preferences edit field for sensor in Preferences.sensors_regex.findall( self.settings["custom_text"]): sensor = sensor[1:-1] instance = self.get(sensor) if instance: value = instance.get_value(sensor) if value: res[sensor] = value else: # custom sensor res[sensor] = BaseSensor.script_exec(self.settings["sensors"][sensor][1]) return res def __init__(self): if SensorManager._instance is None: SensorManager._instance = SensorManager.__impl() # Store instance reference as the only member in the handle self.__dict__['_SensorManager__instance'] = SensorManager._instance def __getattr__(self, attr): """ Delegate access to implementation """ return getattr(self.__instance, attr) def __setattr__(self, attr, value): """ Delegate access to implementation """ return setattr(self.__instance, attr, value) class BaseSensor(object): name = '' desc = '' cmd = True def check(self, sensor): ''' checks to see if the sensor string passed in valid :param sensor: string representation of the sensor :return: True if the sensor is understood and passes the check or an Exception if the format of the sensor string is wrong None is returned if the sensor string is nothing to-do with the Sensor name ''' if sensor == self.name: return True def get_value(self, sensor_data): return None @staticmethod def script_exec(command): """Execute a custom command.""" try: output = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True).communicate()[0].strip() except: output = _("Error") logging.error(_("Error running: {}").format(command)) return output.decode('utf-8') if output else _("(no output)") class NvGPUSensor(BaseSensor): name = 'nvgpu' desc = _('Nvidia GPU utilization') def get_value(self, sensor): if sensor == 'nvgpu': return "{:02.0f}%".format(self._fetch_gpu()) def _fetch_gpu(self, percpu=False): result = subprocess.check_output(['nvidia-smi', '--query-gpu=utilization.gpu', '--format=csv']) perc = result.splitlines()[1] perc = perc[:-2] return int(perc) class NvGPUTemp(BaseSensor): """Return GPU temperature expressed in Celsius """ name = 'nvgputemp' desc = _('Nvidia GPU Temperature') def get_value(self, sensor): # degrees symbol is unicode U+00B0 return "{}\u00B0C".format(self._fetch_gputemp()) def _fetch_gputemp(self): result = subprocess.check_output(['nvidia-smi', '--query-gpu=temperature.gpu', '--format=csv']) perc = result.splitlines()[1] return int(perc) class CPUSensor(BaseSensor): name = 'cpu\d*' desc = _('Average CPU usage') cpus = re.compile("\Acpu\d*\Z") last = None if ps_v1_api: cpu_count = ps.NUM_CPUS else: cpu_count = ps.cpu_count() def check(self, sensor): if self.cpus.match(sensor): if len(sensor) == 3: nber = 0 else: nber = int(sensor[3:]) if len(sensor) > 3 else 999 if nber >= self.cpu_count: print(sensor) print(self.cpu_count) print(len(sensor)) raise ISMError(_("Invalid number of CPUs.")) return True def get_value(self, sensor): if sensor == 'cpu': return "{:02.0f}%".format(self._fetch_cpu()) elif CPUSensor.cpus.match(sensor): cpus = self._fetch_cpu(percpu=True) return "{:02.0f}%".format(cpus[int(sensor[3:])]) return None def _fetch_cpu(self, percpu=False): if percpu: return cpu_load r = 0.0 for i in cpu_load: r += i r /= self.cpu_count return r class MemSensor(BaseSensor): name = 'mem' desc = _('Physical memory in use.') def get_value(self, sensor_data): return '{:02.0f}%'.format(self._fetch_mem()) def _fetch_mem(self): """It gets the total memory info and return the used in percent.""" def grep(pattern, word_list): expr = re.compile(pattern) arr = [elem for elem in word_list if expr.match(elem)] return arr[0] with open('/proc/meminfo') as meminfofile: meminfo = meminfofile.readlines() total = SensorManager.digit_regex.findall(grep("MemTotal", meminfo))[0] release = re.split('\.', platform.release()) major_version = int(release[0]) minor_version = int(re.search(r'\d+', release[1]).group()) if (minor_version >= 16 and major_version == 3) or (major_version > 3): available = SensorManager.digit_regex.findall( grep("MemAvailable", meminfo))[0] return 100 - 100 * int(available) / float(total) else: free = SensorManager.digit_regex.findall( grep("MemFree", meminfo))[0] cached = SensorManager.digit_regex.findall( grep("Cached", meminfo))[0] free = int(free) + int(cached) return 100 - 100 * free / float(total) class NetSensor(BaseSensor): name = 'net' desc = _('Network activity.') _last_net_usage = [0, 0] # (up, down) def get_value(self, sensor_data): return self._fetch_net() def _fetch_net(self): """It returns the bytes sent and received in bytes/second""" current = [0, 0] for _, iostat in list(ps.net_io_counters(pernic=True).items()): current[0] += iostat.bytes_recv current[1] += iostat.bytes_sent dummy = copy.deepcopy(current) current[0] -= self._last_net_usage[0] current[1] -= self._last_net_usage[1] self._last_net_usage = dummy mgr = SensorManager() current[0] /= mgr.get_interval() current[1] /= mgr.get_interval() return '↓ {:>9s}/s ↑ {:>9s}/s'.format(bytes_to_human(current[0]), bytes_to_human(current[1])) class NetCompSensor(BaseSensor): name = 'netcomp' desc = _('Network activity in Compact form.') _last_net_usage = [0, 0] # (up, down) def get_value(self, sensor_data): return self._fetch_net() def _fetch_net(self): """It returns the bytes sent and received in bytes/second""" current = [0, 0] for _, iostat in list(ps.net_io_counters(pernic=True).items()): current[0] += iostat.bytes_recv current[1] += iostat.bytes_sent dummy = copy.deepcopy(current) current[0] -= self._last_net_usage[0] current[1] -= self._last_net_usage[1] self._last_net_usage = dummy mgr = SensorManager() current[0] /= mgr.get_interval() current[1] /= mgr.get_interval() return '⇵ {:>9s}/s'.format(bytes_to_human(current[0] + current[1])) class TotalNetSensor(BaseSensor): name = 'totalnet' desc = _('Total Network activity.') def get_value(self, sensor_data): return self._fetch_net() def _fetch_net(self): """It returns total number the bytes sent and received""" current = [0, 0] for _, iostat in list(ps.net_io_counters(pernic=True).items()): current[0] += iostat.bytes_recv current[1] += iostat.bytes_sent mgr = SensorManager() current[0] /= mgr.get_interval() current[1] /= mgr.get_interval() return ' Σ {:>9s}'.format(bytes_to_human(current[0] + current[1])) class BatSensor(BaseSensor): name = 'bat\d*' desc = _('Battery capacity.') bat = re.compile("\Abat\d*\Z") def check(self, sensor): if self.bat.match(sensor): bat_id = int(sensor[3:]) if len(sensor) > 3 else 0 if not os.path.exists("/sys/class/power_supply/BAT{}".format(bat_id)): raise ISMError(_("Invalid number returned for the Battery sensor.")) return True def get_value(self, sensor): if BatSensor.bat.match(sensor): bat_id = int(sensor[3:]) if len(sensor) > 3 else 0 return '{:02.0f}%'.format(self._fetch_bat(bat_id)) return None def _fetch_bat(self, batid): """Fetch the the amount of remaining battery""" capacity = 0 try: with open("/sys/class/power_supply/BAT{}/capacity".format(batid)) as state: while True: capacity = int(state.readline()) break except IOError: return "N/A" return capacity class FSSensor(BaseSensor): name = 'fs//.+' desc = _('Available space in file system.') def check(self, sensor): if sensor.startswith("fs//"): path = sensor.split("//")[1] if not os.path.exists(path): raise ISMError(_("Path: {} doesn't exists.").format(path)) return True def get_value(self, sensor): if sensor.startswith('fs//'): parts = sensor.split('//') return self._fetch_fs(parts[1]) return None def _fetch_fs(self, mount_point): """It returns the amount of bytes available in the fs in a human-readble format.""" if not os.access(mount_point, os.F_OK): return None stat = os.statvfs(mount_point) bytes_ = stat.f_bavail * stat.f_frsize for unit in B_UNITS: if bytes_ < 1024: return "{} {}".format(round(bytes_, 2), unit) bytes_ /= 1024 class SwapSensor(BaseSensor): name = 'swap' desc = _("Average swap usage") def get_value(self, sensor): return '{:02.0f}%'.format(self._fetch_swap()) def _fetch_swap(self): """Return the swap usage in percent""" usage = 0 total = 0 try: with open("/proc/swaps") as swaps: swaps.readline() for line in swaps.readlines(): dummy, dummy, total_, usage_, dummy = line.split() total += int(total_) usage += int(usage_) if total == 0: return 0 else: return usage * 100 / total except IOError: return "N/A" class UporDownSensor(BaseSensor): name = 'upordown' desc = _("Display if your internet connection is up or down") command = 'if wget -qO /dev/null google.com > /dev/null; then echo "☺"; else echo "☹"; fi' current_val = "" lasttime = 0 # we refresh this every 10 seconds def get_value(self, sensor): if self.current_val == "" or self.lasttime == 0 or (time.time() - self.lasttime) > 10: self.current_val = self.script_exec(self.command) self.lasttime = time.time() return self.current_val class PublicIPSensor(BaseSensor): name = 'publicip' desc = _("Display your public IP address") command = 'curl ipv4.icanhazip.com' current_ip = "" lasttime = 0 # we refresh this every 10 minutes def get_value(self, sensor): if self.current_ip == "" or self.lasttime == 0 or (time.time() - self.lasttime) > 600: self.current_ip = self.script_exec(self.command) self.lasttime = time.time() return self.current_ip class CPUTemp(BaseSensor): """Return CPU temperature expressed in Celsius """ name = 'cputemp' desc = _('CPU temperature') def get_value(self, sensor): # degrees symbol is unicode U+00B0 return "{:02.0f}\u00B0C".format(self._fetch_cputemp()) def _fetch_cputemp(self): # http://www.mjmwired.net/kernel/Documentation/hwmon/sysfs-interface # first try the following sys file # /sys/class/thermal/thermal_zone0/temp # if that fails try various hwmon files cat = lambda file: open(file, 'r').read().strip() ret = None zone = "/sys/class/thermal/thermal_zone0/" try: ret = int(cat(os.path.join(zone, 'temp'))) / 1000 except: pass if ret: return ret base = '/sys/class/hwmon/' ls = sorted(os.listdir(base)) assert ls, "%r is empty" % base for hwmon in ls: hwmon = os.path.join(base, hwmon) try: ret = int(cat(os.path.join(hwmon, 'temp1_input'))) / 1000 break except: pass # if fahrenheit: # digits = [(x * 1.8) + 32 for x in digits] return ret class StatusFetcher(Thread): """It recollects the info about the sensors.""" def __init__(self, parent): Thread.__init__(self) self._parent = parent self.mgr = SensorManager() self.alive = Event() self.alive.set() GLib.timeout_add_seconds(self.mgr.get_interval(), self.run) def fetch(self): return self.mgr.get_results() def stop(self): self.alive.clear() def run(self): data = self.fetch() self._parent.update(data) if self.alive.isSet(): return True
gpl-3.0
-5,165,070,681,103,870,000
30.178918
120
0.532117
false
mbodenhamer/syn
syn/base/b/tests/test_meta.py
1
12550
import six from nose.tools import assert_raises from syn.base_utils import GroupDict, AttrDict, assert_type_equivalent, \ ReflexiveDict, SeqDict, getfunc, Precedes from syn.type.a import AnyType, TypeType from syn.base.b.meta import Attr, Attrs, Meta, Data from syn.base.a.meta import mro from syn.base.b.meta import create_hook, pre_create_hook from functools import partial def Prec(x): return partial(Precedes, x) #------------------------------------------------------------------------------- # Data Object def test_data(): d = Data() d.a = 1 assert d.a == 1 assert d.b == [] #------------------------------------------------------------------------------- # Attr def test_attr(): a = Attr() assert isinstance(a.type, AnyType) assert a.default is None assert a.doc == '' assert a.optional is False assert a.call is None assert a.group is None assert a.groups is None a = Attr(int, 1, 'A number', optional=True, call=int, group='c', groups=('a', 'b')) assert isinstance(a.type, TypeType) assert a.type.type is int assert a.default == 1 assert a.doc == 'A number' assert a.optional is True assert a.call is int assert a.group == 'c' assert a.groups == ('a', 'b') #------------------------------------------------------------------------------- # Attrs def test_attrs(): fe = lambda self: self.a + 1 attrs = Attrs(a = Attr(int, doc='value 1', group='a'), b = Attr(float, 3.4, group='b'), c = Attr(str, doc='value 2', optional=True, groups=['a', 'b']), d = Attr(list, doc='value 3', optional=True, call=list), e = Attr(int, internal=True, init=fe), ) assert attrs.attrs == set(['a', 'b', 'c', 'd', 'e']) assert attrs.types['a'].type is int assert attrs.types['b'].type is float assert attrs.types['c'].type is str assert attrs.types['d'].type is list assert attrs.types['e'].type is int assert attrs.required == {'a', 'b', 'e'} assert attrs.optional == {'c', 'd'} assert attrs.defaults == dict(b = 3.4) assert attrs.doc == dict(a = 'value 1', c = 'value 2', d = 'value 3') assert attrs.call == dict(d = list) assert attrs.init == dict(e = fe) assert attrs.internal == set(['e']) assert attrs.groups == GroupDict(a = set(['a', 'c']), b = set(['b', 'c'])) #------------------------------------------------------------------------------- # Meta def test_meta(): foo = lambda x: x @six.add_metaclass(Meta) class A(object): _groups = ReflexiveDict('g1', 'g2') _attrs = Attrs(a = Attr(int, doc='value 1', group=_groups.g1), b = Attr(float, 3.4, group=_groups.g2), c = Attr(str, doc='value 2', optional=True, init=foo) ) _opts = AttrDict(x = 1, y = 2.3) class B(A): _attrs = dict(c = Attr(dict, group=A.groups_enum().g1), d = Attr(list, default=[1, 2])) _opts = dict(y = 3.4, z = 'abc') class B2(A): _attrs = dict(c = Attr(dict, group=A.groups_enum().g1, override_parent=True), d = Attr(list, default=[1, 2])) _opts = dict(y = 3.4, z = 'abc') assert A._attrs.types['a'].type is int assert A._attrs.types['b'].type is float assert A._attrs.types['c'].type is str assert A._attrs.required == {'a', 'b'} assert A._attrs.optional == {'c'} assert A._attrs.defaults == dict(b = 3.4) assert A._attrs.init == dict(c = foo) assert A._attrs.doc == dict(a = 'value 1', c = 'value 2') assert A._attrs.attrs == set(['a', 'b', 'c']) assert_type_equivalent(A._groups, GroupDict(_all = set(['a', 'b', 'c']), _internal = set([]), g1 = set(['a']), g2 = set(['b']))) assert B._attrs.types['a'].type is int assert B._attrs.types['b'].type is float assert B._attrs.types['c'].type is dict assert B._attrs.types['d'].type is list assert B._attrs.required == {'a', 'b', 'd'} assert B._attrs.optional == set(['c']) assert B._attrs.defaults == dict(b = 3.4, d = [1, 2]) assert B._attrs.init == dict(c = foo) assert B._attrs.doc == dict(a = 'value 1', c = 'value 2') assert B._attrs.attrs == set(['a', 'b', 'c', 'd']) assert_type_equivalent(B._groups, GroupDict(_all = set(['a', 'b', 'c', 'd']), _internal = set([]), g1 = set(['a', 'c']), g2 = set(['b']))) assert B2._attrs.types['a'].type is int assert B2._attrs.types['b'].type is float assert B2._attrs.types['c'].type is dict assert B2._attrs.types['d'].type is list assert B2._attrs.required == {'a', 'b', 'c', 'd'} assert B2._attrs.optional == set([]) assert B2._attrs.defaults == dict(b = 3.4, d = [1, 2]) assert B2._attrs.init == dict() assert B2._attrs.doc == dict(a = 'value 1') assert B2._attrs.attrs == set(['a', 'b', 'c', 'd']) assert_type_equivalent(B2._groups, GroupDict(_all = set(['a', 'b', 'c', 'd']), _internal = set([]), g1 = set(['a', 'c']), g2 = set(['b']))) assert A._opts == dict(x = 1, y = 2.3) assert B._opts == dict(x = 1, y = 3.4, z = 'abc') assert B2._opts == dict(x = 1, y = 3.4, z = 'abc') class M1(type): pass @six.add_metaclass(M1) class C(object): _attrs = Attrs(a = Attr(float), e = Attr(set, internal=True)) class M2(Meta, M1): pass class D(six.with_metaclass(M2, B, C)): _groups = ReflexiveDict('g3') assert mro(D) == [D, B, A, C, object] assert D._attrs.types['a'].type is int assert D._attrs.types['b'].type is float assert D._attrs.types['c'].type is dict assert D._attrs.types['d'].type is list assert D._attrs.types['e'].type is set assert D._attrs.required == {'a', 'b', 'd', 'e'} assert D._attrs.optional == set(['c']) assert D._attrs.defaults == dict(b = 3.4, d = [1, 2]) assert D._attrs.doc == dict(a = 'value 1', c = 'value 2') assert_type_equivalent(D._groups, GroupDict(_all = set(['a', 'b', 'c', 'd', 'e']), _internal = set(['e']), g1 = set(['a', 'c']), g2 = set(['b']), g3 = set())) assert_type_equivalent(D._opts, AttrDict(x = 1, y = 3.4, z = 'abc')) # Test default blank attrs @six.add_metaclass(Meta) class E(object): pass assert_type_equivalent(E._opts, AttrDict()) assert_type_equivalent(E._attrs, Attrs()) assert_type_equivalent(E._groups, GroupDict()) #------------------------------------------------------------------------------- # Test _get_opt @six.add_metaclass(Meta) class GetOpt(object): _opts = dict(a = 1, b = 2) _seq_opts = dict(a = [1, 2], b = (3, 4)) def test_getopt(): assert GetOpt._get_opt('a') == 1 assert GetOpt._get_opt('b') == 2 assert_raises(AttributeError, GetOpt._get_opt, 'c') assert GetOpt._get_opt('c', default=1) == 1 assert GetOpt._get_opt('c', default=list) == [] #------------------------------------------------------------------------------- # Test _populate_data @six.add_metaclass(Meta) class PopTest(object): _seq_opts = SeqDict(a = ('a1', 'a2'), b = ['b1', 'b2'], metaclass_lookup = ('a', 'b')) a1 = 1 def a2(self): return self.a1 + 1 @classmethod def b1(cls): return cls.a1 + 2 @staticmethod def b2(x): return x + 3 class PopTest2(PopTest): @classmethod def b1(cls): return super(PopTest2, cls).b1() + 4 def test__populate_data(): pt = PopTest() assert isinstance(PopTest._data.a, tuple) assert len(PopTest._data.a) == 2 assert PopTest._data.a[0] == 1 == PopTest.a1 a2 = PopTest._data.a[1] assert a2(pt) == 2 assert isinstance(PopTest._data.b, list) assert len(PopTest._data.b) == 2 b1 = PopTest._data.b[0] b2 = PopTest._data.b[1] assert b1() == 3 assert b2(3) == 6 pt = PopTest2() assert isinstance(PopTest2._data.a, tuple) assert len(PopTest2._data.a) == 2 assert PopTest2._data.a[0] == 1 == PopTest2.a1 a2 = PopTest2._data.a[1] assert a2(pt) == 2 assert isinstance(PopTest2._data.b, list) assert len(PopTest2._data.b) == 2 b1 = PopTest2._data.b[0] b2 = PopTest2._data.b[1] assert b1() == 7 assert b2(3) == 6 #------------------------------------------------------------------------------- # Test create hooks @six.add_metaclass(Meta) class CreateHooks(object): a = 1 @classmethod @create_hook def hook1(cls): cls.a *= 2 class CHA(CreateHooks): _seq_opts = dict(create_hooks = ['hook2'], metaclass_lookup = ['create_hooks']) a = 3 @classmethod def hook2(cls): cls.b = 5 class PseudoHook(object): def __getattr__(self, attr): return type(self) def __call__(self): raise TypeError("Not Callable!!") class CHBad(CreateHooks): a = 5 b = PseudoHook() def test_create_hooks(): assert CreateHooks.a == 2 assert CHA.a == 6 assert CHA.b == 5 ph = PseudoHook() assert_raises(TypeError, ph) assert ph.foobar is PseudoHook assert CHBad.a == 10 assert isinstance(CHBad.b, PseudoHook) #------------------------------------------------------------------------------- # Test Pre-Create Hooks @six.add_metaclass(Meta) class PCHooks(object): a = 1 b = 3 @pre_create_hook def hook1(clsdata): dct = clsdata['dct'] if 'a' in dct: dct['a'] *= 2 @pre_create_hook(order=Prec('hook1'), persist=False) def hook2(clsdata): clsdata['dct']['b'] += 2 class PC2(PCHooks): b = 2 class PC3(PC2): a = 1 class PC4(PC3): a = 10 @pre_create_hook def hook1(clsdata): getfunc(PC3.hook1)(clsdata) clsdata['dct']['a'] //= 2 class PC5(PCHooks): a = 1 @pre_create_hook(order=Prec('hook1')) def hook3(clsdata): clsdata['dct']['c'] = clsdata['dct']['a'] * 3 class PC6(PC4, PC5): a = 10 class PC7(PC5, PC4): a = 10 def test_preprocess_hooks(): assert PCHooks.a == 2 assert PC2.a == 2 assert PC3.a == 2 assert PC4.a == 10 assert PC5.a == 2 assert PC6.a == 10 assert PC7.a == 20 assert PCHooks.b == 5 assert PC2.b == 2 assert PC5.b == 5 assert PC6.b == 2 assert PC7.b == 2 assert PC5.c == 6 assert PC6.c == 30 assert PC7.c == 60 #------------------------------------------------------------------------------- # Test register_subclasses @six.add_metaclass(Meta) class Reg1(object): _opts = dict(register_subclasses = True) class Reg2(Reg1): pass class Reg3(Reg2): _opts = dict(register_subclasses = False) @six.add_metaclass(Meta) class Reg4(object): pass class Reg5(Reg4): _opts = dict(register_subclasses = True) class Reg6(object): pass class Reg7(Reg5, Reg6): pass def test_register_subclasses(): assert Reg1._data.subclasses == [Reg1, Reg2] assert Reg2._data.subclasses == [Reg2] assert Reg3._data.subclasses == [] assert Reg4._data.subclasses == [] assert Reg5._data.subclasses == [Reg5, Reg7] assert not hasattr(Reg6, '_data') assert Reg7._data.subclasses == [Reg7] #------------------------------------------------------------------------------- if __name__ == '__main__': # pragma: no cover from syn.base_utils import run_all_tests run_all_tests(globals(), verbose=True, print_errors=False)
mit
5,729,047,410,053,840,000
27.522727
81
0.478725
false
skirpichev/omg
diofant/combinatorics/polyhedron.py
1
27227
from ..core import Basic, Tuple from ..core.compatibility import as_int from ..sets import FiniteSet from ..utilities import flatten, unflatten from ..utilities.iterables import minlex from .perm_groups import PermutationGroup from .permutations import Permutation rmul = Permutation.rmul class Polyhedron(Basic): """ Represents the polyhedral symmetry group (PSG). The PSG is one of the symmetry groups of the Platonic solids. There are three polyhedral groups: the tetrahedral group of order 12, the octahedral group of order 24, and the icosahedral group of order 60. All doctests have been given in the docstring of the constructor of the object. References ========== http://mathworld.wolfram.com/PolyhedralGroup.html """ _edges = None def __new__(cls, corners, faces=[], pgroup=[]): """ The constructor of the Polyhedron group object. It takes up to three parameters: the corners, faces, and allowed transformations. The corners/vertices are entered as a list of arbitrary expressions that are used to identify each vertex. The faces are entered as a list of tuples of indices; a tuple of indices identifies the vertices which define the face. They should be entered in a cw or ccw order; they will be standardized by reversal and rotation to be give the lowest lexical ordering. If no faces are given then no edges will be computed. >>> Polyhedron(list('abc'), [(1, 2, 0)]).faces {(0, 1, 2)} >>> Polyhedron(list('abc'), [(1, 0, 2)]).faces {(0, 1, 2)} The allowed transformations are entered as allowable permutations of the vertices for the polyhedron. Instance of Permutations (as with faces) should refer to the supplied vertices by index. These permutation are stored as a PermutationGroup. Examples ======== >>> Permutation.print_cyclic = False >>> from diofant.abc import w Here we construct the Polyhedron object for a tetrahedron. >>> corners = [w, x, y, z] >>> faces = [(0, 1, 2), (0, 2, 3), (0, 3, 1), (1, 2, 3)] Next, allowed transformations of the polyhedron must be given. This is given as permutations of vertices. Although the vertices of a tetrahedron can be numbered in 24 (4!) different ways, there are only 12 different orientations for a physical tetrahedron. The following permutations, applied once or twice, will generate all 12 of the orientations. (The identity permutation, Permutation(range(4)), is not included since it does not change the orientation of the vertices.) >>> pgroup = [Permutation([[0, 1, 2], [3]]), ... Permutation([[0, 1, 3], [2]]), ... Permutation([[0, 2, 3], [1]]), ... Permutation([[1, 2, 3], [0]]), ... Permutation([[0, 1], [2, 3]]), ... Permutation([[0, 2], [1, 3]]), ... Permutation([[0, 3], [1, 2]])] The Polyhedron is now constructed and demonstrated: >>> tetra = Polyhedron(corners, faces, pgroup) >>> tetra.size 4 >>> tetra.edges {(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)} >>> tetra.corners (w, x, y, z) It can be rotated with an arbitrary permutation of vertices, e.g. the following permutation is not in the pgroup: >>> tetra.rotate(Permutation([0, 1, 3, 2])) >>> tetra.corners (w, x, z, y) An allowed permutation of the vertices can be constructed by repeatedly applying permutations from the pgroup to the vertices. Here is a demonstration that applying p and p**2 for every p in pgroup generates all the orientations of a tetrahedron and no others: >>> all = ((w, x, y, z), ... (x, y, w, z), ... (y, w, x, z), ... (w, z, x, y), ... (z, w, y, x), ... (w, y, z, x), ... (y, z, w, x), ... (x, z, y, w), ... (z, y, x, w), ... (y, x, z, w), ... (x, w, z, y), ... (z, x, w, y)) >>> got = [] >>> for p in (pgroup + [p**2 for p in pgroup]): ... h = Polyhedron(corners) ... h.rotate(p) ... got.append(h.corners) ... >>> set(got) == set(all) True The make_perm method of a PermutationGroup will randomly pick permutations, multiply them together, and return the permutation that can be applied to the polyhedron to give the orientation produced by those individual permutations. Here, 3 permutations are used: >>> tetra.pgroup.make_perm(3) # doctest: +SKIP Permutation([0, 3, 1, 2]) To select the permutations that should be used, supply a list of indices to the permutations in pgroup in the order they should be applied: >>> use = [0, 0, 2] >>> p002 = tetra.pgroup.make_perm(3, use) >>> p002 Permutation([1, 0, 3, 2]) Apply them one at a time: >>> tetra.reset() >>> for i in use: ... tetra.rotate(pgroup[i]) ... >>> tetra.vertices (x, w, z, y) >>> sequentially = tetra.vertices Apply the composite permutation: >>> tetra.reset() >>> tetra.rotate(p002) >>> tetra.corners (x, w, z, y) >>> tetra.corners in all and tetra.corners == sequentially True Notes ===== Defining permutation groups --------------------------- It is not necessary to enter any permutations, nor is necessary to enter a complete set of transformations. In fact, for a polyhedron, all configurations can be constructed from just two permutations. For example, the orientations of a tetrahedron can be generated from an axis passing through a vertex and face and another axis passing through a different vertex or from an axis passing through the midpoints of two edges opposite of each other. For simplicity of presentation, consider a square -- not a cube -- with vertices 1, 2, 3, and 4: 1-----2 We could think of axes of rotation being: | | 1) through the face | | 2) from midpoint 1-2 to 3-4 or 1-3 to 2-4 3-----4 3) lines 1-4 or 2-3 To determine how to write the permutations, imagine 4 cameras, one at each corner, labeled A-D: A B A B 1-----2 1-----3 vertex index: | | | | 1 0 | | | | 2 1 3-----4 2-----4 3 2 C D C D 4 3 original after rotation along 1-4 A diagonal and a face axis will be chosen for the "permutation group" from which any orientation can be constructed. >>> pgroup = [] Imagine a clockwise rotation when viewing 1-4 from camera A. The new orientation is (in camera-order): 1, 3, 2, 4 so the permutation is given using the *indices* of the vertices as: >>> pgroup.append(Permutation((0, 2, 1, 3))) Now imagine rotating clockwise when looking down an axis entering the center of the square as viewed. The new camera-order would be 3, 1, 4, 2 so the permutation is (using indices): >>> pgroup.append(Permutation((2, 0, 3, 1))) The square can now be constructed: ** use real-world labels for the vertices, entering them in camera order ** for the faces we use zero-based indices of the vertices in *edge-order* as the face is traversed; neither the direction nor the starting point matter -- the faces are only used to define edges (if so desired). >>> square = Polyhedron((1, 2, 3, 4), [(0, 1, 3, 2)], pgroup) To rotate the square with a single permutation we can do: >>> square.rotate(square.pgroup[0]) >>> square.corners (1, 3, 2, 4) To use more than one permutation (or to use one permutation more than once) it is more convenient to use the make_perm method: >>> p011 = square.pgroup.make_perm([0, 1, 1]) # diag flip + 2 rotations >>> square.reset() # return to initial orientation >>> square.rotate(p011) >>> square.corners (4, 2, 3, 1) Thinking outside the box ------------------------ Although the Polyhedron object has a direct physical meaning, it actually has broader application. In the most general sense it is just a decorated PermutationGroup, allowing one to connect the permutations to something physical. For example, a Rubik's cube is not a proper polyhedron, but the Polyhedron class can be used to represent it in a way that helps to visualize the Rubik's cube. >>> facelets = flatten([symbols(s+'1:5') for s in 'UFRBLD']) >>> def show(): ... pairs = unflatten(r2.corners, 2) ... print(sstr(pairs[::2])) ... print(sstr(pairs[1::2])) ... >>> r2 = Polyhedron(facelets, pgroup=RubikGroup(2)) >>> show() [(U1, U2), (F1, F2), (R1, R2), (B1, B2), (L1, L2), (D1, D2)] [(U3, U4), (F3, F4), (R3, R4), (B3, B4), (L3, L4), (D3, D4)] >>> r2.rotate(0) # cw rotation of F >>> show() [(U1, U2), (F3, F1), (U3, R2), (B1, B2), (L1, D1), (R3, R1)] [(L4, L2), (F4, F2), (U4, R4), (B3, B4), (L3, D2), (D3, D4)] Predefined Polyhedra ==================== For convenience, the vertices and faces are defined for the following standard solids along with a permutation group for transformations. When the polyhedron is oriented as indicated below, the vertices in a given horizontal plane are numbered in ccw direction, starting from the vertex that will give the lowest indices in a given face. (In the net of the vertices, indices preceded by "-" indicate replication of the lhs index in the net.) tetrahedron, tetrahedron_faces ------------------------------ 4 vertices (vertex up) net: 0 0-0 1 2 3-1 4 faces: (0,1,2) (0,2,3) (0,3,1) (1,2,3) cube, cube_faces ---------------- 8 vertices (face up) net: 0 1 2 3-0 4 5 6 7-4 6 faces: (0,1,2,3) (0,1,5,4) (1,2,6,5) (2,3,7,6) (0,3,7,4) (4,5,6,7) octahedron, octahedron_faces ---------------------------- 6 vertices (vertex up) net: 0 0 0-0 1 2 3 4-1 5 5 5-5 8 faces: (0,1,2) (0,2,3) (0,3,4) (0,1,4) (1,2,5) (2,3,5) (3,4,5) (1,4,5) dodecahedron, dodecahedron_faces -------------------------------- 20 vertices (vertex up) net: 0 1 2 3 4 -0 5 6 7 8 9 -5 14 10 11 12 13-14 15 16 17 18 19-15 12 faces: (0,1,2,3,4) (0,1,6,10,5) (1,2,7,11,6) (2,3,8,12,7) (3,4,9,13,8) (0,4,9,14,5) (5,10,16,15,14) ( 6,10,16,17,11) (7,11,17,18,12) (8,12,18,19,13) (9,13,19,15,14) (15,16,17,18,19) icosahedron, icosahedron_faces ------------------------------ 12 vertices (face up) net: 0 0 0 0 -0 1 2 3 4 5 -1 6 7 8 9 10 -6 11 11 11 11 -11 20 faces: (0,1,2) (0,2,3) (0,3,4) (0,4,5) (0,1,5) (1,2,6) (2,3,7) (3,4,8) (4,5,9) (1,5,10) (2,6,7) (3,7,8) (4,8,9) (5,9,10) (1,6,10) (6,7,11,) (7,8,11) (8,9,11) (9,10,11) (6,10,11) >>> cube.edges {(0, 1), (0, 3), (0, 4), ..., (4, 7), (5, 6), (6, 7)} If you want to use letters or other names for the corners you can still use the pre-calculated faces: >>> corners = list('abcdefgh') >>> Polyhedron(corners, cube.faces).corners (a, b, c, d, e, f, g, h) References ========== [1] www.ocf.berkeley.edu/~wwu/articles/platonicsolids.pdf """ faces = [minlex(f, directed=False, is_set=True) for f in faces] corners, faces, pgroup = args = \ [Tuple(*a) for a in (corners, faces, pgroup)] obj = Basic.__new__(cls, *args) obj._corners = tuple(corners) # in order given obj._faces = FiniteSet(*faces) if pgroup and pgroup[0].size != len(corners): raise ValueError('Permutation size unequal to number of corners.') # use the identity permutation if none are given obj._pgroup = PermutationGroup(( pgroup or [Permutation(range(len(corners)))] )) return obj @property def corners(self): """ Get the corners of the Polyhedron. The method ``vertices`` is an alias for ``corners``. Examples ======== >>> p = Polyhedron(list('abcd')) >>> p.corners == p.vertices == (a, b, c, d) True See Also ======== array_form, cyclic_form """ return self._corners vertices = corners @property def array_form(self): """Return the indices of the corners. The indices are given relative to the original position of corners. Examples ======== >>> tetrahedron.array_form [0, 1, 2, 3] >>> tetrahedron.rotate(0) >>> tetrahedron.array_form [0, 2, 3, 1] >>> tetrahedron.pgroup[0].array_form [0, 2, 3, 1] >>> tetrahedron.reset() See Also ======== corners, cyclic_form """ corners = list(self.args[0]) return [corners.index(c) for c in self.corners] @property def cyclic_form(self): """Return the indices of the corners in cyclic notation. The indices are given relative to the original position of corners. See Also ======== corners, array_form """ return Permutation._af_new(self.array_form).cyclic_form @property def size(self): """Get the number of corners of the Polyhedron.""" return len(self._corners) @property def faces(self): """Get the faces of the Polyhedron.""" return self._faces @property def pgroup(self): """Get the permutations of the Polyhedron.""" return self._pgroup @property def edges(self): """ Given the faces of the polyhedra we can get the edges. Examples ======== >>> corners = (a, b, c) >>> faces = [(0, 1, 2)] >>> Polyhedron(corners, faces).edges {(0, 1), (0, 2), (1, 2)} """ if self._edges is None: output = set() for face in self.faces: for i in range(len(face)): edge = tuple(sorted([face[i], face[i - 1]])) output.add(edge) self._edges = FiniteSet(*output) return self._edges def rotate(self, perm): """ Apply a permutation to the polyhedron *in place*. The permutation may be given as a Permutation instance or an integer indicating which permutation from pgroup of the Polyhedron should be applied. This is an operation that is analogous to rotation about an axis by a fixed increment. Notes ===== When a Permutation is applied, no check is done to see if that is a valid permutation for the Polyhedron. For example, a cube could be given a permutation which effectively swaps only 2 vertices. A valid permutation (that rotates the object in a physical way) will be obtained if one only uses permutations from the ``pgroup`` of the Polyhedron. On the other hand, allowing arbitrary rotations (applications of permutations) gives a way to follow named elements rather than indices since Polyhedron allows vertices to be named while Permutation works only with indices. Examples ======== >>> cube.corners (0, 1, 2, 3, 4, 5, 6, 7) >>> cube.rotate(0) >>> cube.corners (1, 2, 3, 0, 5, 6, 7, 4) A non-physical "rotation" that is not prohibited by this method: >>> cube.reset() >>> cube.rotate(Permutation([[1, 2]], size=8)) >>> cube.corners (0, 2, 1, 3, 4, 5, 6, 7) Polyhedron can be used to follow elements of set that are identified by letters instead of integers: >>> shadow = h5 = Polyhedron(list('abcde')) >>> p = Permutation([3, 0, 1, 2, 4]) >>> h5.rotate(p) >>> h5.corners (d, a, b, c, e) >>> _ == shadow.corners True >>> copy = h5.copy() >>> h5.rotate(p) >>> h5.corners == copy.corners False """ if not isinstance(perm, Permutation): perm = self.pgroup[perm] # and we know it's valid else: if perm.size != self.size: raise ValueError('Polyhedron and Permutation sizes differ.') a = perm.array_form corners = [self.corners[a[i]] for i in range(len(self.corners))] self._corners = tuple(corners) def reset(self): """Return corners to their original positions. Examples ======== >>> tetrahedron.corners (0, 1, 2, 3) >>> tetrahedron.rotate(0) >>> tetrahedron.corners (0, 2, 3, 1) >>> tetrahedron.reset() >>> tetrahedron.corners (0, 1, 2, 3) """ self._corners = self.args[0] def _pgroup_calcs(): """Return the permutation groups for each of the polyhedra and the face definitions: tetrahedron, cube, octahedron, dodecahedron, icosahedron, tetrahedron_faces, cube_faces, octahedron_faces, dodecahedron_faces, icosahedron_faces (This author didn't find and didn't know of a better way to do it though there likely is such a way.) Although only 2 permutations are needed for a polyhedron in order to generate all the possible orientations, a group of permutations is provided instead. A set of permutations is called a "group" if:: a*b = c (for any pair of permutations in the group, a and b, their product, c, is in the group) a*(b*c) = (a*b)*c (for any 3 permutations in the group associativity holds) there is an identity permutation, I, such that I*a = a*I for all elements in the group a*b = I (the inverse of each permutation is also in the group) None of the polyhedron groups defined follow these definitions of a group. Instead, they are selected to contain those permutations whose powers alone will construct all orientations of the polyhedron, i.e. for permutations ``a``, ``b``, etc... in the group, ``a, a**2, ..., a**o_a``, ``b, b**2, ..., b**o_b``, etc... (where ``o_i`` is the order of permutation ``i``) generate all permutations of the polyhedron instead of mixed products like ``a*b``, ``a*b**2``, etc.... Note that for a polyhedron with n vertices, the valid permutations of the vertices exclude those that do not maintain its faces. e.g. the permutation BCDE of a square's four corners, ABCD, is a valid permutation while CBDE is not (because this would twist the square). Examples ======== The is_group checks for: closure, the presence of the Identity permutation, and the presence of the inverse for each of the elements in the group. This confirms that none of the polyhedra are true groups: >>> polyhedra = (tetrahedron, cube, octahedron, dodecahedron, icosahedron) >>> [h.pgroup.is_group for h in polyhedra] ... [True, True, True, True, True] Although tests in polyhedron's test suite check that powers of the permutations in the groups generate all permutations of the vertices of the polyhedron, here we also demonstrate the powers of the given permutations create a complete group for the tetrahedron: >>> for h in polyhedra[:1]: ... G = h.pgroup ... perms = set() ... for g in G: ... for e in range(g.order()): ... p = tuple((g**e).array_form) ... perms.add(p) ... ... perms = [Permutation(p) for p in perms] ... assert PermutationGroup(perms).is_group In addition to doing the above, the tests in the suite confirm that the faces are all present after the application of each permutation. References ========== http://dogschool.tripod.com/trianglegroup.html """ def _pgroup_of_double(polyh, ordered_faces, pgroup): n = len(ordered_faces[0]) # the vertices of the double which sits inside a give polyhedron # can be found by tracking the faces of the outer polyhedron. # A map between face and the vertex of the double is made so that # after rotation the position of the vertices can be located fmap = dict(zip(ordered_faces, range(len(ordered_faces)))) flat_faces = flatten(ordered_faces) new_pgroup = [] for i, p in enumerate(pgroup): h = polyh.copy() h.rotate(p) c = h.corners # reorder corners in the order they should appear when # enumerating the faces reorder = unflatten([c[j] for j in flat_faces], n) # make them canonical reorder = [tuple(map(as_int, minlex(f, directed=False, is_set=True))) for f in reorder] # map face to vertex: the resulting list of vertices are the # permutation that we seek for the double new_pgroup.append(Permutation([fmap[f] for f in reorder])) return new_pgroup tetrahedron_faces = [ (0, 1, 2), (0, 2, 3), (0, 3, 1), # upper 3 (1, 2, 3), # bottom ] # cw from top # _t_pgroup = [ Permutation([[1, 2, 3], [0]]), # cw from top Permutation([[0, 1, 2], [3]]), # cw from front face Permutation([[0, 3, 2], [1]]), # cw from back right face Permutation([[0, 3, 1], [2]]), # cw from back left face Permutation([[0, 1], [2, 3]]), # through front left edge Permutation([[0, 2], [1, 3]]), # through front right edge Permutation([[0, 3], [1, 2]]), # through back edge ] tetrahedron = Polyhedron( range(4), tetrahedron_faces, _t_pgroup) cube_faces = [ (0, 1, 2, 3), # upper (0, 1, 5, 4), (1, 2, 6, 5), (2, 3, 7, 6), (0, 3, 7, 4), # middle 4 (4, 5, 6, 7), # lower ] # U, D, F, B, L, R = up, down, front, back, left, right _c_pgroup = [Permutation(p) for p in [ [1, 2, 3, 0, 5, 6, 7, 4], # cw from top, U [4, 0, 3, 7, 5, 1, 2, 6], # cw from F face [4, 5, 1, 0, 7, 6, 2, 3], # cw from R face [1, 0, 4, 5, 2, 3, 7, 6], # cw through UF edge [6, 2, 1, 5, 7, 3, 0, 4], # cw through UR edge [6, 7, 3, 2, 5, 4, 0, 1], # cw through UB edge [3, 7, 4, 0, 2, 6, 5, 1], # cw through UL edge [4, 7, 6, 5, 0, 3, 2, 1], # cw through FL edge [6, 5, 4, 7, 2, 1, 0, 3], # cw through FR edge [0, 3, 7, 4, 1, 2, 6, 5], # cw through UFL vertex [5, 1, 0, 4, 6, 2, 3, 7], # cw through UFR vertex [5, 6, 2, 1, 4, 7, 3, 0], # cw through UBR vertex [7, 4, 0, 3, 6, 5, 1, 2], # cw through UBL ]] cube = Polyhedron( range(8), cube_faces, _c_pgroup) octahedron_faces = [ (0, 1, 2), (0, 2, 3), (0, 3, 4), (0, 1, 4), # top 4 (1, 2, 5), (2, 3, 5), (3, 4, 5), (1, 4, 5), # bottom 4 ] octahedron = Polyhedron( range(6), octahedron_faces, _pgroup_of_double(cube, cube_faces, _c_pgroup)) dodecahedron_faces = [ (0, 1, 2, 3, 4), # top (0, 1, 6, 10, 5), (1, 2, 7, 11, 6), (2, 3, 8, 12, 7), # upper 5 (3, 4, 9, 13, 8), (0, 4, 9, 14, 5), (5, 10, 16, 15, 14), (6, 10, 16, 17, 11), (7, 11, 17, 18, 12), # lower 5 (8, 12, 18, 19, 13), (9, 13, 19, 15, 14), (15, 16, 17, 18, 19) # bottom ] def _string_to_perm(s): rv = [Permutation(range(20))] p = None for si in s: if si not in '01': count = int(si) - 1 else: count = 1 if si == '0': p = _f0 else: p = _f1 rv.extend([p]*count) return Permutation.rmul(*rv) # top face cw _f0 = Permutation([ 1, 2, 3, 4, 0, 6, 7, 8, 9, 5, 11, 12, 13, 14, 10, 16, 17, 18, 19, 15]) # front face cw _f1 = Permutation([ 5, 0, 4, 9, 14, 10, 1, 3, 13, 15, 6, 2, 8, 19, 16, 17, 11, 7, 12, 18]) # the strings below, like 0104 are shorthand for F0*F1*F0**4 and are # the remaining 4 face rotations, 15 edge permutations, and the # 10 vertex rotations. _dodeca_pgroup = [_f0, _f1] + [_string_to_perm(s) for s in """ 0104 140 014 0410 010 1403 03104 04103 102 120 1304 01303 021302 03130 0412041 041204103 04120410 041204104 041204102 10 01 1402 0140 04102 0412 1204 1302 0130 03120""".strip().split()] dodecahedron = Polyhedron( range(20), dodecahedron_faces, _dodeca_pgroup) icosahedron_faces = [ [0, 1, 2], [0, 2, 3], [0, 3, 4], [0, 4, 5], [0, 1, 5], [1, 6, 7], [1, 2, 7], [2, 7, 8], [2, 3, 8], [3, 8, 9], [3, 4, 9], [4, 9, 10 ], [4, 5, 10], [5, 6, 10], [1, 5, 6], [6, 7, 11], [7, 8, 11], [8, 9, 11], [9, 10, 11], [6, 10, 11]] icosahedron = Polyhedron( range(12), icosahedron_faces, _pgroup_of_double( dodecahedron, dodecahedron_faces, _dodeca_pgroup)) return (tetrahedron, cube, octahedron, dodecahedron, icosahedron, tetrahedron_faces, cube_faces, octahedron_faces, dodecahedron_faces, icosahedron_faces) (tetrahedron, cube, octahedron, dodecahedron, icosahedron, tetrahedron_faces, cube_faces, octahedron_faces, dodecahedron_faces, icosahedron_faces) = _pgroup_calcs()
bsd-3-clause
3,605,767,470,753,817,000
32.738538
80
0.530356
false
strands-project/robomongo
src/third-party/qscintilla/Python/configure.py
1
29817
# This script configures QScintilla for PyQt v4.10 and later. It will fall # back to the old script if an earlier version of PyQt is found. # # Copyright (c) 2012 Riverbank Computing Limited <[email protected]> # # This file is part of QScintilla. # # This file may be used under the terms of the GNU General Public # License versions 2.0 or 3.0 as published by the Free Software # Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3 # included in the packaging of this file. Alternatively you may (at # your option) use any later version of the GNU General Public # License if such license has been publicly approved by Riverbank # Computing Limited (or its successors, if any) and the KDE Free Qt # Foundation. In addition, as a special exception, Riverbank gives you # certain additional rights. These rights are described in the Riverbank # GPL Exception version 1.1, which can be found in the file # GPL_EXCEPTION.txt in this package. # # If you are unsure which license is appropriate for your use, please # contact the sales department at [email protected]. # # This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE # WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. import sys import os import glob import optparse try: import sysconfig except ImportError: from distutils import sysconfig # Initialise the constants. SIP_MIN_VERSION = '4.12.0' # This must be kept in sync with qscintilla.pro, Qt4Qt5/application.pro and # Qt4Qt5/designer.pro. QSCI_API_MAJOR = 9 def error(msg): """ Display an error message and terminate. msg is the text of the error message. """ sys.stderr.write(format("Error: " + msg) + "\n") sys.exit(1) def inform(msg): """ Display an information message. msg is the text of the error message. """ sys.stdout.write(format(msg) + "\n") def format(msg, left_margin=0, right_margin=78): """ Format a message by inserting line breaks at appropriate places. msg is the text of the message. left_margin is the position of the left margin. right_margin is the position of the right margin. Returns the formatted message. """ curs = left_margin fmsg = " " * left_margin for w in msg.split(): l = len(w) if curs != left_margin and curs + l > right_margin: fmsg = fmsg + "\n" + (" " * left_margin) curs = left_margin if curs > left_margin: fmsg = fmsg + " " curs = curs + 1 fmsg = fmsg + w curs = curs + l return fmsg class HostPythonConfiguration: """ A container for the host Python configuration. """ def __init__(self): """ Initialise the configuration. """ self.platform = sys.platform self.version = sys.hexversion >> 8 if hasattr(sysconfig, 'get_path'): # The modern API. self.inc_dir = sysconfig.get_path('include') self.module_dir = sysconfig.get_path('platlib') else: # The legacy distutils API. self.inc_dir = sysconfig.get_python_inc(plat_specific=1) self.module_dir = sysconfig.get_python_lib(plat_specific=1) if sys.platform == 'win32': self.data_dir = sys.prefix self.lib_dir = sys.prefix + '\\libs' else: self.data_dir = sys.prefix + '/share' self.lib_dir = sys.prefix + '/lib' class TargetQtConfiguration: """ A container for the target Qt configuration. """ def __init__(self, qmake): """ Initialise the configuration. qmake is the full pathname of the qmake executable that will provide the configuration. """ pipe = os.popen(' '.join([qmake, '-query'])) for l in pipe: l = l.strip() tokens = l.split(':', 1) if isinstance(tokens, list): if len(tokens) != 2: error("Unexpected output from qmake: '%s'\n" % l) name, value = tokens else: name = tokens value = None name = name.replace('/', '_') setattr(self, name, value) pipe.close() class TargetConfiguration: """ A container for the target configuration. """ def __init__(self): """ Initialise the configuration with default values. """ # Values based on the host Python configuration. py_config = HostPythonConfiguration() self.py_module_dir = py_config.module_dir self.py_inc_dir = py_config.inc_dir self.py_lib_dir = py_config.lib_dir self.py_platform = py_config.platform self.py_sip_dir = os.path.join(py_config.data_dir, 'sip') self.py_version = py_config.version self.sip_inc_dir = py_config.inc_dir # The default qmake spec. if self.py_platform == 'win32': if self.py_version >= 0x030300: self.qmake_spec = 'win32-msvc2010' elif self.py_version >= 0x020600: self.qmake_spec = 'win32-msvc2008' elif self.py_version >= 0x020400: self.qmake_spec = 'win32-msvc.net' else: self.qmake_spec = 'win32-msvc' else: # Use the Qt default. (We may update it for MacOS/X later.) self.qmake_spec = '' # Remaining values. self.pyqt_sip_flags = '' self.pyqt_version = '' self.qmake = self._find_exe('qmake') self.sip = self._find_exe('sip') self.prot_is_public = (self.py_platform.startswith('linux') or self.py_platform == 'darwin') self.qscintilla_is_dll = (self.py_platform == 'win32') self.module_dir = os.path.join(py_config.module_dir, 'PyQt4') self.pyqt_sip_dir = os.path.join(self.py_sip_dir, 'PyQt4') self.qsci_sip_dir = self.pyqt_sip_dir def from_configuration_file(self, config_file): """ Initialise the configuration with values from a file. config_file is the name of the configuration file. """ inform("Reading configuration from %s..." % config_file) cfg = open(config_file) line_nr = 0 for l in cfg: line_nr += 1 l = l.strip() if len(l) == 0 or l[0] == '#': continue eq = l.find('=') if eq > 0: name = l[:eq - 1].rstrip() value = l[eq + 1:].lstrip() else: name = value = '' if name == '' or value == '': error("%s:%d: Invalid line." % (config_file, line_nr)) default_value = getattr(self, name, None) if default_value is None: error( "%s:%d: Unknown item: %s." % (config_file, line_nr, name)) if isinstance(default_value, int): if value.startswith('0x'): value = int(value, 16) else: value = int(value) setattr(self, name, value) cfg.close() def from_introspection(self, pyqt_package): """ Initialise the configuration by introspecting the system. pyqt_package is the name of the PyQt package we are building against. """ if pyqt_package == 'PyQt5': try: from PyQt5 import QtCore except ImportError: error( "Unable to import PyQt5.QtCore. Make sure PyQt5 is " "installed.") else: try: from PyQt4 import QtCore except ImportError: error( "Unable to import PyQt4.QtCore. Make sure PyQt4 is " "installed.") inform("PyQt %s is being used." % QtCore.PYQT_VERSION_STR) inform("Qt %s is being used." % QtCore.QT_VERSION_STR) # See if we have a PyQt that embeds its configuration. try: pyqt_config = QtCore.PYQT_CONFIGURATION except AttributeError: pyqt_config = None if pyqt_config is None: # Fallback to the old configuration script. config_script = sys.argv[0].replace('configure', 'configure-old') args = [sys.executable, config_script] + sys.argv[1:] try: os.execv(sys.executable, args) except OSError: pass error("Unable to execute '%s'\n" % config_script) self.pyqt_sip_flags = pyqt_config['sip_flags'] def get_qt_configuration(self, opts): """ Get the Qt configuration that can be extracted from qmake. opts are the command line options. """ try: qmake = opts.qmake except AttributeError: # Windows. qmake = None if qmake is not None: self.qmake = qmake elif self.qmake is None: # Under Windows qmake and the Qt DLLs must be on the system PATH # otherwise the dynamic linker won't be able to resolve the # symbols. On other systems we assume we can just run qmake by # using its full pathname. if sys.platform == 'win32': error("Make sure you have a working Qt qmake on your PATH.") else: error( "Make sure you have a working Qt qmake on your PATH " "or use the --qmake argument to explicitly specify a " "working Qt qmake.") # Query qmake. qt_config = TargetQtConfiguration(self.qmake) # The binary MacOS/X Qt installer defaults to XCode. If this is what # we might have then use macx-clang (Qt v5) or macx-g++ (Qt v4). if sys.platform == 'darwin': try: # Qt v5. if qt_config.QMAKE_SPEC == 'macx-xcode': # This will exist (and we can't check anyway). self.qmake_spec = 'macx-clang' else: # No need to explicitly name the default. self.qmake_spec = '' except AttributeError: # Qt v4. self.qmake_spec = 'macx-g++' self.api_dir = qt_config.QT_INSTALL_DATA self.qsci_inc_dir = qt_config.QT_INSTALL_HEADERS self.qsci_lib_dir = qt_config.QT_INSTALL_LIBS def override_defaults(self, opts): """ Override the defaults from the command line. opts are the command line options. """ if opts.apidir is not None: self.api_dir = opts.apidir if opts.destdir is not None: self.module_dir = opts.destdir else: self.module_dir = os.path.join(self.py_module_dir, opts.pyqt_package) if opts.qmakespec is not None: self.qmake_spec = opts.qmakespec if opts.prot_is_public is not None: self.prot_is_public = opts.prot_is_public if opts.qsci_inc_dir is not None: self.qsci_inc_dir = opts.qsci_inc_dir if opts.qsci_lib_dir is not None: self.qsci_lib_dir = opts.qsci_lib_dir if opts.sip_inc_dir is not None: self.sip_inc_dir = opts.sip_inc_dir if opts.pyqt_sip_dir is not None: self.pyqt_sip_dir = opts.pyqt_sip_dir else: self.pyqt_sip_dir = os.path.join(self.py_sip_dir, opts.pyqt_package) if opts.qsci_sip_dir is not None: self.qsci_sip_dir = opts.qsci_sip_dir else: self.qsci_sip_dir = self.pyqt_sip_dir if opts.sip is not None: self.sip = opts.sip if opts.is_dll is not None: self.qscintilla_is_dll = opts.is_dll @staticmethod def _find_exe(exe): """ Find an executable, ie. the first on the path. """ try: path = os.environ['PATH'] except KeyError: path = '' if sys.platform == 'win32': exe = exe + '.exe' for d in path.split(os.pathsep): exe_path = os.path.join(d, exe) if os.access(exe_path, os.X_OK): return exe_path return None def create_optparser(target_config): """ Create the parser for the command line. target_config is the target configuration containing default values. """ def store_abspath(option, opt_str, value, parser): setattr(parser.values, option.dest, os.path.abspath(value)) def store_abspath_dir(option, opt_str, value, parser): if not os.path.isdir(value): raise optparse.OptionValueError("'%s' is not a directory" % value) setattr(parser.values, option.dest, os.path.abspath(value)) def store_abspath_exe(option, opt_str, value, parser): if not os.access(value, os.X_OK): raise optparse.OptionValueError("'%s' is not an executable" % value) setattr(parser.values, option.dest, os.path.abspath(value)) p = optparse.OptionParser(usage="python %prog [options]", version="2.7.2") p.add_option("--spec", dest='qmakespec', default=None, action='store', metavar="SPEC", help="pass -spec SPEC to qmake [default: %s]" % "don't pass -spec" if target_config.qmake_spec == '' else target_config.qmake_spec) p.add_option("--apidir", "-a", dest='apidir', type='string', default=None, action='callback', callback=store_abspath, metavar="DIR", help="the QScintilla API file will be installed in DIR [default: " "QT_INSTALL_DATA/qsci]") p.add_option("--configuration", dest='config_file', type='string', default=None, action='callback', callback=store_abspath, metavar="FILE", help="FILE defines the target configuration") p.add_option("--destdir", "-d", dest='destdir', type='string', default=None, action='callback', callback=store_abspath, metavar="DIR", help="install the QScintilla module in DIR [default: " "%s]" % target_config.module_dir) p.add_option("--protected-is-public", dest='prot_is_public', default=None, action='store_true', help="enable building with 'protected' redefined as 'public' " "[default: %s]" % target_config.prot_is_public) p.add_option("--protected-not-public", dest='prot_is_public', action='store_false', help="disable building with 'protected' redefined as 'public'") p.add_option("--pyqt", dest='pyqt_package', type='choice', choices=['PyQt4', 'PyQt5'], default='PyQt4', action='store', metavar="PyQtn", help="configure for PyQt4 or PyQt5 [default: PyQt4]") if sys.platform != 'win32': p.add_option("--qmake", "-q", dest='qmake', type='string', default=None, action='callback', callback=store_abspath_exe, metavar="FILE", help="the pathname of qmake is FILE [default: " "%s]" % (target_config.qmake or "None")) p.add_option("--qsci-incdir", "-n", dest='qsci_inc_dir', type='string', default=None, action='callback', callback=store_abspath_dir, metavar="DIR", help="the directory containing the QScintilla Qsci header file " "directory is DIR [default: QT_INSTALL_HEADERS]") p.add_option("--qsci-libdir", "-o", dest='qsci_lib_dir', type='string', default=None, action='callback', callback=store_abspath_dir, metavar="DIR", help="the directory containing the QScintilla library is DIR " "[default: QT_INSTALL_LIBS]") p.add_option("--sip", dest='sip', type='string', default=None, action='callback', callback=store_abspath_exe, metavar="FILE", help="the pathname of sip is FILE [default: " "%s]" % (target_config.sip or "None")) p.add_option("--sip-incdir", dest='sip_inc_dir', type='string', default=None, action='callback', callback=store_abspath_dir, metavar="DIR", help="the directory containing the sip.h header file file is DIR " "[default: %s]" % target_config.sip_inc_dir) p.add_option("--pyqt-sipdir", dest='pyqt_sip_dir', type='string', default=None, action='callback', callback=store_abspath_dir, metavar="DIR", help="the directory containing the PyQt .sip files is DIR " "[default: %s]" % target_config.pyqt_sip_dir) p.add_option("--qsci-sipdir", "-v", dest='qsci_sip_dir', type='string', default=None, action='callback', callback=store_abspath_dir, metavar="DIR", help="the QScintilla .sip files will be installed in DIR " "[default: %s]" % target_config.qsci_sip_dir) p.add_option("--concatenate", "-c", dest='concat', default=False, action='store_true', help="concatenate the C++ source files") p.add_option("--concatenate-split", "-j", dest='split', type='int', default=1, metavar="N", help="split the concatenated C++ source files into N pieces " "[default: 1]") p.add_option("--static", "-k", dest='static', default=False, action='store_true', help="build the QScintilla module as a static library") p.add_option("--no-docstrings", dest='no_docstrings', default=False, action='store_true', help="disable the generation of docstrings") p.add_option("--trace", "-r", dest='tracing', default=False, action='store_true', help="build the QScintilla module with tracing enabled") p.add_option("--no-dll", "-s", dest='is_dll', default=None, action='store_false', help="QScintilla is a static library and not a Windows DLL") p.add_option("--debug", "-u", default=False, action='store_true', help="build the QScintilla module with debugging symbols") p.add_option("--no-timestamp", "-T", dest='no_timestamp', default=False, action='store_true', help="suppress timestamps in the header comments of generated " "code [default: include timestamps]") return p def inform_user(target_config): """ Tell the user the values that are going to be used. target_config is the target configuration. """ inform("The sip executable is %s." % target_config.sip) inform("The QScintilla module will be installed in %s." % target_config.module_dir) if target_config.prot_is_public: inform("The QScintilla module is being built with 'protected' " "redefined as 'public'.") inform("The QScintilla .sip files will be installed in %s." % target_config.qsci_sip_dir) inform("The QScintilla API file will be installed in %s." % os.path.join(target_config.api_dir, 'api', 'python')) def check_qscintilla(target_config): """ See if QScintilla can be found and what its version is. target_config is the target configuration. """ # Find the QScintilla header files. sciglobal = os.path.join(target_config.qsci_inc_dir, 'Qsci', 'qsciglobal.h') if not os.access(sciglobal, os.F_OK): error("Qsci/qsciglobal.h could not be found in %s. If QScintilla is installed then use the --qsci-incdir argument to explicitly specify the correct directory." % target_config.qsci_inc_dir) # Get the QScintilla version string. sciversstr = read_define(sciglobal, 'QSCINTILLA_VERSION_STR') if sciversstr is None: error( "The QScintilla version number could not be determined by " "reading %s." % sciglobal) if not glob.glob(os.path.join(target_config.qsci_lib_dir, '*qscintilla2*')): error("The QScintilla library could not be found in %s. If QScintilla is installed then use the --qsci-libdir argument to explicitly specify the correct directory." % target_config.qsci_lib_dir) # Because we include the Python bindings with the C++ code we can # reasonably force the same version to be used and not bother about # versioning. if sciversstr != '2.7.2': error("QScintilla %s is being used but the Python bindings 2.7.2 are being built. Please use matching versions." % sciversstr) inform("QScintilla %s is being used." % sciversstr) def read_define(filename, define): """ Read the value of a #define from a file. filename is the name of the file. define is the name of the #define. None is returned if there was no such #define. """ f = open(filename) for l in f: wl = l.split() if len(wl) >= 3 and wl[0] == "#define" and wl[1] == define: # Take account of embedded spaces. value = ' '.join(wl[2:])[1:-1] break else: value = None f.close() return value def sip_flags(target_config): """ Return the SIP flags. target_config is the target configuration. """ # Get the flags used for the main PyQt module. flags = target_config.pyqt_sip_flags.split() # Generate the API file. flags.append('-a') flags.append('QScintilla2.api') # Add PyQt's .sip files to the search path. flags.append('-I') flags.append(target_config.pyqt_sip_dir) return flags def generate_code(target_config, opts): """ Generate the code for the QScintilla module. target_config is the target configuration. opts are the command line options. """ inform("Generating the C++ source for the Qsci module...") # Build the SIP command line. argv = [target_config.sip] argv.extend(sip_flags(target_config)) if opts.no_timestamp: argv.append('-T') if not opts.no_docstrings: argv.append('-o'); if target_config.prot_is_public: argv.append('-P'); if opts.concat: argv.append('-j') argv.append(str(opts.split)) if opts.tracing: argv.append('-r') argv.append('-c') argv.append('.') if opts.pyqt_package == 'PyQt5': argv.append('sip/qscimod5.sip') else: argv.append('sip/qscimod4.sip') rc = os.spawnv(os.P_WAIT, target_config.sip, argv) if rc != 0: error("%s returned exit code %d." % (target_config.sip, rc)) # Generate the .pro file. generate_pro(target_config, opts) # Generate the Makefile. inform("Creating the Makefile for the Qsci module...") qmake_args = ['qmake'] if target_config.qmake_spec != '': qmake_args.append('-spec') qmake_args.append(target_config.qmake_spec) qmake_args.append('Qsci.pro') rc = os.spawnv(os.P_WAIT, target_config.qmake, qmake_args) if rc != 0: error("%s returned exit code %d." % (target_config.qmake, rc)) def generate_pro(target_config, opts): """ Generate the .pro file for the QScintilla module. target_config is the target configuration. opts are the command line options. """ # Without the 'no_check_exist' magic the target.files must exist when qmake # is run otherwise the install and uninstall targets are not generated. inform("Generating the .pro file for the Qsci module...") pro = open('Qsci.pro', 'w') pro.write('TEMPLATE = lib\n') pro.write('CONFIG += %s\n' % ('debug' if opts.debug else 'release')) pro.write('CONFIG += %s\n' % ('staticlib' if opts.static else 'plugin')) pro.write(''' greaterThan(QT_MAJOR_VERSION, 4) { QT += widgets printsupport } ''') if not opts.static: # I don't really understand why the linker needs to find the Python # .lib file. pro.write(''' win32 { PY_MODULE = Qsci.pyd target.files = Qsci.pyd LIBS += -L%s } else { PY_MODULE = Qsci.so target.files = Qsci.so } target.CONFIG = no_check_exist ''' % target_config.py_lib_dir) pro.write(''' target.path = %s INSTALLS += target ''' % target_config.module_dir) pro.write(''' api.path = %s/api/python api.files = QScintilla2.api INSTALLS += api ''' % target_config.api_dir) pro.write(''' sip.path = %s/Qsci sip.files =''' % target_config.qsci_sip_dir) for s in glob.glob('sip/*.sip'): pro.write(' \\\n %s' % s) pro.write(''' INSTALLS += sip ''') pro.write('\n') # These optimisations could apply to other platforms. if target_config.py_platform == 'darwin': pro.write('QMAKE_CXXFLAGS += -fno-exceptions\n') if target_config.py_platform.startswith('linux'): pro.write('QMAKE_CXXFLAGS += -fno-exceptions\n') if not opts.static: if target_config.py_version >= 0x030000: entry_point = 'PyInit_Qsci' else: entry_point = 'initQsci' exp = open('Qsci.exp', 'wt') exp.write('{ global: %s; local: *; };' % entry_point) exp.close() pro.write('QMAKE_LFLAGS += -Wl,--version-script=Qsci.exp\n') if target_config.prot_is_public: pro.write('DEFINES += SIP_PROTECTED_IS_PUBLIC protected=public\n') if target_config.qscintilla_is_dll: pro.write('DEFINES += QSCINTILLA_DLL\n') pro.write('INCLUDEPATH += %s\n' % target_config.qsci_inc_dir) pro.write('INCLUDEPATH += %s\n' % target_config.py_inc_dir) if target_config.py_inc_dir != target_config.sip_inc_dir: pro.write('INCLUDEPATH += %s\n' % target_config.sip_inc_dir) pro.write('LIBS += -L%s -lqscintilla2\n' % target_config.qsci_lib_dir) if not opts.static: pro.write(''' win32 { QMAKE_POST_LINK = $(COPY_FILE) $(DESTDIR_TARGET) $$PY_MODULE } else { QMAKE_POST_LINK = $(COPY_FILE) $(TARGET) $$PY_MODULE } macx { QMAKE_LFLAGS += "-undefined dynamic_lookup" QMAKE_POST_LINK = $$QMAKE_POST_LINK$$escape_expand(\\\\n\\\\t)$$quote(install_name_tool -change libqscintilla2.%s.dylib %s/libqscintilla2.%s.dylib $$PY_MODULE) } ''' % (QSCI_API_MAJOR, target_config.qsci_lib_dir, QSCI_API_MAJOR)) pro.write('\n') pro.write('TARGET = Qsci\n') pro.write('HEADERS = sipAPIQsci.h\n') pro.write('SOURCES =') for s in glob.glob('*.cpp'): pro.write(' \\\n %s' % s) pro.write('\n') pro.close() def check_sip(target_config): """ Check that the version of sip is good enough. target_config is the target configuration. """ if target_config.sip is None: error( "Make sure you have a working sip on your PATH or use the " "--sip argument to explicitly specify a working sip.") pipe = os.popen(' '.join([target_config.sip, '-V'])) for l in pipe: version_str = l.strip() break else: error("'%s -V' did not generate any output." % target_config.sip) pipe.close() if 'snapshot' not in version_str: version = version_from_string(version_str) if version is None: error( "'%s -V' generated unexpected output: '%s'." % ( target_config.sip, version_str)) min_version = version_from_string(SIP_MIN_VERSION) if version < min_version: error( "This version of QScintilla requires sip %s or later." % SIP_MIN_VERSION) inform("sip %s is being used." % version_str) def version_from_string(version_str): """ Convert a version string of the form m.n or m.n.o to an encoded version number (or None if it was an invalid format). version_str is the version string. """ parts = version_str.split('.') if not isinstance(parts, list): return None if len(parts) == 2: parts.append('0') if len(parts) != 3: return None version = 0 for part in parts: try: v = int(part) except ValueError: return None version = (version << 8) + v return version def main(argv): """ Create the configuration module module. argv is the list of command line arguments. """ # Create the default target configuration. target_config = TargetConfiguration() # Parse the command line. p = create_optparser(target_config) opts, args = p.parse_args() if args: p.print_help() sys.exit(2) # Query qmake for the basic configuration information. target_config.get_qt_configuration(opts) # Update the target configuration. if opts.config_file is not None: target_config.from_configuration_file(opts.config_file) else: target_config.from_introspection(opts.pyqt_package) target_config.override_defaults(opts) # Check SIP is new enough. check_sip(target_config) # Check for QScintilla. check_qscintilla(target_config) # Tell the user what's been found. inform_user(target_config) # Generate the code. generate_code(target_config, opts) ############################################################################### # The script starts here. ############################################################################### if __name__ == '__main__': try: main(sys.argv) except SystemExit: raise except: sys.stderr.write( """An internal error occured. Please report all the output from the program, including the following traceback, to [email protected]. """) raise
gpl-3.0
5,562,037,454,009,413,000
32.691525
202
0.583325
false
emilioramirez/aacv2016
lab1/lab1.py
1
14650
# -*- coding: utf-8 -*- ''' AAVC, FaMAF-UNC, 20-SEP-2016 ======================================================= Lab 1: Clasificación de imágenes empleando modelos BoVW ======================================================= 0) Familiarizarse con el código y con el dataset. Svetlana Lazebnik, Cordelia Schmid, and Jean Ponce. (2006) Beyond Bags of Features: Spatial Pyramid Matching for Recognizing Natural Scene Categories. In: CVPR. Link: http://www-cvr.ai.uiuc.edu/ponce_grp/data/scene_categories/scene_categories.zip 1) Determinar el mejor valor para el parámetro C del clasificador (SVM lineal) mediante 5-fold cross-validation en el conjunto de entrenamiento (gráfica) de uno de los folds. Una vez elegido el parámetro, reportar media y desviación estándar del accuracy sobre el conjunto de test. Hsu, C. W., Chang, C. C., & Lin, C. J. (2003). A practical guide to support vector classification. 2) Evaluar accuracy vs. n_clusters. Que pasa con los tiempos de cómputo de BoVW?. Gráficas. 50 - 90 - 140 Si tengo descriptores locales L2-normalizados: cómo puedo optimizar la asignación de los mismos a palabras del diccionario? (ayuda: expresar la distancia euclídea entre dos vectores en términos de productos puntos entre los mismo) 3) Transformaciones en descriptores / vectores BoVW: evaluar impacto de transformación sqrt y norma L2. Arandjelović, R., & Zisserman, A. (2012). Three things everyone should know to improve object retrieval. In: CVPR. 4) Kernels no lineales: Intersección (BoVW: norm=1) y RBF, ajustando parámetros mediante cross-validation en conjunto de validación. 5*) Implementar "spatial augmentation": agregar las coordenadas espaciales (relativas) a cada descriptor, esto es: el descriptor d=(d1,...,dn) se transforma a d'=(d1,...,dn, x/W-0.5, y/H-0.5), en donde H y W son el alto y ancho de la imagen, respectivamente. Sánchez, J., Perronnin, F., & De Campos, T. (2012). Modeling the spatial layout of images beyond spatial pyramids. In: PRL 6*) Emplear un "vocabulary tree". Explicar como afecta la asignación de descritpores locales a palabras del diccionario. Nister, D., & Stewenius, H. (2006). Scalable recognition with a vocabulary tree. In: CVPR 7*) Reemplazar BoVW por VLAD (implementar) Arandjelovic, R., & Zisserman, A. (2013). All about VLAD. In: CVPR 8*) Trabajar sobre el dataset MIT-IndoorScenes (67 clases). A. Quattoni, and A.Torralba (2009). Recognizing Indoor Scenes. In: CVPR. link: http://web.mit.edu/torralba/www/indoor.html Algunas observaciones: - El dataset provee un train/test split estándar, por lo que hay que armar un parser que levante los .txt y arme el diccionario correspondiente al dataset. - Son 2.4G de imágenes, por lo que tener todos los vectores BoVW en memoria se vuelve difícil. El entrenamiento en este caso se debe hacer mediante SGD (sklearn.linear_models.SGDClassifier). Prestar atención al esquema de actualización. ''' from __future__ import print_function from __future__ import division import sys from os import listdir, makedirs from os.path import join, splitext, abspath, split, exists import numpy as np np.seterr(all='raise') import cv2 from skimage.feature import daisy from skimage.transform import rescale from sklearn.svm import LinearSVC, SVC from scipy.spatial import distance from scipy.io import savemat, loadmat DAISY_L2 = '-daisy:L2' DAISY_sqrt = '-daisy:sqrt' DAISY_L2_sqrt = '-daisy:L2+sqrt' DAISY_NONE = '-daisy:None' DAISY_L1 = '-daisy:L1' EXT_FEAT = '{}.feat'#'.feat' VOCABYLARY = 'vocabulary{:d}{}.dat' SAMPLE = 'sample{:d}{}.feat' CURRENT_DAISY = DAISY_L1 BOVW_L2 = '-bovw:L2' BOVW_sqrt = '-bovw:sqrt' BOVW_L2_sqrt = '-bovw:L2+sqrt' BOVW_None = '-bovw:None' BOVW_L1 = '-bovw:L2' # Dice L2 pero en realidad es L1 CURRENT_BOVW = BOVW_L1 EXT_BOVW = '{}{}.bovw' def save_data(data, filename, force_overwrite=False): # if dir/subdir doesn't exist, create it dirname = split(filename)[0] if not exists(dirname): makedirs(dirname) savemat(filename, {'data': data}, appendmat=False) def load_data(filename): return loadmat(filename, appendmat=False)['data'].squeeze() def load_scene_categories(path, random_state=None): cname = sorted(listdir(path)) # human-readable names cid = [] # class id wrt cname list fname = [] # relative file paths for i, cls in enumerate(cname): for img in listdir(join(path, cls)): if splitext(img)[1] not in ('.jpeg', '.jpg', '.png'): continue fname.append(join(cls, img)) cid.append(i) return {'cname': cname, 'cid': cid, 'fname': fname} def n_per_class_split(dataset, n=100, random_state=None): # set RNG if random_state is None: random_state = np.random.RandomState() n_classes = len(dataset['cname']) cid = dataset['cid'] fname = dataset['fname'] train_set = [] test_set = [] for id_ in range(n_classes): idxs = [i for i, j in enumerate(cid) if j == id_] random_state.shuffle(idxs) # train samples for i in idxs[:n]: train_set.append((fname[i], cid[i])) # test samples for i in idxs[n:]: test_set.append((fname[i], cid[i])) random_state.shuffle(train_set) random_state.shuffle(test_set) return train_set, test_set SCALES_3 = [1.0, 0.5, 0.25] SCALES_5 = [1.0, 0.707, 0.5, 0.354, 0.25] def extract_multiscale_dense_features(imfile, step=8, scales=SCALES_3): im = cv2.imread(imfile, cv2.IMREAD_GRAYSCALE) feat_all = [] for sc in scales: dsize = (int(sc * im.shape[0]), int(sc * im.shape[1])) im_scaled = cv2.resize(im, dsize, interpolation=cv2.INTER_LINEAR) feat = daisy(im_scaled, step=step) if feat.size == 0: break ndim = feat.shape[2] feat = np.atleast_2d(feat.reshape(-1, ndim)) feat_all.append(feat) return np.row_stack(feat_all).astype(np.float32) def compute_features(base_path, im_list, output_path): # compute and store low level features for all images for fname in im_list: # image full path imfile = join(base_path, fname) # check if destination file already exists featfile = join(output_path, splitext(fname)[0] + EXT_FEAT.format(CURRENT_DAISY)) current_file = featfile if exists(current_file): print('{} already exists'.format(current_file)) continue feat = extract_multiscale_dense_features(imfile) save_data(feat, featfile) # feat_l2 = join(output_path, splitext(fname)[0] + EXT_FEAT.format(CURRENT_DAISY)) # current_file = feat_l2 # save_data(normalize_L2(feat), feat_l2) # feat_sqrt = join(output_path, splitext(fname)[0] + EXT_FEAT.format(CURRENT_DAISY)) # current_file = feat_sqrt # save_data(normalize_sqrt(feat), feat_sqrt) # feat_l2_sqrt = join(output_path, splitext(fname)[0] + EXT_FEAT.format(CURRENT_DAISY)) # current_file = feat_l2_sqrt # save_data(normalize_L2(normalize_sqrt(feat)), feat_l2_sqrt) print('{}: {} features'.format(featfile, feat.shape[0])) def normalize_L2(vector, norm=2): nrm = np.linalg.norm(vector, ord=norm) return vector / (nrm + 1e-7) def normalize_sqrt(vector): return np.sign(vector) * np.sqrt(np.absolute(vector)) def sample_feature_set(base_path, im_list, output_path, n_samples, random_state=None): if random_state is None: random_state = np.random.RandomState() n_per_file = 100 sample_file = join(output_path, SAMPLE.format(n_samples, CURRENT_DAISY)) if exists(sample_file): sample = load_data(sample_file) else: sample = [] while len(sample) < n_samples: i = random_state.randint(0, len(im_list)) featfile = join(base_path, splitext(im_list[i])[0] + EXT_FEAT.format(CURRENT_DAISY)) feat = load_data(featfile) idxs = random_state.choice(range(feat.shape[0]), 100) sample += [feat[i] for i in idxs] print('\r{}/{} samples'.format(len(sample), n_samples), end='') sys.stdout.flush() sample = np.row_stack(sample) save_data(sample, sample_file) print('\r{}: {} features'.format(sample_file, sample.shape[0])) return sample def kmeans_fit(samples, n_clusters, maxiter=100, tol=1e-4, random_state=None): if random_state is None: random_state = np.random.RandomState() n_samples = samples.shape[0] # chose random samples as initial estimates idxs = random_state.randint(0, n_samples, n_clusters) centroids = samples[idxs, :] J_old = np.inf for iter_ in range(maxiter): # SAMPLE-TO-CLUSTER ASSIGNMENT # cdist returns a matrix of size n_samples x n_clusters, where the i-th # row stores the (squared) distance from sample i to all centroids dist2 = distance.cdist(samples, centroids, metric='sqeuclidean') # argmin over columns of the distance matrix assignment = np.argmin(dist2, axis=1) # CENTROIDS UPDATE (+ EVAL DISTORTION) J_new = 0. for k in range(n_clusters): idxs = np.where(assignment == k)[0] if len(idxs) == 0: raise RuntimeError('k-means crash!') centroids[k, :] = np.mean(samples[idxs], axis=0).astype(np.float32) J_new += np.sum(dist2[idxs, assignment[idxs]]) J_new /= float(n_samples) print('iteration {}, potential={:.3e}'.format(iter_, J_new)) if (J_old - J_new) / J_new < tol: print('STOP') break J_old = J_new return centroids def compute_bovw(vocabulary, features, norm=2): if vocabulary.shape[1] != features.shape[1]: raise RuntimeError('something is wrong with the data dimensionality') dist2 = distance.cdist(features, vocabulary, metric='sqeuclidean') assignments = np.argmin(dist2, axis=1) bovw, _ = np.histogram(assignments, range(vocabulary.shape[1])) return bovw def split_into_X_y(dataset, output_path='cache'): X, y = [], [] for fname, cid in dataset: bovwfile = join(output_path, splitext(fname)[0] + EXT_BOVW.format(CURRENT_DAISY, CURRENT_BOVW)) #X.append(pickle.load(open(bovwfile, 'rb'))) X.append(load_data(bovwfile)) y.append(cid) return (np.array(X), np.array(y)) if __name__ == "__main__": random_state = np.random.RandomState(12345) # ---------------- # DATA PREPARATION # ---------------- N_CLUSTER = 100 # paths dataset_path = abspath('scene_categories') output_path = 'cache' # load dataset dataset = load_scene_categories(dataset_path) n_classes = len(dataset['cname']) n_images = len(dataset['fname']) print('{} images of {} categories'.format(n_images, n_classes)) # train-test split train_set, test_set = n_per_class_split(dataset, n=100) n_train = len(train_set) n_test = len(test_set) print('{} training samples / {} testing samples'.format(n_train, n_test)) # compute and store low level features for all images # compute_features(dataset_path, dataset['fname'], output_path) # -------------------------------- # UNSUPERVISED DICTIONARY LEARNING # -------------------------------- # n_samples = int(1e5) # n_clusters = N_CLUSTER # vocabulary_file = join(output_path, VOCABYLARY.format(n_clusters, CURRENT_DAISY)) # if exists(vocabulary_file): # #vocabulary = pickle.load(open(vocabulary_file, 'rb')) # vocabulary = load_data(vocabulary_file) # else: # train_files = [fname for (fname, cid) in train_set] # sample = sample_feature_set(output_path, train_files, output_path, # n_samples, random_state=random_state) # vocabulary = kmeans_fit(sample, n_clusters=n_clusters, # random_state=random_state) # save_data(vocabulary, vocabulary_file) # print('{}: {} clusters'.format(vocabulary_file, vocabulary.shape[0])) # -------------------- # COMPUTE BoVW VECTORS # -------------------- # from datetime import datetime # start_time = datetime.now() # for fname in dataset['fname']: # # low-level features file # featfile = join(output_path, splitext(fname)[0] + EXT_FEAT.format(CURRENT_DAISY)) # # check if destination file already exists # bovwfile = join(output_path, splitext(fname)[0] + EXT_BOVW.format(CURRENT_DAISY, CURRENT_BOVW)) # if exists(bovwfile): # print('{} already exists'.format(bovwfile)) # continue # #feat = pickle.load(open(featfile, 'rb')) # feat = load_data(featfile) # bovw = compute_bovw(vocabulary, feat) # #save_data(bovw, bovwfile) # # print("NO SE GRABO LA NORMALIZACION CORRECTA DEL CURRENT_BOVW") # # bovwfile_l2 = join(output_path, splitext(fname)[0] + EXT_BOVW.format(CURRENT_DAISY, BOVW_L2)) # # bovw_l2 = normalize_L2(bovw) # # save_data(bovw_l2, bovwfile_l2) # # bovwfile_l1 = join(output_path, splitext(fname)[0] + EXT_BOVW.format(CURRENT_DAISY, BOVW_L2)) # # bovw_l1 = normalize_L2(bovw, norm=1) # # save_data(bovw_l1, bovwfile_l1) # # bovwfile_sqrt = join(output_path, splitext(fname)[0] + EXT_BOVW.format(CURRENT_DAISY, BOVW_sqrt)) # # bovw_sqrt = normalize_sqrt(bovw) # # save_data(bovw_sqrt, bovwfile_sqrt) # # bovwfile_l2_sqrt = join(output_path, splitext(fname)[0] + EXT_BOVW.format(CURRENT_DAISY, BOVW_L2_sqrt)) # # bovw_l2_sqrt = normalize_L2(normalize_sqrt(bovw)) # # save_data(bovw_l2_sqrt, bovwfile_l2_sqrt) # print('{}'.format(bovwfile)) # stop_time = datetime.now() # time_lapse = stop_time - start_time # print("time lapse on bovw for {} clusters:".format(n_clusters), time_lapse.total_seconds()) # ----------------- # TRAIN CLASSIFIERS # ----------------- # setup training data X_train, y_train = split_into_X_y(train_set) # svm = LinearSVC(C=10.0) # # svm = SVC(C=10.0, gamma=10) # svm.fit(X_train, y_train) # # setup testing data # X_test, y_test = split_into_X_y(test_set) # y_pred = svm.predict(X_test) # tp = np.sum(y_test == y_pred) # print('accuracy = {:.3f}'.format(float(tp) / len(y_test))) from utils import cross_validation print(cross_validation(X_train, y_train))
gpl-3.0
8,645,061,960,172,530,000
32.851852
115
0.627735
false
sassoftware/mint
mint/django_rest/rbuilder/platforms/models.py
1
5423
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from django.db import models from mint.django_rest.rbuilder import modellib from mint.django_rest.rbuilder.modellib import basemodels # hack, because of modellib in Platform import sys from xobj import xobj from mint.django_rest.deco import D ## TODO: Change SyntheticFields to correct type (mostly CharFields/BooleanFields/FK's) ## once the schema is updated (if need be). Some of the models are listed as ## abstract as they lack the necessary tables in the db -- and some of the fields on ## those models are temporarily synthetic because we can't have FK's to abstract models. class Platforms(modellib.Collection): class Meta: abstract = True _xobj = xobj.XObjMetadata(tag='platforms') list_fields = ['platform'] class Platform(modellib.XObjIdModel): class Meta: db_table = 'platforms' ordering = [ 'platform_id' ] _xobj = xobj.XObjMetadata(tag='platform') _MODE_CHOICES = (('manual', 'manual'), ('auto', 'auto')) platform_id = D(models.AutoField(primary_key=True, db_column='platformid'), 'ID of the platform') label = D(models.CharField(max_length=1026, unique=True), 'Platform label, must be unique') mode = D(models.CharField(max_length=1026, default='manual', choices=_MODE_CHOICES), 'Charfield, defaults to "manual"') enabled = D(models.IntegerField(default=1), 'Is enabled, defaults to integer 1') project = D(modellib.DeferredForeignKey('projects.Project', db_column='projectid', null=True), 'Project attached to the platform, cannot be null') platform_name = D(models.CharField(max_length=1026, db_column='platformname'), 'Name of the platform') configurable = D(models.BooleanField(default=False), 'Boolean, defaults to False') abstract = D(models.BooleanField(default=False), 'Boolean, defaults to False') is_from_disk = D(models.BooleanField(default=False, db_column='isfromdisk'), 'Boolean, defaults to False') hidden = D(models.BooleanField(default=False), 'Boolean, defaults to False') upstream_url = D(models.TextField(), "Upstream repository URL used when creating external project for this platform") time_refreshed = D(basemodels.DateTimeUtcField(auto_now_add=True), 'Time at which the platform was refreshed') # hack, modellib keeps evaluating to None # SyntheticFields -- fields with no column in the db # most of these are deferred fk's, M2M's, or CharFields in the old code platform_trove_name = modellib.SyntheticField() # charfield repository_host_name = modellib.SyntheticField() # charfield repository_api = modellib.SyntheticField(modellib.HrefField()) # genuine synthetic field product_version = modellib.SyntheticField() # fk platform_versions = modellib.SyntheticField(modellib.HrefField()) # fk, is this different from product_version ? platform_usage_terms = modellib.SyntheticField() # charfield mirror_permission = modellib.SyntheticField() # boolean platform_type = modellib.SyntheticField() # charfield load = modellib.SyntheticField() # fk image_type_definitions = modellib.SyntheticField() # fk platform_status = modellib.SyntheticField() # fk is_platform = modellib.SyntheticField() # booleanfield def computeSyntheticFields(self, sender, **kwargs): # Platform has yet to be enabled. if self.project is None: return self._computeRepositoryAPI() self._computePlatformVersions() def _computeRepositoryAPI(self): self.repository_api = modellib.HrefField( href='/repos/%s/api' % self.project.short_name, ) def _computePlatformVersions(self): self.platform_versions = modellib.HrefField( href='/api/platforms/%s/platformVersions' % self.platform_id, ) class PlatformVersions(modellib.Collection): class Meta: abstract = True list_fields = ['platform_version'] class PlatformVersion(modellib.XObjIdModel): class Meta: abstract = True name = D(models.CharField(max_length=1026), 'Platform version name') version = D(models.CharField(max_length=1026), 'Is the platform version') revision = D(models.CharField(max_length=1026), 'Is the platform revision') label = models.CharField(max_length=1026) ordering = D(models.DecimalField(), 'Ordering of the version, is a decimal') class PlatformBuildTemplates(modellib.Collection): class Meta: abstract = True list_fields = ['platform_build_template'] class PlatformBuildTemplate(modellib.XObjIdModel): class Meta: abstract = True pass for mod_obj in sys.modules[__name__].__dict__.values(): if hasattr(mod_obj, '_xobj'): if mod_obj._xobj.tag: modellib.type_map[mod_obj._xobj.tag] = mod_obj
apache-2.0
-8,816,140,235,265,148,000
40.083333
121
0.702379
false
pbs/django-cms
cms/utils/page.py
1
3242
# -*- coding: utf-8 -*- from django.conf import settings from django.db.models import Q from django.core.exceptions import ValidationError import re APPEND_TO_SLUG = "-copy" COPY_SLUG_REGEX = re.compile(r'^.*-copy(?:-(\d+)*)?$') def is_valid_page_slug(page, parent, lang, slug, site, path=None): """Validates given slug depending on settings. """ from cms.models import Title # Exclude the page with the publisher_state == page.PUBLISHER_STATE_DELETE qs = Title.objects.filter(page__site=site) if page.id: qs = qs.exclude( Q(page=page) | Q(page=page.publisher_public) | Q(page__publisher_state=page.PUBLISHER_STATE_DELETE) ) if settings.i18n_installed: qs = qs.filter(language=lang) if not settings.CMS_FLAT_URLS: if parent: if parent.is_home(): qs = qs.filter(Q(page__parent=parent) | Q(page__parent__isnull=True)) else: qs = qs.filter(page__parent=parent) else: qs = qs.filter(page__parent__isnull=True) if page.pk: qs = qs.exclude(language=lang, page=page) ## Check for slugs if qs.filter(slug=slug).count(): return False ## Check for path if path and qs.filter(path=path).count(): return False return True def get_available_slug(title, new_slug=None): """Smart function generates slug for title if current title slug cannot be used. Appends APPEND_TO_SLUG to slug and checks it again. (Used in page copy function) Returns: slug """ rewrite_slug = False slug = new_slug or title.slug # We need the full path for the title to check for conflicting urls title.slug = slug title.update_path() path = title.path # This checks for conflicting slugs/overwrite_url, for both published and unpublished pages # This is a simpler check than in page_resolver.is_valid_url which # takes into account actualy page URL if not is_valid_page_slug(title.page, title.page.parent, title.language, slug, title.page.site, path): if title.has_url_overwrite and is_valid_page_slug(title.page, title.page.parent, title.language, slug, title.page.site, None): # The title has an overwrite url so a slug change will not change the path and # the validation fails only because the path already exists. return slug # add nice copy attribute, first is -copy, then -copy-2, -copy-3, .... match = COPY_SLUG_REGEX.match(slug) if match: try: next = int(match.groups()[0]) + 1 slug = "-".join(slug.split('-')[:-1]) + "-%d" % next except TypeError: slug = slug + "-2" else: slug = slug + APPEND_TO_SLUG return get_available_slug(title, slug) else: return slug def check_title_slugs(page): """Checks page title slugs for duplicity if required, used after page move/ cut/paste. """ for title in page.title_set.all(): old_slug = title.slug title.slug = get_available_slug(title) if title.slug != old_slug: title.save()
bsd-3-clause
-1,183,574,578,454,097,700
34.23913
134
0.607958
false
yothenberg/mcxapi
mcxapi/api.py
1
20155
import logging import requests import re from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry from datetime import datetime, timezone, timedelta from collections import namedtuple from anytree import RenderTree, NodeMixin from .exceptions import McxNetworkError, McxParsingError Inbox = namedtuple('Inbox', 'ids fieldnames cases') def ordinal(n): if 10 <= n % 100 < 20: return str(n) + 'th' else: return str(n) + {1: 'st', 2: 'nd', 3: 'rd'}.get(n % 10, "th") def parse_date(date): # Weird date format /Date(milliseconds-since-epoch-+tzoffset)/ # /Date(1486742990423-0600)/ # /Date(1486664366563+0100)/ r = re.compile(r'/Date\((\d+)([-+])(\d{2,2})(\d{2,2})\)/') m = r.match(date) if m is None: return "Unknown Date Format" else: milliseconds, sign, tzhours, tzminutes = m.groups() seconds = int(milliseconds) / 1000.0 sign = -1 if sign == '-' else 1 tzinfo = timezone(sign * timedelta(hours=int(tzhours), minutes=int(tzminutes))) return datetime.fromtimestamp(seconds, tzinfo).strftime('%Y-%m-%d %H:%M%z') class McxApi: BASE_URL = "https://{}.mcxplatform.de/CaseManagement.svc/{}" TIMEOUT = 45 RETRY_COUNT = 3 PASSWORD_KEY = "password" TOKEN_KEY = "token" PAGE_SIZE = 500 PAGES = 199 def __init__(self, instance, company, user, password, headers=None, pool_connections=50): self.instance = instance self.company = company self.user = user self.password = password self.session = requests.Session() # 500 Internal Service Error # 501 Not Implemented # 502 Bad Gateway # 503 Service Unavailable # 504 Gateway Timeout retries = Retry(total=self.RETRY_COUNT, backoff_factor=1, status_forcelist=[500, 501, 502, 503, 504], method_whitelist=['GET', 'POST']) adapter = HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_connections, max_retries=retries) self.session.mount('http://', adapter) self.session.mount('https://', adapter) self.session.headers = headers self.token = None print("HTTP connection timeout: {}, retry count: {}".format(self.TIMEOUT, self.RETRY_COUNT)) def _sanitize_json_for_logging(self, json): json_copy = json.copy() if self.PASSWORD_KEY in json_copy: json_copy[self.PASSWORD_KEY] = "*****" if self.TOKEN_KEY in json: json_copy[self.TOKEN_KEY] = "*****" return json_copy def _url(self, endpoint): return self.BASE_URL.format(self.instance, endpoint) def _post(self, url, params=None, json={}): if self.token: json[self.TOKEN_KEY] = self.token logging.info("POST: url: {} json: {}".format(url, self._sanitize_json_for_logging(json))) try: r = self.session.post(url, params=params, json=json, timeout=self.TIMEOUT) r.raise_for_status() except requests.exceptions.RequestException as e: raise McxNetworkError(url, json=self._sanitize_json_for_logging(json)) from e return r.json() def auth(self): url = self._url("authenticate") payload = {'userName': self.user, self.PASSWORD_KEY: self.password, 'companyName': self.company} json = self._post(url, json=payload) result = json["AuthenticateResult"] if "token" in result: self.token = result["token"] def get_case_inbox(self): """ Fetches active cases assigned to the user """ case_ids = [] fieldnames = [] cases = [] url = self._url("getMobileCaseInboxItems") # Fetches 50 at a time up to a maximum of 100,000 cases for p in range(0, self.PAGES): start_count = len(case_ids) payload = {'startPage': p, 'pageSize': self.PAGE_SIZE} print("Fetching {} {} case_ids from inbox".format(ordinal(p + 1), self.PAGE_SIZE)) json = self._post(url, json=payload) self.parse_case_inbox(json, case_ids, fieldnames, cases) if len(case_ids) == start_count: break fieldnames.sort() return Inbox(ids=case_ids, fieldnames=fieldnames, cases=cases) def parse_case_inbox(self, json, case_ids, fieldnames, cases): rows = json["GetMobileCaseInboxItemsResult"]["caseMobileInboxData"]["Rows"] try: for row in rows: case = {} case_id = None row["Inbox Owner"] = self.user for key, val in row.items(): # special case for the nested list of n columns if key == "Columns": for column in val: column_name = column["ColumnName"] if column_name not in fieldnames: fieldnames.append(column_name) case[column_name] = column["ColumnValue"] else: if key not in fieldnames: fieldnames.append(key) if key == "CaseId": case_id = val case[key] = val if case_id not in case_ids: # Dedupes the cases in case the same case_id is exported multiple times because of paging case_ids.append(case_id) cases.append(case) except Exception as e: raise McxParsingError(json, "Unable to parse inbox") from e def get_case(self, case_id): """ Fetches detailed information about a case """ url = self._url("getCaseView") payload = {'caseId': case_id} json = self._post(url, json=payload) try: case = Case(json["GetCaseViewResult"]) except Exception as e: raise McxParsingError(json, "Unable to parse case {}".format(case_id)) from e return case class Case: """ A Case """ def __init__(self, case_view): values = case_view["viewValues"] self.case_id = values["CaseId"] self.alert_name = values["AlertName"] self.owner = values["OwnerFullName"] self.time_to_close = values["TimeToCloseDisplay"] self.time_to_close_goal = values["TimeToCloseGoalDisplay"] self.time_to_respond = values["TimeToRespondDisplay"] self.time_to_respond_goal = values["TimeToRespondGoalDisplay"] self.status_id = values["CaseStatusId"] self.priority_id = values["CasePriorityId"] self.respondent_id = values["RespondentId"] self.survey_id = values["SurveyId"] self.survey_name = values["SurveyName"] self.status = "" self.priority = "" self.activity_notes = [] self.items = [] self.source_responses = [] items = case_view["caseView"]["CaseViewItems"] self._parse_items(items) self._parse_item_answers(values["ItemAnswers"]) self._parse_root_cause_answers(values["CaseRootCauseAnswers"]) self._parse_activity_notes(values["ActivityNotes"]) self._parse_source_responses(values["SourceResponses"]) self.status = self._lookup_item_dropdown_value(Item.STATUS, self.status_id) self.priority = self._lookup_item_dropdown_value(Item.PRIORITY, self.priority_id) def __str__(self): items = "\n".join([str(a) for a in self.items]) activity_notes = "\n".join([str(n) for n in self.activity_notes]) source_responses = "\n".join([str(s) for s in self.source_responses]) return "id:{} owner:{} status:{} priority:{}\nACTIVITY NOTES:\n{}\n\nITEMS:\n{}\n\nRESPONSES:\n{}".format(self.case_id, self.owner, self.status, self.priority, activity_notes, items, source_responses) @property def dict(self): """ Returns a dictionary representation of the standard properties, source_responses, and items with an answer """ COL_CASE_ID = "Case ID" COL_OWNER = "Owner" COL_TIME_TO_CLOSE = "Time To Close" COL_TIME_TO_CLOSE_GOAL = "Time to Goal Close" COL_TIME_TO_RESPOND = "Time To Respond" COL_TIME_TO_RESPOND_GOAL = "Time To Goal Respond" COL_STATUS = "Status" COL_PRIORITY = "Priority" COL_RESPONDEND_ID = "Respondent Id" COL_SURVEY_ID = "Survey Id" COL_SURVEY_NAME = "Survey Name" case = {COL_CASE_ID: self.case_id, COL_OWNER: self.owner, COL_TIME_TO_CLOSE: self.time_to_close, COL_TIME_TO_CLOSE_GOAL: self.time_to_close_goal, COL_TIME_TO_RESPOND: self.time_to_respond, COL_TIME_TO_RESPOND_GOAL: self.time_to_respond_goal, COL_STATUS: self.status, COL_PRIORITY: self.priority, COL_RESPONDEND_ID: self.respondent_id, COL_SURVEY_ID: self.survey_id, COL_SURVEY_NAME: self.survey_name} for item in self.items: if item.answer or item.root_cause_answers: case[item.case_item_text] = item.display_answer # Activity notes are exported one per column i = 1 COL_ACTIVITY_NOTES = "Activity Note {}" for activity_note in self.activity_notes: case[COL_ACTIVITY_NOTES.format(i)] = "{} @ {}: {}".format(activity_note.full_name, parse_date(activity_note.date), activity_note.note) i += 1 for source_response in self.source_responses: # sometimes the source responses don't have a question text so we use the case_item_id for the column header if source_response.question_text: case[source_response.question_text] = source_response.answer_text else: case[str(source_response.case_item_id)] = source_response.answer_text return case def _lookup_item_dropdown_value(self, case_question_type_id, value): item = self._find_item_by_type(case_question_type_id) if item: dropdown = item._find_dropdown(value) if dropdown: return dropdown.text return None def _parse_items(self, items): for item_dict in items: item = Item(item_dict) self.items.append(item) def _parse_activity_notes(self, activity_notes): for note_dict in activity_notes: self.activity_notes.append(ActivityNote(note_dict)) def _parse_item_answers(self, item_answers): for item_answer_dict in item_answers: item = self._find_item(item_answer_dict["CaseItemId"]) if item: item.add_answer(item_answer_dict) def _parse_root_cause_answers(self, root_cause_answers): for root_cause_answer_dict in root_cause_answers: item = self._find_item(root_cause_answer_dict["CaseItemId"]) if item: item.add_root_cause_answer(root_cause_answer_dict) def _parse_source_responses(self, source_responses): for source_response_dict in source_responses: self.source_responses.append(SourceResponse(source_response_dict)) def _find_item(self, case_item_id): return next((x for x in self.items if x.case_item_id == case_item_id), None) def _find_item_by_type(self, case_question_type_id): return next((x for x in self.items if x.case_question_type_id == case_question_type_id), None) class Item: def __init__(self, values): self.case_item_id = values["CaseItemId"] self.case_question_type_id = values["CaseQuestionTypeId"] self.case_item_text = values["CaseItemText"] self.dropdown_values = [] self.root_cause_values = [] self.root_cause_answers = [] self.answer = None self.display_answer = "" self._parse_dropdown_values(values["DropdownValues"]) self._parse_root_cause_values(values["RootCauseValues"]) self._build_root_cause_tree() def __str__(self): dropdowns = ", ".join([str(d) for d in self.dropdown_values]) root_causes = self._draw_root_cause_tree() root_causes_answers = self._draw_root_cause_answers() return """\n==========\nitem_id:{} question_type: {} text:{} display:{}\n dropdown:\n{}\n rootcauses:\n{}\n rootcause_answers:\n{}\n answer:\n{}""".format(self.case_item_id, self.case_question_type_id, self.case_item_text, self.display_answer, dropdowns, root_causes, root_causes_answers, self.answer) def _draw_root_cause_tree(self): roots = [r for r in self.root_cause_values if r.is_root is True] tree = "" for root in roots: for pre, _, node in RenderTree(root): tree = "{}{}{}\n".format(tree, pre, node.root_cause_name) return tree def _draw_root_cause_answers(self): answers = "" leaf_answers = [a for a in self.root_cause_answers if a.root_cause.is_leaf] for leaf_answer in leaf_answers: leaf = leaf_answer.root_cause.root_cause_name ancestors = " > ".join([c.root_cause_name for c in leaf_answer.root_cause.anchestors]) answers = "{}{} > {}\n".format(answers, ancestors, leaf) return answers def _parse_dropdown_values(self, dropdown_values): for dropdown_dict in dropdown_values: dropdown = Dropdown(dropdown_dict) self.dropdown_values.append(dropdown) def _parse_root_cause_values(self, root_cause_values): for root_cause_dict in root_cause_values: root_cause = RootCause(root_cause_dict) self.root_cause_values.append(root_cause) def _build_root_cause_tree(self): # assign parents for root_cause in self.root_cause_values: if root_cause.parent_tree_id != "#": root_cause.parent = self._find_root_cause(root_cause.parent_tree_id) def _find_root_cause(self, tree_id): return next((r for r in self.root_cause_values if r.tree_id == tree_id), None) # case_question_type_ids CASE_ID = 1 PROGRAM_NAME = 2 CREATED_DATE = 3 STATUS = 4 PRIORITY = 5 ROOT_CAUSE = 6 ACTIVITY_NOTES = 7 OWNER = 9 ALERT_NAME = 10 SHORT_TEXT_BOX = 11 LONG_TEXT_BOX = 12 DROPDOWN = 13 SURVEY_EXCERPT = 15 CLOSED_DATE = 16 SURVEY_NAME = 17 TIME_TO_RESPOND = 18 TIME_TO_CLOSE = 19 EXPLANATION_TEXT = 20 DIVIDER = 21 WATCHERS = 22 LAST_MODIFIED = 25 DATE_PICKER = 26 NUMERIC = 27 def _find_dropdown(self, value): return next((x for x in self.dropdown_values if x.id == value), None) def add_answer(self, values): self.answer = Answer(values) if self.answer.is_empty: self.display_value = "" elif self.case_question_type_id in [self.SHORT_TEXT_BOX, self.LONG_TEXT_BOX, self.DATE_PICKER]: self.display_answer = self.answer.text_value elif self.case_question_type_id == self.NUMERIC: self.display_answer = self.answer.double_value elif self.case_question_type_id == self.DROPDOWN: dropdown = self._find_dropdown(self.answer.int_value) if dropdown: self.display_answer = dropdown.text def add_root_cause_answer(self, values): answer = RootCauseAnswer(values) answer.root_cause = self._find_root_cause(answer.tree_id) self.root_cause_answers.append(answer) self.display_answer = self._draw_root_cause_answers() class ActivityNote: def __init__(self, values): self.note = values["ActivityNote"] self.date = values["ActivityNoteDate"] self.full_name = values["FullName"] def __str__(self): return "{}@{}: {}".format(self.full_name, self.date, self.note) class Dropdown: def __init__(self, values): self.id = values["Id"] self.text = values["Text"] def __str__(self): return "{}:{}".format(self.id, self.text) class RootCause(NodeMixin): def __init__(self, values): self.case_item_id = values["CaseItemId"] self.case_root_cause_id = values["CaseRootCauseId"] self.root_cause_name = values["RootCauseName"] self.parent_tree_id = values["ParentTreeId"] self.tree_id = values["TreeId"] self.parent = None def __str__(self): return "item_id:{} root_cause_id:{} root_cause_name:{} parent_tree_id:{} tree_id:{}".format(self.case_item_id, self.case_root_cause_id, self.root_cause_name, self.parent_tree_id, self.tree_id) class RootCauseAnswer: def __init__(self, values): self.case_item_id = values["CaseItemId"] self.case_root_cause_id = values["CaseRootCauseId"] self.tree_id = values["TreeId"] self.root_cause = None def __str__(self): return "item_id:{} root_cause_id:{} tree_id:{}".format(self.case_item_id, self.case_root_cause_id, self.tree_id) class Answer: def __init__(self, values): self.case_item_answer_id = values["CaseItemAnswerId"] self.case_item_id = values["CaseItemId"] self.case_question_type_id = values["CaseQuestionTypeId"] self.is_empty = values["IsEmpty"] self.bool_value = values["BoolValue"] self.double_value = values["DoubleValue"] self.int_value = values["IntValue"] self.text_value = values["TextValue"] self.time_value = values["TimeValue"] def __str__(self): return "id:{} question_type:{} bool:{} double:{} int:{} text:{} time:{}".format(self.case_item_answer_id, self.case_question_type_id, self.bool_value, self.double_value, self.int_value, self.text_value, self.time_value) class SourceResponse: def __init__(self, values): self.case_item_id = values["Key"] self.question_text = values["Value"]["QuestionText"] self.answer_text = values["Value"]["AnswerText"] def __str__(self): return "item_id:{} text:{} answer:{}".format(self.case_item_id, self.question_text, self.answer_text)
mit
4,570,036,573,036,988,400
39.149402
143
0.541851
false
clebersfonseca/google-python-exercises
basic/list1.py
1
3265
#!/usr/bin/python -tt # Copyright 2010 Google Inc. # Licensed under the Apache License, Version 2.0 # http://www.apache.org/licenses/LICENSE-2.0 # Google's Python Class # http://code.google.com/edu/languages/google-python-class/ # Basic list exercises # Fill in the code for the functions below. main() is already set up # to call the functions with a few different inputs, # printing 'OK' when each function is correct. # The starter code for each function includes a 'return' # which is just a placeholder for your code. # It's ok if you do not complete all the functions, and there # are some additional functions to try in list2.py. # A. match_ends # Given a list of strings, return the count of the number of # strings where the string length is 2 or more and the first # and last chars of the string are the same. # Note: python does not have a ++ operator, but += works. def match_ends(words): t = 0 for w in words: if len(w) >= 2 and w[0] == w[-1]: t += 1 return t # B. front_x # Given a list of strings, return a list with the strings # in sorted order, except group all the strings that begin with 'x' first. # e.g. ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields # ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'] # Hint: this can be done by making 2 lists and sorting each of them # before combining them. def front_x(words): listx = [] listall = [] for w in words: if w[0] == 'x': listx.append(w) else: listall.append(w) listx.sort() listx.extend(sorted(listall)) return listx # C. sort_last # Given a list of non-empty tuples, return a list sorted in increasing # order by the last element in each tuple. # e.g. [(1, 7), (1, 3), (3, 4, 5), (2, 2)] yields # [(2, 2), (1, 3), (3, 4, 5), (1, 7)] # Hint: use a custom key= function to extract the last element form each tuple. def sort_last(tuples): def get_last(t): return t[-1] return sorted(tuples, key=get_last) # Simple provided test() function used in main() to print # what each function returns vs. what it's supposed to return. def test(got, expected): if got == expected: prefix = ' OK ' else: prefix = ' X ' print('%s got: %s expected: %s' % (prefix, repr(got), repr(expected))) # Calls the above functions with interesting inputs. def main(): print('match_ends') test(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3) test(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2) test(match_ends(['aaa', 'be', 'abc', 'hello']), 1) print() print('front_x') test(front_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc']) test(front_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc']) test(front_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']) print() print('sort_last') test(sort_last([(1, 3), (3, 2), (2, 1)]), [(2, 1), (3, 2), (1, 3)]) test(sort_last([(2, 3), (1, 2), (3, 1)]), [(3, 1), (1, 2), (2, 3)]) test(sort_last([(1, 7), (1, 3), (3, 4, 5), (2, 2)]), [(2, 2), (1, 3), (3, 4, 5), (1, 7)]) if __name__ == '__main__': main()
apache-2.0
1,280,166,186,578,082,600
29.514019
79
0.580398
false
cemsbr/aioshell
setup.py
1
1619
"""Publishing to pypi.""" from setuptools import setup, find_packages from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name="aioshell", version="1.0", packages=find_packages(), author="Carlos Eduardo Moreira dos Santos", author_email="[email protected]", description="Run single-threaded concurrent shell and ssh commands with" + "few keystrokes.", long_description=long_description, license="GPLv3", keywords="shell ssh async asyncio asynchronous", url="https://github.com/cemsbr/aioshell", classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 4 - Beta', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Topic :: System :: Installation/Setup', 'Topic :: System :: Systems Administration', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
gpl-3.0
-1,064,534,596,124,871,000
33.446809
78
0.631254
false
NicovincX2/Python-3.5
Physique/Mouvement/Dynamique/Systèmes oscillants/Oscillateur harmonique/oscillateur_harmonique_periode_animation.py
1
2155
# -*- coding: utf-8 -*- import os ''' Simple résolution numérique de l'équation d'un oscillateur harmonique pour illustrer l'isochronisme des oscillations quelle que soit l'amplitude de départ avec animation au cours du temps. ''' import numpy as np # Pour np.linspace import scipy as sp # Simple alias usuel import scipy.integrate # Pour l'intégration import matplotlib.pyplot as plt # Pour les dessins from matplotlib import animation # Pour l'animation progressive omega0 = 1 # On définit la pulsation propre def equadiff(y, t): '''Renvoie l'action du système dx/dt = vx et dvx/dt = -omega0**2 * x soit bien l'oscillateur harmonique x'' + omega0**2 * x = 0''' x, vx = y # y contient position et vitesse return [vx, - omega0**2 * x] # On renvoie un doublet pour [dx/dt,dvx/dt] nb_CI = 10 # Nombre de conditions initiales explorées t = np.linspace(0, 10, 1000) # Le temps total d'intégration x0 = np.linspace(-5, 5, nb_CI) # Les positions initiales choisies v0 = [0] * nb_CI # Les vitesses initiales choisies oscillateurs = [] lignes = [] fig = plt.figure(figsize=(10, 8)) for i in range(nb_CI): # Pour chaque condition initiale # L'intégration proprement dite sol = sp.integrate.odeint(equadiff, [x0[i], v0[i]], t) x = sol[:, 0] # Récupération de la position l, = plt.plot(t, x) # et affichage oscillateurs.append(x) lignes.append(l) def init(): for l in lignes: l.set_xdata([]) l.set_ydata([]) def animate(i): for l, x in zip(lignes, oscillateurs): l.set_ydata(x[:i]) l.set_xdata(t[:i]) # Il ne reste que le traitement cosmétique plt.title('Oscillateur harmonique pour differentes amplitudes initiales') plt.ylabel('Position (unite arbitraire)') plt.xlabel('Temps (unite arbitraire)') anim = animation.FuncAnimation(fig, animate, len( t), interval=20, init_func=init, blit=False) plt.show() # plt.savefig('PNG/S01_oscillateur_harmonique_periode.png') os.system("pause")
gpl-3.0
-7,314,351,340,730,637,000
29.169014
79
0.634921
false
flophx/coccitools
lib/python/configuration.py
1
2265
#!/usr/bin/python # This file is part of Coccitools. # Copyright (C) 2010 Florian MANY # Coccitools is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # Coccitools is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with Coccitools. If not, see <http://www.gnu.org/licenses/>. import os, ConfigParser ## Function called by the cocci module if the file "coccitools.conf" does not exist. #This function generates a "coccitools.conf" file that contains the features of the installation, the path of the cocci tree and the path of the project tree. # @param path installation path of coccitools def initConfig(path): # When adding sections or items, add them in the reverse order of # how you want them to be displayed in the actual file. # In addition, please note that using RawConfigParser's and the raw # mode of ConfigParser's respective set functions, you can assign # non-string values to keys internally, but will receive an error # when attempting to write to a file or when you get it in non-raw # mode. SafeConfigParser does not allow such assignments to take place. config = ConfigParser.RawConfigParser() # config.add_section('Environment') config.set('Environment', 'lib_python_path', path + "lib/python/") config.set('Environment', 'libexec_python_path', path +"libexec/python/" ) config.set('Environment', 'installation_path', path) # config.add_section('Projects') config.set('Projects', 'default_project', '') config.set('Projects', 'project_path', path + "projects/") # config.add_section('Cocci') config.set('Cocci', 'cocci_path', path + "cocci/") # Writing our configuration file to 'example.cfg' with open(path + "coccitools.conf", 'wb') as configfile: config.write(configfile)
lgpl-3.0
2,813,691,682,660,676,600
39.446429
159
0.723179
false
RichardLMR/generic-qsar-py-utils
code/tests/test_4/test_4.py
1
10100
######################################################################################################### # test_4.py # Implements unit tests for the genericQSARpyUtils project (see below). # # ######################################## # #test_4.py: Key documentation :Contents# # ######################################## # #1. Overview of this project. # #2. IMPORTANT LEGAL ISSUES # #<N.B.: Check this section ("IMPORTANT LEGAL ISSUES") to see whether - and how - you ARE ALLOWED TO use this code!> # #<N.B.: Includes contact details.> # ############################## # #1. Overview of this project.# # ############################## # #Project name: genericQSARpyUtils # #Purpose of this project: To provide a set of Python functions # #(or classes with associated methods) that can be used to perform a variety of tasks # #which are relevant to generating input files, from cheminformatics datasets, which can be used to build and # #validate QSAR models (generated using Machine Learning methods implemented in other software packages) # #on such datasets. # #To this end, two Python modules are currently provided. # #(1) ml_input_utils.py # #Defines the following class: # #descriptorsFilesProcessor: This contains methods which can be used to prepare datasets in either CSV or svmlight format, including converting between these formats, based upon previously calculated fingerprints (expressed as a set of tab separated text strings for each instance) or numeric descriptors. # #(2) ml_functions.py # #Defines a set of functions which can be used to carry out univariate feature selection,cross-validation etc. for Machine Learning model input files in svmlight format. # ########################### # #2. IMPORTANT LEGAL ISSUES# # ########################### # Copyright Syngenta Limited 2013 #Copyright (c) 2013-2016 Liverpool John Moores University # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or (at # your option) any later version. # THIS PROGRAM IS MADE AVAILABLE FOR DISTRIBUTION WITHOUT ANY FORM OF WARRANTY TO THE # EXTENT PERMITTED BY APPLICABLE LAW. THE COPYRIGHT HOLDER PROVIDES THE PROGRAM \"AS IS\" # WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM LIES # WITH THE USER. SHOULD THE PROGRAM PROVE DEFECTIVE IN ANY WAY, THE USER ASSUMES THE # COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. THE COPYRIGHT HOLDER IS NOT # RESPONSIBLE FOR ANY AMENDMENT, MODIFICATION OR OTHER ENHANCEMENT MADE TO THE PROGRAM # BY ANY USER WHO REDISTRIBUTES THE PROGRAM SO AMENDED, MODIFIED OR ENHANCED. # IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL THE # COPYRIGHT HOLDER BE LIABLE TO ANY USER FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, # INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE # PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE # OR LOSSES SUSTAINED BY THE USER OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO # OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER HAS BEEN ADVISED OF THE # POSSIBILITY OF SUCH DAMAGES. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # #################### # See also: http://www.gnu.org/licenses/ (last accessed 14/01/2013) # Contact: # 1. [email protected] # or if this fails # 2. [email protected] # ##################### ######################################################################################################### ################################# #N.B. 02/06/13: new output files found to be inconsistent with old output files. #Hence, (1) commented out the new vs. old, and clean up new, part of the code, (2) checked all new output files were consistent with expectations[DONE=><OK>; However, perhaps the following message from run_tests.log indicates a possible cause of inconsistency with old results?:"C:\Python27\lib\site-packages\sklearn\feature_selection\univariate_selection.py:271: UserWarning: Duplicate p-values. Result may depend on feature ordering.There are probably duplicate features, or you used a classification score for a regression task. warn("Duplicate p-values. Result may depend on feature ordering."], (3) copied the new output files , (4) uncommented the the new vs. old, and clean up new, part of the code, (5) re-ran tests. ################################## import sys,re,os,glob project_name = 'genericQSARpyUtils' project_modules_to_test_dir = "\\".join(os.path.abspath(__file__).split('\\')[:-3]) sys.path.append(project_modules_to_test_dir) import unittest class test_4(unittest.TestCase): def clean_up_if_all_checks_passed(self,specific_files_not_to_delete): all_files_to_delete = [file_name for file_name in glob.glob(r'%s\*' % "\\".join(os.path.abspath(__file__).split('\\')[:-1])) if not re.search('(.\py$)',file_name) and not file_name in specific_files_not_to_delete] for FILE_TO_DELETE in all_files_to_delete: os.remove(FILE_TO_DELETE) assert not os.path.exists(FILE_TO_DELETE), " This still exists: \n %s" % FILE_TO_DELETE print 'Removed this temporary file: ', FILE_TO_DELETE def compareOriginalAndNewFiles(self,orig_file,new_file): print '-'*50 print 'Comparing: ' print orig_file print 'to:' print new_file print '-'*50 file2Contents = {} for file_name in [orig_file,new_file]: f_in = open(file_name) try: file2Contents[file_name] = ''.join([re.sub(r'\r|\n','<EOL>',LINE) for LINE in f_in.readlines()]) del LINE finally: f_in.close() del f_in del file_name assert file2Contents[orig_file] == file2Contents[new_file] def test_default_training_set_based_feature_selection_for_raw_fingerprint_representations_of_training_and_test_set(self): ############################## print 'Running unittests for this project: ', project_name print 'Running this unittest: ', self._testMethodName ################################## from ml_input_utils import descriptorsFilesProcessor from ml_functions import filter_features_for_svmlight_format_files id2TrainClass = {'mA':1,'mB':1,'mC':0,'mD':1,'mE':0,'mG':0,'mF':0,'mH':0} #trying to make sure (in train_fp_file) one feature (f1) is only found in class 1, not class 0, hence it should be selected, but that this feature (f1) is not found in the test set! id2TestClass = {'mX':1,'mY':1,'mZ':1} #Note to self: as ever, following file names need to be adjusted to make sure files in the directory of this test code Python file are parsed. train_fp_file = r'%s\contrived_fp_train_file.txt' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) test_fp_file = r'%s\contrived_fp_test_file.txt' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) all_feats_svmlight_train_file = r'%s\contrived_svmlight_train_file.txt' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) all_feats_svmlight_test_file = r'%s\contrived_svmlight_test_file.txt' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) print 'Preparing original files (pre-feature selection) in svmlight format.' our_descriptorsFilesProcessor = descriptorsFilesProcessor() record_of_all_feat2IndexFiles = [None] for TRAIN_OR_TEST_LABEL in ['Train','Test']: if 'Train' == TRAIN_OR_TEST_LABEL: id2class = id2TrainClass fp_file = train_fp_file all_feats_svmlight_file = all_feats_svmlight_train_file else: assert 'Test' == TRAIN_OR_TEST_LABEL id2class = id2TestClass fp_file = test_fp_file all_feats_svmlight_file = all_feats_svmlight_test_file record_of_all_feat2IndexFiles = our_descriptorsFilesProcessor.write_svmlight_format_modellingFile_from_multiple_descriptors_files(list_of_descriptors_files=[fp_file],corresponding_list_of_whether_descriptors_file_is_actually_a_raw_fp_file=[True],corresponding_list_of_whether_descriptors_file_is_actually_a_jCompoundMapperStringFeatures_file=[False],descriptors_file_name=all_feats_svmlight_file,id2responseVariable=id2class,corresponding_list_of_unique_features_files=record_of_all_feat2IndexFiles) del our_descriptorsFilesProcessor print 'PREPARED original files (pre-feature selection) in svmlight format.' filter_features_for_svmlight_format_files(svmlight_format_train_file=all_feats_svmlight_train_file,svmlight_format_test_file=all_feats_svmlight_test_file,number_of_features_to_retain=2) filtered_feats_svmlight_train_file = r'%s\contrived_svmlight_train_file_fs_chi2_top_2.txt' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) filtered_feats_svmlight_test_file = r'%s\contrived_svmlight_test_file_fs_chi2_top_2.txt' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) training_set_feature_name_to_feature_index_file = r'%s\contrived_fp_train_file_fpFeat2InitialIndex.csv' % "\\".join(os.path.abspath(__file__).split('\\')[:-1]) all_input_files_required_for_unittesting = [train_fp_file,test_fp_file] all_orig_output_files_to_be_compared_as_required_for_unittesting = [] for new_file in [all_feats_svmlight_train_file,all_feats_svmlight_test_file,filtered_feats_svmlight_train_file,filtered_feats_svmlight_test_file,training_set_feature_name_to_feature_index_file]: file_ext = new_file.split('.')[-1] orig_file = re.sub('(\.%s$)' % file_ext,' - Copy.%s' % file_ext,new_file) all_orig_output_files_to_be_compared_as_required_for_unittesting.append(orig_file) self.compareOriginalAndNewFiles(orig_file,new_file) files_not_to_delete = all_input_files_required_for_unittesting+all_orig_output_files_to_be_compared_as_required_for_unittesting self.clean_up_if_all_checks_passed(specific_files_not_to_delete=files_not_to_delete)
gpl-2.0
-8,071,070,186,998,198,000
57.381503
726
0.694554
false
dufferzafar/critiquebrainz
critiquebrainz/frontend/apis/relationships/artist.py
1
3750
""" Relationship processor for artist entity. """ from urlparse import urlparse from flask_babel import gettext import urllib def process(artist): """Handles processing supported relation lists.""" if 'artist-relation-list' in artist and artist['artist-relation-list']: artist['band-members'] = _artist(artist['artist-relation-list']) if 'url-relation-list' in artist and artist['url-relation-list']: artist['external-urls'] = _url(artist['url-relation-list']) return artist def _artist(list): """Processor for Artist-Artist relationship. :returns Band members. """ band_members = [] for relation in list: if relation['type'] == 'member of band': band_members.append(relation) return band_members def _url(list): """Processor for Artist-URL relationship.""" basic_types = { 'wikidata': {'name': gettext('Wikidata'), 'icon': 'wikidata-16.png', }, 'discogs': {'name': gettext('Discogs'), 'icon': 'discogs-16.png', }, 'allmusic': {'name': gettext('Allmusic'), 'icon': 'allmusic-16.png', }, 'bandcamp': {'name': gettext('Bandcamp'), 'icon': 'bandcamp-16.png', }, 'official homepage': {'name': gettext('Official homepage'), 'icon': 'home-16.png', }, 'BBC Music page': {'name': gettext('BBC Music'), }, } external_urls = [] for relation in list: if relation['type'] in basic_types: external_urls.append(dict(relation.items() + basic_types[relation['type']].items())) else: try: target = urlparse(relation['target']) if relation['type'] == 'lyrics': external_urls.append(dict( relation.items() + { 'name': gettext('Lyrics'), 'disambiguation': target.netloc, }.items())) elif relation['type'] == 'wikipedia': external_urls.append(dict( relation.items() + { 'name': gettext('Wikipedia'), 'disambiguation': target.netloc.split('.')[0] + ':' + urllib.unquote(target.path.split('/')[2]).decode('utf8').replace("_", " "), 'icon': 'wikipedia-16.png', }.items())) elif relation['type'] == 'youtube': path = target.path.split('/') if path[1] == 'user' or path[1] == 'channel': disambiguation = path[2] else: disambiguation = path[1] external_urls.append(dict( relation.items() + { 'name': gettext('YouTube'), 'disambiguation': disambiguation, 'icon': 'youtube-16.png', }.items())) elif relation['type'] == 'social network': if target.netloc == 'twitter.com': external_urls.append(dict( relation.items() + { 'name': gettext('Twitter'), 'disambiguation': target.path.split('/')[1], 'icon': 'twitter-16.png', }.items())) else: # TODO(roman): Process other types here pass except Exception as e: # FIXME(roman): Too broad exception clause. # TODO(roman): Log error. pass external_urls.sort() return external_urls
gpl-2.0
5,770,933,709,101,641,000
42.103448
121
0.476
false
electrumalt/electrum-ixc
gui/qt/transaction_dialog.py
1
7687
#!/usr/bin/env python # # Electrum - lightweight Bitcoin client # Copyright (C) 2012 thomasv@gitorious # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys, time, datetime, re, threading from electrum_ixc.i18n import _, set_language from electrum_ixc.util import print_error, print_msg import os.path, json, ast, traceback import shutil import StringIO try: import PyQt4 except Exception: sys.exit("Error: Could not import PyQt4 on Linux systems, you may try 'sudo apt-get install python-qt4'") from PyQt4.QtGui import * from PyQt4.QtCore import * import PyQt4.QtCore as QtCore from electrum_ixc import transaction from electrum_ixc.plugins import run_hook from util import MyTreeWidget from util import MONOSPACE_FONT class TxDialog(QDialog): def __init__(self, tx, parent): self.tx = tx tx_dict = tx.as_dict() self.parent = parent self.wallet = parent.wallet QDialog.__init__(self) self.setMinimumWidth(600) self.setWindowTitle(_("Transaction")) self.setModal(1) vbox = QVBoxLayout() self.setLayout(vbox) vbox.addWidget(QLabel(_("Transaction ID:"))) self.tx_hash_e = QLineEdit() self.tx_hash_e.setReadOnly(True) vbox.addWidget(self.tx_hash_e) self.status_label = QLabel() vbox.addWidget(self.status_label) self.date_label = QLabel() vbox.addWidget(self.date_label) self.amount_label = QLabel() vbox.addWidget(self.amount_label) self.fee_label = QLabel() vbox.addWidget(self.fee_label) self.add_io(vbox) vbox.addStretch(1) self.buttons = buttons = QHBoxLayout() vbox.addLayout( buttons ) buttons.addStretch(1) self.sign_button = b = QPushButton(_("Sign")) b.clicked.connect(self.sign) buttons.addWidget(b) self.broadcast_button = b = QPushButton(_("Broadcast")) b.clicked.connect(lambda: self.parent.broadcast_transaction(self.tx)) b.hide() buttons.addWidget(b) self.save_button = b = QPushButton(_("Save")) b.clicked.connect(self.save) buttons.addWidget(b) cancelButton = QPushButton(_("Close")) cancelButton.clicked.connect(lambda: self.done(0)) buttons.addWidget(cancelButton) cancelButton.setDefault(True) b = QPushButton() b.setIcon(QIcon(":icons/qrcode.png")) b.clicked.connect(self.show_qr) buttons.insertWidget(1,b) run_hook('transaction_dialog', self) self.update() def show_qr(self): text = self.tx.raw.decode('hex') try: self.parent.show_qrcode(text, 'Transaction') except Exception as e: self.show_message(str(e)) def sign(self): self.parent.sign_raw_transaction(self.tx) self.update() def save(self): name = 'signed_%s.txn' % (self.tx.hash()[0:8]) if self.tx.is_complete() else 'unsigned.txn' fileName = self.parent.getSaveFileName(_("Select where to save your signed transaction"), name, "*.txn") if fileName: with open(fileName, "w+") as f: f.write(json.dumps(self.tx.as_dict(),indent=4) + '\n') self.show_message(_("Transaction saved successfully")) def update(self): is_relevant, is_mine, v, fee = self.wallet.get_tx_value(self.tx) if self.wallet.can_sign(self.tx): self.sign_button.show() else: self.sign_button.hide() if self.tx.is_complete(): status = _("Signed") tx_hash = self.tx.hash() if tx_hash in self.wallet.transactions.keys(): conf, timestamp = self.wallet.verifier.get_confirmations(tx_hash) if timestamp: time_str = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3] else: time_str = 'pending' status = _("%d confirmations")%conf self.broadcast_button.hide() else: time_str = None conf = 0 self.broadcast_button.show() else: s, r = self.tx.signature_count() status = _("Unsigned") if s == 0 else _('Partially signed (%d/%d)'%(s,r)) time_str = None self.broadcast_button.hide() tx_hash = 'unknown' self.tx_hash_e.setText(tx_hash) self.status_label.setText(_('Status:') + ' ' + status) if time_str is not None: self.date_label.setText(_("Date: %s")%time_str) self.date_label.show() else: self.date_label.hide() # if we are not synchronized, we cannot tell if self.parent.network is None or not self.parent.network.is_running() or not self.parent.network.is_connected(): return if not self.wallet.up_to_date: return if is_relevant: if is_mine: if fee is not None: self.amount_label.setText(_("Amount sent:")+' %s'% self.parent.format_amount(v-fee) + ' ' + self.parent.base_unit()) self.fee_label.setText(_("Transaction fee")+': %s'% self.parent.format_amount(fee) + ' ' + self.parent.base_unit()) else: self.amount_label.setText(_("Amount sent:")+' %s'% self.parent.format_amount(v) + ' ' + self.parent.base_unit()) self.fee_label.setText(_("Transaction fee")+': '+ _("unknown")) else: self.amount_label.setText(_("Amount received:")+' %s'% self.parent.format_amount(v) + ' ' + self.parent.base_unit()) else: self.amount_label.setText(_("Transaction unrelated to your wallet")) run_hook('transaction_dialog_update', self) def add_io(self, vbox): if self.tx.locktime > 0: vbox.addWidget(QLabel("LockTime: %d\n" % self.tx.locktime)) vbox.addWidget(QLabel(_("Inputs"))) def format_input(x): if x.get('is_coinbase'): return 'coinbase' else: _hash = x.get('prevout_hash') return _hash[0:8] + '...' + _hash[-8:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address') lines = map(format_input, self.tx.inputs ) i_text = QTextEdit() i_text.setFont(QFont(MONOSPACE_FONT)) i_text.setText('\n'.join(lines)) i_text.setReadOnly(True) i_text.setMaximumHeight(100) vbox.addWidget(i_text) vbox.addWidget(QLabel(_("Outputs"))) lines = map(lambda x: x[0] + u'\t\t' + self.parent.format_amount(x[1]) if x[1] else x[0], self.tx.get_outputs()) o_text = QTextEdit() o_text.setFont(QFont(MONOSPACE_FONT)) o_text.setText('\n'.join(lines)) o_text.setReadOnly(True) o_text.setMaximumHeight(100) vbox.addWidget(o_text) def show_message(self, msg): QMessageBox.information(self, _('Message'), msg, _('OK'))
gpl-3.0
436,080,397,216,934,200
32.714912
136
0.589046
false
DerThorsten/nifty
src/python/test/deprecated/test_mcgala.py
1
6827
from __future__ import print_function import nifty import numpy import numpy import vigra import glob import os from functools import partial nrag = nifty.graph.rag ngala = nifty.graph.gala ngraph = nifty.graph G = nifty.graph.UndirectedGraph def make_dataset(numberOfImages = 10, noise=1.0,shape=(100,100)): numpy.random.seed(42) imgs = [] gts = [] for i in range(numberOfImages): gtImg = numpy.zeros(shape) gtImg[0:shape[0]/2,:] = 1 gtImg[shape[0]/4: 3*shape[0]/4, shape[0]/4: 3*shape[0]/4] = 2 ra = numpy.random.randint(180) #print ra gtImg = vigra.sampling.rotateImageDegree(gtImg.astype(numpy.float32),int(ra),splineOrder=0) grad = vigra.filters.gaussianGradientMagnitude(gtImg.astype('float32'),1.0) for x in range(6): ra2 = numpy.random.randint(low=0,high=2,size=gtImg.size) ra2 = ra2.reshape(gtImg.shape) #print ra grad*=ra2 grad = grad.squeeze() img = numpy.random.random(shape)*float(0.00001) grad+=img #if i<1 : # vigra.imshow(img) # vigra.show() imgs.append(grad.astype('float32')) gts.append(gtImg) return imgs,gts def makeRag(raw, showSeg = False): #raw = vigra.gaussianSmoothing(raw,1.0) ew = vigra.filters.hessianOfGaussianEigenvalues(-1.0*raw, 2.3)[:,:,0] seg, nseg = vigra.analysis.watershedsNew(ew) #seg, nseg = vigra.analysis.slicSuperpixels(raw,intensityScaling=4.5, seedDistance=20) seg = seg.squeeze() if showSeg: vigra.segShow(raw, seg) vigra.show() # get the rag seg -= 1 assert seg.min() == 0 assert seg.max() == nseg -1 return nifty.graph.rag.gridRag(seg) def makeFeatureOpFromChannel(rag, data, minVal=None, maxVal=None): # feature accumulators if minVal is None: minVal = float(data.min()) if maxVal is None: maxVal = float(data.max()) edgeFeatures = nifty.graph.rag.defaultAccEdgeMap(rag, minVal, maxVal) nodeFeatures = nifty.graph.rag.defaultAccNodeMap(rag, minVal, maxVal) # accumulate features nrag.gridRagAccumulateFeatures(graph=rag,data=data, edgeMap=edgeFeatures, nodeMap=nodeFeatures) fOp = ngala.galaDefaultAccFeature(graph=rag, edgeFeatures=edgeFeatures, nodeFeatures=nodeFeatures) return fOp,minVal, maxVal def makeFeatureOp(rag, raw, minVals=None, maxVals=None): filterFuc = [ partial(vigra.filters.gaussianSmoothing,sigma=0.5), partial(vigra.filters.gaussianSmoothing,sigma=1.0), partial(vigra.filters.gaussianSmoothing,sigma=2.0), partial(vigra.filters.gaussianSmoothing,sigma=4.0), partial(vigra.filters.gaussianGradientMagnitude,sigma=1.0), partial(vigra.filters.gaussianGradientMagnitude,sigma=2.0), partial(vigra.filters.gaussianGradientMagnitude,sigma=4.0), partial(vigra.filters.hessianOfGaussianEigenvalues,scale=1.0), partial(vigra.filters.hessianOfGaussianEigenvalues,scale=2.0), partial(vigra.filters.hessianOfGaussianEigenvalues,scale=4.0), partial(vigra.filters.structureTensorEigenvalues,innerScale=1.0,outerScale=2.0), partial(vigra.filters.structureTensorEigenvalues,innerScale=2.0,outerScale=4.0), partial(vigra.filters.structureTensorEigenvalues,innerScale=4.0,outerScale=8.0) ] fCollection = ngala.galaFeatureCollection(rag) minVals_ = [] maxVals_ = [] c = 0 for f in filterFuc: res = f(raw).squeeze() if res.ndim == 2: res = res[:, :, None] for c in range(res.shape[2]): resC = res[:, :, c] if minVals is not None: minv = minVals[c] else: minv = resC.min() if maxVals is not None: maxv = maxVals[c] else: maxv = resC.max() minVals_.append(minv) maxVals_.append(maxv) op, _minVal , _maxVal = makeFeatureOpFromChannel(rag, resC, minVal=minv, maxVal=maxv) fCollection.addFeatures(op) c +=1 return fCollection, minVals_, maxVals_ def makeEdgeGt(rag, gt): # get the gt nodeGt = nrag.gridRagAccumulateLabels(rag, gt) uvIds = rag.uvIds() edgeGt = (nodeGt[uvIds[:,0]] != nodeGt[uvIds[:,1]]).astype('double') return edgeGt def test_mcgala(): # get the dataset imgs,gts = make_dataset(10, noise=4.5, shape=(200,200)) Obj = G.MulticutObjective CG = G.EdgeContractionGraph CGObj = CG.MulticutObjective greedyFactory = Obj.greedyAdditiveFactory() ilpFactory = Obj.multicutIlpFactory(ilpSolver='cplex', addThreeCyclesConstraints=False, addOnlyViolatedThreeCyclesConstraints=False #memLimit= 0.01 ) fmFactoryA = CGObj.fusionMoveBasedFactory( #fusionMove=CGObj.fusionMoveSettings(mcFactory=greedyFactory), fusionMove=CGObj.fusionMoveSettings(mcFactory=ilpFactory), #proposalGen=nifty.greedyAdditiveProposals(sigma=30,nodeNumStopCond=-1,weightStopCond=0.0), proposalGen=CGObj.watershedProposals(sigma=1,seedFraction=0.1), numberOfIterations=10, numberOfParallelProposals=40, # no effect if nThreads equals 0 or 1 numberOfThreads=40, stopIfNoImprovement=2, fuseN=2, ) ragTrain = makeRag(imgs[0], showSeg= True) fOpTrain, minVal, maxVal = makeFeatureOp(ragTrain, imgs[0]) edgeGt = makeEdgeGt(ragTrain, gts[0]) cOrderSettigns = G.galaContractionOrderSettings( mcMapFactory=fmFactoryA, runMcMapEachNthTime=10) # gala class settings = G.galaSettings(threshold0=0.1, threshold1=0.9, thresholdU=0.1, numberOfEpochs=1, numberOfTrees=200, contractionOrderSettings=cOrderSettigns #mapFactory=fmFactoryA, #perturbAndMapFactory=fmFactoryB ) gala = G.gala(settings) trainingInstance = ngala.galaTrainingInstance(ragTrain, fOpTrain, edgeGt) gala.addTrainingInstance(trainingInstance) gala.train() for x in range(3,10): ragTest = makeRag(imgs[x], showSeg=False) fOpTest, minVal, maxVal = makeFeatureOp(ragTest, imgs[x], minVal, maxVal) instance = ngala.galaInstance(ragTest, fOpTest) edgeGt = makeEdgeGt(ragTest, gts[x]) nodeRes = gala.predict(instance) pixelNodeRes = nrag.projectScalarNodeDataToPixels(ragTest,nodeRes,-1) vigra.segShow(imgs[x], pixelNodeRes) vigra.show() #for edge, uv in enumerate(ragTest.uvIds()): # print(edge,edgeGt[edge],nodeRes[uv[0]]!=nodeRes[uv[1]]) test_mcgala()
mit
9,194,367,776,170,195,000
29.07489
102
0.640398
false
fungos/gemuo
src/gemuo/engine/relpor.py
1
1926
# # GemUO # # (c) 2005-2012 Max Kellermann <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # import re from twisted.python import log import uo.packets as p from gemuo.engine import Engine tilepic_re = re.compile(r'\{ tilepic \d+ \d+ (\d+) \}') class RelPorCaptcha(Engine): """Responds to the captcha gumps on the Rel Por freeshard.""" def _on_captcha(self, packet): tiles = [] total = 0 for m in tilepic_re.findall(packet.layout): value = int(m) total += value tiles.append(value) log.msg("Captcha: " + ','.join(map(hex, tiles))) if len(tiles) == 0: return # see which tile id deviates the most avg = total / len(tiles) d = map(lambda value: abs(avg - value), tiles) m = max(zip(d, range(len(d))), key=lambda value: value[0]) # pick this tile response = m[1] log.msg("Captcha response: %#x" % tiles[response]) # and send the gump response self._client.send(p.GumpResponse(serial=packet.serial, gump_id=packet.gump_id, button_id=1, switches=[response])) def on_packet(self, packet): if isinstance(packet, p.DisplayGumpPacked) and \ len(packet.text) == 1 and \ 'Which of these things is not like the others' in packet.text[0]: self._on_captcha(packet)
gpl-2.0
277,350,442,926,236,400
32.789474
80
0.590343
false
nlsynth/iroha
examples/config-examples.py
1
1102
#! /usr/bin/python # Run this to generate Makefile, then run 'make' EXAMPLES = { 'minimum' : {'minimum'}, 'copy' : {'copy'}, 'loop' : {'loop', 'example_common'}, 'xorshift' : {'xorshift', 'example_common'}, } EXAMPLE_OBJS = ['example_common', 'copy', 'loop', 'minimum', 'xorshift'] OUTPUT = 'Makefile' ofh = open(OUTPUT, 'w') ofh.write('# Generated by config-examples.py\n\n') ofh.write('all\t: ' + ' '.join(EXAMPLES) + '\n\n') ofh.write('clean\t:\n') ofh.write('\trm -f *.o\n') ofh.write('\trm -f ' + ' '.join(EXAMPLES) + '\n') for e in EXAMPLE_OBJS: ofh.write(e + '.o\t: ' + e + '.cpp\n') ofh.write('\tg++ -std=c++11 -Wall -g -I../src -c ' + e + '.cpp\n\n') for k, v in EXAMPLES.iteritems(): objs = [] for o in v: objs.append(o + '.o') obj_lst = ' '.join(objs) ofh.write(k + '\t: ../src/out/Default/obj.target/src/libiroha.a ' + obj_lst + '\n') ofh.write('\tg++ -o ' + k + ' ' + obj_lst + ' -L../src/out/Default/obj.target/src/ -liroha -lverilog_writer -lnumeric -liroha\n') print('Generated Makefile. Please run \'make\'')
bsd-3-clause
-4,760,997,667,858,220,000
29.611111
133
0.554446
false
syscoin/syscoin
test/functional/rpc_rawtransaction.py
1
30253
#!/usr/bin/env python3 # Copyright (c) 2014-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the rawtransaction RPCs. Test the following RPCs: - createrawtransaction - signrawtransactionwithwallet - sendrawtransaction - decoderawtransaction - getrawtransaction """ from collections import OrderedDict from decimal import Decimal from io import BytesIO from test_framework.messages import CTransaction, ToHex from test_framework.test_framework import SyscoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, find_vout_for_address, hex_str_to_bytes, ) class multidict(dict): """Dictionary that allows duplicate keys. Constructed with a list of (key, value) tuples. When dumped by the json module, will output invalid json with repeated keys, eg: >>> json.dumps(multidict([(1,2),(1,2)]) '{"1": 2, "1": 2}' Used to test calls to rpc methods with repeated keys in the json object.""" def __init__(self, x): dict.__init__(self, x) self.x = x def items(self): return self.x # Create one-input, one-output, no-fee transaction: class RawTransactionsTest(SyscoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [ ["-txindex"], ["-txindex"], ["-txindex"], ] self.supports_cli = False def skip_test_if_missing_module(self): self.skip_if_no_wallet() def setup_network(self): super().setup_network() self.connect_nodes(0, 2) def run_test(self): self.log.info('prepare some coins for multiple *rawtransaction commands') self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(101) self.sync_all() self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) self.sync_all() self.nodes[0].generate(5) self.sync_all() self.log.info('Test getrawtransaction on genesis block coinbase returns an error') block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot']) self.log.info('Check parameter types and required parameters of createrawtransaction') # Test `createrawtransaction` required parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction) assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, []) # Test `createrawtransaction` invalid extra parameters assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo') # Test `createrawtransaction` invalid `inputs` txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {}) assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {}) assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {}) assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) # Test `createrawtransaction` invalid `outputs` address = self.nodes[0].getnewaddress() address2 = self.nodes[0].getnewaddress() assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo') self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility self.nodes[0].createrawtransaction(inputs=[], outputs=[]) assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) assert_raises_rpc_error(-5, "Invalid Syscoin address", self.nodes[0].createrawtransaction, [], {'foo': 0}) assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'}) assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1}) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}]) assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")])) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) # Test `createrawtransaction` invalid `locktime` assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo') assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1) assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296) # Test `createrawtransaction` invalid `replaceable` assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') self.log.info('Check that createrawtransaction accepts an array and object as outputs') tx = CTransaction() # One output tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) assert_equal(len(tx.vout), 1) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), ) # Two outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) assert_equal(len(tx.vout), 2) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]), ) # Multiple mixed outputs tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')]))))) assert_equal(len(tx.vout), 3) assert_equal( tx.serialize().hex(), self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]), ) for type in ["bech32", "p2sh-segwit", "legacy"]: addr = self.nodes[0].getnewaddress("", type) addrinfo = self.nodes[0].getaddressinfo(addr) pubkey = addrinfo["scriptPubKey"] self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type)) # Test `signrawtransactionwithwallet` invalid `prevtxs` inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1) succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] if type == "legacy": del prevtx["amount"] succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) assert succ["complete"] if type != "legacy": assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "vout": 3, } ]) assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "scriptPubKey": pubkey, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "scriptPubKey": pubkey, "vout": 3, "amount": 1, } ]) assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [ { "txid": txid, "vout": 3, "amount": 1 } ]) ######################################### # sendrawtransaction with missing input # ######################################### self.log.info('sendrawtransaction with missing input') inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists outputs = { self.nodes[0].getnewaddress() : 4.998 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) # This will raise an exception since there are missing inputs assert_raises_rpc_error(-25, "bad-txns-inputs-missingorspent", self.nodes[2].sendrawtransaction, rawtx['hex']) ##################################### # getrawtransaction with block hash # ##################################### # make a tx by sending then generate 2 blocks; block1 has the tx in it tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) block1, block2 = self.nodes[2].generate(2) self.sync_all() # We should be able to get the raw transaction by providing the correct block gottx = self.nodes[0].getrawtransaction(tx, True, block1) assert_equal(gottx['txid'], tx) assert_equal(gottx['in_active_chain'], True) # We should have the 'in_active_chain' flag when we don't provide a block due to blockindexdb gottx = self.nodes[0].getrawtransaction(tx, True) assert_equal(gottx['txid'], tx) # SYSCOIN assert 'in_active_chain' in gottx # We should not get the tx if we provide an unrelated block assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2) # An invalid block hash should raise the correct errors assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True) assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar") assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234") assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000") assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000") # Undo the blocks and check in_active_chain self.nodes[0].invalidateblock(block1) gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1) assert_equal(gottx['in_active_chain'], False) self.nodes[0].reconsiderblock(block1) assert_equal(self.nodes[0].getbestblockhash(), block2) if not self.options.descriptors: # The traditional multisig workflow does not work with descriptor wallets so these are legacy only. # The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here. ######################### # RAW TX MULTISIG TESTS # ######################### # 2of2 test addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) # Tests for createmultisig and addmultisigaddress assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"]) self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here. mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address'] #use balance deltas instead of absolute values bal = self.nodes[2].getbalance() # send 1.2 SYS to msig adr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance # 2of3 test from different nodes bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr3 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) addr3Obj = self.nodes[2].getaddressinfo(addr3) mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address'] txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() #THIS IS AN INCOMPLETE FEATURE #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable txDetails = self.nodes[0].gettransaction(txId, True) rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000')) bal = self.nodes[0].getbalance() inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}] outputs = { self.nodes[0].getnewaddress() : 2.19 } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs) assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys self.nodes[2].sendrawtransaction(rawTxSigned['hex']) rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx # 2of2 test for combining transactions bal = self.nodes[2].getbalance() addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[1].getaddressinfo(addr1) addr2Obj = self.nodes[2].getaddressinfo(addr2) self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) decTx = self.nodes[0].gettransaction(txId) rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable txDetails = self.nodes[0].gettransaction(txId, True) rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000')) bal = self.nodes[0].getbalance() inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}] outputs = { self.nodes[0].getnewaddress() : 2.19 } rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs) self.log.debug(rawTxPartialSigned1) assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs) self.log.debug(rawTxPartialSigned2) assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) self.log.debug(rawTxComb) self.nodes[2].sendrawtransaction(rawTxComb) rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx # decoderawtransaction tests # witness transaction encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction # non-witness transaction encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) # known ambiguous transaction in the chain (see https://github.com/bitcoin/bitcoin/issues/20579) encrawtx = "020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff4b03c68708046ff8415c622f4254432e434f4d2ffabe6d6de1965d02c68f928e5b244ab1965115a36f56eb997633c7f690124bbf43644e23080000000ca3d3af6d005a65ff0200fd00000000ffffffff03f4c1fb4b0000000016001497cfc76442fe717f2a3f0cc9c175f7561b6619970000000000000000266a24aa21a9ed957d1036a80343e0d1b659497e1b48a38ebe876a056d45965fac4a85cda84e1900000000000000002952534b424c4f434b3a8e092581ab01986cbadc84f4b43f4fa4bb9e7a2e2a0caf9b7cf64d939028e22c0120000000000000000000000000000000000000000000000000000000000000000000000000" decrawtx = self.nodes[0].decoderawtransaction(encrawtx) decrawtx_wit = self.nodes[0].decoderawtransaction(encrawtx, True) assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # fails to decode as non-witness transaction assert_equal(decrawtx, decrawtx_wit) # the witness interpretation should be chosen assert_equal(decrawtx['vin'][0]['coinbase'], "03c68708046ff8415c622f4254432e434f4d2ffabe6d6de1965d02c68f928e5b244ab1965115a36f56eb997633c7f690124bbf43644e23080000000ca3d3af6d005a65ff0200fd00000000") # Basic signrawtransaction test addr = self.nodes[1].getnewaddress() txid = self.nodes[0].sendtoaddress(addr, 10) self.nodes[0].generate(1) self.sync_all() vout = find_vout_for_address(self.nodes[1], txid, addr) rawTx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9.999}) rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx) txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex']) self.nodes[0].generate(1) self.sync_all() # getrawtransaction tests # 1. valid parameters - only supply txid assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned['hex']) # 2. valid parameters - supply txid and 0 for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex']) # 3. valid parameters - supply txid and False for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex']) # 4. valid parameters - supply txid and 1 for verbose. # We only check the "hex" field of the output so we don't need to update this test every time the output format changes. assert_equal(self.nodes[0].getrawtransaction(txId, 1)["hex"], rawTxSigned['hex']) # 5. valid parameters - supply txid and True for non-verbose assert_equal(self.nodes[0].getrawtransaction(txId, True)["hex"], rawTxSigned['hex']) # 6. invalid parameters - supply txid and string "Flase" assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, "Flase") # 7. invalid parameters - supply txid and empty array assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, []) # 8. invalid parameters - supply txid and empty dict assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, {}) inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx= self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 1000) # 9. invalid parameters - sequence number out of range inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}] outputs = { self.nodes[0].getnewaddress() : 1 } assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) # 10. invalid parameters - sequence number out of range inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}] outputs = { self.nodes[0].getnewaddress() : 1 } assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}] outputs = { self.nodes[0].getnewaddress() : 1 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) decrawtx= self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) #################################### # TRANSACTION VERSION NUMBER TESTS # #################################### # Test the minimum transaction version number that fits in a signed 32-bit integer. # As transaction version is unsigned, this should convert to its unsigned equivalent. tx = CTransaction() tx.nVersion = -0x80000000 rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x80000000) # Test the maximum transaction version number that fits in a signed 32-bit integer. tx = CTransaction() tx.nVersion = 0x7fffffff rawtx = ToHex(tx) decrawtx = self.nodes[0].decoderawtransaction(rawtx) assert_equal(decrawtx['version'], 0x7fffffff) self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate') # Test a transaction with a small fee. txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) rawTx = self.nodes[0].getrawtransaction(txId, True) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000')) self.sync_all() inputs = [{ "txid" : txId, "vout" : vout['n'] }] # Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 SYS/sat)) = 0.9999 outputs = { self.nodes[0].getnewaddress() : Decimal("0.99990000") } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) assert_equal(rawTxSigned['complete'], True) # Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 SYS/kB # Thus, testmempoolaccept should reject testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'max-fee-exceeded') # and sendrawtransaction should throw assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000) # and the following calls should both succeed testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0] assert_equal(testres['allowed'], True) self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex']) # Test a transaction with a large fee. txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) rawTx = self.nodes[0].getrawtransaction(txId, True) vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000')) self.sync_all() inputs = [{ "txid" : txId, "vout" : vout['n'] }] # Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 SYS/sat)) = 0.98 outputs = { self.nodes[0].getnewaddress() : Decimal("0.98000000") } rawTx = self.nodes[2].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx) assert_equal(rawTxSigned['complete'], True) # Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 SYS/kB # Thus, testmempoolaccept should reject testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0] assert_equal(testres['allowed'], False) assert_equal(testres['reject-reason'], 'max-fee-exceeded') # and sendrawtransaction should throw assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex']) # and the following calls should both succeed testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0] assert_equal(testres['allowed'], True) self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000') if __name__ == '__main__': RawTransactionsTest().main()
mit
-4,894,850,434,724,789,000
57.743689
601
0.645556
false
jbaayen/sympy
sympy/thirdparty/pyglet/pyglet/font/__init__.py
1
21133
# ---------------------------------------------------------------------------- # pyglet # Copyright (c) 2006-2007 Alex Holkner # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of the pyglet nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- '''Load fonts and render text. This is a fairly-low level interface to text rendering. Obtain a font using `load`:: from pyglet import font arial = font.load('Arial', 14, bold=True, italic=False) pyglet will load any system-installed fonts. You can add additional fonts (for example, from your program resources) using `add_file` or `add_directory`. Obtain a list of `Glyph` objects for a string of text using the `Font` object:: text = 'Hello, world!' glyphs = arial.get_glyphs(text) The most efficient way to render these glyphs is with a `GlyphString`:: glyph_string = GlyphString(text, glyphs) glyph_string.draw() There are also a variety of methods in both `Font` and `GlyphString` to facilitate word-wrapping. A convenient way to render a string of text is with a `Text`:: text = Text(font, text) text.draw() See the `pyglet.font.base` module for documentation on the base classes used by this package. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: __init__.py 1493 2007-12-08 09:20:38Z Alex.Holkner $' import sys import os import math import pyglet from pyglet.gl import * from pyglet import window from pyglet import image class GlyphString(object): '''An immutable string of glyphs that can be rendered quickly. This class is ideal for quickly rendering single or multi-line strings of text that use the same font. To wrap text using a glyph string, call `get_break_index` to find the optimal breakpoint for each line, the repeatedly call `draw` for each breakpoint. ''' def __init__(self, text, glyphs, x=0, y=0): '''Create a glyph string. The `text` string is used to determine valid breakpoints; all glyphs must have already been determined using `pyglet.font.base.Font.get_glyphs`. The string will be positioned with the baseline of the left-most glyph at the given coordinates. :Parameters: `text` : str or unicode String to represent. `glyphs` : list of `pyglet.font.base.Glyph` Glyphs representing `text`. `x` : float X coordinate of the left-side bearing of the left-most glyph. `y` : float Y coordinate of the baseline. ''' # Create an interleaved array in GL_T2F_V3F format and determine # state changes required. lst = [] texture = None self.text = text self.states = [] self.cumulative_advance = [] # for fast post-string breaking state_from = 0 state_length = 0 for i, glyph in enumerate(glyphs): if glyph.owner != texture: if state_length: self.states.append((state_from, state_length, texture)) texture = glyph.owner state_from = i state_length = 0 state_length += 1 t = glyph.tex_coords lst += [t[0], t[1], t[2], 1., x + glyph.vertices[0], y + glyph.vertices[1], 0., 1., t[3], t[4], t[5], 1., x + glyph.vertices[2], y + glyph.vertices[1], 0., 1., t[6], t[7], t[8], 1., x + glyph.vertices[2], y + glyph.vertices[3], 0., 1., t[9], t[10], t[11], 1., x + glyph.vertices[0], y + glyph.vertices[3], 0., 1.] x += glyph.advance self.cumulative_advance.append(x) self.states.append((state_from, state_length, texture)) self.array = (c_float * len(lst))(*lst) self.width = x def get_break_index(self, from_index, width): '''Find a breakpoint within the text for a given width. Returns a valid breakpoint after `from_index` so that the text between `from_index` and the breakpoint fits within `width` pixels. This method uses precomputed cumulative glyph widths to give quick answer, and so is much faster than `pyglet.font.base.Font.get_glyphs_for_width`. :Parameters: `from_index` : int Index of text to begin at, or 0 for the beginning of the string. `width` : float Maximum width to use. :rtype: int :return: the index of text which will be used as the breakpoint, or `from_index` if there is no valid breakpoint. ''' to_index = from_index if from_index >= len(self.text): return from_index if from_index: width += self.cumulative_advance[from_index-1] for i, (c, w) in enumerate( zip(self.text[from_index:], self.cumulative_advance[from_index:])): if c in u'\u0020\u200b': to_index = i + from_index + 1 if c == '\n': return i + from_index + 1 if w > width: return to_index return to_index def get_subwidth(self, from_index, to_index): '''Return the width of a slice of this string. :Parameters: `from_index` : int The start index of the string to measure. `to_index` : int The end index (exclusive) of the string to measure. :rtype: float ''' if to_index <= from_index: return 0 width = self.cumulative_advance[to_index-1] if from_index: width -= self.cumulative_advance[from_index-1] return width def draw(self, from_index=0, to_index=None): '''Draw a region of the glyph string. Assumes texture state is enabled. To enable the texture state:: from pyglet.gl import * glEnable(GL_TEXTURE_2D) :Parameters: `from_index` : int Start index of text to render. `to_index` : int End index (exclusive) of text to render. ''' if from_index >= len(self.text) or \ from_index == to_index or \ not self.text: return # XXX Safe to assume all required textures will use same blend state I # think. (otherwise move this into loop) self.states[0][2].apply_blend_state() if from_index: glPushMatrix() glTranslatef(-self.cumulative_advance[from_index-1], 0, 0) if to_index is None: to_index = len(self.text) glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT) glInterleavedArrays(GL_T4F_V4F, 0, self.array) for state_from, state_length, texture in self.states: if state_from + state_length < from_index: continue state_from = max(state_from, from_index) state_length = min(state_length, to_index - state_from) if state_length <= 0: break glBindTexture(GL_TEXTURE_2D, texture.id) glDrawArrays(GL_QUADS, state_from * 4, state_length * 4) glPopClientAttrib() if from_index: glPopMatrix() class Text(object): '''Simple displayable text. This is a convenience class for rendering strings of text. It takes care of caching the vertices so the text can be rendered every frame with little performance penalty. Text can be word-wrapped by specifying a `width` to wrap into. If the width is not specified, it gives the width of the text as laid out. :Ivariables: `x` : int X coordinate of the text `y` : int Y coordinate of the text ''' _layout_width = None # Width to layout text to _text_width = 0 # Calculated width of text _text_height = 0 # Calculated height of text (bottom descender to top # ascender) _dirty = False # Flag if require layout # Alignment constants #: Align the left edge of the text to the given X coordinate. LEFT = 'left' #: Align the horizontal center of the text to the given X coordinate. CENTER = 'center' #: Align the right edge of the text to the given X coordinate. RIGHT = 'right' #: Align the bottom of the descender of the final line of text with the #: given Y coordinate. BOTTOM = 'bottom' #: Align the baseline of the first line of text with the given Y #: coordinate. BASELINE = 'baseline' #: Align the top of the ascender of the first line of text with the given #: Y coordinate. TOP = 'top' _halign = LEFT _valign = BASELINE def __init__(self, font, text='', x=0, y=0, z=0, color=(1,1,1,1), width=None, halign=LEFT, valign=BASELINE): '''Create displayable text. :Parameters: `font` : `Font` Font to render the text in. `text` : str Initial string to render. `x` : float X coordinate of the left edge of the text. `y` : float Y coordinate of the baseline of the text. If the text is word-wrapped, this refers to the first line of text. `z` : float Z coordinate of the text plane. `color` : 4-tuple of float Color to render the text in. Alpha values can be specified in the fourth component. `width` : float Width to limit the rendering to. Text will be word-wrapped if necessary. `halign` : str Alignment of the text. See `Text.halign` for details. `valign` : str Controls positioning of the text based off the y coordinate. One of BASELINE, BOTTOM, CENTER or TOP. Defaults to BASELINE. ''' self._dirty = True self.font = font self._text = text self.color = color self.x = x self.y = y self.leading = 0 self._layout_width = width self._halign = halign self._valign = valign def _clean(self): '''Resolve changed layout''' # Adding a space to the end of the text simplifies the inner loop # of the wrapping layout. It ensures there is a breakpoint returned at # the end of the string (GlyphString cannot guarantee this otherwise # it would not be useable with styled layout algorithms). text = self._text + ' ' glyphs = self.font.get_glyphs(text) self._glyph_string = GlyphString(text, glyphs) self.lines = [] i = 0 if self._layout_width is None: self._text_width = 0 while '\n' in text[i:]: end = text.index('\n', i) self.lines.append((i, end)) self._text_width = max(self._text_width, self._glyph_string.get_subwidth(i, end)) i = end + 1 # Discard the artifical appended space. end = len(text) - 1 if i < end: self.lines.append((i, end)) self._text_width = max(self._text_width, self._glyph_string.get_subwidth(i, end)) else: bp = self._glyph_string.get_break_index(i, self._layout_width) while i < len(text) and bp > i: if text[bp-1] == '\n': self.lines.append((i, bp - 1)) else: self.lines.append((i, bp)) i = bp bp = self._glyph_string.get_break_index(i, self._layout_width) if i < len(text) - 1: self.lines.append((i, len(text))) self.line_height = self.font.ascent - self.font.descent + self.leading self._text_height = self.line_height * len(self.lines) self._dirty = False def draw(self): '''Render the text. This method makes no assumptions about the projection. Using the default projection set up by pyglet, coordinates refer to window-space and the text will be aligned to the window. Other projections can be used to render text into 3D space. The OpenGL state is not modified by this method. ''' if self._dirty: self._clean() y = self.y if self._valign == self.BOTTOM: y += self.height - self.font.ascent elif self._valign == self.CENTER: y += self.height // 2 - self.font.ascent elif self._valign == self.TOP: y -= self.font.ascent glPushAttrib(GL_CURRENT_BIT | GL_ENABLE_BIT) glEnable(GL_TEXTURE_2D) glColor4f(*self.color) glPushMatrix() glTranslatef(0, y, 0) for start, end in self.lines: width = self._glyph_string.get_subwidth(start, end) x = self.x align_width = self._layout_width or 0 if self._halign == self.RIGHT: x += align_width - width elif self._halign == self.CENTER: x += align_width // 2 - width // 2 glTranslatef(x, 0, 0) self._glyph_string.draw(start, end) glTranslatef(-x, -self.line_height, 0) glPopMatrix() glPopAttrib() def _get_width(self): if self._dirty: self._clean() if self._layout_width: return self._layout_width return self._text_width def _set_width(self, width): self._layout_width = width self._dirty = True width = property(_get_width, _set_width, doc='''Width of the text. When set, this enables word-wrapping to the specified width. Otherwise, the width of the text as it will be rendered can be determined. :type: float ''') def _get_height(self): if self._dirty: self._clean() return self._text_height height = property(_get_height, doc='''Height of the text. This property is the ascent minus the descent of the font, unless there is more than one line of word-wrapped text, in which case the height takes into account the line leading. Read-only. :type: float ''') def _set_text(self, text): self._text = text self._dirty = True text = property(lambda self: self._text, _set_text, doc='''Text to render. The glyph vertices are only recalculated as needed, so multiple changes to the text can be performed with no performance penalty. :type: str ''') def _set_halign(self, halign): self._halign = halign self._dirty = True halign = property(lambda self: self._halign, _set_halign, doc='''Horizontal alignment of the text. The text is positioned relative to `x` and `width` according to this property, which must be one of the alignment constants `LEFT`, `CENTER` or `RIGHT`. :type: str ''') def _set_valign(self, valign): self._valign = valign self._dirty = True valign = property(lambda self: self._valign, _set_valign, doc='''Vertical alignment of the text. The text is positioned relative to `y` according to this property, which must be one of the alignment constants `BOTTOM`, `BASELINE`, `CENTER` or `TOP`. :type: str ''') if not getattr(sys, 'is_epydoc', False): if sys.platform == 'darwin': from pyglet.font.carbon import CarbonFont _font_class = CarbonFont elif sys.platform in ('win32', 'cygwin'): if pyglet.options['font'][0] == 'win32': from pyglet.font.win32 import Win32Font _font_class = Win32Font elif pyglet.options['font'][0] == 'gdiplus': from pyglet.font.win32 import GDIPlusFont _font_class = GDIPlusFont else: assert False, 'Unknown font driver' else: from pyglet.font.freetype import FreeTypeFont _font_class = FreeTypeFont def load(name, size, bold=False, italic=False, dpi=None): '''Load a font for rendering. :Parameters: `name` : str, or list of str Font family, for example, "Times New Roman". If a list of names is provided, the first one matching a known font is used. If no font can be matched to the name(s), a default font is used. `size` : float Size of the font, in points. The returned font may be an exact match or the closest available. `bold` : bool If True, a bold variant is returned, if one exists for the given family and size. `italic` : bool If True, an italic variant is returned, if one exists for the given family and size. `dpi` : float If specified, the assumed resolution of the display device, for the purposes of determining the pixel size of the font. If not specified, the platform's native resolution is used (72 DPI on Mac OS X, 96 DPI on Windows, 120 DPI on Windows with large fonts, and user-settable on Linux). :rtype: `Font` ''' # Find first matching name if type(name) in (tuple, list): for n in name: if _font_class.have_font(n): name = n break else: name = None # Locate or create font cache shared_object_space = get_current_context().object_space if not hasattr(shared_object_space, 'pyglet_font_font_cache'): shared_object_space.pyglet_font_font_cache = {} font_cache = shared_object_space.pyglet_font_font_cache # Look for font name in font cache descriptor = (name, size, bold, italic, dpi) if descriptor in font_cache: return font_cache[descriptor] # Not in cache, create from scratch font = _font_class(name, size, bold=bold, italic=italic, dpi=dpi) font_cache[descriptor] = font return font def add_file(font): '''Add a font to pyglet's search path. In order to load a font that is not installed on the system, you must call this method to tell pyglet that it exists. You can supply either a filename or any file-like object. The font format is platform-dependent, but is typically a TrueType font file containing a single font face. Note that to load this file after adding it you must specify the face name to `load`, not the filename. :Parameters: `font` : str or file Filename or file-like object to load fonts from. ''' if type(font) in (str, unicode): font = open(font, 'rb') if hasattr(font, 'read'): font = font.read() _font_class.add_font_data(font) def add_directory(dir): '''Add a directory of fonts to pyglet's search path. This function simply calls `add_file` for each file with a ``.ttf`` extension in the given directory. Subdirectories are not searched. :Parameters: `dir` : str Directory that contains font files. ''' import os for file in os.listdir(dir): if file[-4:].lower() == '.ttf': add_file(os.path.join(dir, file))
bsd-3-clause
-2,993,675,939,295,804,400
34.221667
79
0.58288
false
GabrielNicolasAvellaneda/dd-agent
checks.d/wmi_check.py
1
5343
''' Windows Only. Generic WMI check. This check allows you to specify particular metrics that you want from WMI in your configuration. Check wmi_check.yaml.example in your conf.d directory for more details on configuration. ''' # 3rd party import wmi # project from checks import AgentCheck UP_METRIC = 'Up' SEARCH_WILDCARD = '*' class WMICheck(AgentCheck): def __init__(self, name, init_config, agentConfig, instances): AgentCheck.__init__(self, name, init_config, agentConfig, instances) self.wmi_conns = {} def _get_wmi_conn(self, host, user, password): key = "%s:%s:%s" % (host, user, password) if key not in self.wmi_conns: self.wmi_conns[key] = wmi.WMI(host, user=user, password=password) return self.wmi_conns[key] def check(self, instance): host = instance.get('host', None) user = instance.get('username', None) password = instance.get('password', None) w = self._get_wmi_conn(host, user, password) wmi_class = instance.get('class') metrics = instance.get('metrics') filters = instance.get('filters') tag_by = instance.get('tag_by') tag_queries = instance.get('tag_queries') constant_tags = instance.get('constant_tags') if not wmi_class: raise Exception('WMI instance is missing a value for `class` in wmi_check.yaml') # If there are filters, we need one query per filter. if filters: for f in filters: prop = f.keys()[0] search = f.values()[0] if SEARCH_WILDCARD in search: search = search.replace(SEARCH_WILDCARD, '%') wql = "SELECT * FROM %s WHERE %s LIKE '%s'" \ % (wmi_class, prop, search) results = w.query(wql) else: results = getattr(w, wmi_class)(**f) self._extract_metrics(results, metrics, tag_by, w, tag_queries, constant_tags) else: results = getattr(w, wmi_class)() self._extract_metrics(results, metrics, tag_by, w, tag_queries, constant_tags) def _extract_metrics(self, results, metrics, tag_by, wmi, tag_queries, constant_tags): if len(results) > 1 and tag_by is None: raise Exception('WMI query returned multiple rows but no `tag_by` value was given. ' 'metrics=%s' % metrics) for res in results: tags = [] # include any constant tags... if constant_tags: tags.extend(constant_tags) # if tag_queries is specified then get attributes from other classes and use as a tags if tag_queries: for query in tag_queries: link_source_property = int(getattr(res, query[0])) target_class = query[1] link_target_class_property = query[2] target_property = query[3] link_results = \ wmi.query("SELECT {0} FROM {1} WHERE {2} = {3}" .format(target_property, target_class, link_target_class_property, link_source_property)) if len(link_results) != 1: self.log.warning("Failed to find {0} for {1} {2}. No metrics gathered" .format(target_class, link_target_class_property, link_source_property)) continue link_value = str(getattr(link_results[0], target_property)).lower() tags.append("{0}:{1}".format(target_property.lower(), "_".join(link_value.split()))) # Grab the tag from the result if there's a `tag_by` value (e.g.: "name:jenkins") # Strip any #instance off the value when `tag_queries` is set (gives us unique tags) if tag_by: tag_value = str(getattr(res, tag_by)).lower() if tag_queries and tag_value.find("#") > 0: tag_value = tag_value[:tag_value.find("#")] tags.append('%s:%s' % (tag_by.lower(), tag_value)) if len(tags) == 0: tags = None for wmi_property, name, mtype in metrics: if wmi_property == UP_METRIC: # Special-case metric will just submit 1 for every value # returned in the result. val = 1 elif getattr(res, wmi_property): val = float(getattr(res, wmi_property)) else: self.log.warning("When extracting metrics with wmi, found a null value" " for property '{0}'. Metric type of property is {1}." .format(wmi_property, mtype)) continue # Submit the metric to Datadog try: func = getattr(self, mtype) except AttributeError: raise Exception('Invalid metric type: {0}'.format(mtype)) func(name, val, tags=tags)
bsd-3-clause
658,768,032,123,503,700
41.404762
98
0.518061
false
JackDanger/sentry
tests/acceptance/test_project_keys.py
1
2933
from __future__ import absolute_import from datetime import datetime from django.utils import timezone from sentry.models import ProjectKey from sentry.testutils import AcceptanceTestCase class ProjectKeysTest(AcceptanceTestCase): def setUp(self): super(ProjectKeysTest, self).setUp() self.user = self.create_user('[email protected]') self.org = self.create_organization( name='Rowdy Tiger', owner=None, ) self.team = self.create_team( organization=self.org, name='Mariachi Band' ) self.project = self.create_project( organization=self.org, team=self.team, name='Bengal', ) self.create_member( user=self.user, organization=self.org, role='owner', teams=[self.team], ) ProjectKey.objects.filter(project=self.project).delete() ProjectKey.objects.create( project=self.project, label='Default', public_key='5cc0482a13d248ff99f9717101dd6356', secret_key='410fd998318844b8894775f36184ec28', ) self.login_as(self.user) self.path = '/{}/{}/settings/keys/'.format(self.org.slug, self.project.slug) def test_simple(self): self.browser.get(self.path) self.browser.wait_until_not('.loading-indicator') self.browser.snapshot('project keys') self.browser.wait_until('.ref-keys') class ProjectKeyDetailsTest(AcceptanceTestCase): def setUp(self): super(ProjectKeyDetailsTest, self).setUp() self.user = self.create_user('[email protected]') self.org = self.create_organization( name='Rowdy Tiger', owner=None, ) self.team = self.create_team( organization=self.org, name='Mariachi Band' ) self.project = self.create_project( organization=self.org, team=self.team, name='Bengal', ) self.create_member( user=self.user, organization=self.org, role='owner', teams=[self.team], ) self.pk = ProjectKey.objects.create( project=self.project, label='Default', public_key='5cc0482a13d248ff99f9717101dd6356', secret_key='410fd998318844b8894775f36184ec28', date_added=datetime(2015, 10, 1, 21, 19, 5, 648517, tzinfo=timezone.utc), ) self.login_as(self.user) self.path = '/{}/{}/settings/keys/{}/'.format( self.org.slug, self.project.slug, self.pk.public_key, ) def test_simple(self): self.browser.get(self.path) self.browser.wait_until_not('.loading-indicator') self.browser.snapshot('project key details') self.browser.wait_until('.ref-key-details')
bsd-3-clause
2,673,051,990,279,457,300
30.537634
85
0.582339
false
sgibbes/zonal_stats_app
utilities/zstats_subprocess.py
1
2378
import sys import os import sqlite3 import prep_shapefile import arcpy from arcpy.sa import * import datetime import simpledbf arcpy.CheckOutExtension("Spatial") value = sys.argv[1] zone = sys.argv[2] final_aoi = sys.argv[3] cellsize = sys.argv[4] analysis = sys.argv[5] start = int(sys.argv[6]) stop = int(sys.argv[7]) arcpy.env.overwriteOutput = True for i in range(start, stop): print("prepping feature id {}".format(i)) # select one individual feature from the input shapefile mask = prep_shapefile.zonal_stats_mask(final_aoi, i) scratch_wkspc = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'scratch.gdb') # set environments arcpy.env.extent = mask arcpy.env.mask = mask arcpy.env.cellSize = cellsize arcpy.env.snapRaster = value arcpy.env.scratchWorkspace = scratch_wkspc arcpy.env.workspace = scratch_wkspc tables_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'tables') z_stats_tbl = os.path.join(tables_dir, 'output_{}.dbf'.format(i)) start_time = datetime.datetime.now() print("running zstats") outzstats = ZonalStatisticsAsTable(zone, "VALUE", value, z_stats_tbl, "DATA", "SUM") end_time = datetime.datetime.now() - start_time print("debug:time elapsed: {}".format(end_time)) # convert the output zstats table into a pandas DF dbf = simpledbf.Dbf5(z_stats_tbl) df = dbf.to_dataframe() # populate a new field "id" with the FID and analysis with the sum df['ID'] = i df[analysis] = df['SUM'] # sometimes this value came back as an object, so here we are fixing that bug df.VALUE = df.VALUE.astype(int) # name of the sql database to store the sql table zstats_results_db = os.path.join(tables_dir, 'zstats_results_db.db') # create a connection to the sql database conn = sqlite3.connect(zstats_results_db) # append the dataframe to the database df.to_sql(analysis, conn, if_exists='append') # delete these because they create a lock del df del dbf os.remove(z_stats_tbl) # reset these environments. Otherwise the shapefile is redefined based on features within the extent arcpy.env.extent = None arcpy.env.mask = None arcpy.env.cellSize = None arcpy.env.snapRaster = None print('process succeeded for id {0}'.format(i))
apache-2.0
-2,103,962,230,845,745,000
28.358025
108
0.687132
false
JordanP/openstack-snippets
ospurge/ospurge/main.py
1
8851
#!/usr/bin/env python3 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import concurrent.futures import logging import operator import sys import threading import typing import os_client_config import shade from ospurge import exceptions from ospurge.resources.base import ServiceResource from ospurge import utils if typing.TYPE_CHECKING: # pragma: no cover from typing import Optional # noqa: F401 def configure_logging(verbose: bool) -> None: log_level = logging.INFO if verbose else logging.WARNING logging.basicConfig( format='%(levelname)s:%(name)s:%(asctime)s:%(message)s', level=log_level ) logging.getLogger( 'requests.packages.urllib3.connectionpool').setLevel(logging.WARNING) def create_argument_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser( description="Purge resources from an Openstack project." ) parser.add_argument( "--verbose", action="store_true", help="Make output verbose" ) parser.add_argument( "--dry-run", action="store_true", help="List project's resources" ) parser.add_argument( "--delete-shared-resources", action="store_true", help="Whether to delete shared resources (public images and external " "networks)" ) parser.add_argument( "--admin-role-name", default="admin", help="Name of admin role. Defaults to 'admin'. This role will be " "temporarily granted on the project to purge to the " "authenticated user." ) group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "--purge-project", metavar="ID_OR_NAME", help="ID or Name of project to purge. This option requires " "to authenticate with admin credentials." ) group.add_argument( "--purge-own-project", action="store_true", help="Purge resources of the project used to authenticate. Useful " "if you don't have the admin credentials of the cloud." ) return parser class CredentialsManager(object): def __init__(self, options: argparse.Namespace) -> None: self.options = options self.revoke_role_after_purge = False self.disable_project_after_purge = False self.cloud = None # type: Optional[shade.OpenStackCloud] self.operator_cloud = None # type: Optional[shade.OperatorCloud] if options.purge_own_project: self.cloud = shade.openstack_cloud(argparse=options) self.user_id = self.cloud.keystone_session.get_user_id() self.project_id = self.cloud.keystone_session.get_project_id() else: self.operator_cloud = shade.operator_cloud(argparse=options) self.user_id = self.operator_cloud.keystone_session.get_user_id() project = self.operator_cloud.get_project(options.purge_project) if not project: raise exceptions.OSProjectNotFound( "Unable to find project '{}'".format(options.purge_project) ) self.project_id = project['id'] # If project is not enabled, we must disable it after purge. self.disable_project_after_purge = not project.enabled # Reuse the information passed to get the `OperatorCloud` but # change the project. This way we bind/re-scope to the project # we want to purge, not the project we authenticated to. self.cloud = shade.openstack_cloud( **utils.replace_project_info( self.operator_cloud.cloud_config.config, self.project_id ) ) auth_args = self.cloud.cloud_config.get_auth_args() logging.warning( "Going to list and/or delete resources from project '%s'", options.purge_project or auth_args.get('project_name') or auth_args.get('project_id') ) def ensure_role_on_project(self) -> None: if self.operator_cloud and self.operator_cloud.grant_role( self.options.admin_role_name, project=self.options.purge_project, user=self.user_id ): logging.warning( "Role 'Member' granted to user '%s' on project '%s'", self.user_id, self.options.purge_project ) self.revoke_role_after_purge = True def revoke_role_on_project(self) -> None: self.operator_cloud.revoke_role( self.options.admin_role_name, user=self.user_id, project=self.options.purge_project) logging.warning( "Role 'Member' revoked from user '%s' on project '%s'", self.user_id, self.options.purge_project ) def ensure_enabled_project(self) -> None: if self.operator_cloud and self.disable_project_after_purge: self.operator_cloud.update_project(self.project_id, enabled=True) logging.warning("Project '%s' was disabled before purge and it is " "now enabled", self.options.purge_project) def disable_project(self) -> None: self.operator_cloud.update_project(self.project_id, enabled=False) logging.warning("Project '%s' was disabled before purge and it is " "now also disabled", self.options.purge_project) @utils.monkeypatch_oscc_logging_warning def runner( resource_mngr: ServiceResource, options: argparse.Namespace, exit: threading.Event ) -> None: try: if not options.dry_run: resource_mngr.wait_for_check_prerequisite(exit) for resource in resource_mngr.list(): # No need to continue if requested to exit. if exit.is_set(): return if resource_mngr.should_delete(resource): logging.info("Going to delete %s", resource_mngr.to_str(resource)) if options.dry_run: continue utils.call_and_ignore_notfound(resource_mngr.delete, resource) except Exception as exc: log = logging.error recoverable = False if hasattr(exc, 'inner_exception'): # inner_exception is a tuple (type, value, traceback) # mypy complains: "Exception" has no attribute "inner_exception" exc_info = exc.inner_exception # type: ignore if exc_info[0].__name__.lower().endswith('endpointnotfound'): log = logging.info recoverable = True log("Can't deal with %s: %r", resource_mngr.__class__.__name__, exc) if not recoverable: exit.set() def main() -> None: parser = create_argument_parser() cloud_config = os_client_config.OpenStackConfig() cloud_config.register_argparse_arguments(parser, sys.argv) options = parser.parse_args() configure_logging(options.verbose) creds_manager = CredentialsManager(options=options) creds_manager.ensure_enabled_project() creds_manager.ensure_role_on_project() resource_managers = sorted( [cls(creds_manager) for cls in utils.get_all_resource_classes()], key=operator.methodcaller('order') ) # This is an `Event` used to signal whether one of the threads encountered # an unrecoverable error, at which point all threads should exit because # otherwise there's a chance the cleanup process never finishes. exit = threading.Event() # Dummy function to work around `ThreadPoolExecutor.map()` not accepting # a callable with arguments. def partial_runner(resource_manager: ServiceResource) -> None: runner(resource_manager, options=options, exit=exit) # pragma: no cover try: with concurrent.futures.ThreadPoolExecutor(8) as executor: executor.map(partial_runner, resource_managers) except KeyboardInterrupt: exit.set() if creds_manager.revoke_role_after_purge: creds_manager.revoke_role_on_project() if creds_manager.disable_project_after_purge: creds_manager.disable_project() sys.exit(int(exit.is_set())) if __name__ == "__main__": # pragma: no cover main()
apache-2.0
-7,863,330,502,847,431,000
35.726141
79
0.632471
false
ideascube/ideascube
ideascube/blog/migrations/0001_initial.py
1
1992
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import taggit.managers class Migration(migrations.Migration): dependencies = [ ('taggit', '0002_auto_20150616_2121'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Content', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ('modified_at', models.DateTimeField(auto_now=True)), ('title', models.CharField(max_length=100, verbose_name='title')), ('author_text', models.CharField(max_length=300, verbose_name='author text', blank=True)), ('summary', models.CharField(max_length=300, verbose_name='summary')), ('image', models.ImageField(upload_to=b'blog/image', verbose_name='image', blank=True)), ('text', models.TextField(verbose_name='text')), ('published_at', models.DateTimeField(verbose_name='publication date')), ('status', models.PositiveSmallIntegerField(default=1, verbose_name='Status', choices=[(1, 'draft'), (2, 'published'), (3, 'deleted')])), ('lang', models.CharField(default=b'en', max_length=10, verbose_name='Language', choices=[(b'en', b'English'), (b'fr', 'Fran\xe7ais'), (b'ar', '\u0627\u0644\u0639\u0631\u0628\u064a\u0629')])), ('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')), ], options={ 'abstract': False, }, ), ]
agpl-3.0
-7,513,129,243,814,382,000
51.421053
208
0.601908
false
chenzilin/git-repo
git_refs.py
1
3980
# # Copyright (C) 2009 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from trace import Trace import platform_utils HEAD = 'HEAD' R_CHANGES = 'refs/changes/' R_HEADS = 'refs/heads/' R_TAGS = 'refs/tags/' R_PUB = 'refs/published/' R_M = 'refs/remotes/m/' class GitRefs(object): def __init__(self, gitdir): self._gitdir = gitdir self._phyref = None self._symref = None self._mtime = {} @property def all(self): self._EnsureLoaded() return self._phyref def get(self, name): try: return self.all[name] except KeyError: return '' def deleted(self, name): if self._phyref is not None: if name in self._phyref: del self._phyref[name] if name in self._symref: del self._symref[name] if name in self._mtime: del self._mtime[name] def symref(self, name): try: self._EnsureLoaded() return self._symref[name] except KeyError: return '' def _EnsureLoaded(self): if self._phyref is None or self._NeedUpdate(): self._LoadAll() def _NeedUpdate(self): Trace(': scan refs %s', self._gitdir) for name, mtime in self._mtime.items(): try: if mtime != os.path.getmtime(os.path.join(self._gitdir, name)): return True except OSError: return True return False def _LoadAll(self): Trace(': load refs %s', self._gitdir) self._phyref = {} self._symref = {} self._mtime = {} self._ReadPackedRefs() self._ReadLoose('refs/') self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD) scan = self._symref attempts = 0 while scan and attempts < 5: scan_next = {} for name, dest in scan.items(): if dest in self._phyref: self._phyref[name] = self._phyref[dest] else: scan_next[name] = dest scan = scan_next attempts += 1 def _ReadPackedRefs(self): path = os.path.join(self._gitdir, 'packed-refs') try: fd = open(path, 'r') mtime = os.path.getmtime(path) except IOError: return except OSError: return try: for line in fd: line = str(line) if line[0] == '#': continue if line[0] == '^': continue line = line[:-1] p = line.split(' ') ref_id = p[0] name = p[1] self._phyref[name] = ref_id finally: fd.close() self._mtime['packed-refs'] = mtime def _ReadLoose(self, prefix): base = os.path.join(self._gitdir, prefix) for name in platform_utils.listdir(base): p = os.path.join(base, name) if platform_utils.isdir(p): self._mtime[prefix] = os.path.getmtime(base) self._ReadLoose(prefix + name + '/') elif name.endswith('.lock'): pass else: self._ReadLoose1(p, prefix + name) def _ReadLoose1(self, path, name): try: fd = open(path) except IOError: return try: try: mtime = os.path.getmtime(path) ref_id = fd.readline() except (IOError, OSError): return finally: fd.close() try: ref_id = ref_id.decode() except AttributeError: pass if not ref_id: return ref_id = ref_id[:-1] if ref_id.startswith('ref: '): self._symref[name] = ref_id[5:] else: self._phyref[name] = ref_id self._mtime[name] = mtime
apache-2.0
-7,995,023,122,377,508,000
22.690476
74
0.585427
false
remibergsma/cosmic
cosmic-core/systemvm/patches/debian/config/opt/cloud/bin/cs/CsConfig.py
1
2202
# -- coding: utf-8 -- from CsAddress import CsAddress from CsDatabag import CsCmdLine class CsConfig(object): """ A class to cache all the stuff that the other classes need """ __LOG_FILE = "/var/log/cloud.log" __LOG_LEVEL = "DEBUG" __LOG_FORMAT = "%(asctime)s %(levelname)-8s %(message)s" cl = None def __init__(self): self.fw = [] self.ingress_rules = {} def set_address(self): self.ips = CsAddress("ips", self) @classmethod def get_cmdline_instance(cls): if cls.cl is None: cls.cl = CsCmdLine("cmdline") return cls.cl def cmdline(self): return self.get_cmdline_instance() def address(self): return self.ips def get_fw(self): return self.fw def get_ingress_rules(self, key): if self.ingress_rules.has_key(key): return self.ingress_rules[key] return None def set_ingress_rules(self, key, ingress_rules): self.ingress_rules[key] = ingress_rules def get_logger(self): return self.__LOG_FILE def get_level(self): return self.__LOG_LEVEL def is_vpc(self): return self.cl.get_type() == 'vpcrouter' def is_router(self): return self.cl.get_type() == 'router' def is_dhcp(self): return self.cl.get_type() == 'dhcpsrvr' def has_dns(self): return not self.use_extdns() def has_metadata(self): return any((self.is_vpc(), self.is_router(), self.is_dhcp())) def use_extdns(self): return self.cmdline().idata().get('useextdns', 'false') == 'true' def get_domain(self): return self.cl.get_domain() def get_dns(self): conf = self.cmdline().idata() dns = [] if not self.use_extdns(): if not self.is_vpc() and self.cl.is_redundant() and self.cl.get_guest_gw(): dns.append(self.cl.get_guest_gw()) else: dns.append(self.address().get_guest_ip()) for name in ["dns1", "dns2"]: if name in conf: dns.append(conf[name]) return dns def get_format(self): return self.__LOG_FORMAT
apache-2.0
761,955,633,890,424,600
24.022727
87
0.563124
false
salkinium/bachelor
experiment_control/commands/base.py
1
1510
# -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import logging import os class BaseCommand(object): logger_initialized = False def __init__(self, arguments=None, log_path='/var/log/boxmanager'): super(BaseCommand, self).__init__() self.arguments = arguments if arguments else [] self.log_path = log_path self.logger = logging.getLogger('Command') if not BaseCommand.logger_initialized: self.logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # console logging self.ch = logging.StreamHandler() self.ch.setLevel(logging.DEBUG) self.ch.setFormatter(formatter) self.logger.addHandler(self.ch) # file logging fh = logging.FileHandler(os.path.join(self.log_path, 'scriptmanager.log')) fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) self.logger.addHandler(fh) BaseCommand.logger_initialized = True def execute(self, _): return True def __repr__(self): return self.__str__() def __str__(self): return "BaseCommand()"
bsd-2-clause
-1,765,331,765,717,810,700
29.2
97
0.580795
false
elitegreg/mudpy
tyderium/socket.py
1
2832
from . import lib from .timeout import Timeout import greenlet import errno import socket as stdsocket from socket import * # for convenience from socket import timeout as timeout_error class socket(stdsocket.socket): __slots__ = () def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.setblocking(False) def __wait(self, events, timeout=None): try: with Timeout(timeout if timeout else super().gettimeout()): lib.Io(fd=self.fileno(), events=events).start() except TimeoutError: raise timeout_error def connect(self, addr, timeout=None): ret = self.connect_ex(addr) if ret == 0: return if ret != errno.EINPROGRESS: raise stdsocket.error(ret) self.__wait(lib.EV_WRITE, timeout) def send(self, value, timeout=None, *args, **kwargs): while True: try: return super().send(value, *args, **kwargs) except stdsocket.error as err: if err.errno not in (errno.EWOULDBLOCK, errno.EAGAIN, errno.EINTR): raise self.__wait(lib.EV_WRITE, timeout) def sendall(self, value, timeout=None, *args, **kwargs): while True: bytes = self.send(value, timeout, *args, **kwargs) if bytes >= len(value): return value = value[bytes:] def recv(self, size, timeout=None, *args, **kwargs): while True: fd = self.fileno() if fd < 0: return b'' self.__wait(lib.EV_READ, timeout) try: return super().recv(size, *args, **kwargs) except stdsocket.error as err: if err.errno in (errno.EWOULDBLOCK, errno.EAGAIN, errno.EINTR): continue raise def accept(self, timeout=None): while True: self.__wait(lib.EV_READ, timeout) try: sock, addr = super().accept() sock.setblocking(False) sock.__class__ = socket return sock, addr except stdsocket.error as err: if err.errno in (errno.EWOULDBLOCK, errno.EAGAIN, errno.EINTR): continue raise if __name__ == '__main__': from .hub import Hub def get(): sock = socket(AF_INET, SOCK_STREAM) sock.connect(('127.0.0.1', 8000)) sock.send(b'GET / HTTP/1.0\r\n\r\n') # wrong but ok for sample sock.shutdown(SHUT_WR) while True: data = sock.recv(4096) if not data: break print(data) while True: with Hub() as hub: hub.spawn(get) hub.switch()
gpl-3.0
5,062,856,902,460,464,000
29.782609
83
0.521893
false
drm343/HalfDragon_Bot
v2/main.py
1
5127
#!/usr/bin/env python # -*- coding: utf-8 -*- import androidhelper import requests import json import telebot import telebot.util as util import sys import time import os bot = telebot.TeleBot(my_token) class MachineStatus: def __init__(self): self.phone = androidhelper.Android() self.phone.batteryStartMonitoring() self.chat_id = False self.less_40 = False self.less_20 = False @util.async() def monitor(self): while True: time.sleep(5) bettery = self.phone.batteryGetLevel()[1] if ((bettery / 10) <= 4) and self.chat_id and not self.less_40: bot.send_message(self.chat_id, "低於 40%") self.less_40 = True elif ((bettery / 10) <= 1) and self.chat_id and not self.less_20: bot.send_message(self.chat_id, "快沒電了") self.less_20 = True elif ((bettery / 10) >= 10) and self.chat_id and (self.less_20 or self.less_40): bot.send_message(self.chat_id, "充電完畢") self.less_20 = False self.less_40 = False status = MachineStatus() status.monitor() def is_someuser(username): return lambda message: (message.chat.username == username) def get_parameters(message): try: return message.split(" ", 1)[1] except: return "" is_drm343 = is_someuser("drm343") is_DummyData = is_someuser("DummyData") class FTP: def __init__(self): self.host = "ftp://domain" self.port = 22 self.username = "username" self.password = "password" def split_host_and_port(self, message): host, self.port = message.split(":", 1) self.host = "ftp://{0}".format(host) def message(self, message): return message.format(self.host, self.port, self.username, self.password) def change_start(self, message): msg = bot.reply_to(message, "是否更改主機位置或 port?目前為 {0}:{1} (y/N)".format(self.host, self.port)) bot.register_next_step_handler(msg, self.pre_change_host) def pre_change_host(self, message): Y = message.text if (Y == "Y") or (Y == "y"): msg = bot.reply_to(message, """\ 請輸入新的主機位置 格式為「主機:port」 不需要輸入開頭 ftp://""".format(self.host)) bot.register_next_step_handler(msg, self.post_change_host) else: self.pre_change_username(message) def post_change_host(self, message): self.split_host_and_port(message.text) self.pre_change_username(message) def pre_change_username(self, message): msg = bot.reply_to(message, "請輸入帳號?目前為:{0}".format(self.username)) bot.register_next_step_handler(msg, self.post_change_username) def post_change_username(self, message): self.username = message.text self.pre_change_password(message) def pre_change_password(self, message): msg = bot.reply_to(message, "請輸入密碼?目前為:\"{0}\"(沒有\")".format(self.password)) bot.register_next_step_handler(msg, self.post_change_password) def post_change_password(self, message): self.password = message.text chat_id = message.chat.id bot.send_message(chat_id, "更新完成") ftp = FTP() class HalfDragonBot: # command @bot.message_handler(commands=["start"]) def start_bot(message): if is_drm343(message): status.chat_id = message.chat.id bot.send_message(status.chat_id, "已設定完成") @bot.message_handler(commands=["rules"]) def show_rules(message): HELP_MESSAGE = """\ 目前僅供半龍史萊姆群組使用(暫定) 加入第一句話必須說「我是新手」 邀請連結 某甲髒髒ftp 主機位置: {0} Port: {1} 帳號: {2} 密碼: {3} 半龍史萊姆論壇""" bot.reply_to(message, ftp.message(HELP_MESSAGE)) @bot.message_handler(commands=["set_ftp"]) def set_ftp(message): if is_drm343(message) or is_DummyData(message): ftp.change_start(message) else: bot.reply_to(message, "你沒有修改權限") @bot.message_handler(commands=["output"]) def count_probability(message): result = my_test(get_parameters(message.text)) response_message = "" try: response_message = "結果 | 機率\n" next_message = "{0} | {1}\n" for item in result["distributions"]["data"][0]: response_message = response_message + next_message.format(item[0], item[1]) except: response_message = result["error"]["message"] bot.reply_to(message, response_message) def my_test(message): parameters = {"program":"output+{0}".format(message)} response = requests.post("http://anydice.com/calculator_limited.php", data = parameters) result = json.loads(response.text) return result if __name__ == '__main__': while True: try: bot.polling() except: pass
mit
-4,120,896,432,437,278,700
25.972222
100
0.59691
false
llinmeng/PythonStudy
python_project/23/newsagent2.py
1
4783
# -*- coding: utf-8 -*- from nntplib import NNTP from time import strftime, time, localtime from email import message_from_string from urllib import urlopen import textwrap import re day = 24 * 60 * 60 # 一天的秒数 def wrap(string, max = 70): """ 将字符串调整为最大行宽 :param string: :param max: :return: """ return '\n'.join(textwrap.wrap(string)) + '\n' class NewsAgent: """ 可以将新闻来源获取新闻项目并且发布到新闻目标的对象 """ def __init__(self): self.sources = [] self.destination = [] def addSource(self, source): self.sources.append(source) def addDestination(self, dest): self.destination.append(dest) def distribute(self): """ 从所有来源获取所有新闻项目并且发布到所有目标 """ items= [] for source in self.sources: items.extend(source.getItems()) for dest in self.destination: dest.receiveItems(items) class NewsItem: """ 包括标题和主体文本的简单新闻项目 """ def __init__(self, title, body): self.title = title self.body = body class NNTPSource: """ 从NNTP组中获取新闻项目的新闻来源 """ def __init__(self, servername, group, window): self.servername = servername self.group = group self.window = window def getItems(self): start = localtime(time() - self.window * day) date = strftime('%y%m%d', start) hour = strftime('H%m%s', start) server = NNTP(self.servername) ids = server.newnews(self.grou, date, hour)[1] for id in ids: lines = server.article(id)[3] message = message_from_string('\n'.join(lines)) title = message['subject'] body = message.get_playload() if message.is_multipart(): body = body[0] yield NewsItem(title, body) server.quit() class SimpleWebSource: """ 使用正则表达式从网页中提取新闻项目的新闻来源 """ def __init__(self, url,titlePattern, bodyPattern): self.url = url self.titlePattern = titlePattern self.bodyPattern = bodyPattern def getItems(self): text = urlopen(self.url).read() titles = self.titlePattern.findall(text) bodies = self.bodyPattern.findall(text) for title, body in zip(titles, bodies): yield NewsItem(title, wrap(body)) class PlainDestination: """ 将所有新闻项目格式化为纯文本的新闻目标类 """ def receiveItems(self, items): for item in items: print (item.title) print ('-'*len(item.title)) print (item.body) class HTMLDestination: """ 将所有新闻项目格式化为HTML的目标类 """ def __init__(self, filename): self.filename = filename def receiveItems(self, items): out = open(self.filename, 'w') print >> out, """ <html> <head> <title>Today's News</title> </head> <body> <h1>Today's News</title> """ print >> out, '<ul>' id = 0 for item in items: id += 1 print >> out, '<li><a href = "#%i">%s</a></li>' % (id, item.title) print >> out, '</ul>' id = 0 for item in items: id += 1 print >> out, '<h2><a name = "%i">%s</a></h2>' % (id, item.title) print >> out, '<pre>%s</pre>' % item.body print >> out, """ </body> </html> """ def runDefaultSetup(self): """ 来源和目标的默认设置,可以自己修改 :return: """ agent = NewsAgent() # 从XXX新闻站获取新闻的SimpleWebSource: bbc_url = 'http://news.bbc.co.uk/text_only.stm' bbc_title = r'(?s)a href="[^"]*">\s*<b>\s*(.*?)\s*<\b>' bbc_body = r'(?s)</a>\s*<br />/s*(.*?)\s*<' agent.addSource(bbc) # 从comp.lang.python.announce获取新闻的NNTPSource clpa_server = 'news.foo.bar' # Insert real server name clpa_group = 'comp.lang.python.announce' clpa_window = 1 clpa = NNTPSource(clpa_server, clpa_group, clpa_window) agent.addSource(clpa) # 增加文本目标和HTML目标 agent.addDestination(PlainDestination()) agent.addDestination(HTMLDestination('news.html')) # 发布新闻项目 agent.distribute() if __name__ == '__main__': runDefaultSetup()
mit
-7,183,054,910,938,134,000
22.989071
83
0.525177
false
MattDevo/edk2
BaseTools/Source/Python/Eot/EotMain.py
1
69215
## @file # This file is used to be the main entrance of EOT tool # # Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR> # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules # from __future__ import absolute_import import Common.LongFilePathOs as os, time, glob import Common.EdkLogger as EdkLogger import Eot.EotGlobalData as EotGlobalData from optparse import OptionParser from Common.StringUtils import NormPath from Common import BuildToolError from Common.Misc import GuidStructureStringToGuidString, sdict from Eot.Parser import * from Eot.InfParserLite import EdkInfParser from Common.StringUtils import GetSplitValueList from Eot import c from Eot import Database from array import array from Eot.Report import Report from Common.BuildVersion import gBUILD_VERSION from Eot.Parser import ConvertGuid from Common.LongFilePathSupport import OpenLongFilePath as open import struct import uuid import copy import codecs from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID gGuidStringFormat = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" gIndention = -4 class Image(array): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = _HEADER_.size def __new__(cls, *args, **kwargs): return array.__new__(cls, 'B') def __init__(self, ID=None): if ID is None: self._ID_ = str(uuid.uuid1()).upper() else: self._ID_ = ID self._BUF_ = None self._LEN_ = None self._OFF_ = None self._SubImages = sdict() # {offset: Image()} array.__init__(self) def __repr__(self): return self._ID_ def __len__(self): Len = array.__len__(self) for Offset in self._SubImages.keys(): Len += len(self._SubImages[Offset]) return Len def _Unpack(self): self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _Pack(self, PadByte=0xFF): raise NotImplementedError def frombuffer(self, Buffer, Offset=0, Size=None): self._BUF_ = Buffer self._OFF_ = Offset # we may need the Size information in advance if it's given self._LEN_ = Size self._LEN_ = self._Unpack() def empty(self): del self[0:] def GetField(self, FieldStruct, Offset=0): return FieldStruct.unpack_from(self, Offset) def SetField(self, FieldStruct, Offset, *args): # check if there's enough space Size = FieldStruct.size if Size > len(self): self.extend([0] * (Size - len(self))) FieldStruct.pack_into(self, Offset, *args) def _SetData(self, Data): if len(self) < self._HEADER_SIZE_: self.extend([0] * (self._HEADER_SIZE_ - len(self))) else: del self[self._HEADER_SIZE_:] self.extend(Data) def _GetData(self): if len(self) > self._HEADER_SIZE_: return self[self._HEADER_SIZE_:] return None Data = property(_GetData, _SetData) ## CompressedImage() class # # A class for Compressed Image # class CompressedImage(Image): # UncompressedLength = 4-byte # CompressionType = 1-byte _HEADER_ = struct.Struct("1I 1B") _HEADER_SIZE_ = _HEADER_.size _ORIG_SIZE_ = struct.Struct("1I") _CMPRS_TYPE_ = struct.Struct("4x 1B") def __init__(self, CompressedData=None, CompressionType=None, UncompressedLength=None): Image.__init__(self) if UncompressedLength is not None: self.UncompressedLength = UncompressedLength if CompressionType is not None: self.CompressionType = CompressionType if CompressedData is not None: self.Data = CompressedData def __str__(self): global gIndention S = "algorithm=%s uncompressed=%x" % (self.CompressionType, self.UncompressedLength) for Sec in self.Sections: S += '\n' + str(Sec) return S def _SetOriginalSize(self, Size): self.SetField(self._ORIG_SIZE_, 0, Size) def _GetOriginalSize(self): return self.GetField(self._ORIG_SIZE_)[0] def _SetCompressionType(self, Type): self.SetField(self._CMPRS_TYPE_, 0, Type) def _GetCompressionType(self): return self.GetField(self._CMPRS_TYPE_)[0] def _GetSections(self): try: TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:]) DecData = array('B') DecData.fromstring(TmpData) except: TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:]) DecData = array('B') DecData.fromstring(TmpData) SectionList = [] Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary except: break SectionList.append(Sec) return SectionList UncompressedLength = property(_GetOriginalSize, _SetOriginalSize) CompressionType = property(_GetCompressionType, _SetCompressionType) Sections = property(_GetSections) ## Ui() class # # A class for Ui # class Ui(Image): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = 0 def __init__(self): Image.__init__(self) def __str__(self): return self.String def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _GetUiString(self): return codecs.utf_16_decode(self[0:-2].tostring())[0] String = property(_GetUiString) ## Depex() class # # A class for Depex # class Depex(Image): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = 0 _GUID_ = struct.Struct("1I2H8B") _OPCODE_ = struct.Struct("1B") _OPCODE_STRING_ = { 0x00 : "BEFORE", 0x01 : "AFTER", 0x02 : "PUSH", 0x03 : "AND", 0x04 : "OR", 0x05 : "NOT", 0x06 : "TRUE", 0x07 : "FALSE", 0x08 : "END", 0x09 : "SOR" } _NEXT_ = { -1 : _OPCODE_, # first one in depex must be an opcdoe 0x00 : _GUID_, #"BEFORE", 0x01 : _GUID_, #"AFTER", 0x02 : _GUID_, #"PUSH", 0x03 : _OPCODE_, #"AND", 0x04 : _OPCODE_, #"OR", 0x05 : _OPCODE_, #"NOT", 0x06 : _OPCODE_, #"TRUE", 0x07 : _OPCODE_, #"FALSE", 0x08 : None, #"END", 0x09 : _OPCODE_, #"SOR" } def __init__(self): Image.__init__(self) self._ExprList = [] def __str__(self): global gIndention gIndention += 4 Indention = ' ' * gIndention S = '\n' for T in self.Expression: if T in self._OPCODE_STRING_: S += Indention + self._OPCODE_STRING_[T] if T not in [0x00, 0x01, 0x02]: S += '\n' else: S += ' ' + gGuidStringFormat % T + '\n' gIndention -= 4 return S def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _GetExpression(self): if self._ExprList == []: Offset = 0 CurrentData = self._OPCODE_ while Offset < len(self): Token = CurrentData.unpack_from(self, Offset) Offset += CurrentData.size if len(Token) == 1: Token = Token[0] if Token in self._NEXT_: CurrentData = self._NEXT_[Token] else: CurrentData = self._GUID_ else: CurrentData = self._OPCODE_ self._ExprList.append(Token) if CurrentData is None: break return self._ExprList Expression = property(_GetExpression) # # FirmwareVolume() class # # A class for Firmware Volume # class FirmwareVolume(Image): # Read FvLength, Attributes, HeaderLength, Checksum _HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H") _HEADER_SIZE_ = _HEADER_.size _FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3" _GUID_ = struct.Struct("16x 1I2H8B") _LENGTH_ = struct.Struct("16x 16x 1Q") _SIG_ = struct.Struct("16x 16x 8x 1I") _ATTR_ = struct.Struct("16x 16x 8x 4x 1I") _HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H") _CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H") def __init__(self, Name=''): Image.__init__(self) self.Name = Name self.FfsDict = sdict() self.OrderedFfsDict = sdict() self.UnDispatchedFfsDict = sdict() self.ProtocolList = sdict() def CheckArchProtocol(self): for Item in EotGlobalData.gArchProtocolGuids: if Item.lower() not in EotGlobalData.gProtocolList: return False return True def ParseDepex(self, Depex, Type): List = None if Type == 'Ppi': List = EotGlobalData.gPpiList if Type == 'Protocol': List = EotGlobalData.gProtocolList DepexStack = [] DepexList = [] DepexString = '' FileDepex = None CouldBeLoaded = True for Index in range(0, len(Depex.Expression)): Item = Depex.Expression[Index] if Item == 0x00: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08: return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE']) elif Item == 0x01: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08: return (True, 'AFTER %s' % Guid, [Guid, 'AFTER']) elif Item == 0x02: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid.lower() in List: DepexStack.append(True) DepexList.append(Guid) else: DepexStack.append(False) DepexList.append(Guid) continue elif Item == 0x03 or Item == 0x04: DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop()))) DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop())) elif Item == 0x05: DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop()))) DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop())) elif Item == 0x06: DepexStack.append(True) DepexList.append('TRUE') DepexString = DepexString + 'TRUE' + ' ' elif Item == 0x07: DepexStack.append(False) DepexList.append('False') DepexString = DepexString + 'FALSE' + ' ' elif Item == 0x08: if Index != len(Depex.Expression) - 1: CouldBeLoaded = False else: CouldBeLoaded = DepexStack.pop() else: CouldBeLoaded = False if DepexList != []: DepexString = DepexList[0].strip() return (CouldBeLoaded, DepexString, FileDepex) def Dispatch(self, Db=None): if Db is None: return False self.UnDispatchedFfsDict = copy.copy(self.FfsDict) # Find PeiCore, DexCore, PeiPriori, DxePriori first FfsSecCoreGuid = None FfsPeiCoreGuid = None FfsDxeCoreGuid = None FfsPeiPrioriGuid = None FfsDxePrioriGuid = None for FfsID in self.UnDispatchedFfsDict.keys(): Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x03: FfsSecCoreGuid = FfsID continue if Ffs.Type == 0x04: FfsPeiCoreGuid = FfsID continue if Ffs.Type == 0x05: FfsDxeCoreGuid = FfsID continue if Ffs.Guid.lower() == PEI_APRIORI_GUID.lower(): FfsPeiPrioriGuid = FfsID continue if Ffs.Guid.lower() == DXE_APRIORI_GUID.lower(): FfsDxePrioriGuid = FfsID continue # Parse SEC_CORE first if FfsSecCoreGuid is not None: self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid) self.LoadPpi(Db, FfsSecCoreGuid) # Parse PEI first if FfsPeiCoreGuid is not None: self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid) self.LoadPpi(Db, FfsPeiCoreGuid) if FfsPeiPrioriGuid is not None: # Load PEIM described in priori file FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid) if len(FfsPeiPriori.Sections) == 1: Section = FfsPeiPriori.Sections.popitem()[1] if Section.Type == 0x19: GuidStruct = struct.Struct('1I2H8B') Start = 4 while len(Section) > Start: Guid = GuidStruct.unpack_from(Section[Start : Start + 16]) GuidString = gGuidStringFormat % Guid Start = Start + 16 if GuidString in self.UnDispatchedFfsDict: self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString) self.LoadPpi(Db, GuidString) self.DisPatchPei(Db) # Parse DXE then if FfsDxeCoreGuid is not None: self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid) self.LoadProtocol(Db, FfsDxeCoreGuid) if FfsDxePrioriGuid is not None: # Load PEIM described in priori file FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid) if len(FfsDxePriori.Sections) == 1: Section = FfsDxePriori.Sections.popitem()[1] if Section.Type == 0x19: GuidStruct = struct.Struct('1I2H8B') Start = 4 while len(Section) > Start: Guid = GuidStruct.unpack_from(Section[Start : Start + 16]) GuidString = gGuidStringFormat % Guid Start = Start + 16 if GuidString in self.UnDispatchedFfsDict: self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString) self.LoadProtocol(Db, GuidString) self.DisPatchDxe(Db) def LoadProtocol(self, Db, ModuleGuid): SqlCommand = """select GuidValue from Report where SourceFileFullPath in (select Value1 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s) and Model = %s) and ItemType = 'Protocol' and ItemMode = 'Produced'""" \ % (ModuleGuid, 5001, 3007) RecordSet = Db.TblReport.Exec(SqlCommand) for Record in RecordSet: SqlCommand = """select Value2 from Inf where BelongsToFile = (select DISTINCT BelongsToFile from Inf where Value1 = (select SourceFileFullPath from Report where GuidValue like '%s' and ItemMode = 'Callback')) and Value1 = 'FILE_GUID'""" % Record[0] CallBackSet = Db.TblReport.Exec(SqlCommand) if CallBackSet != []: EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid else: EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid def LoadPpi(self, Db, ModuleGuid): SqlCommand = """select GuidValue from Report where SourceFileFullPath in (select Value1 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s) and Model = %s) and ItemType = 'Ppi' and ItemMode = 'Produced'""" \ % (ModuleGuid, 5001, 3007) RecordSet = Db.TblReport.Exec(SqlCommand) for Record in RecordSet: EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid def DisPatchDxe(self, Db): IsInstalled = False ScheduleList = sdict() for FfsID in self.UnDispatchedFfsDict.keys(): CouldBeLoaded = False DepexString = '' FileDepex = None Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x07: # Get Depex IsFoundDepex = False for Section in Ffs.Sections.values(): # Find Depex if Section.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol') break if Section.Type == 0x01: CompressSections = Section._SubImages[4] for CompressSection in CompressSections.Sections: if CompressSection.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol') break if CompressSection.Type == 0x02: NewSections = CompressSection._SubImages[4] for NewSection in NewSections.Sections: if NewSection.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol') break # Not find Depex if not IsFoundDepex: CouldBeLoaded = self.CheckArchProtocol() DepexString = '' FileDepex = None # Append New Ffs if CouldBeLoaded: IsInstalled = True NewFfs = self.UnDispatchedFfsDict.pop(FfsID) NewFfs.Depex = DepexString if FileDepex is not None: ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0]) else: ScheduleList[FfsID] = NewFfs else: self.UnDispatchedFfsDict[FfsID].Depex = DepexString for FfsID in ScheduleList.keys(): NewFfs = ScheduleList.pop(FfsID) FfsName = 'UnKnown' self.OrderedFfsDict[FfsID] = NewFfs self.LoadProtocol(Db, FfsID) SqlCommand = """select Value2 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s) and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001) RecordSet = Db.TblReport.Exec(SqlCommand) if RecordSet != []: FfsName = RecordSet[0][0] if IsInstalled: self.DisPatchDxe(Db) def DisPatchPei(self, Db): IsInstalled = False for FfsID in self.UnDispatchedFfsDict.keys(): CouldBeLoaded = True DepexString = '' FileDepex = None Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x06 or Ffs.Type == 0x08: # Get Depex for Section in Ffs.Sections.values(): if Section.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi') break if Section.Type == 0x01: CompressSections = Section._SubImages[4] for CompressSection in CompressSections.Sections: if CompressSection.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi') break if CompressSection.Type == 0x02: NewSections = CompressSection._SubImages[4] for NewSection in NewSections.Sections: if NewSection.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi') break # Append New Ffs if CouldBeLoaded: IsInstalled = True NewFfs = self.UnDispatchedFfsDict.pop(FfsID) NewFfs.Depex = DepexString self.OrderedFfsDict[FfsID] = NewFfs self.LoadPpi(Db, FfsID) else: self.UnDispatchedFfsDict[FfsID].Depex = DepexString if IsInstalled: self.DisPatchPei(Db) def __str__(self): global gIndention gIndention += 4 FvInfo = '\n' + ' ' * gIndention FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum) FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict]) gIndention -= 4 return FvInfo + FfsInfo def _Unpack(self): Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0] self.empty() self.extend(self._BUF_[self._OFF_:self._OFF_ + Size]) # traverse the FFS EndOfFv = Size FfsStartAddress = self.HeaderSize LastFfsObj = None while FfsStartAddress < EndOfFv: FfsObj = Ffs() FfsObj.frombuffer(self, FfsStartAddress) FfsId = repr(FfsObj) if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \ or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0): if LastFfsObj is not None: LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj) else: if FfsId in self.FfsDict: EdkLogger.error("FV", 0, "Duplicate GUID in FFS", ExtraData="\t%s @ %s\n\t%s @ %s" \ % (FfsObj.Guid, FfsObj.Offset, self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset)) self.FfsDict[FfsId] = FfsObj if LastFfsObj is not None: LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj) FfsStartAddress += len(FfsObj) # # align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1)) # The next FFS must be at the latest next 8-byte aligned address # FfsStartAddress = (FfsStartAddress + 7) & (~7) LastFfsObj = FfsObj def _GetAttributes(self): return self.GetField(self._ATTR_, 0)[0] def _GetSize(self): return self.GetField(self._LENGTH_, 0)[0] def _GetChecksum(self): return self.GetField(self._CHECKSUM_, 0)[0] def _GetHeaderLength(self): return self.GetField(self._HLEN_, 0)[0] def _GetFileSystemGuid(self): return gGuidStringFormat % self.GetField(self._GUID_, 0) Attributes = property(_GetAttributes) Size = property(_GetSize) Checksum = property(_GetChecksum) HeaderSize = property(_GetHeaderLength) FileSystemGuid = property(_GetFileSystemGuid) ## GuidDefinedImage() class # # A class for GUID Defined Image # class GuidDefinedImage(Image): _HEADER_ = struct.Struct("1I2H8B 1H 1H") _HEADER_SIZE_ = _HEADER_.size _GUID_ = struct.Struct("1I2H8B") _DATA_OFFSET_ = struct.Struct("16x 1H") _ATTR_ = struct.Struct("18x 1H") CRC32_GUID = "FC1BCDB0-7D31-49AA-936A-A4600D9DD083" TIANO_COMPRESS_GUID = 'A31280AD-481E-41B6-95E8-127F4C984779' LZMA_COMPRESS_GUID = 'EE4E5898-3914-4259-9D6E-DC7BD79403CF' def __init__(self, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None): Image.__init__(self) if SectionDefinitionGuid is not None: self.SectionDefinitionGuid = SectionDefinitionGuid if DataOffset is not None: self.DataOffset = DataOffset if Attributes is not None: self.Attributes = Attributes if Data is not None: self.Data = Data def __str__(self): S = "guid=%s" % (gGuidStringFormat % self.SectionDefinitionGuid) for Sec in self.Sections: S += "\n" + str(Sec) return S def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _SetAttribute(self, Attribute): self.SetField(self._ATTR_, 0, Attribute) def _GetAttribute(self): return self.GetField(self._ATTR_)[0] def _SetGuid(self, Guid): self.SetField(self._GUID_, 0, Guid) def _GetGuid(self): return self.GetField(self._GUID_) def _SetDataOffset(self, Offset): self.SetField(self._DATA_OFFSET_, 0, Offset) def _GetDataOffset(self): return self.GetField(self._DATA_OFFSET_)[0] def _GetSections(self): SectionList = [] Guid = gGuidStringFormat % self.SectionDefinitionGuid if Guid == self.CRC32_GUID: # skip the CRC32 value, we don't do CRC32 verification here Offset = self.DataOffset - 4 while Offset < len(self): Sec = Section() try: Sec.frombuffer(self, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) elif Guid == self.TIANO_COMPRESS_GUID: try: # skip the header Offset = self.DataOffset - 4 TmpData = DeCompress('Framework', self[self.Offset:]) DecData = array('B') DecData.fromstring(TmpData) Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) except: pass elif Guid == self.LZMA_COMPRESS_GUID: try: # skip the header Offset = self.DataOffset - 4 TmpData = DeCompress('Lzma', self[self.Offset:]) DecData = array('B') DecData.fromstring(TmpData) Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) except: pass return SectionList Attributes = property(_GetAttribute, _SetAttribute) SectionDefinitionGuid = property(_GetGuid, _SetGuid) DataOffset = property(_GetDataOffset, _SetDataOffset) Sections = property(_GetSections) ## Section() class # # A class for Section # class Section(Image): _TypeName = { 0x00 : "<unknown>", 0x01 : "COMPRESSION", 0x02 : "GUID_DEFINED", 0x10 : "PE32", 0x11 : "PIC", 0x12 : "TE", 0x13 : "DXE_DEPEX", 0x14 : "VERSION", 0x15 : "USER_INTERFACE", 0x16 : "COMPATIBILITY16", 0x17 : "FIRMWARE_VOLUME_IMAGE", 0x18 : "FREEFORM_SUBTYPE_GUID", 0x19 : "RAW", 0x1B : "PEI_DEPEX" } _SectionSubImages = { 0x01 : CompressedImage, 0x02 : GuidDefinedImage, 0x17 : FirmwareVolume, 0x13 : Depex, 0x1B : Depex, 0x15 : Ui } # Size = 3-byte # Type = 1-byte _HEADER_ = struct.Struct("3B 1B") _HEADER_SIZE_ = _HEADER_.size # SubTypeGuid # _FREE_FORM_SUBTYPE_GUID_HEADER_ = struct.Struct("1I2H8B") _SIZE_ = struct.Struct("3B") _TYPE_ = struct.Struct("3x 1B") def __init__(self, Type=None, Size=None): Image.__init__(self) self._Alignment = 1 if Type is not None: self.Type = Type if Size is not None: self.Size = Size def __str__(self): global gIndention gIndention += 4 SectionInfo = ' ' * gIndention if self.Type in self._TypeName: SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size) else: SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size) for Offset in self._SubImages.keys(): SectionInfo += ", " + str(self._SubImages[Offset]) gIndention -= 4 return SectionInfo def _Unpack(self): self.empty() Type, = self._TYPE_.unpack_from(self._BUF_, self._OFF_) Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_) Size = Size1 + (Size2 << 8) + (Size3 << 16) if Type not in self._SectionSubImages: # no need to extract sub-image, keep all in this Image object self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size]) else: # keep header in this Image object self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._HEADER_SIZE_]) # # use new Image object to represent payload, which may be another kind # of image such as PE32 # PayloadOffset = self._HEADER_SIZE_ PayloadLen = self.Size - self._HEADER_SIZE_ Payload = self._SectionSubImages[self.Type]() Payload.frombuffer(self._BUF_, self._OFF_ + self._HEADER_SIZE_, PayloadLen) self._SubImages[PayloadOffset] = Payload return Size def _SetSize(self, Size): Size1 = Size & 0xFF Size2 = (Size & 0xFF00) >> 8 Size3 = (Size & 0xFF0000) >> 16 self.SetField(self._SIZE_, 0, Size1, Size2, Size3) def _GetSize(self): Size1, Size2, Size3 = self.GetField(self._SIZE_) return Size1 + (Size2 << 8) + (Size3 << 16) def _SetType(self, Type): self.SetField(self._TYPE_, 0, Type) def _GetType(self): return self.GetField(self._TYPE_)[0] def _GetAlignment(self): return self._Alignment def _SetAlignment(self, Alignment): self._Alignment = Alignment AlignmentMask = Alignment - 1 # section alignment is actually for payload, so we need to add header size PayloadOffset = self._OFF_ + self._HEADER_SIZE_ if (PayloadOffset & (~AlignmentMask)) == 0: return NewOffset = (PayloadOffset + AlignmentMask) & (~AlignmentMask) while (NewOffset - PayloadOffset) < self._HEADER_SIZE_: NewOffset += self._Alignment def tofile(self, f): self.Size = len(self) Image.tofile(self, f) for Offset in self._SubImages: self._SubImages[Offset].tofile(f) Type = property(_GetType, _SetType) Size = property(_GetSize, _SetSize) Alignment = property(_GetAlignment, _SetAlignment) ## Ffs() class # # A class for Ffs Section # class Ffs(Image): _FfsFormat = "24B%(payload_size)sB" # skip IntegrityCheck _HEADER_ = struct.Struct("1I2H8B 2x 1B 1B 3B 1B") _HEADER_SIZE_ = _HEADER_.size _NAME_ = struct.Struct("1I2H8B") _INT_CHECK_ = struct.Struct("16x 1H") _TYPE_ = struct.Struct("18x 1B") _ATTR_ = struct.Struct("19x 1B") _SIZE_ = struct.Struct("20x 3B") _STATE_ = struct.Struct("23x 1B") VTF_GUID = "1BA0062E-C779-4582-8566-336AE8F78F09" FFS_ATTRIB_FIXED = 0x04 FFS_ATTRIB_DATA_ALIGNMENT = 0x38 FFS_ATTRIB_CHECKSUM = 0x40 _TypeName = { 0x00 : "<unknown>", 0x01 : "RAW", 0x02 : "FREEFORM", 0x03 : "SECURITY_CORE", 0x04 : "PEI_CORE", 0x05 : "DXE_CORE", 0x06 : "PEIM", 0x07 : "DRIVER", 0x08 : "COMBINED_PEIM_DRIVER", 0x09 : "APPLICATION", 0x0A : "SMM", 0x0B : "FIRMWARE_VOLUME_IMAGE", 0x0C : "COMBINED_SMM_DXE", 0x0D : "SMM_CORE", 0x0E : "MM_STANDALONE", 0x0F : "MM_CORE_STANDALONE", 0xc0 : "OEM_MIN", 0xdf : "OEM_MAX", 0xe0 : "DEBUG_MIN", 0xef : "DEBUG_MAX", 0xf0 : "FFS_MIN", 0xff : "FFS_MAX", 0xf0 : "FFS_PAD", } def __init__(self): Image.__init__(self) self.FreeSpace = 0 self.Sections = sdict() self.Depex = '' self.__ID__ = None def __str__(self): global gIndention gIndention += 4 Indention = ' ' * gIndention FfsInfo = Indention FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \ (Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment) SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()]) gIndention -= 4 return FfsInfo + SectionInfo + "\n" def __len__(self): return self.Size def __repr__(self): return self.__ID__ def _Unpack(self): Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_) Size = Size1 + (Size2 << 8) + (Size3 << 16) self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size]) # Pad FFS may use the same GUID. We need to avoid it. if self.Type == 0xf0: self.__ID__ = str(uuid.uuid1()).upper() else: self.__ID__ = self.Guid # Traverse the SECTION. RAW and PAD do not have sections if self.Type not in [0xf0, 0x01] and Size > 0 and Size < 0xFFFFFF: EndOfFfs = Size SectionStartAddress = self._HEADER_SIZE_ while SectionStartAddress < EndOfFfs: SectionObj = Section() SectionObj.frombuffer(self, SectionStartAddress) #f = open(repr(SectionObj), 'wb') #SectionObj.Size = 0 #SectionObj.tofile(f) #f.close() self.Sections[SectionStartAddress] = SectionObj SectionStartAddress += len(SectionObj) SectionStartAddress = (SectionStartAddress + 3) & (~3) def Pack(self): pass def SetFreeSpace(self, Size): self.FreeSpace = Size def _GetGuid(self): return gGuidStringFormat % self.Name def _SetName(self, Value): # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11 self.SetField(self._NAME_, 0, Value) def _GetName(self): # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11 return self.GetField(self._NAME_) def _SetSize(self, Size): Size1 = Size & 0xFF Size2 = (Size & 0xFF00) >> 8 Size3 = (Size & 0xFF0000) >> 16 self.SetField(self._SIZE_, 0, Size1, Size2, Size3) def _GetSize(self): Size1, Size2, Size3 = self.GetField(self._SIZE_) return Size1 + (Size2 << 8) + (Size3 << 16) def _SetType(self, Type): self.SetField(self._TYPE_, 0, Type) def _GetType(self): return self.GetField(self._TYPE_)[0] def _SetAttributes(self, Value): self.SetField(self._ATTR_, 0, Value) def _GetAttributes(self): return self.GetField(self._ATTR_)[0] def _GetFixed(self): if (self.Attributes & self.FFS_ATTRIB_FIXED) != 0: return True return False def _GetCheckSum(self): if (self.Attributes & self.FFS_ATTRIB_CHECKSUM) != 0: return True return False def _GetAlignment(self): return (self.Attributes & self.FFS_ATTRIB_DATA_ALIGNMENT) >> 3 def _SetState(self, Value): self.SetField(self._STATE_, 0, Value) def _GetState(self): return self.GetField(self._STATE_)[0] Name = property(_GetName, _SetName) Guid = property(_GetGuid) Type = property(_GetType, _SetType) Size = property(_GetSize, _SetSize) Attributes = property(_GetAttributes, _SetAttributes) Fixed = property(_GetFixed) Checksum = property(_GetCheckSum) Alignment = property(_GetAlignment) State = property(_GetState, _SetState) ## MultipleFv() class # # A class for Multiple FV # class MultipleFv(FirmwareVolume): def __init__(self, FvList): FirmwareVolume.__init__(self) self.BasicInfo = [] for FvPath in FvList: Fd = None FvName = os.path.splitext(os.path.split(FvPath)[1])[0] if FvPath.strip(): Fd = open(FvPath, 'rb') Buf = array('B') try: Buf.fromfile(Fd, os.path.getsize(FvPath)) except EOFError: pass Fv = FirmwareVolume(FvName) Fv.frombuffer(Buf, 0, len(Buf)) self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size]) self.FfsDict.append(Fv.FfsDict) ## Class Eot # # This class is used to define Eot main entrance # # @param object: Inherited from object class # class Eot(object): ## The constructor # # @param self: The object pointer # def __init__(self, CommandLineOption=True, IsInit=True, SourceFileList=None, \ IncludeDirList=None, DecFileList=None, GuidList=None, LogFile=None, FvFileList="", MapFileList="", Report='Report.html', Dispatch=None): # Version and Copyright self.VersionNumber = ("0.02" + " " + gBUILD_VERSION) self.Version = "%prog Version " + self.VersionNumber self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved." self.Report = Report self.IsInit = IsInit self.SourceFileList = SourceFileList self.IncludeDirList = IncludeDirList self.DecFileList = DecFileList self.GuidList = GuidList self.LogFile = LogFile self.FvFileList = FvFileList self.MapFileList = MapFileList self.Dispatch = Dispatch # Check workspace environment if "EFI_SOURCE" not in os.environ: if "EDK_SOURCE" not in os.environ: pass else: EotGlobalData.gEDK_SOURCE = os.path.normpath(os.getenv("EDK_SOURCE")) else: EotGlobalData.gEFI_SOURCE = os.path.normpath(os.getenv("EFI_SOURCE")) EotGlobalData.gEDK_SOURCE = os.path.join(EotGlobalData.gEFI_SOURCE, 'Edk') if "WORKSPACE" not in os.environ: EdkLogger.error("EOT", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", ExtraData="WORKSPACE") else: EotGlobalData.gWORKSPACE = os.path.normpath(os.getenv("WORKSPACE")) EotGlobalData.gMACRO['WORKSPACE'] = EotGlobalData.gWORKSPACE EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gEFI_SOURCE EotGlobalData.gMACRO['EDK_SOURCE'] = EotGlobalData.gEDK_SOURCE # Parse the options and args if CommandLineOption: self.ParseOption() if self.FvFileList: for FvFile in GetSplitValueList(self.FvFileList, ' '): FvFile = os.path.normpath(FvFile) if not os.path.isfile(FvFile): EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % FvFile) EotGlobalData.gFV_FILE.append(FvFile) else: EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "The fv file list of target platform was not specified") if self.MapFileList: for MapFile in GetSplitValueList(self.MapFileList, ' '): MapFile = os.path.normpath(MapFile) if not os.path.isfile(MapFile): EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile) EotGlobalData.gMAP_FILE.append(MapFile) # Generate source file list self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList) # Generate guid list of dec file list self.ParseDecFile(self.DecFileList) # Generate guid list from GUID list file self.ParseGuidList(self.GuidList) # Init Eot database EotGlobalData.gDb = Database.Database(Database.DATABASE_PATH) EotGlobalData.gDb.InitDatabase(self.IsInit) # Build ECC database self.BuildDatabase() # Parse Ppi/Protocol self.ParseExecutionOrder() # Merge Identifier tables self.GenerateQueryTable() # Generate report database self.GenerateReportDatabase() # Load Fv Info self.LoadFvInfo() # Load Map Info self.LoadMapInfo() # Generate Report self.GenerateReport() # Convert log file self.ConvertLogFile(self.LogFile) # DONE EdkLogger.quiet("EOT FINISHED!") # Close Database EotGlobalData.gDb.Close() ## ParseDecFile() method # # parse DEC file and get all GUID names with GUID values as {GuidName : GuidValue} # The Dict is stored in EotGlobalData.gGuidDict # # @param self: The object pointer # @param DecFileList: A list of all DEC files # def ParseDecFile(self, DecFileList): if DecFileList: path = os.path.normpath(DecFileList) lfr = open(path, 'rb') for line in lfr: path = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if os.path.exists(path): dfr = open(path, 'rb') for line in dfr: line = CleanString(line) list = line.split('=') if len(list) == 2: EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip()) ## ParseGuidList() method # # Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue} # The Dict is stored in EotGlobalData.gGuidDict # # @param self: The object pointer # @param GuidList: A list of all GUID and its value # def ParseGuidList(self, GuidList): Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList) if os.path.isfile(Path): for Line in open(Path): if Line.strip(): (GuidName, GuidValue) = Line.split() EotGlobalData.gGuidDict[GuidName] = GuidValue ## ConvertLogFile() method # # Parse a real running log file to get real dispatch order # The result is saved to old file name + '.new' # # @param self: The object pointer # @param LogFile: A real running log file name # def ConvertLogFile(self, LogFile): newline = [] lfr = None lfw = None if LogFile: lfr = open(LogFile, 'rb') lfw = open(LogFile + '.new', 'wb') for line in lfr: line = line.strip() line = line.replace('.efi', '') index = line.find("Loading PEIM at ") if index > -1: newline.append(line[index + 55 : ]) continue index = line.find("Loading driver at ") if index > -1: newline.append(line[index + 57 : ]) continue for line in newline: lfw.write(line + '\r\n') if lfr: lfr.close() if lfw: lfw.close() ## GenerateSourceFileList() method # # Generate a list of all source files # 1. Search the file list one by one # 2. Store inf file name with source file names under it like # { INF file name: [source file1, source file2, ...]} # 3. Search the include list to find all .h files # 4. Store source file list to EotGlobalData.gSOURCE_FILES # 5. Store INF file list to EotGlobalData.gINF_FILES # # @param self: The object pointer # @param SourceFileList: A list of all source files # @param IncludeFileList: A list of all include files # def GenerateSourceFileList(self, SourceFileList, IncludeFileList): EdkLogger.quiet("Generating source files list ... ") mSourceFileList = [] mInfFileList = [] mDecFileList = [] mFileList = {} mCurrentInfFile = '' mCurrentSourceFileList = [] if SourceFileList: sfl = open(SourceFileList, 'r') for line in sfl: line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if line[-2:].upper() == '.C' or line[-2:].upper() == '.H': if line not in mCurrentSourceFileList: mCurrentSourceFileList.append(line) mSourceFileList.append(line) EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % line) if line[-4:].upper() == '.INF': if mCurrentInfFile != '': mFileList[mCurrentInfFile] = mCurrentSourceFileList mCurrentSourceFileList = [] mCurrentInfFile = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line)) EotGlobalData.gOP_INF.write('%s\n' % mCurrentInfFile) if mCurrentInfFile not in mFileList: mFileList[mCurrentInfFile] = mCurrentSourceFileList # Get all include files from packages if IncludeFileList: ifl = open(IncludeFileList, 'rb') for line in ifl: if not line.strip(): continue newline = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) for Root, Dirs, Files in os.walk(str(newline)): for File in Files: FullPath = os.path.normpath(os.path.join(Root, File)) if FullPath not in mSourceFileList and File[-2:].upper() == '.H': mSourceFileList.append(FullPath) EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % FullPath) if FullPath not in mDecFileList and File.upper().find('.DEC') > -1: mDecFileList.append(FullPath) EotGlobalData.gSOURCE_FILES = mSourceFileList EotGlobalData.gOP_SOURCE_FILES.close() EotGlobalData.gINF_FILES = mFileList EotGlobalData.gOP_INF.close() ## GenerateReport() method # # Generate final HTML report # # @param self: The object pointer # def GenerateReport(self): EdkLogger.quiet("Generating report file ... ") Rep = Report(self.Report, EotGlobalData.gFV, self.Dispatch) Rep.GenerateReport() ## LoadMapInfo() method # # Load map files and parse them # # @param self: The object pointer # def LoadMapInfo(self): if EotGlobalData.gMAP_FILE != []: EdkLogger.quiet("Parsing Map file ... ") EotGlobalData.gMap = ParseMapFile(EotGlobalData.gMAP_FILE) ## LoadFvInfo() method # # Load FV binary files and parse them # # @param self: The object pointer # def LoadFvInfo(self): EdkLogger.quiet("Parsing FV file ... ") EotGlobalData.gFV = MultipleFv(EotGlobalData.gFV_FILE) EotGlobalData.gFV.Dispatch(EotGlobalData.gDb) for Protocol in EotGlobalData.gProtocolList: EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s\n' %Protocol) ## GenerateReportDatabase() method # # Generate data for the information needed by report # 1. Update name, macro and value of all found PPI/PROTOCOL GUID # 2. Install hard coded PPI/PROTOCOL # # @param self: The object pointer # def GenerateReportDatabase(self): EdkLogger.quiet("Generating the cross-reference table of GUID for Ppi/Protocol ... ") # Update Protocol/Ppi Guid SqlCommand = """select DISTINCT GuidName from Report""" RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) for Record in RecordSet: GuidName = Record[0] GuidMacro = '' GuidMacro2 = '' GuidValue = '' # Find guid value defined in Dec file if GuidName in EotGlobalData.gGuidDict: GuidValue = EotGlobalData.gGuidDict[GuidName] SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName) EotGlobalData.gDb.TblReport.Exec(SqlCommand) continue # Search defined Macros for guid name SqlCommand ="""select DISTINCT Value, Modifier from Query where Name like '%s'""" % GuidName GuidMacroSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) # Ignore NULL result if not GuidMacroSet: continue GuidMacro = GuidMacroSet[0][0].strip() if not GuidMacro: continue # Find Guid value of Guid Macro SqlCommand ="""select DISTINCT Value from Query2 where Value like '%%%s%%' and Model = %s""" % (GuidMacro, MODEL_IDENTIFIER_MACRO_DEFINE) GuidValueSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) if GuidValueSet != []: GuidValue = GuidValueSet[0][0] GuidValue = GuidValue[GuidValue.find(GuidMacro) + len(GuidMacro) :] GuidValue = GuidValue.lower().replace('\\', '').replace('\r', '').replace('\n', '').replace('l', '').strip() GuidValue = GuidStructureStringToGuidString(GuidValue) SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName) EotGlobalData.gDb.TblReport.Exec(SqlCommand) continue # Update Hard Coded Ppi/Protocol SqlCommand = """select DISTINCT GuidValue, ItemType from Report where ModuleID = -2 and ItemMode = 'Produced'""" RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) for Record in RecordSet: if Record[1] == 'Ppi': EotGlobalData.gPpiList[Record[0].lower()] = -2 if Record[1] == 'Protocol': EotGlobalData.gProtocolList[Record[0].lower()] = -2 ## GenerateQueryTable() method # # Generate two tables improve query performance # # @param self: The object pointer # def GenerateQueryTable(self): EdkLogger.quiet("Generating temp query table for analysis ... ") for Identifier in EotGlobalData.gIdentifierTableList: SqlCommand = """insert into Query (Name, Modifier, Value, Model) select Name, Modifier, Value, Model from %s where (Model = %s or Model = %s)""" \ % (Identifier[0], MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION) EotGlobalData.gDb.TblReport.Exec(SqlCommand) SqlCommand = """insert into Query2 (Name, Modifier, Value, Model) select Name, Modifier, Value, Model from %s where Model = %s""" \ % (Identifier[0], MODEL_IDENTIFIER_MACRO_DEFINE) EotGlobalData.gDb.TblReport.Exec(SqlCommand) ## ParseExecutionOrder() method # # Get final execution order # 1. Search all PPI # 2. Search all PROTOCOL # # @param self: The object pointer # def ParseExecutionOrder(self): EdkLogger.quiet("Searching Ppi/Protocol ... ") for Identifier in EotGlobalData.gIdentifierTableList: ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled = \ -1, '', '', -1, '', '', '', '', '', '', '', '', 0 SourceFileID = Identifier[0].replace('Identifier', '') SourceFileFullPath = Identifier[1] Identifier = Identifier[0] # Find Ppis ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallPpi', '->InstallPpi', 'PeiInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.ReInstallPpi', '->ReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2) SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode) ItemMode = 'Consumed' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.LocatePpi', '->LocatePpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Ppi', ItemMode) ItemMode = 'Callback' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.NotifyPpi', '->NotifyPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) # Find Procotols ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallProtocolInterface', '.ReInstallProtocolInterface', '->InstallProtocolInterface', '->ReInstallProtocolInterface', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1) SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallMultipleProtocolInterfaces', '->InstallMultipleProtocolInterfaces', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) ItemMode = 'Consumed' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.LocateProtocol', '->LocateProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0) SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.HandleProtocol', '->HandleProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) ItemMode = 'Callback' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.RegisterProtocolNotify', '->RegisterProtocolNotify', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) # Hard Code EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiSecPlatformInformationPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiNtLoadAsDllPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtPeiLoadFileGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtAutoScanPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtFwhPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtThunkPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiPlatformTypePpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiFrequencySelectionCpuPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiCachePpiGuid', '', '', '', 0) EotGlobalData.gDb.Conn.commit() ## BuildDatabase() methoc # # Build the database for target # # @param self: The object pointer # def BuildDatabase(self): # Clean report table EotGlobalData.gDb.TblReport.Drop() EotGlobalData.gDb.TblReport.Create() # Build database if self.IsInit: self.BuildMetaDataFileDatabase(EotGlobalData.gINF_FILES) EdkLogger.quiet("Building database for source code ...") c.CreateCCodeDB(EotGlobalData.gSOURCE_FILES) EdkLogger.quiet("Building database for source code done!") EotGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EotGlobalData.gDb) ## BuildMetaDataFileDatabase() method # # Build the database for meta data files # # @param self: The object pointer # @param Inf_Files: A list for all INF files # def BuildMetaDataFileDatabase(self, Inf_Files): EdkLogger.quiet("Building database for meta data files ...") for InfFile in Inf_Files: if not InfFile: continue EdkLogger.quiet("Parsing %s ..." % str(InfFile)) EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile], '') EotGlobalData.gDb.Conn.commit() EdkLogger.quiet("Building database for meta data files done!") ## ParseOption() method # # Parse command line options # # @param self: The object pointer # def ParseOption(self): (Options, Target) = self.EotOptionParser() # Set log level self.SetLogLevel(Options) if Options.FvFileList: self.FvFileList = Options.FvFileList if Options.MapFileList: self.MapFileList = Options.FvMapFileList if Options.SourceFileList: self.SourceFileList = Options.SourceFileList if Options.IncludeDirList: self.IncludeDirList = Options.IncludeDirList if Options.DecFileList: self.DecFileList = Options.DecFileList if Options.GuidList: self.GuidList = Options.GuidList if Options.LogFile: self.LogFile = Options.LogFile if Options.keepdatabase: self.IsInit = False ## SetLogLevel() method # # Set current log level of the tool based on args # # @param self: The object pointer # @param Option: The option list including log level setting # def SetLogLevel(self, Option): if Option.verbose is not None: EdkLogger.SetLevel(EdkLogger.VERBOSE) elif Option.quiet is not None: EdkLogger.SetLevel(EdkLogger.QUIET) elif Option.debug is not None: EdkLogger.SetLevel(Option.debug + 1) else: EdkLogger.SetLevel(EdkLogger.INFO) ## EotOptionParser() method # # Using standard Python module optparse to parse command line option of this tool. # # @param self: The object pointer # # @retval Opt A optparse.Values object containing the parsed options # @retval Args Target of build command # def EotOptionParser(self): Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Eot.exe", usage = "%prog [options]") Parser.add_option("-m", "--makefile filename", action="store", type="string", dest='MakeFile', help="Specify a makefile for the platform.") Parser.add_option("-c", "--dsc filename", action="store", type="string", dest="DscFile", help="Specify a dsc file for the platform.") Parser.add_option("-f", "--fv filename", action="store", type="string", dest="FvFileList", help="Specify fv file list, quoted by \"\".") Parser.add_option("-a", "--map filename", action="store", type="string", dest="MapFileList", help="Specify map file list, quoted by \"\".") Parser.add_option("-s", "--source files", action="store", type="string", dest="SourceFileList", help="Specify source file list by a file") Parser.add_option("-i", "--include dirs", action="store", type="string", dest="IncludeDirList", help="Specify include dir list by a file") Parser.add_option("-e", "--dec files", action="store", type="string", dest="DecFileList", help="Specify dec file list by a file") Parser.add_option("-g", "--guid list", action="store", type="string", dest="GuidList", help="Specify guid file list by a file") Parser.add_option("-l", "--log filename", action="store", type="string", dest="LogFile", help="Specify real execution log file") Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Eot database will not be cleaned except report information if this option is specified.") Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\ "including library instances selected, final dependency expression, "\ "and warning messages, etc.") Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.") (Opt, Args)=Parser.parse_args() return (Opt, Args) ## # # This acts like the main() function for the script, unless it is 'import'ed into another # script. # if __name__ == '__main__': # Initialize log system EdkLogger.Initialize() EdkLogger.IsRaiseError = False EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n") StartTime = time.clock() Eot = Eot(CommandLineOption=False, SourceFileList=r'C:\TestEot\Source.txt', GuidList=r'C:\TestEot\Guid.txt', FvFileList=r'C:\TestEot\FVRECOVERY.Fv') FinishTime = time.clock() BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime)))) EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
bsd-2-clause
-5,653,460,506,086,089,000
38.241279
199
0.54166
false
nilouco/dpAutoRigSystem
dpAutoRigSystem/Modules/dpWheel.py
1
35159
# Thanks to Andrew Christophersen # Maya Wheel Rig with World Vectors video tutorial # https://youtu.be/QpDc93br3dM # importing libraries: import maya.cmds as cmds from Library import dpUtils as utils import dpBaseClass as Base import dpLayoutClass as Layout # global variables to this module: CLASS_NAME = "Wheel" TITLE = "m156_wheel" DESCRIPTION = "m157_wheelDesc" ICON = "/Icons/dp_wheel.png" class Wheel(Base.StartClass, Layout.LayoutClass): def __init__(self, *args, **kwargs): #Add the needed parameter to the kwargs dict to be able to maintain the parameter order kwargs["CLASS_NAME"] = CLASS_NAME kwargs["TITLE"] = TITLE kwargs["DESCRIPTION"] = DESCRIPTION kwargs["ICON"] = ICON Base.StartClass.__init__(self, *args, **kwargs) def createModuleLayout(self, *args): Base.StartClass.createModuleLayout(self) Layout.LayoutClass.basicModuleLayout(self) def createGuide(self, *args): Base.StartClass.createGuide(self) # Custom GUIDE: cmds.addAttr(self.moduleGrp, longName="flip", attributeType='bool') cmds.addAttr(self.moduleGrp, longName="geo", dataType='string') cmds.addAttr(self.moduleGrp, longName="startFrame", attributeType='long', defaultValue=1) cmds.addAttr(self.moduleGrp, longName="showControls", attributeType='bool') cmds.addAttr(self.moduleGrp, longName="steering", attributeType='bool') cmds.setAttr(self.moduleGrp+".flip", 0) cmds.setAttr(self.moduleGrp+".showControls", 1) cmds.setAttr(self.moduleGrp+".steering", 0) cmds.setAttr(self.moduleGrp+".moduleNamespace", self.moduleGrp[:self.moduleGrp.rfind(":")], type='string') self.cvCenterLoc = self.ctrls.cvJointLoc(ctrlName=self.guideName+"_CenterLoc", r=0.6, d=1, rot=(90, 0, 90), guide=True) self.jGuideCenter = cmds.joint(name=self.guideName+"_JGuideCenter", radius=0.001) cmds.setAttr(self.jGuideCenter+".template", 1) cmds.parent(self.jGuideCenter, self.moduleGrp, relative=True) self.cvFrontLoc = self.ctrls.cvControl("id_059_AimLoc", ctrlName=self.guideName+"_FrontLoc", r=0.3, d=1, rot=(0, 0, 90)) self.ctrls.colorShape([self.cvFrontLoc], "blue") shapeSizeCH = self.ctrls.shapeSizeSetup(self.cvFrontLoc) cmds.parent(self.cvFrontLoc, self.cvCenterLoc) cmds.setAttr(self.cvFrontLoc+".tx", 1.3) self.jGuideFront = cmds.joint(name=self.guideName+"_JGuideFront", radius=0.001) cmds.setAttr(self.jGuideFront+".template", 1) cmds.transformLimits(self.cvFrontLoc, translationX=(1, 1), enableTranslationX=(True, False)) radiusCtrl = self.moduleGrp+"_RadiusCtrl" cvFrontLocPosNode = cmds.createNode("plusMinusAverage", name=self.cvFrontLoc+"_Pos_PMA") cmds.setAttr(cvFrontLocPosNode+".input1D[0]", -0.5) cmds.connectAttr(radiusCtrl+".translateX", cvFrontLocPosNode+".input1D[1]") cmds.connectAttr(cvFrontLocPosNode+".output1D", self.cvFrontLoc+".tx") self.ctrls.setLockHide([self.cvCenterLoc, self.cvFrontLoc], ['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz']) self.cvInsideLoc = self.ctrls.cvLocator(ctrlName=self.guideName+"_InsideLoc", r=0.2, d=1, guide=True) cmds.parent(self.cvInsideLoc, self.cvCenterLoc) cmds.setAttr(self.cvInsideLoc+".tz", 0.3) self.jGuideInside = cmds.joint(name=self.guideName+"_JGuideInside", radius=0.001) cmds.setAttr(self.jGuideInside+".template", 1) cmds.transformLimits(self.cvInsideLoc, tz=(0.01, 1), etz=(True, False)) inverseRadius = cmds.createNode("multiplyDivide", name=self.moduleGrp+"_Radius_Inv_MD") cmds.setAttr(inverseRadius+".input2X", -1) cmds.connectAttr(radiusCtrl+".translateX", inverseRadius+".input1X") cmds.connectAttr(inverseRadius+".outputX", self.cvInsideLoc+".translateY") self.ctrls.setLockHide([self.cvInsideLoc], ['tx', 'ty', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz']) self.cvOutsideLoc = self.ctrls.cvLocator(ctrlName=self.guideName+"_OutsideLoc", r=0.2, d=1, guide=True) cmds.parent(self.cvOutsideLoc, self.cvCenterLoc) cmds.setAttr(self.cvOutsideLoc+".tz", -0.3) self.jGuideOutside = cmds.joint(name=self.guideName+"_JGuideOutside", radius=0.001) cmds.setAttr(self.jGuideOutside+".template", 1) cmds.transformLimits(self.cvOutsideLoc, tz=(-1, 0.01), etz=(False, True)) cmds.connectAttr(inverseRadius+".outputX", self.cvOutsideLoc+".translateY") self.ctrls.setLockHide([self.cvOutsideLoc], ['tx', 'ty', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz']) cmds.parent(self.cvCenterLoc, self.moduleGrp) cmds.parent(self.jGuideFront, self.jGuideInside, self.jGuideOutside, self.jGuideCenter) cmds.parentConstraint(self.cvCenterLoc, self.jGuideCenter, maintainOffset=False, name=self.jGuideCenter+"_PaC") cmds.parentConstraint(self.cvFrontLoc, self.jGuideFront, maintainOffset=False, name=self.jGuideFront+"_PaC") cmds.parentConstraint(self.cvInsideLoc, self.jGuideInside, maintainOffset=False, name=self.cvInsideLoc+"_PaC") cmds.parentConstraint(self.cvOutsideLoc, self.jGuideOutside, maintainOffset=False, name=self.cvOutsideLoc+"_PaC") def changeStartFrame(self, *args): """ Update moduleGrp startFrame attribute from UI. """ newStartFrameValue = cmds.intField(self.startFrameIF, query=True, value=True) cmds.setAttr(self.moduleGrp+".startFrame", newStartFrameValue) def changeSteering(self, *args): """ Update moduleGrp steering attribute from UI. """ newSterringValue = cmds.checkBox(self.steeringCB, query=True, value=True) cmds.setAttr(self.moduleGrp+".steering", newSterringValue) def changeShowControls(self, *args): """ Update moduleGrp showControls attribute from UI. """ newShowControlsValue = cmds.checkBox(self.showControlsCB, query=True, value=True) cmds.setAttr(self.moduleGrp+".showControls", newShowControlsValue) def changeGeo(self, *args): """ Update moduleGrp geo attribute from UI textField. """ newGeoValue = cmds.textField(self.geoTF, query=True, text=True) cmds.setAttr(self.moduleGrp+".geo", newGeoValue, type='string') def rigModule(self, *args): Base.StartClass.rigModule(self) # verify if the guide exists: if cmds.objExists(self.moduleGrp): try: hideJoints = cmds.checkBox('hideJointsCB', query=True, value=True) except: hideJoints = 1 # declare lists to store names and attributes: self.mainCtrlList, self.wheelCtrlList, self.steeringGrpList, self.ctrlHookGrpList = [], [], [], [] # start as no having mirror: sideList = [""] # analisys the mirror module: self.mirrorAxis = cmds.getAttr(self.moduleGrp+".mirrorAxis") if self.mirrorAxis != 'off': # get rigs names: self.mirrorNames = cmds.getAttr(self.moduleGrp+".mirrorName") # get first and last letters to use as side initials (prefix): sideList = [ self.mirrorNames[0]+'_', self.mirrorNames[len(self.mirrorNames)-1]+'_' ] for s, side in enumerate(sideList): duplicated = cmds.duplicate(self.moduleGrp, name=side+self.userGuideName+'_Guide_Base')[0] allGuideList = cmds.listRelatives(duplicated, allDescendents=True) for item in allGuideList: cmds.rename(item, side+self.userGuideName+"_"+item) self.mirrorGrp = cmds.group(name="Guide_Base_Grp", empty=True) cmds.parent(side+self.userGuideName+'_Guide_Base', self.mirrorGrp, absolute=True) # re-rename grp: cmds.rename(self.mirrorGrp, side+self.userGuideName+'_'+self.mirrorGrp) # do a group mirror with negative scaling: if s == 1: if cmds.getAttr(self.moduleGrp+".flip") == 0: for axis in self.mirrorAxis: gotValue = cmds.getAttr(side+self.userGuideName+"_Guide_Base.translate"+axis) flipedValue = gotValue*(-2) cmds.setAttr(side+self.userGuideName+'_'+self.mirrorGrp+'.translate'+axis, flipedValue) else: for axis in self.mirrorAxis: cmds.setAttr(side+self.userGuideName+'_'+self.mirrorGrp+'.scale'+axis, -1) # joint labelling: jointLabelAdd = 1 else: # if not mirror: duplicated = cmds.duplicate(self.moduleGrp, name=self.userGuideName+'_Guide_Base')[0] allGuideList = cmds.listRelatives(duplicated, allDescendents=True) for item in allGuideList: cmds.rename(item, self.userGuideName+"_"+item) self.mirrorGrp = cmds.group(self.userGuideName+'_Guide_Base', name="Guide_Base_Grp", relative=True) #for Maya2012: self.userGuideName+'_'+self.moduleGrp+"_Grp" # re-rename grp: cmds.rename(self.mirrorGrp, self.userGuideName+'_'+self.mirrorGrp) # joint labelling: jointLabelAdd = 0 # store the number of this guide by module type dpAR_count = utils.findModuleLastNumber(CLASS_NAME, "dpAR_type") + 1 # run for all sides for s, side in enumerate(sideList): # declare guides: self.base = side+self.userGuideName+'_Guide_Base' self.cvCenterLoc = side+self.userGuideName+"_Guide_CenterLoc" self.cvFrontLoc = side+self.userGuideName+"_Guide_FrontLoc" self.cvInsideLoc = side+self.userGuideName+"_Guide_InsideLoc" self.cvOutsideLoc = side+self.userGuideName+"_Guide_OutsideLoc" self.radiusGuide = side+self.userGuideName+"_Guide_Base_RadiusCtrl" # create a joint: cmds.select(clear=True) # center joint: self.centerJoint = cmds.joint(name=side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Jnt", scaleCompensate=False) cmds.addAttr(self.centerJoint, longName='dpAR_joint', attributeType='float', keyable=False) # joint labelling: utils.setJointLabel(self.centerJoint, s+jointLabelAdd, 18, self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']) # create end joint: self.endJoint = cmds.joint(name=side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_JEnd", radius=0.5) # main joint: cmds.select(clear=True) self.mainJoint = cmds.joint(name=side+self.userGuideName+"_"+self.langDic[self.langName]['c058_main']+"_Jnt", scaleCompensate=False) cmds.addAttr(self.mainJoint, longName='dpAR_joint', attributeType='float', keyable=False) # joint labelling: utils.setJointLabel(self.mainJoint, s+jointLabelAdd, 18, self.userGuideName+"_"+self.langDic[self.langName]['c058_main']) # create end joint: self.mainEndJoint = cmds.joint(name=side+self.userGuideName+"_"+self.langDic[self.langName]['c058_main']+"_JEnd", radius=0.5) # create controls: self.wheelCtrl = self.ctrls.cvControl("id_060_WheelCenter", side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Ctrl", r=self.ctrlRadius, d=self.curveDegree) self.mainCtrl = self.ctrls.cvControl("id_061_WheelMain", side+self.userGuideName+"_"+self.langDic[self.langName]['c058_main']+"_Ctrl", r=self.ctrlRadius*0.4, d=self.curveDegree) self.insideCtrl = self.ctrls.cvControl("id_062_WheelPivot", side+self.userGuideName+"_"+self.langDic[self.langName]['c011_RevFoot_B'].capitalize()+"_Ctrl", r=self.ctrlRadius*0.2, d=self.curveDegree, rot=(0, 90, 0)) self.outsideCtrl = self.ctrls.cvControl("id_062_WheelPivot", side+self.userGuideName+"_"+self.langDic[self.langName]['c010_RevFoot_A'].capitalize()+"_Ctrl", r=self.ctrlRadius*0.2, d=self.curveDegree, rot=(0, 90, 0)) self.mainCtrlList.append(self.mainCtrl) self.wheelCtrlList.append(self.wheelCtrl) # origined from attributes: utils.originedFrom(objName=self.mainCtrl, attrString=self.base+";"+self.cvCenterLoc+";"+self.cvFrontLoc+";"+self.radiusGuide) utils.originedFrom(objName=self.insideCtrl, attrString=self.cvInsideLoc) utils.originedFrom(objName=self.outsideCtrl, attrString=self.cvOutsideLoc) # prepare group to receive steering wheel connection: self.toSteeringGrp = cmds.group(self.insideCtrl, name=side+self.userGuideName+"_"+self.langDic[self.langName]['c070_steering'].capitalize()+"_Grp") cmds.addAttr(self.toSteeringGrp, longName=self.langDic[self.langName]['c070_steering'], attributeType='bool', keyable=True) cmds.addAttr(self.toSteeringGrp, longName=self.langDic[self.langName]['c070_steering']+self.langDic[self.langName]['m151_invert'], attributeType='bool', keyable=True) cmds.setAttr(self.toSteeringGrp+"."+self.langDic[self.langName]['c070_steering'], 1) self.steeringGrpList.append(self.toSteeringGrp) # position and orientation of joint and control: cmds.delete(cmds.parentConstraint(self.cvCenterLoc, self.centerJoint, maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvFrontLoc, self.endJoint, maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, self.wheelCtrl, maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, self.mainCtrl, maintainOffset=False)) cmds.parentConstraint(self.mainCtrl, self.mainJoint, maintainOffset=False, name=self.mainJoint+"_PaC") cmds.delete(cmds.parentConstraint(self.cvFrontLoc, self.mainEndJoint, maintainOffset=False)) if s == 1 and cmds.getAttr(self.moduleGrp+".flip") == 1: cmds.move(self.ctrlRadius, self.mainCtrl, moveY=True, relative=True, objectSpace=True, worldSpaceDistance=True) else: cmds.move(-self.ctrlRadius, self.mainCtrl, moveY=True, relative=True, objectSpace=True, worldSpaceDistance=True) cmds.delete(cmds.parentConstraint(self.cvInsideLoc, self.toSteeringGrp, maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvOutsideLoc, self.outsideCtrl, maintainOffset=False)) # zeroOut controls: zeroGrpList = utils.zeroOut([self.mainCtrl, self.wheelCtrl, self.toSteeringGrp, self.outsideCtrl]) wheelAutoGrp = utils.zeroOut([self.wheelCtrl]) wheelAutoGrp = cmds.rename(wheelAutoGrp, side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Auto_Grp") # fixing flip mirror: if s == 1: if cmds.getAttr(self.moduleGrp+".flip") == 1: for zeroOutGrp in zeroGrpList: cmds.setAttr(zeroOutGrp+".scaleX", -1) cmds.setAttr(zeroOutGrp+".scaleY", -1) cmds.setAttr(zeroOutGrp+".scaleZ", -1) cmds.addAttr(self.wheelCtrl, longName='scaleCompensate', attributeType="bool", keyable=False) cmds.setAttr(self.wheelCtrl+".scaleCompensate", 1, channelBox=True) cmds.connectAttr(self.wheelCtrl+".scaleCompensate", self.centerJoint+".segmentScaleCompensate", force=True) cmds.addAttr(self.mainCtrl, longName='scaleCompensate', attributeType="bool", keyable=False) cmds.setAttr(self.mainCtrl+".scaleCompensate", 1, channelBox=True) cmds.connectAttr(self.mainCtrl+".scaleCompensate", self.mainJoint+".segmentScaleCompensate", force=True) # hide visibility attributes: self.ctrls.setLockHide([self.mainCtrl, self.insideCtrl, self.outsideCtrl], ['v']) self.ctrls.setLockHide([self.wheelCtrl], ['tx', 'ty', 'tz', 'rx', 'ry', 'sx', 'sy', 'sz', 'v']) # grouping: cmds.parentConstraint(self.wheelCtrl, self.centerJoint, maintainOffset=False, name=self.centerJoint+"_PaC") cmds.scaleConstraint(self.wheelCtrl, self.centerJoint, maintainOffset=True, name=self.centerJoint+"_ScC") cmds.parent(zeroGrpList[1], self.mainCtrl, absolute=True) cmds.parent(zeroGrpList[0], self.outsideCtrl, absolute=True) cmds.parent(zeroGrpList[3], self.insideCtrl, absolute=True) # add attributes: cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c047_autoRotate'], attributeType="bool", defaultValue=1, keyable=True) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c068_startFrame'], attributeType="long", defaultValue=1, keyable=False) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c067_radius'], attributeType="float", min=0.01, defaultValue=self.ctrlRadius, keyable=True) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c069_radiusScale'], attributeType="float", defaultValue=1, keyable=False) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c021_showControls'], attributeType="long", min=0, max=1, defaultValue=0, keyable=True) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c070_steering'], attributeType="bool", defaultValue=0, keyable=True) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['i037_to']+self.langDic[self.langName]['c070_steering'].capitalize(), attributeType="float", defaultValue=0, keyable=False) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c070_steering']+self.langDic[self.langName]['c053_invert'].capitalize(), attributeType="long", min=0, max=1, defaultValue=1, keyable=False) cmds.addAttr(self.wheelCtrl, longName=self.langDic[self.langName]['c093_tryKeepUndo'], attributeType="long", min=0, max=1, defaultValue=1, keyable=False) # get stored values by user: startFrameValue = cmds.getAttr(self.moduleGrp+".startFrame") steeringValue = cmds.getAttr(self.moduleGrp+".steering") showControlsValue = cmds.getAttr(self.moduleGrp+".showControls") cmds.setAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c068_startFrame'], startFrameValue, channelBox=True) cmds.setAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c070_steering'], steeringValue, channelBox=True) cmds.setAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c021_showControls'], showControlsValue, channelBox=True) cmds.setAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c070_steering']+self.langDic[self.langName]['c053_invert'].capitalize(), 1, channelBox=True) cmds.setAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c093_tryKeepUndo'], 1, channelBox=True) if s == 1: if cmds.getAttr(self.moduleGrp+".flip") == 1: cmds.setAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c070_steering']+self.langDic[self.langName]['c053_invert'].capitalize(), 0) # automatic rotation wheel setup: receptSteeringMD = cmds.createNode('multiplyDivide', name=side+self.userGuideName+"_"+self.langDic[self.langName]['c070_steering']+"_MD") inverseSteeringMD = cmds.createNode('multiplyDivide', name=side+self.userGuideName+"_"+self.langDic[self.langName]['c070_steering']+"_Inv_MD") steeringInvCnd = cmds.createNode('condition', name=side+self.userGuideName+"_"+self.langDic[self.langName]['c070_steering']+"_Inv_Cnd") cmds.setAttr(steeringInvCnd+".colorIfTrueR", 1) cmds.setAttr(steeringInvCnd+".colorIfFalseR", -1) cmds.connectAttr(self.wheelCtrl+"."+self.langDic[self.langName]['i037_to']+self.langDic[self.langName]['c070_steering'].capitalize(), receptSteeringMD+".input1X", force=True) cmds.connectAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c070_steering'], receptSteeringMD+".input2X", force=True) cmds.connectAttr(receptSteeringMD+".outputX", inverseSteeringMD+".input1X", force=True) cmds.connectAttr(steeringInvCnd+".outColorR", inverseSteeringMD+".input2X", force=True) cmds.connectAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c070_steering']+self.langDic[self.langName]['c053_invert'].capitalize(), steeringInvCnd+".firstTerm", force=True) cmds.connectAttr(inverseSteeringMD+".outputX", self.toSteeringGrp+".rotateY", force=True) # create locators (frontLoc to get direction and oldLoc to store wheel old position): self.frontLoc = cmds.spaceLocator(name=side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Front_Loc")[0] self.oldLoc = cmds.spaceLocator(name=side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Old_Loc")[0] cmds.delete(cmds.parentConstraint(self.cvFrontLoc, self.frontLoc, maintainOffset=False)) cmds.parent(self.frontLoc, self.mainCtrl) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, self.oldLoc, maintainOffset=False)) cmds.setAttr(self.frontLoc+".visibility", 0, lock=True) cmds.setAttr(self.oldLoc+".visibility", 0, lock=True) # this wheel auto group locator could be replaced by a decomposeMatrix to get the translation in world space of the Wheel_Auto_Ctrl_Grp instead: self.wheelAutoGrpLoc = cmds.spaceLocator(name=side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Auto_Loc")[0] cmds.pointConstraint(wheelAutoGrp, self.wheelAutoGrpLoc, maintainOffset=False, name=self.wheelAutoGrpLoc+"_PoC") cmds.setAttr(self.wheelAutoGrpLoc+".visibility", 0, lock=True) expString = "if ("+self.wheelCtrl+"."+self.langDic[self.langName]['c047_autoRotate']+" == 1) {"+\ "\nif ("+self.wheelCtrl+"."+self.langDic[self.langName]['c093_tryKeepUndo']+" == 1) { undoInfo -stateWithoutFlush 0; };"+\ "\nfloat $radius = "+self.wheelCtrl+"."+self.langDic[self.langName]['c067_radius']+" * "+self.wheelCtrl+"."+self.langDic[self.langName]['c069_radiusScale']+\ ";\nvector $moveVectorOld = `xform -q -ws -t \""+self.oldLoc+\ "\"`;\nvector $moveVector = << "+self.wheelAutoGrpLoc+".translateX, "+self.wheelAutoGrpLoc+".translateY, "+self.wheelAutoGrpLoc+".translateZ >>;"+\ "\nvector $dirVector = `xform -q -ws -t \""+self.frontLoc+\ "\"`;\nvector $wheelVector = ($dirVector - $moveVector);"+\ "\nvector $motionVector = ($moveVector - $moveVectorOld);"+\ "\nfloat $distance = mag($motionVector);"+\ "\n$dot = dotProduct($motionVector, $wheelVector, 1);\n"+\ wheelAutoGrp+".rotateZ = "+wheelAutoGrp+".rotateZ - 360 / (6.283*$radius) * ($dot*$distance);"+\ "\nxform -t ($moveVector.x) ($moveVector.y) ($moveVector.z) "+self.oldLoc+\ ";\nif (frame == "+self.wheelCtrl+"."+self.langDic[self.langName]['c068_startFrame']+") { "+wheelAutoGrp+".rotateZ = 0; };"+\ "\nif ("+self.wheelCtrl+"."+self.langDic[self.langName]['c093_tryKeepUndo']+" == 1) { undoInfo -stateWithoutFlush 1; };};" # expression: cmds.expression(name=side+self.userGuideName+"_"+self.langDic[self.langName]['m156_wheel']+"_Exp", object=self.frontLoc, string=expString) self.ctrls.setLockHide([self.frontLoc, self.wheelAutoGrpLoc], ['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz', 'v']) # deformers: self.loadedGeo = cmds.getAttr(self.moduleGrp+".geo") # geometry holder: self.geoHolder = cmds.polyCube(name=side+self.userGuideName+"_"+self.langDic[self.langName]['c046_holder']+"_Geo", constructionHistory=False)[0] cmds.delete(cmds.parentConstraint(self.cvCenterLoc, self.geoHolder, maintainOffset=False)) cmds.setAttr(self.geoHolder+".visibility", 0, lock=True) # skinning: cmds.skinCluster(self.centerJoint, self.geoHolder, toSelectedBones=True, dropoffRate=4.0, maximumInfluences=3, skinMethod=0, normalizeWeights=1, removeUnusedInfluence=False, name=side+self.userGuideName+"_"+self.langDic[self.langName]['c046_holder']+"_SC") if self.loadedGeo: if cmds.objExists(self.loadedGeo): baseName = utils.extractSuffix(self.loadedGeo) skinClusterName = baseName+"_SC" if "|" in skinClusterName: skinClusterName = skinClusterName[skinClusterName.rfind("|")+1:] try: cmds.skinCluster(self.centerJoint, self.loadedGeo, toSelectedBones=True, dropoffRate=4.0, maximumInfluences=3, skinMethod=0, normalizeWeights=1, removeUnusedInfluence=False, name=skinClusterName) except: childList = cmds.listRelatives(self.loadedGeo, children=True, allDescendents=True) if childList: for item in childList: itemType = cmds.objectType(item) if itemType == "mesh" or itemType == "nurbsSurface": try: skinClusterName = utils.extractSuffix(item)+"_SC" cmds.skinCluster(self.centerJoint, item, toSelectedBones=True, dropoffRate=4.0, maximumInfluences=3, skinMethod=0, normalizeWeights=1, removeUnusedInfluence=False, name=skinClusterName) except: pass # lattice: latticeList = cmds.lattice(self.geoHolder, divisions=(6, 6, 6), outsideLattice=2, outsideFalloffDistance=1, position=(0, 0, 0), scale=(self.ctrlRadius*2, self.ctrlRadius*2, self.ctrlRadius*2), name=side+self.userGuideName+"_FFD") #[deformer, lattice, base] cmds.scale(self.ctrlRadius*2, self.ctrlRadius*2, self.ctrlRadius*2, latticeList[2]) # clusters: upperClusterList = cmds.cluster(latticeList[1]+".pt[0:5][4:5][0:5]", relative=True, name=side+self.userGuideName+"_"+self.langDic[self.langName]['c044_upper']+"_Cls") #[deform, handle] middleClusterList = cmds.cluster(latticeList[1]+".pt[0:5][2:3][0:5]", relative=True, name=side+self.userGuideName+"_"+self.langDic[self.langName]['m033_middle']+"_Cls") #[deform, handle] lowerClusterList = cmds.cluster(latticeList[1]+".pt[0:5][0:1][0:5]", relative=True, name=side+self.userGuideName+"_"+self.langDic[self.langName]['c045_lower']+"_Cls") #[deform, handle] clusterGrpList = utils.zeroOut([upperClusterList[1], middleClusterList[1], lowerClusterList[1]]) clustersGrp = cmds.group(clusterGrpList, name=side+self.userGuideName+"_Clusters_Grp") # deform controls: upperDefCtrl = self.ctrls.cvControl("id_063_WheelDeform", side+self.userGuideName+"_"+self.langDic[self.langName]['c044_upper']+"_Ctrl", r=self.ctrlRadius*0.5, d=self.curveDegree) middleDefCtrl = self.ctrls.cvControl("id_064_WheelMiddle", side+self.userGuideName+"_"+self.langDic[self.langName]['m033_middle']+"_Ctrl", r=self.ctrlRadius*0.5, d=self.curveDegree) lowerDefCtrl = self.ctrls.cvControl("id_063_WheelDeform", side+self.userGuideName+"_"+self.langDic[self.langName]['c045_lower']+"_Ctrl", r=self.ctrlRadius*0.5, d=self.curveDegree, rot=(0, 0, 180)) defCtrlGrpList = utils.zeroOut([upperDefCtrl, middleDefCtrl, lowerDefCtrl]) defCtrlGrp = cmds.group(defCtrlGrpList, name=side+self.userGuideName+"_Ctrl_Grp") # positions: cmds.delete(cmds.parentConstraint(upperClusterList[1], defCtrlGrpList[0], maintainOffset=False)) cmds.delete(cmds.parentConstraint(middleClusterList[1], defCtrlGrpList[1], maintainOffset=False)) cmds.delete(cmds.parentConstraint(lowerClusterList[1], defCtrlGrpList[2], maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, latticeList[1], maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, latticeList[2], maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, clustersGrp, maintainOffset=False)) cmds.delete(cmds.parentConstraint(self.cvCenterLoc, defCtrlGrp, maintainOffset=False)) outsideDist = cmds.getAttr(self.cvOutsideLoc+".tz") if s == 1: if cmds.getAttr(self.moduleGrp+".flip") == 1: outsideDist = -outsideDist cmds.move(outsideDist, defCtrlGrp, moveZ=True, relative=True, objectSpace=True, worldSpaceDistance=True) self.ctrls.directConnect(upperDefCtrl, upperClusterList[1]) self.ctrls.directConnect(middleDefCtrl, middleClusterList[1]) self.ctrls.directConnect(lowerDefCtrl, lowerClusterList[1]) # grouping deformers: if self.loadedGeo: if cmds.objExists(self.loadedGeo): cmds.lattice(latticeList[0], edit=True, geometry=self.loadedGeo) defGrp = cmds.group(latticeList[1], latticeList[2], clustersGrp, name=side+self.userGuideName+"_Deform_Grp") cmds.parentConstraint(self.mainCtrl, defGrp, maintainOffset=True, name=defGrp+"_PaC") cmds.scaleConstraint(self.mainCtrl, defGrp, maintainOffset=True, name=defGrp+"_ScC") cmds.parent(defCtrlGrp, self.mainCtrl) cmds.connectAttr(self.wheelCtrl+"."+self.langDic[self.langName]['c021_showControls'], defCtrlGrp+".visibility", force=True) # create a masterModuleGrp to be checked if this rig exists: self.toCtrlHookGrp = cmds.group(zeroGrpList[2], name=side+self.userGuideName+"_Control_Grp") self.toScalableHookGrp = cmds.group(self.centerJoint, self.mainJoint, defGrp, name=side+self.userGuideName+"_Joint_Grp") self.toStaticHookGrp = cmds.group(self.toCtrlHookGrp, self.toScalableHookGrp, self.oldLoc, self.wheelAutoGrpLoc, self.geoHolder, name=side+self.userGuideName+"_Grp") # add hook attributes to be read when rigging integrated modules: utils.addHook(objName=self.toCtrlHookGrp, hookType='ctrlHook') utils.addHook(objName=self.toScalableHookGrp, hookType='scalableHook') utils.addHook(objName=self.toStaticHookGrp, hookType='staticHook') cmds.addAttr(self.toStaticHookGrp, longName="dpAR_name", dataType="string") cmds.addAttr(self.toStaticHookGrp, longName="dpAR_type", dataType="string") cmds.setAttr(self.toStaticHookGrp+".dpAR_name", self.userGuideName, type="string") cmds.setAttr(self.toStaticHookGrp+".dpAR_type", CLASS_NAME, type="string") # add module type counter value cmds.addAttr(self.toStaticHookGrp, longName='dpAR_count', attributeType='long', keyable=False) cmds.setAttr(self.toStaticHookGrp+'.dpAR_count', dpAR_count) self.ctrlHookGrpList.append(self.toCtrlHookGrp) if hideJoints: cmds.setAttr(self.toScalableHookGrp+".visibility", 0) # delete duplicated group for side (mirror): cmds.delete(side+self.userGuideName+'_'+self.mirrorGrp) # finalize this rig: self.integratingInfo() cmds.select(clear=True) # delete UI (moduleLayout), GUIDE and moduleInstance namespace: self.deleteModule() def integratingInfo(self, *args): Base.StartClass.integratingInfo(self) """ This method will create a dictionary with informations about integrations system between modules. """ self.integratedActionsDic = { "module": { "mainCtrlList" : self.mainCtrlList, "wheelCtrlList" : self.wheelCtrlList, "steeringGrpList" : self.steeringGrpList, "ctrlHookGrpList" : self.ctrlHookGrpList, } } ### # # Wheel Auto Rotation Expression: # # if (WHEEL_CTRL.AUTO_ROTATE == 1) { # if (WHEEL_CTRL.TRYKEEPUNDO == 1) { undoInfo -stateWithoutFlush 0; }; # float $radius = WHEEL_CTRL.RADIUS * WHEEL_CTRL.RADIUSSCALE; # vector $moveVectorOld = `xform -q -ws -t "OLD_LOC"`; # vector $moveVector = << AUTO_GRP_LOC.translateX, AUTO_GRP_LOC.translateY, AUTO_GRP_LOC.translateZ >>; # vector $dirVector = `xform -q -ws -t "FRONT_LOC"`; # vector $wheelVector = ($dirVector - $moveVector); # vector $motionVector = ($moveVector - $moveVectorOld); # float $distance = mag($motionVector); # $dot = dotProduct($motionVector, $wheelVector, 1); # AUTO_GRP.rotateZ = AUTO_GRP.rotateZ - 360 / (6.283*$radius) * ($dot*$distance); # xform -t ($moveVector.x) ($moveVector.y) ($moveVector.z) OLD_LOC; # if (frame == WHEEL_CTRL.START_FRAME) { AUTO_GRP.rotateZ = 0; }; # if (WHEEL_CTRL.TRYKEEPUNDO == 1) { undoInfo -stateWithoutFlush 1; };}; # ###
gpl-2.0
6,195,987,666,483,919,000
73.649682
272
0.622401
false
rjw57/rbc
test/test_switch.py
1
1234
def test_basic_switch(check_output): check_output(''' main() { extrn putchar;; auto i; i = 0; while(i <= 4) { describe(i); putchar('*n'); ++i; } } describe(val) { extrn putstr, putnumb; putnumb(val); putstr(" is "); switch(val) { case 0: putstr("zero"); break; case 1: putstr("one"); break; default: putstr("many"); break; } } ''', '0 is zero\n1 is one\n2 is many\n3 is many\n4 is many\n') def test_fallthrough(check_output): check_output(''' main() { extrn putchar;; auto i; i = 0; while(i <= 4) { describe(i); putchar('*n'); ++i; } } describe(val) { extrn putstr, putnumb; putnumb(val); putstr(" is "); switch(val) { case 0: putstr("zero"); case 1: putstr("one"); break; default: putstr("many"); } } ''', '0 is zeroone\n1 is one\n2 is many\n3 is many\n4 is many\n')
mit
4,395,934,646,460,936,000
27.045455
69
0.389789
false
Nikola-K/tp_smapi_pyqt
design.py
1
28157
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'design.ui' # # Created: Sat Feb 7 18:23:12 2015 # by: PyQt4 UI code generator 4.11.2 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName(_fromUtf8("MainWindow")) MainWindow.resize(421, 565) self.centralwidget = QtGui.QWidget(MainWindow) self.centralwidget.setObjectName(_fromUtf8("centralwidget")) self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget) self.verticalLayout.setContentsMargins(3, 5, 3, -1) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.label = QtGui.QLabel(self.centralwidget) self.label.setObjectName(_fromUtf8("label")) self.horizontalLayout.addWidget(self.label) self.batteryComboBox = QtGui.QComboBox(self.centralwidget) self.batteryComboBox.setObjectName(_fromUtf8("batteryComboBox")) self.batteryComboBox.addItem(_fromUtf8("")) self.batteryComboBox.addItem(_fromUtf8("")) self.horizontalLayout.addWidget(self.batteryComboBox) self.line = QtGui.QFrame(self.centralwidget) self.line.setFrameShape(QtGui.QFrame.VLine) self.line.setFrameShadow(QtGui.QFrame.Sunken) self.line.setObjectName(_fromUtf8("line")) self.horizontalLayout.addWidget(self.line) self.label_2 = QtGui.QLabel(self.centralwidget) self.label_2.setObjectName(_fromUtf8("label_2")) self.horizontalLayout.addWidget(self.label_2) self.installed = QtGui.QLabel(self.centralwidget) self.installed.setFrameShape(QtGui.QFrame.Box) self.installed.setFrameShadow(QtGui.QFrame.Plain) self.installed.setAlignment(QtCore.Qt.AlignCenter) self.installed.setObjectName(_fromUtf8("installed")) self.horizontalLayout.addWidget(self.installed) self.verticalLayout.addLayout(self.horizontalLayout) self.horizontalLayout_2 = QtGui.QHBoxLayout() self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2")) self.label_25 = QtGui.QLabel(self.centralwidget) self.label_25.setObjectName(_fromUtf8("label_25")) self.horizontalLayout_2.addWidget(self.label_25) self.ac_connected = QtGui.QLabel(self.centralwidget) self.ac_connected.setFrameShape(QtGui.QFrame.Box) self.ac_connected.setAlignment(QtCore.Qt.AlignCenter) self.ac_connected.setObjectName(_fromUtf8("ac_connected")) self.horizontalLayout_2.addWidget(self.ac_connected) self.verticalLayout.addLayout(self.horizontalLayout_2) self.horizontalLayout_4 = QtGui.QHBoxLayout() self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4")) self.formLayout = QtGui.QFormLayout() self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout.setObjectName(_fromUtf8("formLayout")) self.label_3 = QtGui.QLabel(self.centralwidget) self.label_3.setObjectName(_fromUtf8("label_3")) self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_3) self.state = QtGui.QLabel(self.centralwidget) self.state.setFrameShape(QtGui.QFrame.Box) self.state.setAlignment(QtCore.Qt.AlignCenter) self.state.setObjectName(_fromUtf8("state")) self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.state) self.label_4 = QtGui.QLabel(self.centralwidget) self.label_4.setObjectName(_fromUtf8("label_4")) self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_4) self.cycle_count = QtGui.QLabel(self.centralwidget) self.cycle_count.setFrameShape(QtGui.QFrame.Box) self.cycle_count.setAlignment(QtCore.Qt.AlignCenter) self.cycle_count.setObjectName(_fromUtf8("cycle_count")) self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.cycle_count) self.label_5 = QtGui.QLabel(self.centralwidget) self.label_5.setObjectName(_fromUtf8("label_5")) self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_5) self.current_now = QtGui.QLabel(self.centralwidget) self.current_now.setFrameShape(QtGui.QFrame.Box) self.current_now.setAlignment(QtCore.Qt.AlignCenter) self.current_now.setObjectName(_fromUtf8("current_now")) self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.current_now) self.label_6 = QtGui.QLabel(self.centralwidget) self.label_6.setObjectName(_fromUtf8("label_6")) self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_6) self.current_avg = QtGui.QLabel(self.centralwidget) self.current_avg.setFrameShape(QtGui.QFrame.Box) self.current_avg.setAlignment(QtCore.Qt.AlignCenter) self.current_avg.setObjectName(_fromUtf8("current_avg")) self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.current_avg) self.label_7 = QtGui.QLabel(self.centralwidget) self.label_7.setObjectName(_fromUtf8("label_7")) self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_7) self.power_now = QtGui.QLabel(self.centralwidget) self.power_now.setFrameShape(QtGui.QFrame.Box) self.power_now.setAlignment(QtCore.Qt.AlignCenter) self.power_now.setObjectName(_fromUtf8("power_now")) self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.power_now) self.label_8 = QtGui.QLabel(self.centralwidget) self.label_8.setObjectName(_fromUtf8("label_8")) self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_8) self.power_avg = QtGui.QLabel(self.centralwidget) self.power_avg.setFrameShape(QtGui.QFrame.Box) self.power_avg.setAlignment(QtCore.Qt.AlignCenter) self.power_avg.setObjectName(_fromUtf8("power_avg")) self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.power_avg) self.label_9 = QtGui.QLabel(self.centralwidget) self.label_9.setObjectName(_fromUtf8("label_9")) self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_9) self.last_full_capacity = QtGui.QLabel(self.centralwidget) self.last_full_capacity.setFrameShape(QtGui.QFrame.Box) self.last_full_capacity.setAlignment(QtCore.Qt.AlignCenter) self.last_full_capacity.setObjectName(_fromUtf8("last_full_capacity")) self.formLayout.setWidget(6, QtGui.QFormLayout.FieldRole, self.last_full_capacity) self.label_10 = QtGui.QLabel(self.centralwidget) self.label_10.setObjectName(_fromUtf8("label_10")) self.formLayout.setWidget(7, QtGui.QFormLayout.LabelRole, self.label_10) self.remaining_percent = QtGui.QLabel(self.centralwidget) self.remaining_percent.setFrameShape(QtGui.QFrame.Box) self.remaining_percent.setAlignment(QtCore.Qt.AlignCenter) self.remaining_percent.setObjectName(_fromUtf8("remaining_percent")) self.formLayout.setWidget(7, QtGui.QFormLayout.FieldRole, self.remaining_percent) self.label_11 = QtGui.QLabel(self.centralwidget) self.label_11.setObjectName(_fromUtf8("label_11")) self.formLayout.setWidget(8, QtGui.QFormLayout.LabelRole, self.label_11) self.remaining_running_time = QtGui.QLabel(self.centralwidget) self.remaining_running_time.setFrameShape(QtGui.QFrame.Box) self.remaining_running_time.setAlignment(QtCore.Qt.AlignCenter) self.remaining_running_time.setObjectName(_fromUtf8("remaining_running_time")) self.formLayout.setWidget(8, QtGui.QFormLayout.FieldRole, self.remaining_running_time) self.label_12 = QtGui.QLabel(self.centralwidget) self.label_12.setObjectName(_fromUtf8("label_12")) self.formLayout.setWidget(9, QtGui.QFormLayout.LabelRole, self.label_12) self.remaining_charge_time = QtGui.QLabel(self.centralwidget) self.remaining_charge_time.setFrameShape(QtGui.QFrame.Box) self.remaining_charge_time.setAlignment(QtCore.Qt.AlignCenter) self.remaining_charge_time.setObjectName(_fromUtf8("remaining_charge_time")) self.formLayout.setWidget(9, QtGui.QFormLayout.FieldRole, self.remaining_charge_time) self.label_13 = QtGui.QLabel(self.centralwidget) self.label_13.setObjectName(_fromUtf8("label_13")) self.formLayout.setWidget(10, QtGui.QFormLayout.LabelRole, self.label_13) self.remaining_capacity = QtGui.QLabel(self.centralwidget) self.remaining_capacity.setFrameShape(QtGui.QFrame.Box) self.remaining_capacity.setAlignment(QtCore.Qt.AlignCenter) self.remaining_capacity.setObjectName(_fromUtf8("remaining_capacity")) self.formLayout.setWidget(10, QtGui.QFormLayout.FieldRole, self.remaining_capacity) self.horizontalLayout_4.addLayout(self.formLayout) self.formLayout_2 = QtGui.QFormLayout() self.formLayout_2.setObjectName(_fromUtf8("formLayout_2")) self.label_24 = QtGui.QLabel(self.centralwidget) self.label_24.setObjectName(_fromUtf8("label_24")) self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_24) self.label_14 = QtGui.QLabel(self.centralwidget) self.label_14.setObjectName(_fromUtf8("label_14")) self.formLayout_2.setWidget(10, QtGui.QFormLayout.LabelRole, self.label_14) self.label_22 = QtGui.QLabel(self.centralwidget) self.label_22.setObjectName(_fromUtf8("label_22")) self.formLayout_2.setWidget(9, QtGui.QFormLayout.LabelRole, self.label_22) self.label_20 = QtGui.QLabel(self.centralwidget) self.label_20.setObjectName(_fromUtf8("label_20")) self.formLayout_2.setWidget(8, QtGui.QFormLayout.LabelRole, self.label_20) self.label_17 = QtGui.QLabel(self.centralwidget) self.label_17.setObjectName(_fromUtf8("label_17")) self.formLayout_2.setWidget(7, QtGui.QFormLayout.LabelRole, self.label_17) self.label_16 = QtGui.QLabel(self.centralwidget) self.label_16.setObjectName(_fromUtf8("label_16")) self.formLayout_2.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_16) self.label_18 = QtGui.QLabel(self.centralwidget) self.label_18.setObjectName(_fromUtf8("label_18")) self.formLayout_2.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_18) self.label_23 = QtGui.QLabel(self.centralwidget) self.label_23.setObjectName(_fromUtf8("label_23")) self.formLayout_2.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_23) self.label_21 = QtGui.QLabel(self.centralwidget) self.label_21.setObjectName(_fromUtf8("label_21")) self.formLayout_2.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_21) self.label_19 = QtGui.QLabel(self.centralwidget) self.label_19.setObjectName(_fromUtf8("label_19")) self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_19) self.label_15 = QtGui.QLabel(self.centralwidget) self.label_15.setObjectName(_fromUtf8("label_15")) self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_15) self.design_capacity = QtGui.QLabel(self.centralwidget) self.design_capacity.setFrameShape(QtGui.QFrame.Box) self.design_capacity.setAlignment(QtCore.Qt.AlignCenter) self.design_capacity.setObjectName(_fromUtf8("design_capacity")) self.formLayout_2.setWidget(10, QtGui.QFormLayout.FieldRole, self.design_capacity) self.manufacturing_date = QtGui.QLabel(self.centralwidget) self.manufacturing_date.setFrameShape(QtGui.QFrame.Box) self.manufacturing_date.setAlignment(QtCore.Qt.AlignCenter) self.manufacturing_date.setObjectName(_fromUtf8("manufacturing_date")) self.formLayout_2.setWidget(9, QtGui.QFormLayout.FieldRole, self.manufacturing_date) self.chemistry = QtGui.QLabel(self.centralwidget) self.chemistry.setFrameShape(QtGui.QFrame.Box) self.chemistry.setAlignment(QtCore.Qt.AlignCenter) self.chemistry.setObjectName(_fromUtf8("chemistry")) self.formLayout_2.setWidget(8, QtGui.QFormLayout.FieldRole, self.chemistry) self.manufacturer = QtGui.QLabel(self.centralwidget) self.manufacturer.setFrameShape(QtGui.QFrame.Box) self.manufacturer.setAlignment(QtCore.Qt.AlignCenter) self.manufacturer.setObjectName(_fromUtf8("manufacturer")) self.formLayout_2.setWidget(7, QtGui.QFormLayout.FieldRole, self.manufacturer) self.design_voltage = QtGui.QLabel(self.centralwidget) self.design_voltage.setFrameShape(QtGui.QFrame.Box) self.design_voltage.setAlignment(QtCore.Qt.AlignCenter) self.design_voltage.setObjectName(_fromUtf8("design_voltage")) self.formLayout_2.setWidget(6, QtGui.QFormLayout.FieldRole, self.design_voltage) self.model = QtGui.QLabel(self.centralwidget) self.model.setFrameShape(QtGui.QFrame.Box) self.model.setAlignment(QtCore.Qt.AlignCenter) self.model.setObjectName(_fromUtf8("model")) self.formLayout_2.setWidget(5, QtGui.QFormLayout.FieldRole, self.model) self.first_use_date = QtGui.QLabel(self.centralwidget) self.first_use_date.setFrameShape(QtGui.QFrame.Box) self.first_use_date.setAlignment(QtCore.Qt.AlignCenter) self.first_use_date.setObjectName(_fromUtf8("first_use_date")) self.formLayout_2.setWidget(4, QtGui.QFormLayout.FieldRole, self.first_use_date) self.serial = QtGui.QLabel(self.centralwidget) self.serial.setFrameShape(QtGui.QFrame.Box) self.serial.setAlignment(QtCore.Qt.AlignCenter) self.serial.setObjectName(_fromUtf8("serial")) self.formLayout_2.setWidget(3, QtGui.QFormLayout.FieldRole, self.serial) self.barcoding = QtGui.QLabel(self.centralwidget) self.barcoding.setFrameShape(QtGui.QFrame.Box) self.barcoding.setAlignment(QtCore.Qt.AlignCenter) self.barcoding.setObjectName(_fromUtf8("barcoding")) self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.barcoding) self.voltage = QtGui.QLabel(self.centralwidget) self.voltage.setFrameShape(QtGui.QFrame.Box) self.voltage.setAlignment(QtCore.Qt.AlignCenter) self.voltage.setObjectName(_fromUtf8("voltage")) self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.voltage) self.temperature = QtGui.QLabel(self.centralwidget) self.temperature.setFrameShape(QtGui.QFrame.Box) self.temperature.setAlignment(QtCore.Qt.AlignCenter) self.temperature.setObjectName(_fromUtf8("temperature")) self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.temperature) self.horizontalLayout_4.addLayout(self.formLayout_2) self.verticalLayout.addLayout(self.horizontalLayout_4) self.groupBox = QtGui.QGroupBox(self.centralwidget) self.groupBox.setObjectName(_fromUtf8("groupBox")) self.verticalLayout_3 = QtGui.QVBoxLayout(self.groupBox) self.verticalLayout_3.setContentsMargins(0, 4, 0, 0) self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3")) self.horizontalLayout_6 = QtGui.QHBoxLayout() self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6")) self.label_49 = QtGui.QLabel(self.groupBox) self.label_49.setObjectName(_fromUtf8("label_49")) self.horizontalLayout_6.addWidget(self.label_49) self.start_charge_slider = QtGui.QSlider(self.groupBox) self.start_charge_slider.setSliderPosition(1) self.start_charge_slider.setOrientation(QtCore.Qt.Horizontal) self.start_charge_slider.setTickPosition(QtGui.QSlider.TicksBelow) self.start_charge_slider.setTickInterval(25) self.start_charge_slider.setObjectName(_fromUtf8("start_charge_slider")) self.horizontalLayout_6.addWidget(self.start_charge_slider) self.start_charge_spinbox = QtGui.QSpinBox(self.groupBox) self.start_charge_spinbox.setMinimum(1) self.start_charge_spinbox.setObjectName(_fromUtf8("start_charge_spinbox")) self.horizontalLayout_6.addWidget(self.start_charge_spinbox) self.verticalLayout_3.addLayout(self.horizontalLayout_6) self.horizontalLayout_5 = QtGui.QHBoxLayout() self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5")) self.label_50 = QtGui.QLabel(self.groupBox) self.label_50.setObjectName(_fromUtf8("label_50")) self.horizontalLayout_5.addWidget(self.label_50) self.stop_charge_slider = QtGui.QSlider(self.groupBox) self.stop_charge_slider.setMinimum(1) self.stop_charge_slider.setMaximum(100) self.stop_charge_slider.setOrientation(QtCore.Qt.Horizontal) self.stop_charge_slider.setTickPosition(QtGui.QSlider.TicksBelow) self.stop_charge_slider.setTickInterval(25) self.stop_charge_slider.setObjectName(_fromUtf8("stop_charge_slider")) self.horizontalLayout_5.addWidget(self.stop_charge_slider) self.stop_charge_spinbox = QtGui.QSpinBox(self.groupBox) self.stop_charge_spinbox.setMinimum(1) self.stop_charge_spinbox.setMaximum(100) self.stop_charge_spinbox.setObjectName(_fromUtf8("stop_charge_spinbox")) self.horizontalLayout_5.addWidget(self.stop_charge_spinbox) self.verticalLayout_3.addLayout(self.horizontalLayout_5) self.label_26 = QtGui.QLabel(self.groupBox) font = QtGui.QFont() font.setPointSize(10) self.label_26.setFont(font) self.label_26.setWordWrap(True) self.label_26.setObjectName(_fromUtf8("label_26")) self.verticalLayout_3.addWidget(self.label_26) self.line_3 = QtGui.QFrame(self.groupBox) self.line_3.setFrameShape(QtGui.QFrame.HLine) self.line_3.setFrameShadow(QtGui.QFrame.Sunken) self.line_3.setObjectName(_fromUtf8("line_3")) self.verticalLayout_3.addWidget(self.line_3) self.horizontalLayout_8 = QtGui.QHBoxLayout() self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8")) self.label_27 = QtGui.QLabel(self.groupBox) self.label_27.setObjectName(_fromUtf8("label_27")) self.horizontalLayout_8.addWidget(self.label_27) self.inhibit_charge_slider = QtGui.QSlider(self.groupBox) self.inhibit_charge_slider.setMaximum(120) self.inhibit_charge_slider.setOrientation(QtCore.Qt.Horizontal) self.inhibit_charge_slider.setObjectName(_fromUtf8("inhibit_charge_slider")) self.horizontalLayout_8.addWidget(self.inhibit_charge_slider) self.inhibit_charge_spinbox = QtGui.QSpinBox(self.groupBox) self.inhibit_charge_spinbox.setMaximum(120) self.inhibit_charge_spinbox.setObjectName(_fromUtf8("inhibit_charge_spinbox")) self.horizontalLayout_8.addWidget(self.inhibit_charge_spinbox) self.verticalLayout_3.addLayout(self.horizontalLayout_8) self.label_28 = QtGui.QLabel(self.groupBox) self.label_28.setWordWrap(True) self.label_28.setObjectName(_fromUtf8("label_28")) self.verticalLayout_3.addWidget(self.label_28) self.verticalLayout.addWidget(self.groupBox) spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.verticalLayout.addItem(spacerItem) self.line_2 = QtGui.QFrame(self.centralwidget) self.line_2.setFrameShape(QtGui.QFrame.HLine) self.line_2.setFrameShadow(QtGui.QFrame.Sunken) self.line_2.setObjectName(_fromUtf8("line_2")) self.verticalLayout.addWidget(self.line_2) self.horizontalLayout_7 = QtGui.QHBoxLayout() self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7")) self.btn_reload = QtGui.QPushButton(self.centralwidget) self.btn_reload.setObjectName(_fromUtf8("btn_reload")) self.horizontalLayout_7.addWidget(self.btn_reload) self.btn_write = QtGui.QPushButton(self.centralwidget) self.btn_write.setObjectName(_fromUtf8("btn_write")) self.horizontalLayout_7.addWidget(self.btn_write) self.verticalLayout.addLayout(self.horizontalLayout_7) MainWindow.setCentralWidget(self.centralwidget) self.retranslateUi(MainWindow) QtCore.QObject.connect(self.start_charge_slider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.start_charge_spinbox.setValue) QtCore.QObject.connect(self.stop_charge_slider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.stop_charge_spinbox.setValue) QtCore.QObject.connect(self.start_charge_spinbox, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.start_charge_slider.setValue) QtCore.QObject.connect(self.stop_charge_spinbox, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.stop_charge_slider.setValue) QtCore.QObject.connect(self.inhibit_charge_slider, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.inhibit_charge_spinbox.setValue) QtCore.QObject.connect(self.inhibit_charge_spinbox, QtCore.SIGNAL(_fromUtf8("valueChanged(int)")), self.inhibit_charge_slider.setValue) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None)) self.label.setText(_translate("MainWindow", "Battery:", None)) self.batteryComboBox.setItemText(0, _translate("MainWindow", "BAT0", None)) self.batteryComboBox.setItemText(1, _translate("MainWindow", "BAT1", None)) self.label_2.setText(_translate("MainWindow", "Battery Installed:", None)) self.installed.setText(_translate("MainWindow", "N/A", None)) self.label_25.setText(_translate("MainWindow", "AC Connected:", None)) self.ac_connected.setText(_translate("MainWindow", "N/A", None)) self.label_3.setText(_translate("MainWindow", "State:", None)) self.state.setText(_translate("MainWindow", "N/A", None)) self.label_4.setText(_translate("MainWindow", "Cycle Count:", None)) self.cycle_count.setText(_translate("MainWindow", "N/A", None)) self.label_5.setText(_translate("MainWindow", "Current Now:", None)) self.current_now.setText(_translate("MainWindow", "N/A", None)) self.label_6.setText(_translate("MainWindow", "Current Avg.:", None)) self.current_avg.setText(_translate("MainWindow", "N/A", None)) self.label_7.setText(_translate("MainWindow", "Power Now:", None)) self.power_now.setText(_translate("MainWindow", "N/A", None)) self.label_8.setText(_translate("MainWindow", "Power Avg.:", None)) self.power_avg.setText(_translate("MainWindow", "N/A", None)) self.label_9.setText(_translate("MainWindow", "Last Full Capacity:", None)) self.last_full_capacity.setText(_translate("MainWindow", "N/A", None)) self.label_10.setText(_translate("MainWindow", "Remaining Percent:", None)) self.remaining_percent.setText(_translate("MainWindow", "N/A", None)) self.label_11.setText(_translate("MainWindow", "Rem. Running Time:", None)) self.remaining_running_time.setText(_translate("MainWindow", "N/A", None)) self.label_12.setText(_translate("MainWindow", "Rem. Charge Time:", None)) self.remaining_charge_time.setText(_translate("MainWindow", "N/A", None)) self.label_13.setText(_translate("MainWindow", "Remaining Capacity:", None)) self.remaining_capacity.setText(_translate("MainWindow", "N/A", None)) self.label_24.setText(_translate("MainWindow", "Temperature:", None)) self.label_14.setText(_translate("MainWindow", "Design Capacity:", None)) self.label_22.setText(_translate("MainWindow", "Manufacturing Date:", None)) self.label_20.setText(_translate("MainWindow", "Chemistry:", None)) self.label_17.setText(_translate("MainWindow", "Manufacturer:", None)) self.label_16.setText(_translate("MainWindow", "Design Voltage:", None)) self.label_18.setText(_translate("MainWindow", "Model:", None)) self.label_23.setText(_translate("MainWindow", "First Use Date:", None)) self.label_21.setText(_translate("MainWindow", "Serial:", None)) self.label_19.setText(_translate("MainWindow", "Barcoding:", None)) self.label_15.setText(_translate("MainWindow", "Voltage:", None)) self.design_capacity.setText(_translate("MainWindow", "N/A", None)) self.manufacturing_date.setText(_translate("MainWindow", "N/A", None)) self.chemistry.setText(_translate("MainWindow", "N/A", None)) self.manufacturer.setText(_translate("MainWindow", "N/A", None)) self.design_voltage.setText(_translate("MainWindow", "N/A", None)) self.model.setText(_translate("MainWindow", "N/A", None)) self.first_use_date.setText(_translate("MainWindow", "N/A", None)) self.serial.setText(_translate("MainWindow", "N/A", None)) self.barcoding.setText(_translate("MainWindow", "N/A", None)) self.voltage.setText(_translate("MainWindow", "N/A", None)) self.temperature.setText(_translate("MainWindow", "N/A", None)) self.groupBox.setTitle(_translate("MainWindow", "Charging Options", None)) self.label_49.setText(_translate("MainWindow", "Start Charge Thresh:", None)) self.label_50.setText(_translate("MainWindow", "Stop Charge Thresh:", None)) self.label_26.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'sans-serif\'; font-size:8pt; font-weight:600; color:#252525; background-color:#d5f0d0;\">Hint:</span><span style=\" font-family:\'sans-serif\'; font-size:8pt; color:#000000; background-color:#d5f0d0;\"/><span style=\" font-family:\'sans-serif\'; font-size:8pt; color:#252525;\">Battery charging thresholds can be used to keep Li-Ion and Li-Polymer batteries partially charged, in order to </span><a href=\"http://www.thinkwiki.org/wiki/Maintenance#Battery_treatment\"><span style=\" font-size:8pt; text-decoration: underline; color:#0000ff;\">increase their lifetime</span></a><span style=\" font-family:\'sans-serif\'; font-size:8pt; color:#252525;\">.</span></p></body></html>", None)) self.label_27.setText(_translate("MainWindow", "Inhibit Charge (min.): ", None)) self.label_28.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-family:\'sans-serif\'; font-size:8pt; font-weight:600; color:#252525; background-color:#d5f0d0;\">Hint:</span><span style=\" font-family:\'sans-serif\'; font-size:8pt; color:#252525;\">Charge inhibiting can be used to reduce the power draw of the laptop, in order to use an under-spec power supply that can\'t handle the combined power draw of running and charging. It can also be used to control which battery is charged when </span><a href=\"http://www.thinkwiki.org/wiki/How_to_use_UltraBay_batteries\"><span style=\" font-size:8pt; text-decoration: underline; color:#0000ff;\">using an Ultrabay battery</span></a><span style=\" font-family:\'sans-serif\'; font-size:8pt; color:#252525;\">.</span></p></body></html>", None)) self.btn_reload.setText(_translate("MainWindow", "Reload Settings", None)) self.btn_write.setText(_translate("MainWindow", "Write Settings", None))
mit
4,475,475,535,524,859,400
66.358852
825
0.707949
false
brunoabud/ic
gui/vertical_scroll_area.py
1
1965
# coding: utf-8 # Copyright (C) 2016 Bruno Abude Cardoso # # Imagem Cinemática is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Imagem Cinemática is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from PyQt4.QtGui import QScrollArea from PyQt4.QtCore import QSize, QEvent class VerticalScrollArea(QScrollArea): """QScrollArea optimized for a vertical Scrolling Area.""" def __init__(self, *args, **kwargs): super(VerticalScrollArea, self).__init__(*args, **kwargs) self.setMinimumSize(QSize(100, 100)) def _updateWidth(self): total_width = self.widget().minimumSizeHint().width() total_width += self.frameWidth() total_width += self.verticalScrollBar().sizeHint().width() if self.verticalScrollBar().isVisible() else 0 self.setMinimumWidth(total_width) self.setMaximumWidth(total_width) def setWidget(self, widget): if self.widget() is not None: self.widget().removeEventFilter(self) widget.installEventFilter(self) super(VerticalScrollArea, self).setWidget(widget) def eventFilter(self, obj, event): if obj is self.widget() and (event.type() == QEvent.Resize or event.type() == QEvent.ChildAdded or event.type() == QEvent.ChildRemoved): self._updateWidth() return False def resizeEvent(self, event): self._updateWidth() super(VerticalScrollArea, self).resizeEvent(event)
gpl-3.0
2,268,670,650,056,277,000
40.765957
113
0.695364
false
explosion/ml-datasets
ml_datasets/loaders/dbpedia.py
1
1090
from pathlib import Path import csv import random from ..util import get_file from .._registry import register_loader # DBPedia Ontology from https://course.fast.ai/datasets DBPEDIA_ONTOLOGY_URL = "https://s3.amazonaws.com/fast-ai-nlp/dbpedia_csv.tgz" @register_loader("dbpedia") def dbpedia(loc=None, *, train_limit=0, dev_limit=0): if loc is None: loc = get_file("dbpedia_csv", DBPEDIA_ONTOLOGY_URL, untar=True, unzip=True) train_loc = Path(loc) / "train.csv" test_loc = Path(loc) / "test.csv" return ( read_dbpedia_ontology(train_loc, limit=train_limit), read_dbpedia_ontology(test_loc, limit=dev_limit), ) def read_dbpedia_ontology(data_file, *, limit=0): examples = [] with open(data_file, newline="", encoding="utf-8") as f: reader = csv.reader(f) for row in reader: label = row[0] title = row[1] text = row[2] examples.append((title + "\n" + text, label)) random.shuffle(examples) if limit >= 1: examples = examples[:limit] return examples
mit
-6,058,549,363,968,170,000
28.459459
83
0.62844
false
asadoughi/python-neutronclient
neutronclient/neutron/v2_0/vpn/vpnservice.py
1
3371
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # (c) Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Swaminathan Vasudevan, Hewlett-Packard. # import logging from neutronclient.neutron import v2_0 as neutronv20 from neutronclient.openstack.common.gettextutils import _ class ListVPNService(neutronv20.ListCommand): """List VPNService configurations that belong to a given tenant.""" resource = 'vpnservice' log = logging.getLogger(__name__ + '.ListVPNService') list_columns = [ 'id', 'name', 'router_id', 'status' ] _formatters = {} pagination_support = True sorting_support = True class ShowVPNService(neutronv20.ShowCommand): """Show information of a given VPNService.""" resource = 'vpnservice' log = logging.getLogger(__name__ + '.ShowVPNService') class CreateVPNService(neutronv20.CreateCommand): """Create a VPNService.""" resource = 'vpnservice' log = logging.getLogger(__name__ + '.CreateVPNService') def add_known_arguments(self, parser): parser.add_argument( '--admin-state-down', dest='admin_state', action='store_false', help=_('Set admin state up to false')) parser.add_argument( '--name', help=_('Set a name for the vpnservice')) parser.add_argument( '--description', help=_('Set a description for the vpnservice')) parser.add_argument( 'router', metavar='ROUTER', help=_('Router unique identifier for the vpnservice')) parser.add_argument( 'subnet', metavar='SUBNET', help=_('Subnet unique identifier for the vpnservice deployment')) def args2body(self, parsed_args): _subnet_id = neutronv20.find_resourceid_by_name_or_id( self.get_client(), 'subnet', parsed_args.subnet) _router_id = neutronv20.find_resourceid_by_name_or_id( self.get_client(), 'router', parsed_args.router) body = {self.resource: {'subnet_id': _subnet_id, 'router_id': _router_id, 'admin_state_up': parsed_args.admin_state}, } neutronv20.update_dict(parsed_args, body[self.resource], ['name', 'description', 'tenant_id']) return body class UpdateVPNService(neutronv20.UpdateCommand): """Update a given VPNService.""" resource = 'vpnservice' log = logging.getLogger(__name__ + '.UpdateVPNService') class DeleteVPNService(neutronv20.DeleteCommand): """Delete a given VPNService.""" resource = 'vpnservice' log = logging.getLogger(__name__ + '.DeleteVPNService')
apache-2.0
8,309,112,606,520,467,000
33.050505
78
0.627707
false
Brian151/OpenShockwave
tools/imports/shockabsorber/model/cast.py
1
1698
class CastLibraryTable: #------------------------------ def __init__(self, castlibs): self.by_nr = {} self.by_assoc_id = {} for cl in castlibs: self.by_nr[cl.nr] = cl if cl.assoc_id>0: self.by_assoc_id[cl.assoc_id] = cl def iter_by_nr(self): return self.by_nr.itervalues() def get_cast_library(self, lib_nr): return self.by_nr[lib_nr] def get_cast_member(self, lib_nr, member_nr): cast_lib = self.by_nr[lib_nr] return cast_lib.get_cast_member(member_nr) if cast_lib != None else None #-------------------------------------------------- class CastLibrary: #------------------------------ def __init__(self, nr, name, path, assoc_id, idx_range, self_idx): self.nr = nr self.name = name self.path = path self.assoc_id = assoc_id self.idx_range = idx_range self.self_idx = self_idx self.castmember_table = None def __repr__(self): return "<CastLibrary #%d name=\"%s\" size=%d>" % (self.nr, self.name, len(self.castmember_table) if self.castmember_table != None else -1) def get_path(self): return self.path def castmember_table_is_set(self): return self.castmember_table != None def get_castmember_table(self): return self.castmember_table def set_castmember_table(self,table): self.castmember_table = table def get_cast_member(self, member_nr): if self.castmember_table == None: return None # TODO: Ensure loaded return self.castmember_table[member_nr-1] #--------------------------------------------------
apache-2.0
846,849,474,389,359,700
36.733333
126
0.526502
false
praekelt/vumi-twilio-api
vxtwinio/twilio_api.py
1
26195
from datetime import datetime from dateutil.tz import tzutc import json from klein import Klein from math import ceil import os import re import treq from twisted.internet.defer import inlineCallbacks, returnValue import uuid from vumi.application import ApplicationWorker from vumi.components.session import SessionManager from vumi.config import ConfigDict, ConfigInt, ConfigText from vumi.message import TransportUserMessage from vumi.persist.txredis_manager import TxRedisManager import xml.etree.ElementTree as ET from vxtwinio.twiml_parser import TwiMLParser c2s = re.compile('(?!^)([A-Z+])') def camel_to_snake(string): return c2s.sub(r'_\1', string).lower() def convert_dict_keys(dct): res = {} for key, value in dct.iteritems(): if isinstance(value, dict): res[camel_to_snake(key)] = convert_dict_keys(value) else: res[camel_to_snake(key)] = value return res class SessionIDLookup(object): def __init__(self, redis_manager, expiry_time, namespace): self._redis_manager = redis_manager self._namespace = namespace self._expiry_time = expiry_time def _get_key(self, message_id): return "%s:%s" % (self._namespace, message_id) def set_id(self, message_id, address): return self._redis_manager.setex( self._get_key(message_id), self._expiry_time, address) def get_address(self, message_id): return self._redis_manager.get(self._get_key(message_id)) def delete_id(self, message_id): return self._redis_manager.delete(self._get_key(message_id)) class TwilioAPIConfig(ApplicationWorker.CONFIG_CLASS): """Config for the Twilio API worker""" web_path = ConfigText( "The path the worker should expose the API on", required=True, static=True) web_port = ConfigInt( "The port the worker should open for the API", required=True, static=True) api_version = ConfigText( "The version of the API, used in the api URL", default="2010-04-01", static=True) redis_manager = ConfigDict("Redis config.", required=True, static=True) redis_timeout = ConfigInt( "Expiry time in seconds for redis keys", default=3600, static=True) session_lookup_namespace = ConfigText( "The redis namespace to use for storing session ID lookups", default="session_id", static=True) client_path = ConfigText( "The web path that the API worker should send requests to", required=True) client_method = ConfigText( "The HTTP method that the API worker uses when sending requests", default='POST') status_callback_path = ConfigText( "The web path that the API sends a request to when the call ends", default=None) status_callback_method = ConfigText( "The HTTP method to use when sending the callback status", default='POST') class TwilioAPIWorker(ApplicationWorker): """Emulates the Twilio API to use vumi as if it was Twilio""" CONFIG_CLASS = TwilioAPIConfig @inlineCallbacks def setup_application(self): """Application specific setup""" self.app_config = self.get_static_config() self.server = TwilioAPIServer(self, self.app_config.api_version) path = os.path.join( self.app_config.web_path, self.app_config.api_version) self.webserver = self.start_web_resources([ (self.server.app.resource(), path)], self.app_config.web_port) redis = yield TxRedisManager.from_config(self.app_config.redis_manager) self.session_manager = SessionManager( redis, self.app_config.redis_timeout) self.session_lookup = SessionIDLookup( redis, self.app_config.redis_timeout, self.app_config.session_lookup_namespace) @inlineCallbacks def teardown_application(self): """Clean-up of setup done in `setup_application`""" yield self.webserver.loseConnection() yield self.session_manager.stop() def _http_request(self, url='', method='GET', data={}): return treq.request(method, url, persistent=False, data=data) def _request_data_from_session(self, session): return { 'CallSid': session['CallId'], 'AccountSid': session['AccountSid'], 'From': session['From'], 'To': session['To'], 'CallStatus': session['Status'], 'ApiVersion': self.app_config.api_version, 'Direction': session['Direction'], } @inlineCallbacks def _get_twiml_from_client(self, session, data=None): if data is None: data = self._request_data_from_session(session) twiml_raw = yield self._http_request( session['Url'], session['Method'], data) if twiml_raw.code < 200 or twiml_raw.code >= 300: twiml_raw = yield self._http_request( session['FallbackUrl'], session['FallbackMethod'], data) twiml_raw = yield twiml_raw.content() twiml_parser = TwiMLParser(session['Url']) returnValue(twiml_parser.parse(twiml_raw)) @inlineCallbacks def _handle_connected_call( self, session_id, session, status='in-progress', twiml=None): # TODO: Support sending ForwardedFrom parameter # TODO: Support sending CallerName parameter # TODO: Support sending geographic data parameters session['Status'] = status self.session_manager.save_session(session_id, session) if twiml is None: twiml = yield self._get_twiml_from_client(session) for verb in twiml: if verb.name == "Play": # TODO: Support loop and digit attributes yield self._send_message(verb.nouns[0], session) elif verb.name == "Hangup": yield self._send_message( None, session, TransportUserMessage.SESSION_CLOSE) yield self.session_manager.clear_session(session_id) break elif verb.name == "Gather": # TODO: Support timeout and numDigits attributes msgs = [] for subverb in verb.nouns: # TODO: Support Say and Pause subverbs if subverb.name == "Play": msgs.append({'speech_url': subverb.nouns[0]}) session['Gather_Action'] = verb.attributes['action'] session['Gather_Method'] = verb.attributes['method'] yield self.session_manager.save_session(session_id, session) if len(msgs) == 0: msgs.append({'speech_url': None}) msgs[-1]['wait_for'] = verb.attributes['finishOnKey'] for msg in msgs: yield self._send_message( msg['speech_url'], session, wait_for=msg.get('wait_for')) break def _send_message(self, url, session, session_event=None, wait_for=None): helper_metadata = {'voice': {}} if url is not None: helper_metadata['voice']['speech_url'] = url if wait_for is not None: helper_metadata['voice']['wait_for'] = wait_for return self.send_to( session['To'], None, from_addr=session['From'], session_event=session_event, to_addr_type=TransportUserMessage.AT_MSISDN, from_addr_type=TransportUserMessage.AT_MSISDN, helper_metadata=helper_metadata) @inlineCallbacks def consume_user_message(self, message): # At the moment there is no way to determine whether or not a message # is the result of a wait_for or just a single digit, so if the Gather # data exists inside the current session data, then we assume that it # is the result of a Gather # TODO: Fix this session = yield self.session_manager.load_session(message['from_addr']) if session.get('Gather_Action') and session.get('Gather_Method'): data = self._request_data_from_session(session) data['Digits'] = message['content'] twiml = yield self._get_twiml_from_client({ 'Url': session['Gather_Action'], 'Method': session['Gather_Method'], 'Fallback_Url': None, 'Fallback_Method': None, }, data=data) yield self._handle_connected_call( message['from_addr'], session, twiml=twiml) @inlineCallbacks def consume_ack(self, event): message_id = event['user_message_id'] session_id = yield self.session_lookup.get_address(message_id) yield self.session_lookup.delete_id(message_id) session = yield self.session_manager.load_session(session_id) if session['Status'] == 'queued': yield self._handle_connected_call(session_id, session) @inlineCallbacks def consume_nack(self, event): message_id = event['user_message_id'] session_id = yield self.session_lookup.get_address(message_id) yield self.session_lookup.delete_id(message_id) session = yield self.session_manager.load_session(session_id) if session['Status'] == 'queued': yield self._handle_connected_call( session_id, session, status='failed') @inlineCallbacks def new_session(self, message): yield self.session_lookup.set_id( message['message_id'], message['from_addr']) config = yield self.get_config(message) session = { 'CallId': self.server._get_sid(), 'AccountSid': self.server._get_sid(), 'From': message['from_addr'], 'To': message['to_addr'], 'Status': 'in-progress', 'Direction': 'inbound', 'Url': config.client_path, 'Method': config.client_method, 'StatusCallback': config.status_callback_path, 'StatusCallbackMethod': config.status_callback_method, } yield self.session_manager.create_session( message['from_addr'], **session) twiml = yield self._get_twiml_from_client(session) for verb in twiml: if verb.name == "Play": yield self.reply_to(message, None, helper_metadata={ 'voice': { 'speech_url': verb.nouns[0], } }) elif verb.name == "Hangup": yield self.reply_to( message, None, session_event=TransportUserMessage.SESSION_CLOSE) yield self.session_manager.clear_session(message['from_addr']) break elif verb.name == "Gather": # TODO: Support timeout and numDigits attributes msgs = [] for subverb in verb.nouns: # TODO: Support Say and Pause subverbs if subverb.name == "Play": msgs.append({'speech_url': subverb.nouns[0]}) session['Gather_Action'] = verb.attributes['action'] session['Gather_Method'] = verb.attributes['method'] yield self.session_manager.save_session( message['from_addr'], session) if len(msgs) == 0: msgs.append({'speech_url': None}) msgs[-1]['wait_for'] = verb.attributes['finishOnKey'] for msg in msgs: yield self.reply_to(message, None, helper_metadata={ 'voice': { 'speech_url': msg.get('speech_url'), 'wait_for': msg.get('wait_for'), }}) break @inlineCallbacks def close_session(self, message): # TODO: Implement call duration parameters # TODO: Implement recording parameters session = yield self.session_manager.load_session(message['from_addr']) yield self.session_manager.clear_session(message['from_addr']) url = session.get('StatusCallback') if url and url != 'None': session['Status'] = 'completed' data = self._request_data_from_session(session) yield self._http_request( session['StatusCallback'], session['StatusCallbackMethod'], data) class TwilioAPIUsageException(Exception): """Called when in incorrect query is sent to the API""" def __init__(self, message, format_='xml'): super(TwilioAPIUsageException, self).__init__(message) self.format_ = format_ class Response(object): """Base Response object used for HTTP responses""" name = 'Response' def __init__(self, **kw): self._data = kw @property def xml(self): response = ET.Element("TwilioResponse") root = ET.SubElement(response, self.name) def format_xml_rec(dct, root): for key, value in dct.iteritems(): if isinstance(value, dict): sub = ET.SubElement(root, key) format_xml_rec(value, sub) else: sub = ET.SubElement(root, key) sub.text = value return root format_xml_rec(self._data, root) return response def format_xml(self): return ET.tostring(self.xml) @property def dictionary(self): return convert_dict_keys(self._data) def format_json(self): return json.dumps(self.dictionary) @property def sid(self): return self._data.get("Sid") class ListResponse(object): """Used for responding to API requests with a paginated list""" name = 'ListResponse' def __init__(self, items): """ :param int pagesize: The number of elements in each returned page :param list items: A list of Response items to be returned """ self.items = sorted(items, key=lambda k: k.sid) def _get_page_attributes(self, uri, page, pagesize, aftersid): pagesize = min(pagesize, 1000) numpages = int(ceil(len(self.items) * 1.0 / pagesize)) or 1 if aftersid is not None: start = ( n for n, i in enumerate(self.items) if i.sid > aftersid).next() page = int(start/pagesize) else: start = page * pagesize page_items = self.items[start:start + pagesize] base_uri = uri.split('?')[0] if len(page_items) < pagesize: nextpageuri = None else: nextpageuri = "%s?Page=%s&PageSize=%s&AfterSid=%s" % ( base_uri, page + 1, pagesize, page_items[-1].sid) if page == 0: prevpageuri = None else: prevpageuri = "%s?Page=%s&PageSize=%s" % ( base_uri, page-1, pagesize) last = numpages - 1 attributes = { 'page': page, 'num_pages': numpages, 'page_size': pagesize, 'total': len(self.items), 'start': start, 'end': start + len(page_items), 'uri': uri, 'first_page_uri': '%s?Page=%s&PageSize=%s' % ( base_uri, page, pagesize), 'next_page_uri': nextpageuri, 'previous_page_uri': prevpageuri, 'last_page_uri': '%s?Page=%s&PageSize=%s' % ( base_uri, last, pagesize), } return (attributes, page_items) def _format_attributes_for_xml(self, dic): """XML attributes must be strings""" ret = {} for key, value in dic.iteritems(): if value is None: value = '' ret[key.replace('_', '')] = str(value) return ret def format_xml(self, uri, page=0, pagesize=50, aftersid=None): response = ET.Element("TwilioResponse") root = ET.SubElement(response, self.name) root.attrib, page_items = self._get_page_attributes( uri, page, pagesize, aftersid) root.attrib = self._format_attributes_for_xml(root.attrib) for obj in page_items: [item] = obj.xml root.append(item) return ET.tostring(response) def format_json(self, uri, page=0, pagesize=50, aftersid=None): attrib, page_items = self._get_page_attributes( uri, page, pagesize, aftersid) page_items = [item.dictionary for item in page_items] attrib[camel_to_snake(self.name)] = page_items return json.dumps(attrib) class Applications(ListResponse): """Used for responding with a list of Applications for the Applications resource""" name = 'Applications' def __init__(self, url, applications): self.url = url super(Applications, self).__init__(applications) def format_xml(self): return super(Applications, self).format_xml(self.url) def format_json(self): return super(Applications, self).format_json(self.url) class Application(Response): """A single Application object""" name = 'Application' class Error(Response): """Error HTTP response object, returned for incorred API queries""" name = 'Error' def __init__(self, error_type, error_message): super(Error, self).__init__( error_type=error_type, error_message=error_message) @classmethod def from_exception(cls, exception): return cls(exception.__class__.__name__, exception.message) class Version(Response): """Version HTTP response object, returned for root resource""" name = 'Version' def __init__(self, name, uri, **kwargs): super(Version, self).__init__( Name=name, Uri=uri, SubresourceUris=kwargs) class Call(Response): """Call HTTP response object, returned for the Calls resource""" name = 'Call' class TwilioAPIServer(object): app = Klein() def __init__(self, vumi_worker, version): self.vumi_worker = vumi_worker self.version = version def _format_response(self, request, response, format_): format_ = str(format_.lstrip('.').lower()) or 'xml' func = getattr( response, 'format_' + format_, None) if not func: raise TwilioAPIUsageException( '%r is not a valid request format' % format_) request.setHeader('Content-Type', 'application/%s' % format_) return func() @app.handle_errors(TwilioAPIUsageException) def usage_exception(self, request, failure): request.setResponseCode(400) return self._format_response( request, Error.from_exception(failure.value), failure.value.format_) @app.route('/', defaults={'format_': ''}, methods=['GET']) @app.route('/<string:format_>', methods=['GET']) def root(self, request, format_): version = Version( self.version, '/%s%s' % (self.version, format_), Accounts='/%s/Accounts%s' % (self.version, format_)) return self._format_response(request, version, format_) @app.route( '/Accounts/<string:account_sid>/Applications', defaults={'format_': ''}, methods=['GET']) @app.route( '/Accounts/<string:account_sid>/Applications<string:format_>', methods=['GET']) def get_applications(self, request, account_sid, format_): application = Application( # Application sid the same as Account sid to ensure consistency # between calls. Sid=account_sid, DateCreated=self._get_timestamp(), DateUpdated=self._get_timestamp(), AccountSid=account_sid, FriendlyName=self._get_field(request, 'FriendlyName'), ApiVersion=self.version, VoiceUrl=None, VoiceMethod='POST', VoiceFallbackUrl=None, VoiceFallbackMethod='POST', StatusCallback=None, StatusCallbackMethod=None, VoiceCallerIdLookup=False, SmsUrl=None, SmsMethod='POST', SmsFallbackUrl=None, SmsFallbackMethod='POST', SmsStatusCallback=None, Uri='/Accounts/%s/Applications/%s%s' % ( account_sid, account_sid, format_)) applications = Applications(request.uri, [application]) return self._format_response(request, applications, format_) @app.route( '/Accounts/<string:account_sid>/Calls', defaults={'format_': ''}, methods=['POST']) @app.route( '/Accounts/<string:account_sid>/Calls<string:format_>', methods=['POST']) @inlineCallbacks def make_call(self, request, account_sid, format_): """Making calls endpoint https://www.twilio.com/docs/api/rest/making-calls""" # TODO: Support ApplicationSid field # TODO: Support SendDigits field # TODO: Support IfMachine field # TODO: Support Timeout field # TODO: Support Record field fields = self._validate_make_call_fields(request, format_) fields['AccountSid'] = account_sid fields['CallId'] = self._get_sid() fields['DateCreated'] = self._get_timestamp() fields['Uri'] = '/%s/Accounts/%s/Calls/%s' % ( self.version, account_sid, fields['CallId']) fields['Status'] = 'queued' fields['Direction'] = 'outbound-api' message = yield self.vumi_worker.send_to( fields['To'], '', from_addr=fields['From'], session_event=TransportUserMessage.SESSION_NEW, to_addr_type=TransportUserMessage.AT_MSISDN, from_addr_type=TransportUserMessage.AT_MSISDN ) yield self.vumi_worker.session_lookup.set_id( message['message_id'], message['to_addr']) yield self.vumi_worker.session_manager.create_session( message['to_addr'], **fields) returnValue(self._format_response(request, Call( **{ 'Sid': fields['CallId'], 'DateCreated': fields['DateCreated'], 'DateUpdated': fields['DateCreated'], 'ParentCallSid': None, 'AccountSid': account_sid, 'To': fields['To'], 'FormattedTo': fields['To'], 'From': fields['From'], 'FormattedFrom': fields['From'], 'PhoneNumberSid': None, 'Status': fields['Status'], 'StartTime': None, 'EndTime': None, 'Duration': None, 'Price': None, 'Direction': fields['Direction'], 'AnsweredBy': None, 'ApiVersion': self.version, 'ForwardedFrom': None, 'CallerName': None, 'Uri': '%s%s' % (fields['Uri'], format_), 'SubresourceUris': { 'Notifications': '%s/Notifications%s' % ( fields['Uri'], format_), 'Recordings': '%s/Recordings%s' % (fields['Uri'], format_), } }), format_)) def _get_sid(self): return str(uuid.uuid4()).replace('-', '') def _get_timestamp(self): return datetime.now(tzutc()).strftime('%a, %d %b %Y %H:%M:%S %z') def _get_field(self, request, field, default=None): return request.args.get(field, [default])[0] def _validate_make_call_required_fields(self, request, format_): """Validates the required fields as detailed by https://www.twilio.com/docs/api/rest/making-calls#post-parameters-required """ fields = {} for field in ['From', 'To', 'Url', 'ApplicationSid']: fields[field] = self._get_field(request, field) for field in ['From', 'To']: if not fields[field]: raise TwilioAPIUsageException( 'Required field %r not supplied' % field, format_) if not (fields['Url'] or fields['ApplicationSid']): raise TwilioAPIUsageException( "Request must have an 'Url' or an 'ApplicationSid' field", format_) return fields def _validate_make_call_optional_fields(self, request, format_): """Validates the required fields as detailed by https://www.twilio.com/docs/api/rest/making-calls#post-parameters-optional """ fields = {} for field, default in [ ('Method', 'POST'), ('FallbackMethod', 'POST'), ('StatusCallbackMethod', 'POST'), ('Timeout', 60), ('Record', False)]: fields[field] = self._get_field(request, field, default) for field in [ 'FallbackUrl', 'StatusCallback', 'SendDigits', 'IfMachine']: fields[field] = self._get_field(request, field) if fields['SendDigits']: if not all(re.match('[0-9#*w]', c) for c in fields['SendDigits']): raise TwilioAPIUsageException( "SendDigits value %r is not valid. May only contain the " "characters (0-9), '#', '*' and 'w'" % ( fields['SendDigits']), format_) valid_fields_IfMachine = [None, 'Continue', 'Hangup'] if fields['IfMachine'] not in valid_fields_IfMachine: raise TwilioAPIUsageException( "IfMachine value must be one of %r" % valid_fields_IfMachine, format_) return fields def _validate_make_call_fields(self, request, format_): """Validates the fields sent to the request according to https://www.twilio.com/docs/api/rest/making-calls""" fields = self._validate_make_call_required_fields(request, format_) fields.update( self._validate_make_call_optional_fields(request, format_)) return fields
bsd-3-clause
7,689,144,784,915,018,000
37.296784
82
0.574003
false
rande/python-element
element/plugins/seo/seo.py
1
3479
# # Copyright 2014 Thomas Rabaix <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import element.node from ioc.helper import deepcopy class SeoPage(object): def __init__(self, title_pattern="%s", metas=None, keywords=None): self.title_pattern = title_pattern self.metas = metas or {} self.keywords = keywords or [] class SeoListener(object): def __init__(self, seo_page): self.seo_page = seo_page def listener(self, event): """ listen to element.seo.headers event and return a node with seo information only subject should be a NodeContext object """ if not event.has('subject'): return node = element.node.Node('seo://%s' % event.get('subject').id, { 'type': 'seo.headers', 'seo': self.build_seo(event.get('subject')), }) event.set('node', node) def build_seo(self, context): """ build the seo information from the provide context """ seo = { 'title': None, 'metas': {} } self.configure_title(context, seo) self.configure_metas(context, seo) return seo def get_title(self, title): return self.seo_page.title_pattern % title def configure_title(self, context, seo): if 'seo' in context.settings and 'title' in context.settings['seo']: seo['title'] = self.get_title(context.settings['seo']['title']) return for field in ['title', 'name']: if context[field]: seo['title'] = self.get_title(context[field]) return # no title defined! seo['title'] = self.get_title(u"\u2605") def configure_metas(self, context, seo): if 'seo' not in context.settings or 'metas' not in context.settings['seo']: seo['metas'] = deepcopy(self.seo_page.metas) return if 'metas' in context.settings['seo']: seo['metas'] = deepcopy(context.settings['seo']['metas']) for pname, pmetas in deepcopy(self.seo_page.metas).iteritems(): if pname not in seo['metas']: seo['metas'][pname] = pmetas continue # merge values for mname, mvalue in pmetas.iteritems(): if mname not in seo['metas'][pname]: seo['metas'][pname][mname] = mvalue class SeoHandler(element.node.NodeHandler): def __init__(self, templating): self.templating = templating def get_defaults(self, node): return { 'template': 'element.plugins.seo:headers.html' } def get_name(self): return 'Seo' def execute(self, request_handler, context): return self.render(request_handler, self.templating, context.settings['template'], { 'context': context, 'seo': context.seo })
apache-2.0
-5,339,116,559,207,831,000
30.342342
92
0.5904
false
Kongsea/tensorflow
tensorflow/contrib/model_pruning/python/pruning_test.py
1
6703
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the key functions in pruning library.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.model_pruning.python import pruning from tensorflow.python.ops import math_ops from tensorflow.python.ops import partitioned_variables from tensorflow.python.ops import random_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import training_util class PruningHParamsTest(test.TestCase): PARAM_LIST = [ "name=test", "threshold_decay=0.9", "pruning_frequency=10", "do_not_prune=[conv1,conv2]", "sparsity_function_end_step=100", "target_sparsity=0.9" ] TEST_HPARAMS = ",".join(PARAM_LIST) def setUp(self): super(PruningHParamsTest, self).setUp() # Add global step variable to the graph self.global_step = training_util.get_or_create_global_step() # Add sparsity self.sparsity = variables.Variable(0.5, name="sparsity") # Parse hparams self.pruning_hparams = pruning.get_pruning_hparams().parse( self.TEST_HPARAMS) def testInit(self): p = pruning.Pruning(self.pruning_hparams) self.assertEqual(p._spec.name, "test") self.assertAlmostEqual(p._spec.threshold_decay, 0.9) self.assertEqual(p._spec.pruning_frequency, 10) self.assertAllEqual(p._spec.do_not_prune, ["conv1", "conv2"]) self.assertEqual(p._spec.sparsity_function_end_step, 100) self.assertAlmostEqual(p._spec.target_sparsity, 0.9) def testInitWithExternalSparsity(self): with self.test_session(): p = pruning.Pruning(spec=self.pruning_hparams, sparsity=self.sparsity) variables.global_variables_initializer().run() sparsity = p._sparsity.eval() self.assertAlmostEqual(sparsity, 0.5) def testInitWithVariableReuse(self): with self.test_session(): p = pruning.Pruning(spec=self.pruning_hparams, sparsity=self.sparsity) p_copy = pruning.Pruning( spec=self.pruning_hparams, sparsity=self.sparsity) variables.global_variables_initializer().run() sparsity = p._sparsity.eval() self.assertAlmostEqual(sparsity, 0.5) self.assertEqual(p._sparsity.eval(), p_copy._sparsity.eval()) class PruningTest(test.TestCase): def setUp(self): super(PruningTest, self).setUp() self.global_step = training_util.get_or_create_global_step() def testCreateMask2D(self): width = 10 height = 20 with self.test_session(): weights = variables.Variable( random_ops.random_normal([width, height], stddev=1), name="weights") masked_weights = pruning.apply_mask(weights, variable_scope.get_variable_scope()) variables.global_variables_initializer().run() weights_val = weights.eval() masked_weights_val = masked_weights.eval() self.assertAllEqual(weights_val, masked_weights_val) def testUpdateSingleMask(self): with self.test_session() as session: weights = variables.Variable( math_ops.linspace(1.0, 100.0, 100), name="weights") masked_weights = pruning.apply_mask(weights) sparsity = variables.Variable(0.5, name="sparsity") p = pruning.Pruning(sparsity=sparsity) p._spec.threshold_decay = 0.0 mask_update_op = p.mask_update_op() variables.global_variables_initializer().run() masked_weights_val = masked_weights.eval() self.assertAllEqual(np.count_nonzero(masked_weights_val), 100) session.run(mask_update_op) masked_weights_val = masked_weights.eval() self.assertAllEqual(np.count_nonzero(masked_weights_val), 51) def testPartitionedVariableMasking(self): partitioner = partitioned_variables.variable_axis_size_partitioner(40) with self.test_session() as session: with variable_scope.variable_scope("", partitioner=partitioner): sparsity = variables.Variable(0.5, name="Sparsity") weights = variable_scope.get_variable( "weights", initializer=math_ops.linspace(1.0, 100.0, 100)) masked_weights = pruning.apply_mask( weights, scope=variable_scope.get_variable_scope()) p = pruning.Pruning(sparsity=sparsity) p._spec.threshold_decay = 0.0 mask_update_op = p.mask_update_op() variables.global_variables_initializer().run() masked_weights_val = masked_weights.eval() session.run(mask_update_op) masked_weights_val = masked_weights.eval() self.assertAllEqual(np.count_nonzero(masked_weights_val), 51) def testConditionalMaskUpdate(self): param_list = [ "pruning_frequency=2", "begin_pruning_step=1", "end_pruning_step=6" ] test_spec = ",".join(param_list) pruning_hparams = pruning.get_pruning_hparams().parse(test_spec) weights = variables.Variable( math_ops.linspace(1.0, 100.0, 100), name="weights") masked_weights = pruning.apply_mask(weights) sparsity = variables.Variable(0.00, name="sparsity") # Set up pruning p = pruning.Pruning(pruning_hparams, sparsity=sparsity) p._spec.threshold_decay = 0.0 mask_update_op = p.conditional_mask_update_op() sparsity_val = math_ops.linspace(0.0, 0.9, 10) increment_global_step = state_ops.assign_add(self.global_step, 1) non_zero_count = [] with self.test_session() as session: variables.global_variables_initializer().run() for i in range(10): session.run(state_ops.assign(sparsity, sparsity_val[i])) session.run(mask_update_op) session.run(increment_global_step) non_zero_count.append(np.count_nonzero(masked_weights.eval())) # Weights pruned at steps 0,2,4,and,6 expected_non_zero_count = [100, 100, 80, 80, 60, 60, 40, 40, 40, 40] self.assertAllEqual(expected_non_zero_count, non_zero_count) if __name__ == "__main__": test.main()
apache-2.0
-7,917,016,903,297,070,000
40.376543
80
0.687453
false
praekelt/go-contacts-api
go_contacts/handlers/contacts_for_group.py
1
1590
from cyclone.web import HTTPError from go_api.cyclone.handlers import BaseHandler from go_api.collections.errors import ( CollectionUsageError, CollectionObjectNotFound) from twisted.internet.defer import maybeDeferred class ContactsForGroupHandler(BaseHandler): """ Handler for getting all contacts for a group Methods supported: * ``GET /:group_id/contacts`` - retrieve all contacts of a group. """ route_suffix = ":group_id/contacts" model_alias = "collection" def get(self, group_id): query = self.get_argument('query', default=None) stream = self.get_argument('stream', default='false') if stream == 'true': d = maybeDeferred(self.collection.stream, group_id, query) d.addCallback(self.write_queue) else: cursor = self.get_argument('cursor', default=None) max_results = self.get_argument('max_results', default=None) try: max_results = max_results and int(max_results) except ValueError: raise HTTPError(400, "max_results must be an integer") d = maybeDeferred( self.collection.page, group_id, cursor=cursor, max_results=max_results, query=query) d.addCallback(self.write_page) d.addErrback(self.catch_err, 404, CollectionObjectNotFound) d.addErrback(self.catch_err, 400, CollectionUsageError) d.addErrback( self.raise_err, 500, "Failed to retrieve contacts for group %r." % group_id) return d
bsd-3-clause
5,880,230,675,900,789,000
35.976744
72
0.633962
false
sdague/gatemine
gatemine/results.py
1
1881
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. DEFAULT_PAGE_SIZE = 100 class ESQuery(object): """An encapsulation of an elastic search query""" query = "" pagesize = DEFAULT_PAGE_SIZE page = 0 def __init__(self, query, size=DEFAULT_PAGE_SIZE): self.query = query self.pagesize = size class ResultSet(object): """A container for results""" results = [] pagesize = DEFAULT_PAGE_SIZE page = 0 took = 0 timed_out = False size = 0 def __init__(self, data): self.results = [] self._parse(data) def _parse(self, data): self.took = data['took'] self.timed_out = data['timed_out'] self.size = data['hits']['total'] for r in data['hits']['hits']: self.results.append(Result(r)) def next(self, pagesize=None): """Eventually used to load the next page of results""" pass def __iter__(self): return iter(self.results) class Result(object): """A single log stash result""" def __init__(self, data): self._data = data def __str__(self): return str(self._data)
apache-2.0
-6,920,318,627,922,871,000
26.26087
78
0.636364
false
nealp9084/hw3
negotiator_framework.py
1
6109
from csv import DictReader import csv from sys import argv, exit # from negotiator import Negotiator # from door_in_face_negotiator import DoorInFaceNegotiator # from greedy_negotiator import GreedyNegotiator # from door_in_face_dummy import DIFDummyNegotiator # from negotiator_a import Negotiator_A from random import seed, randint from nap7jz import Negotiator as Nap7jz # read_scenario(parameterfile_name : String) --> (int, list(dict)) # Utility function to read in a single scenario from a csv file # Expects a single int on the first line, specifying the iteration limit, # and then an arbitrary number of rows of three comma-separated columns, # specifying the name of each item, its rank (where 1 is best) for negotiator A, # and the same for negotiator B def read_scenario(parameterfile_name): # Open the file for reading with open(parameterfile_name, 'r') as parameterfile: # Consume the first line, getting the iteration limit number_iterations = parameterfile.readline() return ( int(number_iterations), # Use Python's builtin CSV reader to read the rest of the file as specified list(DictReader(parameterfile, fieldnames=["item_name", "negotiator_a", "negotiator_b"])) ) # negotiate(num_iterations : Int, negotiator_a : BaseNegotiator, negotiator_b : BaseNegotiator) --> (Boolean, list(String), Int) # The main negotiation function, responsible for running a single scenario & coordinating interactions between the two # negotiators. def negotiate(num_iterations, negotiator_a, negotiator_b): # Get the initial offer from negotiator a - we pass in None to signify that no previous opposing offers have been made (offer_a, offer_b) = (negotiator_a.make_offer(None), None) # We scale the reported utility by a random factor a_scale = randint(1, 11) b_scale = randint(1, 11) #print("scales are %f %f" % (a_scale, b_scale)) # Keep trading offers until we reach an agreement or the iteration limit, whichever comes first for i in range(num_iterations): print(offer_a, offer_b) # Get from each negotiator the utility it received from the offer it most recently gave utility = (a_scale * negotiator_a.utility(), b_scale * negotiator_b.utility()) # Send b the latest offer from a and allow it to rebut negotiator_b.receive_utility(utility[0]) offer_b = negotiator_b.make_offer(offer_a) # We signify agreement by both offers being structurally equal if offer_a == offer_b: return (True, offer_a, i) # If we didn't agree, let a respond to b's offer, recalculate utility and send 'a' the info utility = (a_scale * negotiator_a.utility(), b_scale * negotiator_b.utility()) negotiator_a.receive_utility(utility[1]) offer_a = negotiator_a.make_offer(offer_b) if offer_a == offer_b: return (True, offer_a, i) # If we failed overall, then there's no ordering to return return (False, None, num_iterations) if __name__ == "__main__": # We can't run without at least one scenario. We can, however, run with multiple provided scenarios if len(argv) < 2: print("Please provide at least one scenario file, in csv format.") exit(-42) score_a = score_b = 0 # We will replace Negotiator here with <your id>_Negotiator, as specified in the Readme negotiator_a = Nap7jz() negotiator_b = Nap7jz() count = randint(0,1) for scenario in argv[1:]: # Get the scenario parameters (num_iters, mapping) = read_scenario(scenario) # Separate the mapping out for each negotiator, and sort the items from it into a list # based upon the preferences of each negotiator a_mapping = {item["item_name"] : int(item["negotiator_a"]) for item in mapping} a_order = sorted(a_mapping, key=a_mapping.get, reverse=True) b_mapping = {item["item_name"] : int(item["negotiator_b"]) for item in mapping} b_order = sorted(b_mapping, key=b_mapping.get, reverse=True) # Give each negotiator their preferred item ordering negotiator_a.initialize(a_order, num_iters) negotiator_b.initialize(b_order, num_iters) # Get the result of the negotiation and SWAP TURNS if count%2 == 0: print("A (random) is going first") print("A's prefs: " + str(negotiator_a.preferences)) print("B's prefs: " + str(negotiator_b.preferences)) (result, order, count) = negotiate(num_iters, negotiator_a, negotiator_b) else: print("B (you) going first (so your offers are the first column") print("A's prefs: " + str(negotiator_a.preferences)) print("B's prefs: " + str(negotiator_b.preferences)) (result, order, count) = negotiate(num_iters, negotiator_b, negotiator_a) # Assign points to each negotiator. Note that if the negotiation failed, each negotiatior receives a negative penalty # However, it is also possible in a "successful" negotiation for a given negotiator to receive negative points (points_a, points_b) = (negotiator_a.utility(), negotiator_b.utility()) if result else (-len(a_order), -len(b_order)) results = (result, points_a, points_b, count) score_a += points_a score_b += points_b # Update each negotiator with the final result, points assigned, and number of iterations taken to reach an agreement negotiator_a.receive_results(results) negotiator_b.receive_results(results) print("{} negotiation:\n\tNegotiator A: {}\n\tNegotiator B: {}".format("Successful" if result else "Failed", points_a, points_b)) #swap turns. count = count + 1 print("Final result:\n\tNegotiator A (random one): {}\n\tNegotiator B: (us) {}".format(score_a, score_b))
mit
-8,557,401,489,509,834,000
50.336134
141
0.650188
false
mkoura/dump2polarion
dump2polarion/exporters/xunit_exporter.py
1
12507
""" Dump testcases results to XUnit file for submitting to the Polarion XUnit Importer. Example of input tests_records: tests_records = ImportedData( results=[ { "title": "test_post_single[vim-common-2:7.4.160-1.el7.x86_64.rpm]", "classname": "vmaas.tests.test_packages.TestPackagesQuery", "verdict": "passed", "time": "0.00890207290649414", "file": "vmaas/tests/test_packages.py", "params": {"pkg": "some_package"}, } ], testrun=None, ) """ import datetime import logging from typing import Callable, List, NamedTuple, Optional from lxml import etree from dump2polarion import utils from dump2polarion.exceptions import Dump2PolarionException, NothingToDoException from dump2polarion.exporters import transform_projects from dump2polarion.exporters.verdicts import Verdicts LOGGER = logging.getLogger(__name__) ImportedData = NamedTuple("ImportedData", [("results", List[dict]), ("testrun", Optional[str])]) class XunitExport: """Export testcases results into Polarion XUnit.""" def __init__( self, testrun_id: str, tests_records: ImportedData, config: dict, transform_func: Optional[Callable] = None, ) -> None: self.testrun_id = testrun_id self.tests_records = tests_records self.config = config or {} self._lookup_prop = "" self._transform_func = transform_func or transform_projects.get_xunit_transform(config) def _top_element(self) -> etree.Element: """Return top XML element.""" top = etree.Element("testsuites") comment = etree.Comment("Generated for testrun {}".format(self.testrun_id)) top.append(comment) return top def _properties_element(self, parent_element: etree.Element) -> etree.Element: """Return properties XML element.""" testsuites_properties = etree.SubElement(parent_element, "properties") etree.SubElement( testsuites_properties, "property", {"name": "polarion-testrun-id", "value": str(self.testrun_id)}, ) etree.SubElement( testsuites_properties, "property", {"name": "polarion-project-id", "value": str(self.config["polarion-project-id"])}, ) for name, value in sorted(self.config["xunit_import_properties"].items()): if name == "polarion-lookup-method": lookup_prop = str(value).lower() if lookup_prop not in ("id", "name", "custom"): raise Dump2PolarionException( "Invalid value '{}' for the 'polarion-lookup-method' property".format( str(value) ) ) self._lookup_prop = lookup_prop elif name in ("polarion-testrun-id", "polarion-project-id"): # this was already set continue else: etree.SubElement( testsuites_properties, "property", {"name": name, "value": str(value)} ) return testsuites_properties def _fill_lookup_prop(self, testsuites_properties: etree.Element) -> None: """Fill the polarion-lookup-method property.""" if not self._lookup_prop: raise Dump2PolarionException("Failed to set the 'polarion-lookup-method' property") etree.SubElement( testsuites_properties, "property", {"name": "polarion-lookup-method", "value": self._lookup_prop}, ) def _testsuite_element(self, parent_element: etree.Element) -> etree.Element: """Return testsuite XML element.""" testsuite = etree.SubElement( parent_element, "testsuite", { "name": "Import for {} - {} testrun".format( self.config["polarion-project-id"], self.testrun_id ) }, ) return testsuite @staticmethod def _fill_verdict(verdict: str, result: dict, testcase: etree.Element, records: dict) -> None: # XUnit Pass maps to Passed in Polarion if verdict in Verdicts.PASS: records["passed"] += 1 # XUnit Failure maps to Failed in Polarion elif verdict in Verdicts.FAIL: records["failures"] += 1 verdict_data = {"type": "failure"} if result.get("comment"): verdict_data["message"] = utils.get_unicode_str(result["comment"]) etree.SubElement(testcase, "failure", utils.sorted_dict(verdict_data)) # XUnit Error maps to Blocked in Polarion elif verdict in Verdicts.SKIP: records["skipped"] += 1 verdict_data = {"type": "error"} if result.get("comment"): verdict_data["message"] = utils.get_unicode_str(result["comment"]) etree.SubElement(testcase, "error", utils.sorted_dict(verdict_data)) # XUnit Skipped maps to Waiting in Polarion elif verdict in Verdicts.WAIT: records["waiting"] += 1 verdict_data = {"type": "skipped"} if result.get("comment"): verdict_data["message"] = utils.get_unicode_str(result["comment"]) etree.SubElement(testcase, "skipped", utils.sorted_dict(verdict_data)) def _transform_result(self, result: dict) -> dict: """Call transform function on result.""" if self._transform_func: result = self._transform_func(result) return result or {} @staticmethod def _get_verdict(result: dict): """Get verdict of the testcase.""" verdict = result.get("verdict") if not verdict: return None verdict = verdict.strip().lower() if verdict not in Verdicts.PASS + Verdicts.FAIL + Verdicts.SKIP + Verdicts.WAIT: return None return verdict def _set_lookup_prop(self, result_data: dict) -> None: """Set lookup property based on processed testcases if not configured.""" if self._lookup_prop: return if result_data.get("id"): self._lookup_prop = "id" elif result_data.get("title"): self._lookup_prop = "name" else: return LOGGER.debug("Setting lookup method for xunit to `%s`", self._lookup_prop) def _check_lookup_prop(self, result_data: dict) -> bool: """Check that selected lookup property can be used for this testcase.""" if not self._lookup_prop: return False if not result_data.get("id") and self._lookup_prop != "name": return False if not result_data.get("title") and self._lookup_prop == "name": return False return True @staticmethod def _testcase_element( parent_element: etree.Element, result: dict, records: dict, testcase_id: Optional[str], testcase_title: Optional[str], ) -> etree.Element: """Create XML element for given testcase result and update testcases records.""" name = testcase_title or testcase_id if not name: raise Dump2PolarionException( "Neither `testcase_id` not `testcase_title` has valid value." ) testcase_time = float(result.get("time") or result.get("duration") or 0) records["time"] += testcase_time testcase_data = {"name": name, "time": str(testcase_time)} if result.get("classname"): testcase_data["classname"] = result["classname"] testcase = etree.SubElement(parent_element, "testcase", utils.sorted_dict(testcase_data)) return testcase @staticmethod def _fill_out_err(result: dict, testcase: etree.Element) -> None: """Add stdout and stderr if present.""" if result.get("stdout"): system_out = etree.SubElement(testcase, "system-out") system_out.text = utils.get_unicode_str(result["stdout"]) if result.get("stderr"): system_err = etree.SubElement(testcase, "system-err") system_err.text = utils.get_unicode_str(result["stderr"]) @staticmethod def _fill_properties( verdict: str, result: dict, testcase: etree.Element, testcase_id: Optional[str], testcase_title: Optional[str], ) -> None: """Add properties into testcase element.""" id_value = testcase_id or testcase_title if not id_value: raise Dump2PolarionException( "Neither `testcase_id` not `testcase_title` has valid value." ) properties = etree.SubElement(testcase, "properties") etree.SubElement( properties, "property", {"name": "polarion-testcase-id", "value": id_value} ) if verdict in Verdicts.PASS and result.get("comment"): etree.SubElement( properties, "property", { "name": "polarion-testcase-comment", "value": utils.get_unicode_str(result["comment"]), }, ) params = result.get("params") or {} for param, value in params.items(): etree.SubElement( properties, "property", { "name": "polarion-parameter-{}".format(param), "value": utils.get_unicode_str(value), }, ) def _gen_testcase(self, parent_element: etree.Element, result: dict, records: dict) -> None: """Create record for given testcase result.""" result = self._transform_result(result) if not result: return if result.get("ignored"): LOGGER.debug("Skipping ignored testcase") return verdict = self._get_verdict(result) if not verdict: LOGGER.warning("Skipping testcase, verdict is missing or invalid") return testcase_id = result.get("id") testcase_title = result.get("title") self._set_lookup_prop(result) if not self._check_lookup_prop(result): LOGGER.warning( "Skipping testcase `%s`, data missing for selected lookup method", testcase_id or testcase_title, ) return testcase = self._testcase_element( parent_element, result, records, testcase_id, testcase_title ) self._fill_verdict(verdict, result, testcase, records) self._fill_out_err(result, testcase) self._fill_properties(verdict, result, testcase, testcase_id, testcase_title) def _fill_tests_results(self, testsuite_element: etree.Element) -> None: """Create records for all testcases results.""" if not self.tests_records.results: raise NothingToDoException("Nothing to export") records = {"passed": 0, "skipped": 0, "failures": 0, "waiting": 0, "time": 0.0} for testcase_result in self.tests_records.results: self._gen_testcase(testsuite_element, testcase_result, records) tests_num = ( records["passed"] + records["skipped"] + records["failures"] + records["waiting"] ) if tests_num == 0: raise NothingToDoException("Nothing to export") testsuite_element.set("errors", str(records["skipped"])) testsuite_element.set("failures", str(records["failures"])) testsuite_element.set("skipped", str(records["waiting"])) testsuite_element.set("time", "{:.4f}".format(records["time"])) testsuite_element.set("tests", str(tests_num)) def export(self) -> str: """Return XUnit XML.""" top = self._top_element() properties = self._properties_element(top) testsuite = self._testsuite_element(top) self._fill_tests_results(testsuite) self._fill_lookup_prop(properties) return utils.prettify_xml(top) def write_xml(self, xml_str: str, output_file: Optional[str] = None) -> None: """Output the XML content into a file.""" gen_filename = "testrun_{}-{:%Y%m%d%H%M%S}.xml".format( self.testrun_id, datetime.datetime.now() ) utils.write_xml(xml_str, output_loc=output_file, filename=gen_filename)
gpl-2.0
-2,698,110,121,219,784,000
36.785498
98
0.579515
false
cherishing78/BSVer
Yintest/noloop.py
1
3693
import numpy as np def Initial_diag(dim): conv=np.diag(np.random.rand(dim)) return conv def Convergence(matrix): delta=(np.abs(matrix).max(axis=0)).max(axis=0) return delta def Train(trainingset,label): (imagenum,dim)=trainingset.shape #Each column vector stands for a image. dataset=np.transpose(trainingset) label.shape=(-1,) peoplenum=label[-1] m=np.zeros(peoplenum,dtype=np.uint16) #m[i] stands for the num of images the i th people has. #The elements in label start with 1. for i in label: m[i-1]+=1 #Delete the repetitive elements and get the m_set list. m_set=set(list(m)) m_max=max(m_set) print '------ m_set Accomplished ------' print m_set #Initialization Su=Initial_diag(dim) Se=Initial_diag(dim) print '------ Initialization Accomplished ------' #Iteration epsilon=1e-4 Delta_Su=Su Delta_Se=Se Iter=0 Delta=max(Convergence(Delta_Su),Convergence(Delta_Se)) print '------ Training Process ------' while Delta>epsilon: print '------ Delta=%f in %dth Iteration------'%(Delta,Iter) #Compute the F and all kinds of G in each iteration time. F=np.linalg.pinv(Se) #In case there is no people has m[k] images. G_class=[0 for i in range(m_max)] for i in range(1,m_max+1): if i in m_set: #Compute various G in advance for the sake of convenience. G_class[i-1]=-np.dot(np.linalg.pinv((i+1)*Su+Se),np.dot(Su,F)) print '------ G_class[%d] Accopmlished in the %dth Iteration ------'%(i-1,Iter) #Compute u[i] for each person and e[i,j] for each image. #Initialize the Pointer of each person's images. m_index=0 Su_new=0 Se_new=0 print '------ Compute the Su_new an Se_new in %dth Iteration'%Iter for i in range(peoplenum): u=0 e=0 #Compute the constant term for e[i,j]. constant=0 for j in range(m_index,m_index+m[i]): constant+=np.dot(Se,np.dot(G_class[m[i]-1],dataset[:,j])) #Compute the Su_new and Se_new for j in range(m_index,m_index+m[i]): u+=np.dot(Su,np.dot((F+(m[i]+1)*G_class[m[i]-1]),dataset[:,j])) eij=np.dot(Se,dataset[:,j])+constant Se_new+=np.dot(eij,np.transpose(eij))/m[i]/peoplenum Su_new+=np.dot(u,np.transpose(u))/peoplenum #Pointer move on. m_index+=m[i] Delta_Su=Su_new.__sub__(Su) Delta_Se=Se_new.__sub__(Se) Delta=max(Convergence(Delta_Su),Convergence(Delta_Se)) Su=Su_new Se=Se_new print '------ %dth iteration accomlished ------'%Iter Iter+=1 if Iter>10: break #Get the matrix in need. F=np.linalg.pinv(Se) #Save the memory. if 1 not in m_set: G_class[0]=-np.dot(np.dot(np.linalg.pinv(2*Su+Se),Su),F) A=np.linalg.pinv(Su+Se)-F-G_class[0] return A,G def Noloop(trainingset,label): (imagenum,dim)=trainingset.shape #Each column vector stands for a image. #For the dim aligning. trainingset.shape=(imagenum,dim,1) label.shape=(-1,) peoplenum=label[-1] m=np.zeros(peoplenum,dtype=np.uint16) #m[i] stands for the num of images the i th people has. #The elements in label start with 1. for i in label: m[i-1]+=1 #Delete the repetitive elements and get the m_set list. m_set=set(list(m)) m_max=max(m_set) print '------ m_set Accomplished ------' print m_set m_index=0 print '------ Compute Su ------' Su=0 Se=0 for i in range(peoplenum): u=0 e=0 for j in range(m_index,m_index+m[i]): u+=trainingset[j]/m[i] for j in range(m_index,m_index+m[i]): Se+=np.dot((trainingset[j]-u),np.transpose(trainingset[j]-u))/m[i]/(peoplenum-1) Su+=np.dot(u,np.transpose(u))/(peoplenum-1) return Su,Se def Verify(A,G,x1,x2): x1.shape=(-1,1) x2.shape=(-1,1) ratio=np.dot(np.dot(np.transpose(x1),A),x1)+np.dot(np.dot(np.transpose(x2),A),x2)-2*np.dot(np.dot(np.transpose(x1),G),x2) return ratio
bsd-3-clause
7,908,429,308,505,981,000
29.02439
122
0.655835
false
psychopy/psychopy
psychopy/experiment/components/joyButtons/virtualJoyButtons.py
1
1281
#!/usr/bin/env python # -*- coding: utf-8 -*- # Part of the PsychoPy library # Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2021 Open Science Tools Ltd. # Distributed under the terms of the GNU General Public License (GPL). # Support for fake joystick/gamepad during development # if no 'real' joystick/gamepad is available use keyboard emulation # 'ctrl' + 'alt' + numberKey from __future__ import absolute_import, division, print_function from psychopy import event class VirtualJoyButtons(object): def __init__(self, device_number): self.device_number = device_number self.numberKeys=['0','1','2','3','4','5','6','7','8','9'] self.modifierKeys=['ctrl','alt'] self.mouse = event.Mouse() event.Mouse(visible=False) def getNumButtons(self): return(len(self.numberKeys)) def getAllButtons(self): keys = event.getKeys(keyList=self.numberKeys, modifiers=True) values = [key for key, modifiers in keys if all([modifiers[modKey] for modKey in self.modifierKeys])] self.state = [key in values for key in self.numberKeys] mouseButtons = self.mouse.getPressed() self.state[:len(mouseButtons)] = [a or b != 0 for (a,b) in zip(self.state, mouseButtons)] return(self.state)
gpl-3.0
-8,302,396,992,503,322,000
39.03125
109
0.67057
false
deepmind/reverb
reverb/client_test.py
1
17392
# Lint as: python3 # Copyright 2019 DeepMind Technologies Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for python client.""" import collections import multiprocessing.dummy as multithreading import pickle from absl.testing import absltest import numpy as np from reverb import client from reverb import errors from reverb import item_selectors from reverb import rate_limiters from reverb import server import tensorflow.compat.v1 as tf import tree TABLE_NAME = 'table' NESTED_SIGNATURE_TABLE_NAME = 'nested_signature_table' SIMPLE_QUEUE_NAME = 'simple_queue' QUEUE_SIGNATURE = { 'a': tf.TensorSpec(dtype=tf.int64, shape=(3,)), 'b': tf.TensorSpec(dtype=tf.float32, shape=(3, 2, 2)), } class ClientTest(absltest.TestCase): @classmethod def setUpClass(cls): super().setUpClass() cls.server = server.Server( tables=[ server.Table( name=TABLE_NAME, sampler=item_selectors.Prioritized(1), remover=item_selectors.Fifo(), max_size=1000, rate_limiter=rate_limiters.MinSize(3), signature=tf.TensorSpec(dtype=tf.int64, shape=[]), ), server.Table.queue( name=NESTED_SIGNATURE_TABLE_NAME, max_size=10, signature=QUEUE_SIGNATURE, ), server.Table.queue(SIMPLE_QUEUE_NAME, 10), ], port=None) cls.client = client.Client(f'localhost:{cls.server.port}') def tearDown(self): self.client.reset(TABLE_NAME) self.client.reset(NESTED_SIGNATURE_TABLE_NAME) self.client.reset(SIMPLE_QUEUE_NAME) super().tearDown() @classmethod def tearDownClass(cls): cls.server.stop() super().tearDownClass() def _get_sample_frequency(self, n=10000): keys = [sample[0].info.key for sample in self.client.sample(TABLE_NAME, n)] counter = collections.Counter(keys) return [count / n for _, count in counter.most_common()] def test_sample_sets_table_size(self): for i in range(1, 11): self.client.insert(i, {TABLE_NAME: 1.0}) if i >= 3: sample = next(self.client.sample(TABLE_NAME, 1))[0] self.assertEqual(sample.info.table_size, i) def test_sample_sets_probability(self): for i in range(1, 11): self.client.insert(i, {TABLE_NAME: 1.0}) if i >= 3: sample = next(self.client.sample(TABLE_NAME, 1))[0] self.assertAlmostEqual(sample.info.probability, 1.0 / i, 0.01) def test_sample_sets_priority(self): # Set the test context by manually mutating priorities to known ones. for i in range(10): self.client.insert(i, {TABLE_NAME: 1000.0}) def _sample_priorities(n=100): return { sample[0].info.key: sample[0].info.priority for sample in self.client.sample(TABLE_NAME, n) } original_priorities = _sample_priorities(n=100) self.assertNotEmpty(original_priorities) self.assertSequenceAlmostEqual([1000.0] * len(original_priorities), original_priorities.values()) expected_priorities = { key: float(i) for i, key in enumerate(original_priorities) } self.client.mutate_priorities(TABLE_NAME, updates=expected_priorities) # Resample and check priorities. sampled_priorities = _sample_priorities(n=100) self.assertNotEmpty(sampled_priorities) for key, priority in sampled_priorities.items(): if key in expected_priorities: self.assertAlmostEqual(expected_priorities[key], priority) def test_insert_raises_if_priorities_empty(self): with self.assertRaises(ValueError): self.client.insert([1], {}) def test_insert(self): self.client.insert(1, {TABLE_NAME: 1.0}) # This should be sampled often. self.client.insert(2, {TABLE_NAME: 0.1}) # This should be sampled rarely. self.client.insert(3, {TABLE_NAME: 0.0}) # This should never be sampled. freqs = self._get_sample_frequency() self.assertLen(freqs, 2) self.assertAlmostEqual(freqs[0], 0.9, delta=0.05) self.assertAlmostEqual(freqs[1], 0.1, delta=0.05) def test_writer_raises_if_max_sequence_length_lt_1(self): with self.assertRaises(ValueError): self.client.writer(0) def test_writer_raises_if_chunk_length_lt_1(self): self.client.writer(2, chunk_length=1) # Should be fine. for chunk_length in [0, -1]: with self.assertRaises(ValueError): self.client.writer(2, chunk_length=chunk_length) def test_writer_raises_if_chunk_length_gt_max_sequence_length(self): self.client.writer(2, chunk_length=1) # lt should be fine. self.client.writer(2, chunk_length=2) # eq should be fine. with self.assertRaises(ValueError): self.client.writer(2, chunk_length=3) def test_writer_raises_if_max_in_flight_items_lt_1(self): self.client.writer(1, max_in_flight_items=1) self.client.writer(1, max_in_flight_items=2) self.client.writer(1, max_in_flight_items=None) with self.assertRaises(ValueError): self.client.writer(1, max_in_flight_items=-1) def test_writer_works_with_no_retries(self): # If the server responds correctly, the writer ignores the no retries arg. writer = self.client.writer(2) writer.append([0]) writer.create_item(TABLE_NAME, 1, 1.0) writer.close(retry_on_unavailable=False) def test_writer(self): with self.client.writer(2) as writer: writer.append([0]) writer.create_item(TABLE_NAME, 1, 1.0) writer.append([1]) writer.create_item(TABLE_NAME, 2, 1.0) writer.append([2]) writer.create_item(TABLE_NAME, 1, 1.0) writer.append_sequence([np.array([3, 4])]) writer.create_item(TABLE_NAME, 2, 1.0) freqs = self._get_sample_frequency() self.assertLen(freqs, 4) for freq in freqs: self.assertAlmostEqual(freq, 0.25, delta=0.05) def test_write_and_sample_different_shapes_and_dtypes(self): trajectories = [ np.ones([], np.int64), np.ones([2, 2], np.float32), np.ones([3, 3], np.int32), ] for trajectory in trajectories: self.client.insert(trajectory, {SIMPLE_QUEUE_NAME: 1.0}) for i, [sample] in enumerate(self.client.sample(SIMPLE_QUEUE_NAME, 3)): np.testing.assert_array_equal(trajectories[i], sample.data[0]) def test_mutate_priorities_update(self): self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) before = self._get_sample_frequency() self.assertLen(before, 3) for freq in before: self.assertAlmostEqual(freq, 0.33, delta=0.05) key = next(self.client.sample(TABLE_NAME, 1))[0].info.key self.client.mutate_priorities(TABLE_NAME, updates={key: 0.5}) after = self._get_sample_frequency() self.assertLen(after, 3) self.assertAlmostEqual(after[0], 0.4, delta=0.05) self.assertAlmostEqual(after[1], 0.4, delta=0.05) self.assertAlmostEqual(after[2], 0.2, delta=0.05) def test_mutate_priorities_delete(self): self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) before = self._get_sample_frequency() self.assertLen(before, 4) key = next(self.client.sample(TABLE_NAME, 1))[0].info.key self.client.mutate_priorities(TABLE_NAME, deletes=[key]) after = self._get_sample_frequency() self.assertLen(after, 3) def test_reset(self): self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) keys_before = set( sample[0].info.key for sample in self.client.sample(TABLE_NAME, 1000)) self.assertLen(keys_before, 3) self.client.reset(TABLE_NAME) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) keys_after = set( sample[0].info.key for sample in self.client.sample(TABLE_NAME, 1000)) self.assertLen(keys_after, 3) self.assertTrue(keys_after.isdisjoint(keys_before)) def test_server_info(self): self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) self.client.insert([0], {TABLE_NAME: 1.0}) list(self.client.sample(TABLE_NAME, 1)) server_info = self.client.server_info() self.assertLen(server_info, 3) self.assertIn(TABLE_NAME, server_info) table = server_info[TABLE_NAME] self.assertEqual(table.current_size, 3) self.assertEqual(table.num_unique_samples, 1) self.assertEqual(table.max_size, 1000) self.assertEqual(table.sampler_options.prioritized.priority_exponent, 1) self.assertTrue(table.remover_options.fifo) self.assertEqual(table.signature, tf.TensorSpec(dtype=tf.int64, shape=[])) self.assertIn(NESTED_SIGNATURE_TABLE_NAME, server_info) queue = server_info[NESTED_SIGNATURE_TABLE_NAME] self.assertEqual(queue.current_size, 0) self.assertEqual(queue.num_unique_samples, 0) self.assertEqual(queue.max_size, 10) self.assertTrue(queue.sampler_options.fifo) self.assertTrue(queue.remover_options.fifo) self.assertEqual(queue.signature, QUEUE_SIGNATURE) self.assertIn(SIMPLE_QUEUE_NAME, server_info) info = server_info[SIMPLE_QUEUE_NAME] self.assertEqual(info.current_size, 0) self.assertEqual(info.num_unique_samples, 0) self.assertEqual(info.max_size, 10) self.assertTrue(info.sampler_options.fifo) self.assertTrue(info.remover_options.fifo) self.assertIsNone(info.signature) def test_sample_trajectory_with_signature(self): with self.client.trajectory_writer(3) as writer: for _ in range(3): writer.append({ 'a': np.ones([], np.int64), 'b': np.ones([2, 2], np.float32), }) writer.create_item( table=NESTED_SIGNATURE_TABLE_NAME, priority=1.0, trajectory={ 'a': writer.history['a'][:], 'b': writer.history['b'][:], }) sample = next(self.client.sample(NESTED_SIGNATURE_TABLE_NAME, emit_timesteps=False, unpack_as_table_signature=True)) # The data should be be unpacked as the structure of the table. want = { 'a': np.ones([3], np.int64), 'b': np.ones([3, 2, 2], np.float32), } tree.map_structure(np.testing.assert_array_equal, sample.data, want) # The info fields should all be scalars (i.e not batched by time). self.assertIsInstance(sample.info.key, int) self.assertIsInstance(sample.info.probability, float) self.assertIsInstance(sample.info.table_size, int) self.assertIsInstance(sample.info.priority, float) def test_sample_trajectory_without_signature(self): with self.client.trajectory_writer(3) as writer: for _ in range(3): writer.append({ 'a': np.ones([], np.int64), 'b': np.ones([2, 2], np.float32), }) writer.create_item( table=SIMPLE_QUEUE_NAME, priority=1.0, trajectory={ 'a': writer.history['a'][:], 'b': writer.history['b'][:], }) sample = next(self.client.sample(SIMPLE_QUEUE_NAME, emit_timesteps=False, unpack_as_table_signature=True)) # The data should be flat as the table has no signature. Each element within # the flat data should represent the entire column (i.e not just one step). want = [np.ones([3], np.int64), np.ones([3, 2, 2], np.float32)] tree.map_structure(np.testing.assert_array_equal, sample.data, want) # The info fields should all be scalars (i.e not batched by time). self.assertIsInstance(sample.info.key, int) self.assertIsInstance(sample.info.probability, float) self.assertIsInstance(sample.info.table_size, int) self.assertIsInstance(sample.info.priority, float) def test_sample_trajectory_as_flat_data(self): with self.client.trajectory_writer(3) as writer: for _ in range(3): writer.append({ 'a': np.ones([], np.int64), 'b': np.ones([2, 2], np.float32), }) writer.create_item( table=NESTED_SIGNATURE_TABLE_NAME, priority=1.0, trajectory={ 'a': writer.history['a'][:], 'b': writer.history['b'][:], }) sample = next(self.client.sample(NESTED_SIGNATURE_TABLE_NAME, emit_timesteps=False, unpack_as_table_signature=False)) # The table has a signature but we requested the data to be flat. want = [np.ones([3], np.int64), np.ones([3, 2, 2], np.float32)] tree.map_structure(np.testing.assert_array_equal, sample.data, want) # The info fields should all be scalars (i.e not batched by time). self.assertIsInstance(sample.info.key, int) self.assertIsInstance(sample.info.probability, float) self.assertIsInstance(sample.info.table_size, int) self.assertIsInstance(sample.info.priority, float) def test_sample_trajectory_written_with_insert(self): self.client.insert(np.ones([3, 3], np.int32), {SIMPLE_QUEUE_NAME: 1.0}) sample = next(self.client.sample(SIMPLE_QUEUE_NAME, emit_timesteps=False)) # An extra batch dimension should have been added to the inserted data as # it is a trajectory of length 1. want = [np.ones([1, 3, 3], np.int32)] tree.map_structure(np.testing.assert_array_equal, sample.data, want) # The info fields should all be scalars (i.e not batched by time). self.assertIsInstance(sample.info.key, int) self.assertIsInstance(sample.info.probability, float) self.assertIsInstance(sample.info.table_size, int) self.assertIsInstance(sample.info.priority, float) def test_sample_trajectory_written_with_legacy_writer(self): with self.client.writer(3) as writer: for i in range(3): writer.append([i, np.ones([2, 2], np.float64)]) writer.create_item(SIMPLE_QUEUE_NAME, 3, 1.0) sample = next(self.client.sample(SIMPLE_QUEUE_NAME, emit_timesteps=False)) # The time dimension should have been added to all fields. want = [np.array([0, 1, 2]), np.ones([3, 2, 2], np.float64)] tree.map_structure(np.testing.assert_array_equal, sample.data, want) # The info fields should all be scalars (i.e not batched by time). self.assertIsInstance(sample.info.key, int) self.assertIsInstance(sample.info.probability, float) self.assertIsInstance(sample.info.table_size, int) self.assertIsInstance(sample.info.priority, float) def test_server_info_timeout(self): # Setup a client that doesn't actually connect to anything. dummy_client = client.Client(f'localhost:{self.server.port + 1}') with self.assertRaises( errors.DeadlineExceededError, msg='ServerInfo call did not complete within provided timeout of 1s'): dummy_client.server_info(timeout=1) def test_pickle(self): loaded_client = pickle.loads(pickle.dumps(self.client)) self.assertEqual(loaded_client._server_address, self.client._server_address) loaded_client.insert([0], {TABLE_NAME: 1.0}) def test_multithreaded_writer_using_flush(self): # Ensure that we don't have any errors caused by multithreaded use of # writers or clients. pool = multithreading.Pool(64) def _write(i): with self.client.writer(1) as writer: writer.append([i]) # Make sure that flush before create_item doesn't create trouble. writer.flush() writer.create_item(TABLE_NAME, 1, 1.0) writer.flush() for _ in range(5): pool.map(_write, list(range(256))) info = self.client.server_info()[TABLE_NAME] self.assertEqual(info.current_size, 1000) pool.close() pool.join() def test_multithreaded_writer_using_scope(self): # Ensure that we don't have any errors caused by multithreaded use of # writers or clients. pool = multithreading.Pool(64) def _write(i): with self.client.writer(1) as writer: writer.append([i]) writer.create_item(TABLE_NAME, 1, 1.0) for _ in range(5): pool.map(_write, list(range(256))) info = self.client.server_info()[TABLE_NAME] self.assertEqual(info.current_size, 1000) pool.close() pool.join() def test_validates_trajectory_writer_config(self): with self.assertRaises(ValueError): self.client.trajectory_writer(0) with self.assertRaises(ValueError): self.client.trajectory_writer(-1) if __name__ == '__main__': absltest.main()
apache-2.0
5,739,390,927,420,527,000
35.308977
80
0.652196
false
mauriciogtec/PropedeuticoDataScience2017
Alumnos/Leonardo_Marin/Tarea 2 {spyder}.py
1
2336
####################################################################### ### Parte 2 import numpy as np import pandas as pd import matplotlib.pyplot as plt from PIL import Image ##################################################################### ## Ejercicio 1 # Importar imagen imagen = Image.open('C:/Users/Data Mining/Documents/ITAM/Propedeutico/Alumnos/PropedeuticoDataScience2017/Alumnos/Leonardo_Marin/black_and_white.jpg') imagen_gris = imagen.convert('LA') ## Convertir a escala de grises ## Convertir la imagen a una matriz imagen_mat = np.array(list(imagen_gris.getdata(band=0)), float) imagen_mat.shape = (imagen_gris.size[1], imagen_gris.size[0]) imagen_mat = np.matrix(imagen_mat) plt.figure(figsize=(9, 6)) plt.imshow(imagen_mat, cmap='gray') ## Desomposición singular U, sigma, V = np.linalg.svd(imagen_mat) ## Probar la visualización con los primeris n vectores # n= 1 j = 1 matriz_equivalente = np.matrix(U[:, :j]) * np.diag(sigma[:j]) * np.matrix(V[:j, :]) plt.figure(figsize=(9, 6)) plt.imshow(matriz_equivalente, cmap='gray') # n = 5 j = 5 matriz_equivalente = np.matrix(U[:, :j]) * np.diag(sigma[:j]) * np.matrix(V[:j, :]) plt.figure(figsize=(9, 6)) plt.imshow(matriz_equivalente, cmap='gray') # n = 25 j = 25 matriz_equivalente = np.matrix(U[:, :j]) * np.diag(sigma[:j]) * np.matrix(V[:j, :]) plt.figure(figsize=(9, 6)) plt.imshow(matriz_equivalente, cmap='gray') # n = 50 j = 50 matriz_equivalente = np.matrix(U[:, :j]) * np.diag(sigma[:j]) * np.matrix(V[:j, :]) plt.figure(figsize=(9, 6)) plt.imshow(matriz_equivalente, cmap='gray') ## Podemos ver como se puede reconstruir la imagen sin utilizar toda la información de la matriz original, ##################################################################### ## Ejercicio 2 A = np.array([[1,0],[1,2]]) A def pseudoinversa(A): U,s,V=np.linalg.svd(A) D1 = np.dot(V,(np.diag(1/s))) peudoinversa = np.dot(D1,U.T) return peudoinversa B = pseudoinversa(A) B def sistema_ecuaciones(A,b): #Resuelve un sistema de ecuaciones, A es la matriz con los coeficentes de las ecuaciones, b es el vector de re sesultados x = np.dot(pseudoinversa(A),b.T) return(x) A = np.array([[1,0],[1,2]]) A A.shape b = np.array([[5,3]]) b b.shape ## Probar la Función sistema_ecuaciones(A,b) ##
mit
2,516,087,497,806,065,700
18.272727
150
0.606775
false
nemesisdesign/openwisp2
openwisp_controller/vpn_backends.py
1
1648
from copy import deepcopy from netjsonconfig import OpenVpn as BaseOpenVpn # adapt OpenVPN schema in order to limit it to 1 item only limited_schema = deepcopy(BaseOpenVpn.schema) limited_schema['properties']['openvpn'].update( {'additionalItems': False, 'minItems': 1, 'maxItems': 1} ) # server mode only limited_schema['properties']['openvpn']['items'].update( { 'oneOf': [ {'$ref': '#/definitions/server_bridged'}, {'$ref': '#/definitions/server_routed'}, {'$ref': '#/definitions/server_manual'}, ] } ) limited_schema['required'] = limited_schema.get('required', []) limited_schema['required'].append('openvpn') # default values for ca, cert and key limited_schema['definitions']['tunnel']['properties']['ca']['default'] = 'ca.pem' limited_schema['definitions']['tunnel']['properties']['cert']['default'] = 'cert.pem' limited_schema['definitions']['tunnel']['properties']['key']['default'] = 'key.pem' limited_schema['definitions']['server']['properties']['dh']['default'] = 'dh.pem' limited_schema['properties']['files']['default'] = [ {'path': 'ca.pem', 'mode': '0644', 'contents': '{{ ca }}'}, {'path': 'cert.pem', 'mode': '0644', 'contents': '{{ cert }}'}, {'path': 'key.pem', 'mode': '0644', 'contents': '{{ key }}'}, {'path': 'dh.pem', 'mode': '0644', 'contents': '{{ dh }}'}, ] class OpenVpn(BaseOpenVpn): """ modified OpenVpn backend its schema is adapted to be used as a VPN Server backend: * shows server only * allows only 1 vpn * adds default values for ca, cert, key and dh """ schema = limited_schema
gpl-3.0
8,774,440,878,577,652,000
35.622222
85
0.613471
false
doconix/django-mako-plus
django_mako_plus/tags.py
1
2889
from django.template import engines from django.template import TemplateDoesNotExist from mako.runtime import supports_caller ### ### Mako-style tags that DMP provides ### ############################################################### ### Include Django templates ### def django_include(context, template_name, **kwargs): ''' Mako tag to include a Django template withing the current DMP (Mako) template. Since this is a Django template, it is search for using the Django search algorithm (instead of the DMP app-based concept). See https://docs.djangoproject.com/en/2.1/topics/templates/. The current context is sent to the included template, which makes all context variables available to the Django template. Any additional kwargs are added to the context. ''' try: djengine = engines['django'] except KeyError as e: raise TemplateDoesNotExist("Django template engine not configured in settings, so template cannot be found: {}".format(template_name)) from e djtemplate = djengine.get_template(template_name) djcontext = {} djcontext.update(context) djcontext.update(kwargs) return djtemplate.render(djcontext, context['request']) ######################################################### ### Template autoescaping on/off # attaching to `caller_stack` because it's the same object # throughout rendering of a template inheritance AUTOESCAPE_KEY = '__dmp_autoescape' def is_autoescape(context): return bool(getattr(context.caller_stack, AUTOESCAPE_KEY, True)) def _toggle_autoescape(context, escape_on=True): ''' Internal method to toggle autoescaping on or off. This function needs access to the caller, so the calling method must be decorated with @supports_caller. ''' previous = is_autoescape(context) setattr(context.caller_stack, AUTOESCAPE_KEY, escape_on) try: context['caller'].body() finally: setattr(context.caller_stack, AUTOESCAPE_KEY, previous) @supports_caller def autoescape_on(context): ''' Mako tag to enable autoescaping for a given block within a template, (individual filters can still override with ${ somevar | n }). Example: <%namespace name="dmp" module="django_mako_plus.tags"/> <%dmp:autoescape_on> ${ somevar } will be autoescaped. </%dmp:autoescape_on> ''' _toggle_autoescape(context, True) return '' @supports_caller def autoescape_off(context): ''' Mako tag to disable autoescaping for a given block within a template, (individual filters can still override with ${ somevar | h }). Example: <%namespace name="dmp" module="django_mako_plus.tags"/> <%dmp:autoescape> ${ somevar } will not be autoescaped. </%dmp:autoescape> ''' _toggle_autoescape(context, False) return ''
apache-2.0
-1,902,192,503,060,209,200
30.747253
149
0.66009
false
opticode/eve
eve/__init__.py
1
1947
# -*- coding: utf-8 -*- """ Eve ~~~ An out-of-the-box REST Web API that's as dangerous as you want it to be. :copyright: (c) 2014 by Nicola Iarocci. :license: BSD, see LICENSE for more details. .. versionchanged:: 0.5 'QUERY_WHERE' added. 'QUERY_SORT' added. 'QUERY_PAGE' added. 'QUERY_MAX_RESULTS' added. 'QUERY_PROJECTION' added. 'QUERY_EMBEDDED' added. 'RFC1123_DATE_FORMAT' added. .. versionchanged:: 0.4 'META' defaults to '_meta'. 'ERROR' defaults to '_error'. Remove unnecessary commented code. .. versionchanged:: 0.2 'LINKS' defaults to '_links'. 'ITEMS' defaults to '_items'. 'STATUS' defaults to 'status'. 'ISSUES' defaults to 'issues'. .. versionchanged:: 0.1.1 'SERVER_NAME' defaults to None. .. versionchagned:: 0.0.9 'DATE_FORMAT now using GMT instead of UTC. """ __version__ = '0.5-dev' # RFC 1123 (ex RFC 822) DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' RFC1123_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S GMT' URL_PREFIX = '' API_VERSION = '' SERVER_NAME = None PAGINATION = True PAGINATION_LIMIT = 50 PAGINATION_DEFAULT = 25 ID_FIELD = '_id' CACHE_CONTROL = 'max-age=10,must-revalidate' # TODO confirm this value CACHE_EXPIRES = 10 RESOURCE_METHODS = ['GET'] ITEM_METHODS = ['GET'] ITEM_LOOKUP = True ITEM_LOOKUP_FIELD = ID_FIELD ITEM_URL = '[a-f0-9]{24}' STATUS_OK = "OK" STATUS_ERR = "ERR" LAST_UPDATED = '_updated' DATE_CREATED = '_created' ISSUES = '_issues' STATUS = '_status' ERROR = '_error' ITEMS = '_items' LINKS = '_links' ETAG = '_etag' VERSION = '_version' META = '_meta' QUERY_WHERE = 'where' QUERY_SORT = 'sort' QUERY_PAGE = 'page' QUERY_MAX_RESULTS = 'max_results' QUERY_EMBEDDED = 'embedded' QUERY_PROJECTION = 'projection' VALIDATION_ERROR_STATUS = 422 # must be the last line (will raise W402 on pyflakes) from eve.flaskapp import Eve # noqa
bsd-3-clause
683,748,090,322,254,000
21.905882
77
0.618901
false
openstack/vitrage
vitrage/tests/unit/datasources/static/test_static_transformer.py
1
5948
# Copyright 2016 - Nokia # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime from oslo_config import cfg from vitrage.common.constants import DatasourceOpts as DSOpts from vitrage.common.constants import DatasourceProperties as DSProps from vitrage.common.constants import EntityCategory from vitrage.common.constants import UpdateMethod from vitrage.common.constants import VertexProperties as VProps from vitrage.datasources.nova.host import NOVA_HOST_DATASOURCE from vitrage.datasources.nova.host.transformer import HostTransformer from vitrage.datasources.static import STATIC_DATASOURCE from vitrage.datasources.static import StaticFields from vitrage.datasources.static.transformer import StaticTransformer from vitrage.datasources.transformer_base import TransformerBase from vitrage.tests import base from vitrage.tests.mocks import mock_driver class TestStaticTransformer(base.BaseTest): OPTS = [ cfg.StrOpt(DSOpts.UPDATE_METHOD, default=UpdateMethod.PULL), ] # noinspection PyAttributeOutsideInit,PyPep8Naming @classmethod def setUpClass(cls): super(TestStaticTransformer, cls).setUpClass() cls.transformers = {} cls.conf = cfg.ConfigOpts() cls.conf.register_opts(cls.OPTS, group=STATIC_DATASOURCE) cls.transformer = StaticTransformer(cls.transformers) cls.transformers[STATIC_DATASOURCE] = cls.transformer cls.transformers[NOVA_HOST_DATASOURCE] = \ HostTransformer(cls.transformers) # noinspection PyAttributeOutsideInit def setUp(self): super(TestStaticTransformer, self).setUp() self.entity_type = STATIC_DATASOURCE self.entity_id = '12345' self.timestamp = datetime.datetime.utcnow() def test_create_placeholder_vertex(self): properties = { VProps.VITRAGE_TYPE: self.entity_type, VProps.ID: self.entity_id, VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_SAMPLE_TIMESTAMP: self.timestamp } placeholder = self.transformer.create_neighbor_placeholder_vertex( **properties) observed_entity_id = placeholder.vertex_id expected_entity_id = \ TransformerBase.uuid_from_deprecated_vitrage_id( 'RESOURCE:static:12345') self.assertEqual(expected_entity_id, observed_entity_id) observed_time = placeholder.get(VProps.VITRAGE_SAMPLE_TIMESTAMP) self.assertEqual(self.timestamp, observed_time) observed_subtype = placeholder.get(VProps.VITRAGE_TYPE) self.assertEqual(self.entity_type, observed_subtype) observed_entity_id = placeholder.get(VProps.ID) self.assertEqual(self.entity_id, observed_entity_id) observed_vitrage_category = placeholder.get(VProps.VITRAGE_CATEGORY) self.assertEqual(EntityCategory.RESOURCE, observed_vitrage_category) vitrage_is_placeholder = placeholder.get(VProps.VITRAGE_IS_PLACEHOLDER) self.assertTrue(vitrage_is_placeholder) def test_snapshot_transform(self): vals_list = mock_driver.simple_static_generators(snapshot_events=1) events = mock_driver.generate_random_events_list(vals_list) self._event_transform_test(events) def test_update_transform(self): vals_list = mock_driver.simple_static_generators(update_events=1) events = mock_driver.generate_random_events_list(vals_list) self._event_transform_test(events) def _event_transform_test(self, events): for event in events: wrapper = self.transformer.transform(event) vertex = wrapper.vertex self._validate_vertex(vertex, event) neighbors = wrapper.neighbors self._validate_neighbors(neighbors, vertex.vertex_id, event) def _validate_vertex(self, vertex, event): self._validate_common_props(vertex, event) self.assertEqual(vertex[VProps.VITRAGE_SAMPLE_TIMESTAMP], event[DSProps.SAMPLE_DATE]) for k, v in event.get(StaticFields.METADATA, {}): self.assertEqual(v, vertex[k]) def _validate_common_props(self, vertex, event): self.assertEqual(vertex[VProps.VITRAGE_CATEGORY], EntityCategory.RESOURCE) self.assertEqual(vertex[VProps.VITRAGE_TYPE], event[StaticFields.TYPE]) self.assertEqual(vertex[VProps.ID], event[StaticFields.ID]) self.assertFalse(vertex[VProps.VITRAGE_IS_DELETED]) def _validate_neighbors(self, neighbors, vertex_id, event): for i in range(len(neighbors)): self._validate_neighbor( neighbors[i], event[StaticFields.RELATIONSHIPS][i], vertex_id) def _validate_neighbor(self, neighbor, rel, vertex_id): vertex = neighbor.vertex self._validate_neighbor_vertex_props( vertex, rel[StaticFields.TARGET]) edge = neighbor.edge self.assertEqual(edge.source_id, vertex_id) self.assertEqual(edge.target_id, neighbor.vertex.vertex_id) self.assertEqual(edge.label, rel[StaticFields.RELATIONSHIP_TYPE]) def _validate_neighbor_vertex_props(self, vertex, event): self._validate_common_props(vertex, event) self.assertTrue(vertex[VProps.VITRAGE_IS_PLACEHOLDER])
apache-2.0
-4,519,157,993,563,326,000
39.189189
79
0.691829
false
anushbmx/kitsune
kitsune/questions/config.py
1
16005
from collections import OrderedDict from django.utils.translation import ugettext_lazy as _lazy # The number of answers per page. ANSWERS_PER_PAGE = 20 # The number of questions per page. QUESTIONS_PER_PAGE = 20 # Highest ranking to show for a user HIGHEST_RANKING = 100 # Special tag names: ESCALATE_TAG_NAME = 'escalate' NEEDS_INFO_TAG_NAME = 'needsinfo' OFFTOPIC_TAG_NAME = 'offtopic' # Escalation config ESCALATE_EXCLUDE_PRODUCTS = ['thunderbird', 'webmaker', 'open-badges'] # How long until a question is automatically taken away from a user TAKE_TIMEOUT = 600 # AAQ config: products = OrderedDict([ ('desktop', { 'name': _lazy(u'Firefox'), 'subtitle': _lazy(u'Web browser for Windows, Mac and Linux'), 'extra_fields': ['troubleshooting', 'ff_version', 'os', 'plugins'], 'tags': ['desktop'], 'product': 'firefox', 'categories': OrderedDict([ # TODO: Just use the IA topics for this. # See bug 979397 ('download-and-install', { 'name': _lazy(u'Download, install and migration'), 'topic': 'download-and-install', 'tags': ['download-and-install'], }), ('privacy-and-security', { 'name': _lazy(u'Privacy and security settings'), 'topic': 'privacy-and-security', 'tags': ['privacy-and-security'], }), ('customize', { 'name': _lazy(u'Customize controls, options and add-ons'), 'topic': 'customize', 'tags': ['customize'], }), ('fix-problems', { 'name': _lazy(u'Fix slowness, crashing, error messages and ' u'other problems'), 'topic': 'fix-problems', 'tags': ['fix-problems'], }), ('tips', { 'name': _lazy(u'Tips and tricks'), 'topic': 'tips', 'tags': ['tips'], }), ('bookmarks', { 'name': _lazy(u'Bookmarks'), 'topic': 'bookmarks', 'tags': ['bookmarks'], }), ('cookies', { 'name': _lazy(u'Cookies'), 'topic': 'cookies', 'tags': ['cookies'], }), ('tabs', { 'name': _lazy(u'Tabs'), 'topic': 'tabs', 'tags': ['tabs'], }), ('websites', { 'name': _lazy(u'Websites'), 'topic': 'websites', 'tags': ['websites'], }), ('sync', { 'name': _lazy(u'Firefox Sync'), 'topic': 'sync', 'tags': ['sync'], }), ('other', { 'name': _lazy(u'Other'), 'topic': 'other', 'tags': ['other'], }), ]) }), ('mobile', { 'name': _lazy(u'Firefox for Android'), 'subtitle': _lazy(u'Web browser for Android smartphones and tablets'), 'extra_fields': ['ff_version', 'os', 'plugins'], 'tags': ['mobile'], 'product': 'mobile', 'categories': OrderedDict([ # TODO: Just use the IA topics for this. # See bug 979397 ('download-and-install', { 'name': _lazy(u'Download, install and migration'), 'topic': 'download-and-install', 'tags': ['download-and-install'], }), ('privacy-and-security', { 'name': _lazy(u'Privacy and security settings'), 'topic': 'privacy-and-security', 'tags': ['privacy-and-security'], }), ('customize', { 'name': _lazy(u'Customize controls, options and add-ons'), 'topic': 'customize', 'tags': ['customize'], }), ('fix-problems', { 'name': _lazy(u'Fix slowness, crashing, error messages and ' u'other problems'), 'topic': 'fix-problems', 'tags': ['fix-problems'], }), ('tips', { 'name': _lazy(u'Tips and tricks'), 'topic': 'tips', 'tags': ['tips'], }), ('bookmarks', { 'name': _lazy(u'Bookmarks'), 'topic': 'bookmarks', 'tags': ['bookmarks'], }), ('cookies', { 'name': _lazy(u'Cookies'), 'topic': 'cookies', 'tags': ['cookies'], }), ('tabs', { 'name': _lazy(u'Tabs'), 'topic': 'tabs', 'tags': ['tabs'], }), ('websites', { 'name': _lazy(u'Websites'), 'topic': 'websites', 'tags': ['websites'], }), ('sync', { 'name': _lazy(u'Firefox Sync'), 'topic': 'sync', 'tags': ['sync'], }), ('other', { 'name': _lazy(u'Other'), 'topic': 'other', 'tags': ['other'], }), ]) }), ('ios', { 'name': _lazy(u'Firefox for iOS'), 'subtitle': _lazy(u'Firefox for iPhone, iPad and iPod touch devices'), 'extra_fields': ['ff_version', 'os', 'plugins'], 'tags': ['ios'], 'product': 'ios', 'categories': OrderedDict([ ('install-and-update-firefox-ios', { 'name': _lazy(u'Install and Update'), 'topic': 'install-and-update-firefox-ios', 'tags': ['install-and-update-firefox-ios'] }), ('how-to-use-firefox-ios', { 'name': _lazy(u'How to use Firefox for iOS'), 'topic': 'how-to-use-firefox-ios', 'tags': ['how-to-use-firefox-ios'] }), ('firefox-ios-not-working-expected', { 'name': _lazy(u'Firefox for iOS is not working as expected'), 'topic': 'firefox-ios-not-working-expected', 'tags': ['firefox-ios-not-working-expected'] }), ]) }), ('focus', { 'name': _lazy(u'Firefox Focus'), 'subtitle': _lazy(u'Automatic privacy browser and content blocker'), 'extra_fields': [], 'tags': ['focus-firefox'], 'product': 'focus-firefox', 'categories': OrderedDict([ ('Focus-ios', { 'name': _lazy(u'Firefox Focus for iOS'), 'topic': 'Focus-ios', 'tags': ['Focus-ios'] }), ('firefox-focus-android', { 'name': _lazy(u'Firefox Focus for Android'), 'topic': 'firefox-focus-android', 'tags': ['firefox-focus-android'] }), ]) }), ('firefox-amazon-devices', { 'name': _lazy(u'Firefox for Amazon Devices'), 'subtitle': _lazy(u'Browser for Amazon devices'), 'extra_fields': [], 'tags': ['firefox-amazon'], 'product': 'firefox-amazon-devices', 'categories': OrderedDict([ ('firefox-fire-tv', { 'name': _lazy(u'Firefox for Fire TV'), 'topic': 'firefox-fire-tv', 'tags': ['firefox-fire-tv'] }), ('firefox-echo-show', { 'name': _lazy(u'Firefox for Echo Show'), 'topic': 'firefox-echo-show', 'tags': ['firefox-echo-show'] }), ]) }), ('thunderbird', { 'name': _lazy(u'Thunderbird'), 'subtitle': _lazy(u'Email software for Windows, Mac and Linux'), 'extra_fields': [], 'tags': [], 'product': 'thunderbird', 'categories': OrderedDict([ # TODO: Just use the IA topics for this. # See bug 979397 ('download-and-install', { 'name': _lazy(u'Download, install and migration'), 'topic': 'download-install-and-migration', 'tags': ['download-and-install'], }), ('privacy-and-security', { 'name': _lazy(u'Privacy and security settings'), 'topic': 'privacy-and-security-settings', 'tags': ['privacy-and-security'], }), ('customize', { 'name': _lazy(u'Customize controls, options and add-ons'), 'topic': 'customize-controls-options-and-add-ons', 'tags': ['customize'], }), ('fix-problems', { 'name': _lazy(u'Fix slowness, crashing, error messages and ' u'other problems'), 'topic': 'fix-slowness-crashing-error-messages-and-other-' 'problems', 'tags': ['fix-problems'], }), ('calendar', { 'name': _lazy(u'Calendar'), 'topic': 'calendar', 'tags': ['calendar'], }), ('other', { 'name': _lazy(u'Other'), 'topic': 'other', 'tags': ['other'], }), ]) }), ('firefox-lite', { 'name': _lazy(u'Firefox Lite'), 'subtitle': _lazy(u'Mobile browser for Indonesia'), 'extra_fields': [], 'tags': ['firefox-lite'], 'product': 'firefox-lite', 'categories': OrderedDict([ ('get-started', { 'name': _lazy(u'Get started'), 'topic': 'get-started', 'tags': ['get-started'] }), ('fix-problems', { 'name': _lazy(u'Fix problems'), 'topic': 'fix-problems', 'tags': ['fix-problems'] }), ]) }), ('firefox-enterprise', { 'name': _lazy(u'Firefox for Enterprise'), 'subtitle': _lazy(u'Enterprise version of Firefox'), 'extra_fields': [], 'tags': [], 'product': 'firefox-enterprise', 'categories': OrderedDict([ ('deploy-firefox-for-enterprise', { 'name': _lazy(u'Deploy'), 'topic': 'deploy-firefox-for-enterprise', 'tags': ['deployment'], }), ('policies-customization-enterprise', { 'name': _lazy(u'Manage updates, policies & customization'), 'topic': 'policies-customization-enterprise', 'tags': ['customization'], }), ('manage-add-ons-enterprise', { 'name': _lazy(u'Manage add-ons'), 'topic': 'manage-add-ons-enterprise', 'tags': ['customization'], }), ('manage-certificates-firefox-enterprise', { 'name': _lazy(u'Manage certificates'), 'topic': 'manage-certificates-firefox-enterprise', 'tags': ['customization'], }), ]) }), ('firefox-reality', { 'name': _lazy(u'Firefox Reality'), 'subtitle': _lazy(u'Firefox for Virtual Reality'), 'extra_fields': [], 'tags': [], 'product': 'firefox-reality', 'categories': OrderedDict([ ('get-started', { 'name': _lazy(u'Get started with Firefox Reality'), 'topic': 'get-started', 'tags': ['get-started'], }), ('troubleshooting-reality', { 'name': _lazy(u'Troubleshooting Firefox Reality'), 'topic': 'troubleshooting-reality', 'tags': ['troubleshooting'], }), ]) }), ('firefox-preview', { 'name': _lazy(u'Firefox Preview'), 'subtitle': _lazy(u'Firefox for Android'), 'extra_fields': [], 'tags': [], 'product': 'firefox-preview', 'categories': OrderedDict([ ('install-and-update-firefox-preview', { 'name': _lazy(u'Install and Update'), 'topic': 'install-and-update', 'tags': ['download-and-install'], }), ('how-to-use-firefox-preview', { 'name': _lazy(u'How do I use Firefox Preview'), 'topic': 'how-do-i-use-firefox-preview', 'tags': ['tips'], }), ('browsing-firefox-preview', { 'name': _lazy(u'Browsing'), 'topic': 'browsing-preview', 'tags': ['tips'], }), ('library-firefox-preview', { 'name': _lazy(u'Library'), 'topic': 'library', 'tags': ['library'], }), ('sync-firefox-preview', { 'name': _lazy(u'Sync'), 'topic': 'sync-preview', 'tags': ['sync'], }), ('privacy-and-security-firefox-preview', { 'name': _lazy(u'Privacy and Security'), 'topic': 'privacy-and-security', 'tags': ['privacy-and-security'], }), ('fix-problems-with-firefox-preview', { 'name': _lazy(u'Fix problems with Firefox Preview'), 'topic': 'fix-problems-firefox-preview', 'tags': ['fix-problems'], }), ('settings-and-preferences-firefox-preview', { 'name': _lazy(u'Settings and Preferences'), 'topic': 'settings-prefs-preview', 'tags': ['customize'], }), ('advanced-settings-firefox-preview', { 'name': _lazy(u'Advanced Settings'), 'topic': 'advanced-settings-preview', 'tags': ['customize'], }), ]) }), ('firefox-lockwise', { 'name': _lazy(u'Firefox Lockwise'), 'subtitle': _lazy(u'Firefox Lockwise'), 'extra_fields': [], 'tags': [], 'product': 'firefox-lockwise', 'categories': OrderedDict([ ('install-and-set-up', { 'name': _lazy(u'Install and set up'), 'topic': 'install-lockwise', 'tags': ['install-and-set-up'], }), ('manage-settings-and-logins', { 'name': _lazy(u'Manage settings and logins'), 'topic': 'lockwise-settings', 'tags': ['settings-and-logins'], }), ('fix-problems-with-firefox-lockwise', { 'name': _lazy(u'Fix problems with Firefox Lockwise'), 'topic': 'fix-problems-lockwise', 'tags': ['fix-problems'], }), ]) }), ('other', { 'name': _lazy(u'Other Mozilla products'), 'subtitle': '', 'product': '', 'html': _lazy(u'This site only provides support for some of our products. ' u'For other support, please find your product below.' u'<ul class="product-support">' u'<li><a href="http://www.seamonkey-project.org/doc/">' u'SeaMonkey support</a></li>' u'<li><a ' u'href="/questions/new/thunderbird">' u'Lightning support</a></li>' u'</ul>'), 'categories': OrderedDict([]), 'deadend': True, }), ]) def add_backtrack_keys(products): """Insert 'key' keys so we can go from product or category back to key.""" for p_k, p_v in products.iteritems(): p_v['key'] = p_k for c_k, c_v in p_v['categories'].iteritems(): c_v['key'] = c_k add_backtrack_keys(products)
bsd-3-clause
1,854,020,737,929,702,700
34.64588
83
0.436114
false
chaen/DIRAC
ResourceStatusSystem/Service/ResourceManagementHandler.py
1
6218
''' ResourceManagementHandler Module that allows users to access the ResourceManagementDB remotely. ''' from DIRAC import gConfig, S_OK, gLogger from DIRAC.Core.DISET.RequestHandler import RequestHandler from DIRAC.ResourceStatusSystem.Utilities import Synchronizer from DIRAC.ResourceStatusSystem.DB.ResourceManagementDB import ResourceManagementDB __RCSID__ = '$Id$' def initializeResourceManagementHandler(_serviceInfo): ''' Handler initialization, where we set the ResourceManagementDB as global db. ''' global db db = ResourceManagementDB() syncObject = Synchronizer.Synchronizer() gConfig.addListenerToNewVersionEvent(syncObject.sync) return S_OK() ################################################################################ class ResourceManagementHandler(RequestHandler): ''' The ResourceManagementHandler exposes the DB front-end functions through a XML-RPC server, functionalities inherited from :class:`DIRAC.Core.DISET.Reques\ tHandler.RequestHandler` According to the ResourceManagementDB philosophy, only functions of the type: - insert - select - delete - addOrModify are exposed. If you need anything more complicated, either look for it on the :class:`ResourceManagementClient`, or code it yourself. This way the DB and the Service are kept clean and tidied. To can use this service on this way, but you MUST NOT DO IT. Use it through the :class:`ResourceManagementClient`. If offers in the worst case as good perfor\ mance as the :class:`ResourceManagementHandler`, if not better. >>> from DIRAC.Core.DISET.RPCClient import RPCCLient >>> server = RPCCLient("ResourceStatus/ResourceManagement") ''' def __init__(self, *args, **kwargs): super(ResourceManagementHandler, self).__init__(*args, **kwargs) @staticmethod def __logResult(methodName, result): ''' Method that writes to log error messages ''' if not result['OK']: gLogger.error('%s : %s' % (methodName, result['Message'])) @staticmethod def setDatabase(database): ''' This method let us inherit from this class and overwrite the database object without having problems with the global variables. :Parameters: **database** - `MySQL` database used by this handler :return: None ''' global db db = database types_insert = [basestring, dict] def export_insert(self, table, params): ''' This method is a bridge to access :class:`ResourceManagementDB` remotely. It does not add neither processing nor validation. If you need to know more about this method, you must keep reading on the database documentation. :Parameters: **table** - `string` or `dict` should contain the table from which querying if it's a `dict` the query comes from a client prior to v6r18 **params** - `dict` arguments for the mysql query. Currently it is being used only for column selection. For example: meta = { 'columns' : [ 'Name' ] } will return only the 'Name' column. :return: S_OK() || S_ERROR() ''' gLogger.info('insert: %s %s' % (table, params)) # remove unnecessary key generated by locals() del params['self'] res = db.insert(table, params) self.__logResult('insert', res) return res types_select = [[basestring, dict], dict] def export_select(self, table, params): ''' This method is a bridge to access :class:`ResourceManagementDB` remotely. It does not add neither processing nor validation. If you need to know more\ about this method, you must keep reading on the database documentation. :Parameters: **table** - `string` or `dict` should contain the table from which querying if it's a `dict` the query comes from a client prior to v6r18 **params** - `dict` arguments for the mysql query. Currently it is being used only for column selection. For example: meta = { 'columns' : [ 'Name' ] } will return only the 'Name' column. :return: S_OK() || S_ERROR() ''' gLogger.info('select: %s %s' % (table, params)) res = db.select(table, params) self.__logResult('select', res) return res types_delete = [[basestring, dict], dict] def export_delete(self, table, params): ''' This method is a bridge to access :class:`ResourceManagementDB` remotely.\ It does not add neither processing nor validation. If you need to know more \ about this method, you must keep reading on the database documentation. :Parameters: **table** - `string` or `dict` should contain the table from which querying if it's a `dict` the query comes from a client prior to v6r18 **params** - `dict` arguments for the mysql query. Currently it is being used only for column selection. For example: meta = { 'columns' : [ 'Name' ] } will return only the 'Name' column. :return: S_OK() || S_ERROR() ''' gLogger.info('delete: %s %s' % (table, params)) res = db.delete(table, params) self.__logResult('delete', res) return res types_addOrModify = [[basestring, dict], dict] def export_addOrModify(self, table, params): ''' This method is a bridge to access :class:`ResourceManagementDB` remotely. It does not add neither processing nor validation. If you need to know more about this method, you must keep reading on the database documentation. :Parameters: **table** - `string` or `dict` should contain the table from which querying if it's a `dict` the query comes from a client prior to v6r18 **params** - `dict` arguments for the mysql query. Currently it is being used only for column selection. For example: meta = { 'columns' : [ 'Name' ] } will return only the 'Name' column. :return: S_OK() || S_ERROR() ''' gLogger.info('addOrModify: %s %s' % (table, params)) res = db.addOrModify(table, params) self.__logResult('addOrModify', res) return res ################################################################################ # EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
gpl-3.0
3,929,100,881,457,097,700
30.72449
92
0.660663
false
fcopantoja/djangomx
djangomx/blog/models.py
1
3689
# coding: utf-8 import os from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.core import urlresolvers from django.db import models from django.utils.translation import ugettext as _ from core.utils import get_filename class Category(models.Model): """ Category Model """ title = models.CharField( verbose_name=_(u'Título'), help_text=_(u' '), max_length=255 ) slug = models.SlugField( verbose_name=_(u'Slug'), help_text=_(u'Identificador Uri'), max_length=255, unique=True ) description = models.CharField( verbose_name=_(u'Descripción'), help_text=_(u' '), max_length=255, blank=True ) is_active = models.BooleanField(default=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) class Meta: verbose_name = _(u'Categoría') verbose_name_plural = _(u'Categorías') def __unicode__(self): return "%s" % (self.title,) def get_img_path(instance, filename): name, ext = os.path.splitext(filename) return 'blog/%s' % get_filename(ext) class Post(models.Model): """ Post Model """ title = models.CharField( verbose_name=_(u'Título'), help_text=_(u' '), max_length=255 ) description = models.TextField( blank=True, null=True, help_text=u'Descripción usada para SEO' ) slug = models.SlugField( verbose_name=_(u'Slug'), help_text=_(u'Identificador Uri'), max_length=255, unique=True ) image = models.ImageField( verbose_name=_(u'Imágen'), help_text=_(u'Imagen destacada'), blank=True, upload_to=get_img_path ) content = models.TextField(help_text=_(u'Este es el contenido de el Post'),) extract = models.TextField( blank=True, help_text=_(u'Este es solo un resumen de el Post que se muestra en la \ lista de posts'), ) category = models.ForeignKey( Category, verbose_name=_(u'Categoría'), null=True, blank=True ) author = models.ForeignKey(User, verbose_name=_(u'Autor')) published_at = models.DateTimeField( verbose_name=_(u'Fecha de publicación') ) likes = models.PositiveIntegerField(verbose_name=_(u'Likes'), default=0) is_active = models.BooleanField(default=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) class Meta: verbose_name = _(u'Posts') verbose_name_plural = _(u'Posts') ordering = ["-created_at"] def __unicode__(self): return "%s" % (self.title,) def get_admin_url(self): content_type = ContentType.objects.get_for_model(self.__class__) return urlresolvers.reverse( "admin:%s_%s_change" % ( content_type.app_label, content_type.model ), args=(self.id,) ) def get_absolute_url(self): from django.core.urlresolvers import reverse return reverse('blog:view_post', args=[str(self.slug)]) @property def full_url(self): current_site = Site.objects.get_current() return '{}{}'.format(current_site.domain, self.get_absolute_url()) @property def img_full_url(self): if self.image: current_site = Site.objects.get_current() return '{}{}'.format(current_site.domain, self.image.url) else: return ''
mit
760,292,171,597,744,800
28.44
80
0.602717
false
marvin-ai/marvin-python-toolbox
marvin_python_toolbox/engine_base/engine_base_prediction.py
1
1656
#!/usr/bin/env python # coding=utf-8 # Copyright [2017] [B2W Digital] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from abc import ABCMeta from .._compatibility import six from .._logging import get_logger from .engine_base_action import EngineBaseOnlineAction __all__ = ['EngineBasePrediction'] logger = get_logger('engine_base_prediction') class EngineBasePrediction(EngineBaseOnlineAction): __metaclass__ = ABCMeta _model = None _metrics = None def __init__(self, **kwargs): self._model = self._get_arg(kwargs=kwargs, arg='model') self._metrics = self._get_arg(kwargs=kwargs, arg='metrics') super(EngineBasePrediction, self).__init__(**kwargs) @property def marvin_model(self): return self._load_obj(object_reference='_model') @marvin_model.setter def marvin_model(self, model): self._save_obj(object_reference='_model', obj=model) @property def marvin_metrics(self): return self._load_obj(object_reference='_metrics') @marvin_metrics.setter def marvin_metrics(self, metrics): self._save_obj(object_reference='_metrics', obj=metrics)
apache-2.0
1,667,712,753,690,461,200
28.571429
74
0.701087
false
weiweihuanghuang/wei-glyphs-scripts
Spacing/Show Kerning Pairs Exception.py
1
3326
#MenuTitle: Show Kerning Pairs Exception # -*- coding: utf-8 -*- __doc__=""" Show Kerning Exception Pairs for this glyph in a new tab. """ import GlyphsApp thisFont = Glyphs.font Doc = Glyphs.currentDocument selectedLayers = thisFont.selectedLayers namesOfSelectedGlyphs = [ l.parent.name for l in selectedLayers if hasattr(l.parent, 'name')] namesOfSelectedGlyphs = [i for i in namesOfSelectedGlyphs if i != "/space"] selectedMaster = thisFont.selectedFontMaster masterID = selectedMaster.id # Look for: # New Tab for every glyph # to make it every glyph new tab def nameMaker(kernGlyphOrGroup, side): # if this is a kerning group if kernGlyphOrGroup[0] == "@": for g in thisFont.glyphs: # right glyph if side == "right": # left side of right glyph if g.leftKerningGroup == kernGlyphOrGroup[7:]: return g.name if side == "left": # right side of left glyph if g.rightKerningGroup == kernGlyphOrGroup[7:]: return g.name else: return thisFont.glyphForId_(kernGlyphOrGroup).name # One Tab for all editString = u"""""" for thisGlyphName in namesOfSelectedGlyphs: # New Tab for every glyph # editString = u"""""" thisGlyph = thisFont.glyphs[thisGlyphName] rGroupName = str(thisGlyph.rightKerningGroup) lGroupName = str(thisGlyph.leftKerningGroup) for L in thisFont.kerning[ masterID ]: try: # If L matches thisGlyph or its right side group # @L R # if L[0] == "@" and rGroupName == L[7:] or rGroupName == thisFont.glyphForId_(L).name: # # for every R counterpart to L in the kerning pairs of rGroupName # for R in thisFont.kerning[masterID][L]: # # R is not group kerning # if thisFont.kerning[masterID][L][R] != 0 and R[0] != "@": # print "L: @L R\t\t", L, R # print "\t", "%s, %s" % (thisGlyphName, nameMaker(R, "right")) # kernPair = "/%s/%s " % (thisGlyphName, nameMaker(R, "right")) # editString += kernPair # L @R, L R if thisFont.glyphForId_(L).name == thisGlyph.name: # for every R counterpart to L in the kerning pairs of rGroupName for R in thisFont.kerning[masterID][L]: if thisFont.kerning[masterID][L][R] < 8e+10: # print "L: L @R, L R\t", L, R # print "\t", "%s, %s" % (thisGlyphName, nameMaker(R, "right")) kernPair = "/%s/%s " % (thisGlyphName, nameMaker(R, "right")) editString += kernPair except: pass for R in thisFont.kerning[masterID][L]: try: # If R matches thisGlyph or its left side group # L @R # if R[0] == "@" and lGroupName == R[7:] or lGroupName == thisFont.glyphForId_(R).name: # if thisFont.kerning[masterID][L][R] != 0 and L[0] != "@": # print "R: L @R\t\t", L, R # print "\t", "%s, %s" % (nameMaker(L, "left"), thisGlyphName) # kernPair = "/%s/%s " % (nameMaker(L, "left"), thisGlyphName) # editString += kernPair # @L R, L R if thisFont.glyphForId_(R).name == thisGlyph.name: if thisFont.kerning[masterID][L][R] < 8e+10: # print "R: @L R, L R\t", L, R # print "\t", "%s, %s" % (nameMaker(L, "left"), thisGlyphName) kernPair = "/%s/%s " % (nameMaker(L, "left"), thisGlyphName) editString += kernPair except: pass # New Tab for every glyph # thisFont.newTab(editString) # One Tab for all # editString += "\n" thisFont.newTab(editString)
apache-2.0
-6,634,094,482,841,212,000
31.617647
93
0.634396
false
a2bondar/UWaterlooDriver
tests/test_terms.py
1
6323
from pytest import fixture import vcr from uwaterloodriver import UW_Driver @fixture def list_keys(): # Responsible for only returning the test data return ['current_term', 'previous_term', 'next_term', 'listings'] @fixture def courses_keys(): return ['units', 'catalog_number', 'subject', 'title'] @fixture def examschedule_keys(): return ['course', 'sections'] @fixture def examschedule_sections_keys(): return ['section', 'day', 'date', 'start_time', 'end_time', 'location', 'notes'] @fixture def schedule_keys(): return ['subject', 'catalog_number', 'units', 'title', 'note', 'class_number', 'section', 'campus', 'associated_class', 'related_component_1', 'related_component_2', 'enrollment_capacity', 'enrollment_total', 'waiting_capacity', 'waiting_total', 'topic', 'reserves', 'classes', 'held_with', 'term', 'academic_level', 'last_updated'] @fixture def enrollment_keys(): return ['subject', 'catalog_number', 'class_number', 'section', 'enrollment_capacity', 'enrollment_total', 'waiting_capacity', 'waiting_total', 'last_updated'] @fixture def importantdates_keys(): return ['id', 'title', 'body', 'body_raw', 'special_notes', 'special_notes_raw', 'audience', 'term', 'term_id', 'start_date', 'end_date', 'date_tbd', 'date_na', 'link', 'site', 'vid', 'updated'] @fixture def infosessions_keys(): return ['id', 'employer', 'date', 'start_time', 'end_time', 'description', 'website', 'building', 'audience', 'link'] @vcr.use_cassette('vcr_cassettes/terms/terms_list.yml', filter_query_parameters=['key']) def test_terms_list(list_keys): """Tests an API call to /terms/list endpoint.""" uw_driver = UW_Driver() response = uw_driver.terms_list() assert isinstance(response, dict) assert set(list_keys).issubset(response.keys()), "All terms_list keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_courses.yml', filter_query_parameters=['key']) def test_terms_courses(courses_keys): """Tests an API call to /terms/{term_id}/courses endpoint. term_id = 1161""" uw_driver = UW_Driver() response = uw_driver.terms_courses(1161) assert isinstance(response, list) assert isinstance(response[0], dict) assert set(courses_keys).issubset(response[0].keys()), "All terms_courses keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_examschedule.yml', filter_query_parameters=['key']) def test_terms_examschedule(examschedule_keys, examschedule_sections_keys): """Tests an API call to /terms/{term}/examschedule endpoint. term_id = 1139""" uw_driver = UW_Driver() response = uw_driver.terms_examschedule(1139) assert isinstance(response, list) assert isinstance(response[0], dict) assert set(examschedule_keys).issubset(response[0].keys()), "All terms_examschedule keys should be present." assert set(examschedule_sections_keys).issubset(response[0]["sections"][0].keys()), "All terms_examschedule_sections keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_schedule.yml', filter_query_parameters=['key']) def test_terms_schedule(schedule_keys): """Tests an API call to /terms/{term}/{subject}/schedule endpoint. term_id = 1139, subject = MATH""" uw_driver = UW_Driver() response = uw_driver.terms_schedule(term_id=1139, subject="MATH") assert isinstance(response, list) assert isinstance(response[0], dict) assert set(schedule_keys).issubset(response[0].keys()), "All terms_schedule keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_schedule_with_catalog.yml', filter_query_parameters=['key']) def test_terms_schedule_catalog(schedule_keys): """Tests an API call to /terms/{term}/{subject}/{catalog_number}/schedule endpoint. term_id = 1139, subject = CS, catalog_number = 115""" uw_driver = UW_Driver() response = uw_driver.terms_schedule(term_id=1139, subject="MATH", catalog_number=115) assert isinstance(response, list) assert isinstance(response[0], dict) assert set(schedule_keys).issubset(response[0].keys()), "All terms_schedule keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_enrollment.yml', filter_query_parameters=['key']) def test_terms_enrollment(enrollment_keys): """Tests an API call to /terms/{term}/enrollment endpoint. term_id = 1159""" uw_driver = UW_Driver() response = uw_driver.terms_enrollment(1159) assert isinstance(response, list) assert isinstance(response[0], dict) assert set(enrollment_keys).issubset(response[0].keys()), "All terms_enrollment keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_enrollment_with_subject.yml', filter_query_parameters=['key']) def test_terms_enrollment_subject(enrollment_keys): """Tests an API call to /terms/{term}/{seubject}/enrollment endpoint. term_id = 1159, subject = ITAL""" uw_driver = UW_Driver() response = uw_driver.terms_enrollment(1159, subject="ITAL") assert isinstance(response, list) assert isinstance(response[0], dict) assert set(enrollment_keys).issubset(response[0].keys()), "All terms_enrollment keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_importantdates.yml', filter_query_parameters=['key']) def test_terms_importantdates(importantdates_keys): """Tests an API call to /terms/{term}/importantdates endpoint. term_id = 1179""" uw_driver = UW_Driver() response = uw_driver.terms_importantdates(1179) assert isinstance(response, list) assert isinstance(response[0], dict) assert set(importantdates_keys).issubset(response[0].keys()), "All terms_importantdates keys should be present." @vcr.use_cassette('vcr_cassettes/terms/terms_infosessions.yml', filter_query_parameters=['key']) def test_terms_infosessions(infosessions_keys): """Tests an API call to /terms/{term}/infosessions endpoint. term_id = 1141""" uw_driver = UW_Driver() response = uw_driver.terms_infosessions(1141) assert isinstance(response, list) assert isinstance(response[0], dict) assert set(infosessions_keys).issubset(response[0].keys()), "All terms_infosessions keys should be present."
mit
-4,708,381,211,107,855,000
38.767296
145
0.692235
false
1upon0/rfid-auth-system
GUI/printer/Pillow-2.7.0/Tests/bench_cffi_access.py
1
1597
from helper import unittest, PillowTestCase, hopper # Not running this test by default. No DOS against Travis CI. from PIL import PyAccess import time def iterate_get(size, access): (w, h) = size for x in range(w): for y in range(h): access[(x, y)] def iterate_set(size, access): (w, h) = size for x in range(w): for y in range(h): access[(x, y)] = (x % 256, y % 256, 0) def timer(func, label, *args): iterations = 5000 starttime = time.time() for x in range(iterations): func(*args) if time.time()-starttime > 10: print("%s: breaking at %s iterations, %.6f per iteration" % ( label, x+1, (time.time()-starttime)/(x+1.0))) break if x == iterations-1: endtime = time.time() print("%s: %.4f s %.6f per iteration" % ( label, endtime-starttime, (endtime-starttime)/(x+1.0))) class BenchCffiAccess(PillowTestCase): def test_direct(self): im = hopper() im.load() # im = Image.new( "RGB", (2000, 2000), (1, 3, 2)) caccess = im.im.pixel_access(False) access = PyAccess.new(im, False) self.assertEqual(caccess[(0, 0)], access[(0, 0)]) print ("Size: %sx%s" % im.size) timer(iterate_get, 'PyAccess - get', im.size, access) timer(iterate_set, 'PyAccess - set', im.size, access) timer(iterate_get, 'C-api - get', im.size, caccess) timer(iterate_set, 'C-api - set', im.size, caccess) if __name__ == '__main__': unittest.main() # End of file
apache-2.0
6,695,364,870,849,879,000
25.616667
73
0.555416
false
reinbach/mdbase
tests/test_worker.py
1
1252
import pytest import zmq from mdbase import constants from mdbase.broker import MajorDomoBroker from mdbase.worker import MajorDomoWorker @pytest.fixture def broker_url(): return "tcp://localhost:6666" class TestMajorDomoWorker(): def test_instantiate(self, broker_url): """Test instantiating worker model""" service_name = b"echo" verbose = False w = MajorDomoWorker(broker_url, service_name, verbose) assert w.broker == broker_url assert w.service == service_name assert w.verbose == verbose assert isinstance(w.ctx, zmq.Context) assert isinstance(w.poller, zmq.Poller) @pytest.mark.xfail #TODO determine how to mock the necessary parts def test_send_to_broker_model(self, broker_url): """Test send message to broker""" b = MajorDomoBroker(False) w = MajorDomoWorker(broker_url, b"echo", False) w.send_to_broker(constants.W_REQUEST, b.service, [b"test"]) @pytest.mark.xfail #TODO need to test send to broker method first def test_reconnect_to_broker_model(self, broker_url): """Test reconnecting to broker""" b = MajorDomoBroker(False) w = MajorDomoWorker(broker_url, b"echo", False)
bsd-3-clause
4,145,378,568,227,254,000
31.947368
67
0.669329
false
ddico/odoo
addons/hr_recruitment/models/hr_job.py
1
6632
# Part of Odoo. See LICENSE file for full copyright and licensing details. import ast from odoo import api, fields, models, _ class Job(models.Model): _name = "hr.job" _inherit = ["mail.alias.mixin", "hr.job"] _order = "state desc, name asc" @api.model def _default_address_id(self): return self.env.company.partner_id def _get_default_favorite_user_ids(self): return [(6, 0, [self.env.uid])] address_id = fields.Many2one( 'res.partner', "Job Location", default=_default_address_id, domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]", help="Address where employees are working") application_ids = fields.One2many('hr.applicant', 'job_id', "Applications") application_count = fields.Integer(compute='_compute_application_count', string="Application Count") new_application_count = fields.Integer( compute='_compute_new_application_count', string="New Application", help="Number of applications that are new in the flow (typically at first step of the flow)") manager_id = fields.Many2one( 'hr.employee', related='department_id.manager_id', string="Department Manager", readonly=True, store=True) user_id = fields.Many2one('res.users', "Responsible", tracking=True) hr_responsible_id = fields.Many2one( 'res.users', "HR Responsible", tracking=True, help="Person responsible of validating the employee's contracts.") document_ids = fields.One2many('ir.attachment', compute='_compute_document_ids', string="Documents") documents_count = fields.Integer(compute='_compute_document_ids', string="Document Count") alias_id = fields.Many2one( 'mail.alias', "Alias", ondelete="restrict", required=True, help="Email alias for this job position. New emails will automatically create new applicants for this job position.") color = fields.Integer("Color Index") is_favorite = fields.Boolean(compute='_compute_is_favorite', inverse='_inverse_is_favorite') favorite_user_ids = fields.Many2many('res.users', 'job_favorite_user_rel', 'job_id', 'user_id', default=_get_default_favorite_user_ids) def _compute_is_favorite(self): for job in self: job.is_favorite = self.env.user in job.favorite_user_ids def _inverse_is_favorite(self): unfavorited_jobs = favorited_jobs = self.env['hr.job'] for job in self: if self.env.user in job.favorite_user_ids: unfavorited_jobs |= job else: favorited_jobs |= job favorited_jobs.write({'favorite_user_ids': [(4, self.env.uid)]}) unfavorited_jobs.write({'favorite_user_ids': [(3, self.env.uid)]}) def _compute_document_ids(self): applicants = self.mapped('application_ids').filtered(lambda self: not self.emp_id) app_to_job = dict((applicant.id, applicant.job_id.id) for applicant in applicants) attachments = self.env['ir.attachment'].search([ '|', '&', ('res_model', '=', 'hr.job'), ('res_id', 'in', self.ids), '&', ('res_model', '=', 'hr.applicant'), ('res_id', 'in', applicants.ids)]) result = dict.fromkeys(self.ids, self.env['ir.attachment']) for attachment in attachments: if attachment.res_model == 'hr.applicant': result[app_to_job[attachment.res_id]] |= attachment else: result[attachment.res_id] |= attachment for job in self: job.document_ids = result[job.id] job.documents_count = len(job.document_ids) def _compute_application_count(self): read_group_result = self.env['hr.applicant'].read_group([('job_id', 'in', self.ids)], ['job_id'], ['job_id']) result = dict((data['job_id'][0], data['job_id_count']) for data in read_group_result) for job in self: job.application_count = result.get(job.id, 0) def _get_first_stage(self): self.ensure_one() return self.env['hr.recruitment.stage'].search([ '|', ('job_ids', '=', False), ('job_ids', '=', self.id)], order='sequence asc', limit=1) def _compute_new_application_count(self): for job in self: job.new_application_count = self.env["hr.applicant"].search_count( [("job_id", "=", job.id), ("stage_id", "=", job._get_first_stage().id)] ) def _alias_get_creation_values(self): values = super(Job, self)._alias_get_creation_values() values['alias_model_id'] = self.env['ir.model']._get('hr.applicant').id if self.id: values['alias_defaults'] = defaults = ast.literal_eval(self.alias_defaults or "{}") defaults.update({ 'job_id': self.id, 'department_id': self.department_id.id, 'company_id': self.department_id.company_id.id if self.department_id else self.company_id.id, }) return values @api.model def create(self, vals): vals['favorite_user_ids'] = vals.get('favorite_user_ids', []) + [(4, self.env.uid)] new_job = super(Job, self).create(vals) utm_linkedin = self.env.ref("utm.utm_source_linkedin", raise_if_not_found=False) if utm_linkedin: source_vals = { 'source_id': utm_linkedin.id, 'job_id': new_job.id, } self.env['hr.recruitment.source'].create(source_vals) return new_job def _creation_subtype(self): return self.env.ref('hr_recruitment.mt_job_new') def action_get_attachment_tree_view(self): action = self.env.ref('base.action_attachment').read()[0] action['context'] = { 'default_res_model': self._name, 'default_res_id': self.ids[0] } action['search_view_id'] = (self.env.ref('hr_recruitment.ir_attachment_view_search_inherit_hr_recruitment').id, ) action['domain'] = ['|', '&', ('res_model', '=', 'hr.job'), ('res_id', 'in', self.ids), '&', ('res_model', '=', 'hr.applicant'), ('res_id', 'in', self.mapped('application_ids').ids)] return action def close_dialog(self): return {'type': 'ir.actions.act_window_close'} def edit_dialog(self): form_view = self.env.ref('hr.view_hr_job_form') return { 'name': _('Job'), 'res_model': 'hr.job', 'res_id': self.id, 'views': [(form_view.id, 'form'),], 'type': 'ir.actions.act_window', 'target': 'inline' }
agpl-3.0
-2,389,517,044,412,755,000
44.424658
190
0.592129
false
emijrp/wmcharts
wmchart0004.py
1
3064
# -*- coding: utf-8 -*- # Copyright (C) 2011-2014 emijrp <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from wmchart0000 import * def main(): filename = 'wmchart0004.php' title = 'Deletions and restorations' description = "This chart shows how many deletions and restorations were made in the last days." projectdbs = getProjectDatabases() queries = [ ["Deletions", "SELECT CONCAT(YEAR(log_timestamp),'-',LPAD(MONTH(log_timestamp),2,'0'),'-',LPAD(DAY(log_timestamp),2,'0'),'T00:00:00Z') AS date, COUNT(*) AS count FROM logging WHERE log_timestamp>=DATE_ADD(NOW(), INTERVAL -%d DAY) AND log_action='delete' GROUP BY date ORDER BY date ASC" % (lastdays)], ["Article deletions", "SELECT CONCAT(YEAR(log_timestamp),'-',LPAD(MONTH(log_timestamp),2,'0'),'-',LPAD(DAY(log_timestamp),2,'0'),'T00:00:00Z') AS date, COUNT(*) AS count FROM logging WHERE log_namespace=0 AND log_timestamp>=DATE_ADD(NOW(), INTERVAL -%d DAY) AND log_action='delete' GROUP BY date ORDER BY date ASC" % (lastdays)], ["Restorations", "SELECT CONCAT(YEAR(log_timestamp),'-',LPAD(MONTH(log_timestamp),2,'0'),'-',LPAD(DAY(log_timestamp),2,'0'),'T00:00:00Z') AS date, COUNT(*) AS count FROM logging WHERE log_timestamp>=DATE_ADD(NOW(), INTERVAL -%d DAY) AND log_action='restore' GROUP BY date ORDER BY date ASC" % (lastdays)], ] projects = runQueries(projectdbs=projectdbs, queries=queries) select = generateHTMLSelect(projects) var1 = [] var2 = [] var3 = [] for project, values in projects: var1.append(values["Deletions"]) var2.append(values["Article deletions"]) var3.append(values["Restorations"]) js = """function p() { var d1 = %s; var d2 = %s; var d3 = %s; var placeholder = $("#placeholder"); var selected = document.getElementById('projects').selectedIndex; var data = [{ data: d1[selected], label: "Deletions"}, { data: d2[selected], label: "Article deletions"}, { data: d3[selected], label: "Restorations"}]; var options = { xaxis: { mode: "time" }, lines: {show: true}, points: {show: true}, legend: {noColumns: 3}, grid: { hoverable: true }, }; $.plot(placeholder, data, options); } p();""" % (str(var1), str(var2), str(var3)) output = generateHTML(title=title, description=description, select=select, js=js) writeHTML(filename=filename, output=output) if __name__ == '__main__': main()
gpl-3.0
4,346,915,588,700,290,000
51.827586
337
0.666123
false
talkoopaiva/talkoohakemisto-api
tests/views/test_types.py
1
3151
import operator from flask import url_for import pytest from talkoohakemisto import serializers from talkoohakemisto.extensions import db from tests import factories @pytest.mark.usefixtures('request_ctx', 'database') class TestTypeIndex(object): @pytest.fixture def types(self): types = [ factories.VoluntaryWorkTypeFactory(), factories.VoluntaryWorkTypeFactory(), ] db.session.commit() return types @pytest.fixture def response(self, client, types): return client.get(url_for('type.index')) def test_url(self): assert url_for('type.index') == '/types' def test_returns_200(self, response): assert response.status_code == 200 def test_response_has_proper_content_type(self, response): assert response.mimetype == 'application/vnd.api+json' def test_returns_types_as_json(self, response, types): serializer = serializers.VoluntaryWorkTypeSerializer( sorted(types, key=operator.attrgetter('name')), many=True ) assert response.json == { 'types': serializer.data } @pytest.mark.usefixtures('request_ctx', 'database') class TestTypeGetSingle(object): @pytest.fixture def type(self): type = factories.VoluntaryWorkTypeFactory() db.session.commit() return type @pytest.fixture def response(self, client, type): return client.get(url_for('type.get', id=type.id)) def test_url(self): assert url_for('type.get', id=123) == '/types/123' def test_returns_200(self, response): assert response.status_code == 200 def test_response_has_proper_content_type(self, response): assert response.mimetype == 'application/vnd.api+json' def test_returns_type_as_json(self, response, type): serializer = serializers.VoluntaryWorkTypeSerializer( [type], many=True ) assert response.json == { 'types': serializer.data } @pytest.mark.usefixtures('request_ctx', 'database') class TestTypeGetSingleWhenNotFound(object): @pytest.fixture def response(self, client): return client.get(url_for('type.get', id=12345)) def test_returns_404(self, response): assert response.status_code == 404 def test_response_has_proper_content_type(self, response): assert response.mimetype == 'application/vnd.api+json' def test_returns_error_as_json(self, response): assert response.json == { 'message': 'Not found' } @pytest.mark.usefixtures('request_ctx', 'database') class TestTypeGetSingleWithNonIntegerID(object): @pytest.fixture def response(self, client): return client.get('/types/foobar') def test_returns_404(self, response): assert response.status_code == 404 def test_response_has_proper_content_type(self, response): assert response.mimetype == 'application/vnd.api+json' def test_returns_error_as_json(self, response): assert response.json == { 'message': 'Not found' }
mit
-4,321,390,514,477,213,000
27.908257
62
0.645509
false
jmescuderojustel/codeyourblogin-python-django-1.7
src/blog/tools.py
1
1304
from django.conf import settings import math from django.core.exceptions import PermissionDenied from django.shortcuts import render, redirect class Pager: def __init__(self, page, count): if page is None or int(page) < 1: page = 1 else: page= int(page) self.currentPage = page self.downLimit = (page - 1) * settings.PAGE_SIZE self.upLimit = page * settings.PAGE_SIZE self.pages = [page-2, page-1, page, page+1, page+2] self.finalPage = int(math.ceil(float(count) / float(settings.PAGE_SIZE))) def buildPager(page, count): return Pager(page, count) def render_with_user(request, url, template, data, requires_user=True): data['currentUrl'] = url current_user = request.session['currentUser'] if current_user is not None: data['current_user'] = current_user['name'] return render(request, template, data) elif requires_user is False: data['current_user'] = '' return render(request, template, data) else: return redirect('/user/login') def render_with_user_opt(request, url, template, data): return render_with_user(request, url, template, data, False) def is_user(request): return (request.session['currentUser'] is not None)
mit
731,154,896,484,565,500
22.727273
81
0.644939
false
maxalbert/colormap-selector
color_transformations_skimage.py
1
1823
import numpy as np import matplotlib.colors as mcolors from skimage.color import rgb2lab as rgb2lab_skimage from skimage.color import lab2rgb as lab2rgb_skimage class RGBRangeError(Exception): pass def rgb2lab(rgb): rgb = np.asarray(rgb).reshape(1, 1, 3) lab = rgb2lab_skimage(rgb).reshape(3) return lab def lab2rgb(lab, assert_valid=False, clip=False): lab = np.asarray(lab).reshape(1, 1, 3) rgb = lab2rgb_skimage(lab).reshape(3) if assert_valid and ((rgb < 0.0).any() or (rgb > 1.0).any()): raise RGBRangeError() if clip: rgb = np.clip(rgb, 0., 1.) return rgb def lab2rgba(lab, assert_valid=False, clip=False): r, g, b = lab2rgb(lab, assert_valid=assert_valid, clip=clip) return np.array([r, g, b, 1.]) def linear_colormap(pt1, pt2, coordspace='RGB'): """ Define a perceptually linear colormap defined through a line in the CIELab [1] color space. The line is defined by its endpoints `pt1`, `pt2`. The argument `coordspace` can be either `RGB` (the default) or `lab` and specifies whether the coordinates of `pt1`, `pt2` are given in RGB or Lab coordinates. [1] http://dba.med.sc.edu/price/irf/Adobe_tg/models/cielab.html """ if coordspace == 'RGB': pt1 = np.array(rgb2lab(pt1)) pt2 = np.array(rgb2lab(pt2)) elif coordspace == 'Lab': pt1 = np.array(pt1) pt2 = np.array(pt2) else: raise ValueError("Argument 'coordspace' must be either 'RGB' " "or 'Lab'. Got: {}".format(coordspace)) tvals = np.linspace(0, 1, 256) path_vals = np.array([(1-t) * pt1 + t * pt2 for t in tvals]) cmap_vals = np.array([lab2rgb(pt) for pt in path_vals]) #print np.where(cmap_vals < 0) cmap = mcolors.ListedColormap(cmap_vals) return cmap
mit
1,315,041,971,995,979,500
30.982456
71
0.635217
false
improve-project/platform
models/RehabilitationSetClass.py
1
1448
__author__ = 'tommipor' from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, String, Text, DateTime Base = declarative_base() class RehabilitationSetClass(Base): __tablename__ = 'RehabilitationSet' rehabilitationSetID = Column(String(255), primary_key=True) allowedOrganizations = Column(Text) exerciseResultIDs = Column(Text) patientConditionIDs = Column(Text) patientInformationID = Column(String(255)) def __init__(self, rehabilitationSetID, allowedOrganizations, exerciseResultIDs, patientConditionIDs, patientInformationID): self.rehabilitationSetID = rehabilitationSetID self.exerciseResultIDs = exerciseResultIDs self.patientConditionIDs = patientConditionIDs self.patientInformationID= patientInformationID self.allowedOrganizations = allowedOrganizations; def __repr__(self): return "<RehabilitationSet(%s, %s, %s, %s, %s)>" % (self.rehabilitationSetID, self.allowedOrganizations, self.exerciseResultIDs, self.patientConditionIDs, self.patientInformationID) @property def columns(self): return [ c.name for c in self.__table__.columns ] @property def columnitems(self): return dict([ (c, getattr(self, c)) for c in self.columns ]) def tojson(self): return self.columnitems
bsd-3-clause
-780,814,773,737,549,400
41.617647
197
0.679558
false
SonyCSL/CSLAIER
src/common/nvidia_devices_info.py
1
11234
#!/usr/bin/env python2 import ctypes import platform from logging import getLogger logger = getLogger(__name__) class c_cudaDeviceProp(ctypes.Structure): """ Passed to cudart.cudaGetDeviceProperties() """ _fields_ = [ ('name', ctypes.c_char * 256), ('totalGlobalMem', ctypes.c_size_t), ('sharedMemPerBlock', ctypes.c_size_t), ('regsPerBlock', ctypes.c_int), ('warpSize', ctypes.c_int), ('memPitch', ctypes.c_size_t), ('maxThreadsPerBlock', ctypes.c_int), ('maxThreadsDim', ctypes.c_int * 3), ('maxGridSize', ctypes.c_int * 3), ('clockRate', ctypes.c_int), ('totalConstMem', ctypes.c_size_t), ('major', ctypes.c_int), ('minor', ctypes.c_int), ('textureAlignment', ctypes.c_size_t), ('texturePitchAlignment', ctypes.c_size_t), ('deviceOverlap', ctypes.c_int), ('multiProcessorCount', ctypes.c_int), ('kernelExecTimeoutEnabled', ctypes.c_int), ('integrated', ctypes.c_int), ('canMapHostMemory', ctypes.c_int), ('computeMode', ctypes.c_int), ('maxTexture1D', ctypes.c_int), ('maxTexture1DMipmap', ctypes.c_int), ('maxTexture1DLinear', ctypes.c_int), ('maxTexture2D', ctypes.c_int * 2), ('maxTexture2DMipmap', ctypes.c_int * 2), ('maxTexture2DLinear', ctypes.c_int * 3), ('maxTexture2DGather', ctypes.c_int * 2), ('maxTexture3D', ctypes.c_int * 3), ('maxTexture3DAlt', ctypes.c_int * 3), ('maxTextureCubemap', ctypes.c_int), ('maxTexture1DLayered', ctypes.c_int * 2), ('maxTexture2DLayered', ctypes.c_int * 3), ('maxTextureCubemapLayered', ctypes.c_int * 2), ('maxSurface1D', ctypes.c_int), ('maxSurface2D', ctypes.c_int * 2), ('maxSurface3D', ctypes.c_int * 3), ('maxSurface1DLayered', ctypes.c_int * 2), ('maxSurface2DLayered', ctypes.c_int * 3), ('maxSurfaceCubemap', ctypes.c_int), ('maxSurfaceCubemapLayered', ctypes.c_int * 2), ('surfaceAlignment', ctypes.c_size_t), ('concurrentKernels', ctypes.c_int), ('ECCEnabled', ctypes.c_int), ('pciBusID', ctypes.c_int), ('pciDeviceID', ctypes.c_int), ('pciDomainID', ctypes.c_int), ('tccDriver', ctypes.c_int), ('asyncEngineCount', ctypes.c_int), ('unifiedAddressing', ctypes.c_int), ('memoryClockRate', ctypes.c_int), ('memoryBusWidth', ctypes.c_int), ('l2CacheSize', ctypes.c_int), ('maxThreadsPerMultiProcessor', ctypes.c_int), ('streamPrioritiesSupported', ctypes.c_int), ('globalL1CacheSupported', ctypes.c_int), ('localL1CacheSupported', ctypes.c_int), ('sharedMemPerMultiprocessor', ctypes.c_size_t), ('regsPerMultiprocessor', ctypes.c_int), ('managedMemSupported', ctypes.c_int), ('isMultiGpuBoard', ctypes.c_int), ('multiGpuBoardGroupID', ctypes.c_int), # Extra space for new fields in future toolkits ('__future_buffer', ctypes.c_int * 128), # added later with cudart.cudaDeviceGetPCIBusId # (needed by NVML) ('pciBusID_str', ctypes.c_char * 16), ] class struct_c_nvmlDevice_t(ctypes.Structure): """ Handle to a device in NVML """ pass # opaque handle c_nvmlDevice_t = ctypes.POINTER(struct_c_nvmlDevice_t) class c_nvmlMemory_t(ctypes.Structure): """ Passed to nvml.nvmlDeviceGetMemoryInfo() """ _fields_ = [ ('total', ctypes.c_ulonglong), ('free', ctypes.c_ulonglong), ('used', ctypes.c_ulonglong), # Extra space for new fields in future toolkits ('__future_buffer', ctypes.c_ulonglong * 8), ] class c_nvmlUtilization_t(ctypes.Structure): """ Passed to nvml.nvmlDeviceGetUtilizationRates() """ _fields_ = [ ('gpu', ctypes.c_uint), ('memory', ctypes.c_uint), # Extra space for new fields in future toolkits ('__future_buffer', ctypes.c_uint * 8), ] def get_library(name): """ Returns a ctypes.CDLL or None """ try: if platform.system() == 'Windows': return ctypes.windll.LoadLibrary(name) else: return ctypes.cdll.LoadLibrary(name) except OSError: pass return None def get_cudart(): """ Return the ctypes.DLL object for cudart or None """ if platform.system() == 'Windows': arch = platform.architecture()[0] for ver in range(90, 50, -5): cudart = get_library('cudart%s_%d.dll' % (arch[:2], ver)) if cudart is not None: return cudart elif platform.system() == 'Darwin': for major in xrange(9, 5, -1): for minor in (5, 0): cudart = get_library('libcudart.%d.%d.dylib' % (major, minor)) if cudart is not None: return cudart return get_library('libcudart.dylib') else: for major in xrange(9, 5, -1): for minor in (5, 0): cudart = get_library('libcudart.so.%d.%d' % (major, minor)) if cudart is not None: return cudart return get_library('libcudart.so') return None def get_nvml(): """ Return the ctypes.DLL object for cudart or None """ if platform.system() == 'Windows': return get_library('nvml.dll') else: for name in ( 'libnvidia-ml.so.1', 'libnvidia-ml.so', 'nvml.so'): nvml = get_library(name) if nvml is not None: return nvml return None devices = None def get_devices(force_reload=False): """ Returns a list of c_cudaDeviceProp's Prints an error and returns None if something goes wrong Keyword arguments: force_reload -- if False, return the previously loaded list of devices """ global devices if not force_reload and devices is not None: # Only query CUDA once return devices devices = [] cudart = get_cudart() if cudart is None: return [] # check CUDA version cuda_version = ctypes.c_int() rc = cudart.cudaRuntimeGetVersion(ctypes.byref(cuda_version)) if rc != 0: logger.error('cudaRuntimeGetVersion() failed with error #%s' % rc) return [] if cuda_version.value < 6050: logger.error('ERROR: Cuda version must be >= 6.5, not "%s"' % cuda_version.valu) return [] # get number of devices num_devices = ctypes.c_int() rc = cudart.cudaGetDeviceCount(ctypes.byref(num_devices)) if rc != 0: logger.error('cudaGetDeviceCount() failed with error #%s' % rc) return [] # query devices for x in xrange(num_devices.value): properties = c_cudaDeviceProp() rc = cudart.cudaGetDeviceProperties(ctypes.byref(properties), x) if rc == 0: pciBusID_str = ' ' * 16 # also save the string representation of the PCI bus ID rc = cudart.cudaDeviceGetPCIBusId(ctypes.c_char_p(pciBusID_str), 16, x) if rc == 0: properties.pciBusID_str = pciBusID_str devices.append(properties) else: logger.error('cudaGetDeviceProperties() failed with error #%s' % rc) del properties return devices def get_device(device_id): """ Returns a c_cudaDeviceProp """ return get_devices()[int(device_id)] def get_nvml_info(device_id): """ Gets info from NVML for the given device Returns a dict of dicts from different NVML functions """ device = get_device(device_id) if device is None: return None nvml = get_nvml() if nvml is None: return None rc = nvml.nvmlInit() if rc != 0: raise RuntimeError('nvmlInit() failed with error #%s' % rc) try: # get device handle handle = c_nvmlDevice_t() rc = nvml.nvmlDeviceGetHandleByPciBusId(ctypes.c_char_p(device.pciBusID_str), ctypes.byref(handle)) if rc != 0: raise RuntimeError('nvmlDeviceGetHandleByPciBusId() failed with error #%s' % rc) # Grab info for this device from NVML info = { 'minor_number': device_id, 'product_name': device.name } uuid = ' ' * 41 rc = nvml.nvmlDeviceGetUUID(handle, ctypes.c_char_p(uuid), 41) if rc == 0: info['uuid'] = uuid[:-1] temperature = ctypes.c_int() rc = nvml.nvmlDeviceGetTemperature(handle, 0, ctypes.byref(temperature)) if rc == 0: info['temperature'] = temperature.value speed = ctypes.c_uint() rc = nvml.nvmlDeviceGetFanSpeed(handle, ctypes.byref(speed)) if rc == 0: info['fan'] = speed.value power_draw = ctypes.c_uint() rc = nvml.nvmlDeviceGetPowerUsage(handle, ctypes.byref(power_draw)) if rc == 0: info['power_draw'] = power_draw.value power_limit = ctypes.c_uint() rc = nvml.nvmlDeviceGetPowerManagementLimit(handle, ctypes.byref(power_limit)) if rc == 0: info['power_limit'] = power_limit.value memory = c_nvmlMemory_t() rc = nvml.nvmlDeviceGetMemoryInfo(handle, ctypes.byref(memory)) if rc == 0: info['memory_total'] = memory.total info['memory_used'] = memory.used utilization = c_nvmlUtilization_t() rc = nvml.nvmlDeviceGetUtilizationRates(handle, ctypes.byref(utilization)) if rc == 0: info['gpu_util'] = utilization.gpu return info finally: rc = nvml.nvmlShutdown() if rc != 0: pass def add_unit(data): temperature = 'temperature' if temperature in data: data[temperature] = '{} C'.format(data[temperature]) fan = 'fan' if fan in data: data[fan] = '{} %'.format(data[fan]) power_draw = 'power_draw' if power_draw in data: data[power_draw] = '{:.2f} W'.format(float(data[power_draw]) / pow(10, 3)) power_limit = 'power_limit' if power_limit in data: data[power_limit] = '{:.2f} W'.format(float(data[power_limit]) / pow(10, 3)) memory_total = 'memory_total' if memory_total in data: data[memory_total] = '{} MiB'.format(data[memory_total] / pow(2, 20)) memory_used = 'memory_used' if memory_used in data: data[memory_used] = '{} MiB'.format(data[memory_used] / pow(2, 20)) gpu_util = 'gpu_util' if gpu_util in data: data[gpu_util] = '{} %'.format(data[gpu_util]) def get_devices_info(): if not len(get_devices()): return None nvml = get_nvml() nvml.nvmlInit() version = ' ' * 80 nvml.nvmlSystemGetDriverVersion(ctypes.c_char_p(version), 80) version = version.strip()[:-1] gpus = [] for i, device in enumerate(get_devices()): info = get_nvml_info(i) if info: gpus.append(info) for gpu in gpus: add_unit(gpu) return { 'gpus': gpus, 'driver_version': version }
mit
-8,568,659,771,402,383,000
30.205556
107
0.57504
false
ekholabs/ekholabs-es
service/ElasticsearchService.py
1
1093
from ElasticsearchConnection import Resource from uuid import uuid4 class ElasticsearchIndex: @staticmethod def create(index_name, settings): es = Resource().connect() index = es.indices.create(index=index_name, ignore=400, body=settings) return index @staticmethod def delete_index(index_name): es = Resource().connect() index = es.indices.delete(index=index_name, ignore=[400, 404]) return index @staticmethod def index(index_name, document_type, payload): es = Resource().connect() index = es.index(index=index_name, doc_type=document_type, id=uuid4(), body=payload) return index @staticmethod def query(index_name, query_criteria): es = Resource().connect() index = es.search(index=index_name, body=query_criteria) return index @staticmethod def delete_document(index_name, document_type, document_id): es = Resource().connect() index = es.delete(index=index_name, doc_type=document_type, id=document_id) return index
mit
-8,110,233,009,886,834,000
27.025641
92
0.651418
false
lexelby/apiary
historical/mysql_watcher/dblibs/dbutil.py
1
47774
#!/usr/bin/env python # # $LicenseInfo:firstyear=2007&license=mit$ # # Copyright (c) 2007-2010, Linden Research, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # $/LicenseInfo$ # # # Utility classes that allow us to monitor and keep track of databases # import array import binascii import gzip import math import os import re import socket import string import struct import sys import time from llbase import llsd def asciify(str): "Lame ASCIIfication of a string to keep various things from barfing" out_str = "" for ch in str: if (ch >= chr(0x9)) and (ch <= '~'): out_str += ch else: out_str += "." return out_str def all_as_maps(cursor): """Return all of the cursor with maps for each row instead of sequences""" all_seq = cursor.fetchall() ret_all = [] descs = cursor.description for row in all_seq: new_row = {} count = 0 for desc in descs: new_row[desc[0]] = row[count] count += 1 ret_all.append(new_row) return ret_all # # Cache IP to string lookup to make it faster # ip_table = {} def lookup_ip_string(ip_bin): if not ip_bin in ip_table: ip_table[ip_bin] = "%d.%d.%d.%d" % ((ip_bin & 0xff000000L) >> 24, (ip_bin & 0x00ff0000L) >> 16, (ip_bin & 0x0000ff00L) >> 8, ip_bin & 0x000000ffL) return ip_table[ip_bin] def llquery_from_llsd(query_llsd): # Hack, fill in arbitary data for info that isn't serialized query = LLQuery(None, None, query_llsd['query'], 0.0) query.mData['host_clean'] = query_llsd['host_clean'] query.mData['query_clean'] = query_llsd['query_clean'] # Hack, keeps correctOutliers from trashing the data #query.mNumQueries = query_llsd['num_queries'] #query.mTotalTime = query_llsd['total_time'] try: query.mNumQueriesCorrected = query_llsd['num_queries_corrected'] query.mTotalTimeCorrected = query_llsd['total_time_corrected'] except: # Hack for old output which didn't generate this data query.mNumQueriesCorrected = query_llsd['num_queries'] query.mTotalTimeCorrected = query_llsd['total_time'] return query def get_query_tables(query): "Return the list of tables in a query" # # Really dumb method, literally iterates through a bunch of regular expressions to pull this out. # There are probably better methods out there. # out_tables = [] # Clean up the query query = query.replace('\n',' ') query = re.sub('\s+', ' ', query) m = LLQuery.sSelectWhereRE.match(query) if m: # Split apart by commas tables = m.group(1).split(',') for table in tables: # Take the first part (which is table name) out_tables.append(string.strip(table.split()[0])) return out_tables m = LLQuery.sSelectRE.match(query) if m: out_tables.append(string.strip(m.group(1))) return out_tables m = LLQuery.sUpdateRE.match(query) if m: # Split apart by commas tables = m.group(1).split(',') for table in tables: # Take the first part (which is table name) out_tables.append(string.strip(table.split()[0])) return out_tables m = LLQuery.sReplaceRE.match(query) if m: out_tables.append(string.strip(m.group(1))) return out_tables m = LLQuery.sInsertRE.match(query) if m: out_tables.append(string.strip(m.group(1))) return out_tables m = LLQuery.sDeleteRE.match(query) if m: out_tables.append(string.strip(m.group(1))) return out_tables return out_tables MIN_BIN=-15 MAX_BIN=10 class LLQuery: "Represents all of the data associated with a query" fromLLSDStats = staticmethod(llquery_from_llsd) def __init__(self, host, port, query, start_time): # Store information which will be serialized for metadata in a map self.mData = {} self.mData['host'] = host self.mData['port'] = port self.mData['query'] = query # Metadata self.mData['host_clean'] = None self.mData['host_full'] = None self.mData['query_clean'] = None self.mData['tables'] = [] # # Stats information # self.mNumQueries = 0 self.mTotalTime = 0.0 self.mOutQueries = 0 self.mTotalTimeCorrected = 0.0 # Corrected to remove outliers self.mNumQueriesCorrected = 0 # Corrected to remove outliers # LLQueryStatBins for the query time histogram, as well as corrected time # Query times are collected into bins based on power of 2 execution times (in seconds). # Each bin collects the number of queries and total execution time. See LLQueryStatBin # for more details self.mBins = {} # Bins for histogram # This stuff doesn't usually get serialized self.mQueryLen = len(query) self.mStartTime = start_time self.mResponseTime = start_time def __hash__(self): return (self.mData['host_clean'] + ":" + self.mData['query_clean']).__hash__() def __eq__(self, other): # Note, this matches on clean, not strictly correct if ((self.mData['query_clean'] == other.mData['query_clean']) and (self.mData['host_clean'] == other.mData['host_clean'])): return True return False def getKey(self): # The string key is just the clean host and query, concatenated return self.mData['host_clean'] + ":" + self.mData['query_clean'] def clean(self): "Generate the clean query so it can be used for statistics" if not self.mData['host_clean']: (self.mData['host_clean'], self.mData['host_full']) = get_host_type(self.mData['host']) self.mData['query_clean'] = clean_query(self.mData['query'], 0) def getAvgTimeCorrected(self): "Average time per query, corrected for outliers" return self.mTotalTimeCorrected/self.mNumQueriesCorrected def queryStart(self): "When collecting query stats, use this when the query is receieved" self.mNumQueries += 1 self.mOutQueries += 1 def queryResponse(self, elapsed): "When collecting stats, use this when the response is received" self.mTotalTime += elapsed self.mOutQueries -=1 # Determine which stat bin this query is in bin = MIN_BIN if elapsed: bin = int(math.log(elapsed,2)) bin = max(MIN_BIN, bin) bin = min(MAX_BIN, bin) if bin not in self.mBins: self.mBins[bin] = LLQueryStatBin(bin) self.mBins[bin].accumulate(elapsed) def correctOutliers(self): "Find outliers bins and calculate corrected results" # Outlier bins have query counts which are 3 orders of magnitude less than the total count for that query if not self.mNumQueries: # FIXME: This is a hack because we don't save this information in the query count dump return min_queries = self.mNumQueries/100 self.mTotalTimeCorrected = 0.0 self.mNumQueriesCorrected = 0 for i in self.mBins.keys(): if self.mBins[i].mNumQueries < min_queries: # Outlier, flag as such. self.mBins[i].mOutlier = True else: self.mTotalTimeCorrected += self.mBins[i].mTotalTime self.mNumQueriesCorrected += self.mBins[i].mNumQueries if self.mNumQueriesCorrected == 0: #HACK: Deal with divide by zero self.mNumQueriesCorrected = 1 # Miscellaneous regular expressions to analyze the query type sReadRE = re.compile("(SELECT.*)|(USE.*)", re.IGNORECASE) sSelectWhereRE = re.compile("\(?\s*?SELECT.+?FROM\s+\(?(.*?)\)?\s+WHERE.*", re.IGNORECASE) sSelectRE = re.compile("\(?\s*?SELECT.+?FROM\s+(.+)(?:\s+LIMIT.*|.*)", re.IGNORECASE) sUpdateRE = re.compile("UPDATE\s+(.+?)\s+SET.*", re.IGNORECASE) sReplaceRE = re.compile("REPLACE INTO\s+(.+?)(?:\s*\(|\s+SET).*", re.IGNORECASE) sInsertRE = re.compile("INSERT.+?INTO\s+(.+?)(?:\s*\(|\s+SET).*", re.IGNORECASE) sDeleteRE = re.compile("DELETE.+?FROM\s+(.+?)\s+WHERE.*", re.IGNORECASE) def analyze(self): "Does some query analysis on the query" query = self.mData['query_clean'] self.mData['tables'] = get_query_tables(query) if 'type' in self.mData: # Already analyzed return if LLQuery.sReadRE.match(query): self.mData['type'] = 'read' else: self.mData['type'] = 'write' def dumpLine(self, elapsed, query_len = 0): "Dump a semi-human-readable stats line for reporting" bin_str = '' for i in range(MIN_BIN,MAX_BIN+1): if i in self.mBins: if self.mBins[i].mOutlier: bin_str += '*' else: bin_str += str(int(math.log10(self.mBins[i].mNumQueries))) else: bin_str += '.' if not query_len: query_len = 4096 num_queries = self.mNumQueriesCorrected if not num_queries: num_queries = 1 return ("%s\t%5d\t%6.2f\t%6.2f\t%1.4f\t%s\t" % (bin_str, num_queries, num_queries/elapsed, self.mTotalTimeCorrected, self.mTotalTimeCorrected/num_queries, self.mData['host_clean'])) \ + self.mData['query_clean'][0:query_len] def as_map(self): "Make an LLSD map version of data that can be used for merging" self.analyze() self.mData['num_queries'] = self.mNumQueries self.mData['total_time'] = self.mTotalTime self.mData['num_queries_corrected'] = self.mNumQueriesCorrected self.mData['total_time_corrected'] = self.mTotalTimeCorrected return self.mData class LLConnStatus: "Keeps track of the status of a connection talking to mysql" def __init__(self, ip_port, start_time): self.mLastMysqlPacketNumber = 0 self.mNumPackets = 0 self.mIPPort = ip_port self.mStartTime = start_time self.mLastUpdate = start_time self.mCurState = "" self.mLastQuery = None self.mNumQueries = 0 def quit(self, src_ip, src_port, pkt_time): query = LLQuery(src_ip, src_port, "Quit", pkt_time) query.clean() self.mLastUpdate = pkt_time self.mLastQuery = query self.mNumPackets += 1 def queryStart(self, src_ip, src_port, pkt_time, raw, pkt_len, offset): query_len = pkt_len - 1 query = LLQuery(src_ip, src_port, raw[offset:offset + (pkt_len - 1)], pkt_time) self.mLastUpdate = pkt_time # Packet length includes the command, offset into raw doesn't if query_len > (len(raw) - offset): query.mQueryLen = query_len self.mCurState = "SendingQuery" else: self.mCurState = "QuerySent" query.clean() self.mNumQueries += 1 self.mLastQuery = query self.mNumPackets += 1 def queryStartProcessed(self, src_ip, src_port, pkt_time, query_str): query = LLQuery(src_ip, src_port, query_str, pkt_time) query.clean() self.mLastUpdate = pkt_time self.mCurState = "QuerySent" self.mNumQueries += 1 self.mLastQuery = query self.mNumPackets += 1 def updateNonCommand(self, pkt_time, raw): # Clean up an existing query if you get a non-command. self.mNumPackets += 1 self.mLastUpdate = pkt_time if self.mLastQuery: if self.mCurState == "SendingQuery": # We're continuing a query # We won't generate a new clean version, because it'll $!@# up all the sorting. self.mLastQuery.mData['query'] += raw if len(self.mLastQuery.mData['query']) == self.mLastQuery.mQueryLen: self.mCurState = "QuerySent" self.mLastQuery.clean() return else: # # A non-command that's continuing a query. Not sure why this is happening, # but clear the last query to avoid generating inadvertent long query results. # self.mLastQuery = None # Default to setting state to "NonCommand" self.mCurState = "NonCommand" def updateResponse(self, pkt_time, result_type): # If we've got a query running, accumulate the elapsed time start_query_response = False if self.mCurState == "QuerySent": lq = self.mLastQuery if lq: if lq.mStartTime == 0.0: lq.mStartTime = pkt_time lq.mResponseTime = pkt_time start_query_response = True self.mLastUpdate = pkt_time if result_type == 0: self.mCurState = "Result:RecvOK" elif result_type == 0xff: self.mCurState = "Result:Error" elif result_type == 0xfe: self.mCurState = "Result:EOF" elif result_type == 0x01: self.mCurState = "Result:Header" else: self.mCurState = "Result:Data" return start_query_response def dump(self): if self.mLastQuery: print "%s: NumQ: %d State:%s\n\tLast: %s" % (self.mIPPort, self.mNumQueries, self.mCurState, self.mLastQuery.mData['query_clean'][0:40]) else: print "%s: NumQ: %d State:%s\n\tLast: None" % (self.mIPPort, self.mNumQueries, self.mCurState) class LLQueryStatBin: "Keeps track of statistics for one query bin" def __init__(self, power): self.mMinTime = pow(2, power) self.mMaxTime = pow(2, power+1) self.mTotalTime = 0 self.mNumQueries = 0 self.mOutlier = False def accumulate(self, elapsed): self.mTotalTime += elapsed self.mNumQueries += 1 def dump_query_stat_header(): return "LogHistogram (-15:10) \tCount\tQPS\tTotal\tAvg\tHost\tQuery" class LLQueryStatMap: def __init__(self, description, start_time): self.mDescription = description self.mQueryMap = {} self.mStartTime = start_time self.mFinalTime = 0 self.mLastTime = self.mStartTime self.mQueryStartCount = 0 self.mQueryResponseCount = 0 def load(self, fn): "Load dumped query stats from an LLSD file" # Read in metadata in_file = open(fn) in_string = in_file.read() in_file.close() in_llsd = llsd.LLSD.parse(in_string) info = in_llsd[0] query_list = in_llsd[1] self.mDescription = info['description'] self.mStartTime = info['start_time'] self.mLastTime = info['last_time'] self.mFinalTime = info['last_time'] self.mQueryStartCount = info['query_start_count'] self.mQueryResponseCount = info['query_response_count'] # Iterate through all the queries, and populate the query map. for query_row in query_list: query = LLQuery.fromLLSDStats(query_row) self.mQueryMap[query.getKey()] = query def analyze(self): for query in self.mQueryMap.values(): query.analyze() def queryStart(self, query): if not query in self.mQueryMap: #query.analyze() self.mQueryMap[query] = query self.mQueryMap[query].queryStart() # Update elapsed time for this map self.mLastTime = query.mStartTime if self.mLastTime < self.mStartTime: self.mStartTime = self.mLastTime if self.mLastTime > self.mFinalTime: self.mFinalTime = self.mLastTime self.mQueryStartCount += 1 def queryResponse(self, query): if not query in self.mQueryMap: self.queryStart(query) elapsed = query.mResponseTime - query.mStartTime self.mQueryMap[query].queryResponse(elapsed) self.mLastTime = query.mResponseTime if self.mLastTime > self.mFinalTime: self.mFinalTime = self.mLastTime self.mQueryResponseCount += 1 def getElapsedTime(self): return self.mFinalTime - self.mStartTime def getQPS(self): return self.mQueryStartCount / self.getElapsedTime() def correctOutliers(self): for query in self.mQueryMap.values(): query.correctOutliers() def getSortedKeys(self, sort_by = "total_time"): "Gets a list of keys sorted by sort type" self.correctOutliers() items = self.mQueryMap.items() backitems = None if sort_by == "total_time": backitems = [[v[1].mTotalTimeCorrected, v[0]] for v in items] elif sort_by == "count": backitems = [[v[1].mNumQueriesCorrected, v[0]] for v in items] elif sort_by == "avg_time": backitems = [[v[1].getAvgTimeCorrected(), v[0]] for v in items] else: # Fallback, sort by total time backitems = [[v[1].mTotalTimeCorrected, v[0]] for v in items] backitems.sort() backitems.reverse() # Get the keys out of the items sorted = [] for pair in backitems: sorted.append(pair[1]) return sorted def getSortedStats(self, sort_by = "total_time", num_stats = 0): "Gets a list of the top queries according to sort type" sorted_keys = self.getSortedKeys(sort_by) if num_stats == 0: l = len(sorted_keys) else: l = min(num_stats, len(sorted_keys)) stats = [] for i in range(0, l): stats.append(self.mQueryMap[sorted_keys[i]]) return stats def dumpStatus(self, sort_type = "total_time", elapsed = None): # Dump status according to total time if not elapsed: elapsed = self.getElapsedTime() sorted_stats = self.getSortedStats(sort_type) for query in sorted_stats: print query.dumpLine(elapsed, 60) def dumpLLSD(self, filename): # Analyze queries to generate metadata self.analyze() # Dump an LLSD document representing the entire object out = [] # First, dump all the metadata into the first block info_map = {} info_map['description'] = self.mDescription info_map['start_time'] = self.mStartTime info_map['last_time'] = self.mLastTime info_map['query_start_count'] = self.mQueryStartCount info_map['query_response_count'] = self.mQueryResponseCount out.append(info_map) # Dump all of the query info into the second block sorted_stats = self.getSortedStats("total_time") query_list = [] for query in sorted_stats: query_list.append(query.as_map()) out.append(query_list) f = open(filename, "w") f.write(str(llsd.LLSD(out))) f.close() def dumpTiming(self, filename): cur_time = time.time() f = open(filename, "w") f.write(dump_query_stat_header() + "\n") # Sort the queries sorted_stats = self.getSortedStats("total_time") for query in sorted_stats: f.write(query.dumpLine(cur_time - self.mStartTime)) f.write("\n") f.close() def dumpCountsLLSD(self, filename): "Dump the query statistics as an LLSD doc, for later merging with the query_info doc" out = [] # Put the metadata into a map info_map = {} info_map['description'] = self.mDescription info_map['start_time'] = self.mStartTime info_map['last_time'] = self.mLastTime info_map['query_start_count'] = self.mQueryStartCount info_map['query_response_count'] = self.mQueryResponseCount out.append(info_map) sorted_stats = self.getSortedStats("total_time") query_list = [] for query in sorted_stats: query_row = {} # We only want to dump identifying info and stats, not metadata query_row['host_clean'] = query.mData['host_clean'] # Convert the queries to utf-8 to make sure it doesn't break XML try: u = unicode(query.mData['query_clean']) query_row['query_clean'] = u.encode('utf-8') except: query_row['query_clean'] = 'NON-UTF8' try: u = unicode(query.mData['query']) query_row['query'] = u.encode('utf-8') except: query_row['query'] = 'NON-UTF8' query_row['count'] = query.mNumQueriesCorrected query_row['total_time'] = query.mTotalTimeCorrected query_row['avg_time'] = query.getAvgTimeCorrected() query_list.append(query_row) out.append(query_list) f = open(filename, "w") f.write(str(llsd.LLSD(out))) f.close() class LLBinnedQueryStats: "Keeps track of a fixed number of N minute bins of query stats" def __init__(self): self.mHourBins = {} # This will be keyed by unixtime seconds, eventually self.mMinuteBins = {} self.mLastUpdateHour = 0 self.mLastUpdateMinute = 0 def dumpTiming(self, path): # Dump hour bins for (key, value) in self.mHourBins.items(): value.dumpTiming("%s/hour-%s-query_timing.txt" % (path, key)) # Dump minute bins for (key, value) in self.mMinuteBins.items(): value.dumpTiming("%s/minute-%s-query_timing.txt" % (path, key)) def dumpCountsLLSD(self, path): # Dump hour bins for (key, value) in self.mHourBins.items(): value.dumpCountsLLSD("%s/hour-%s-query_counts.llsd" % (path, key)) # Dump minute bins for (key, value) in self.mMinuteBins.items(): value.dumpCountsLLSD("%s/minute-%s-query_counts.llsd" % (path, key)) def dumpLLSD(self, path): # Dump hour bins for (key, value) in self.mHourBins.items(): value.dumpLLSD("%s/hour-%s-query_dump.llsd" % (path, key)) # Dump minute bins for (key, value) in self.mMinuteBins.items(): value.dumpLLSD("%s/minute-%s-query_dump.llsd" % (path, key)) def flushOldBins(self, time_secs): for minute_bin_str in self.mMinuteBins.keys(): bin_secs = time.mktime(time.strptime(minute_bin_str, "%Y-%m-%d-%H-%M")) if (time_secs - bin_secs) > 3*3600: del self.mMinuteBins[minute_bin_str] def queryStart(self, query): "Update associated bin for the time specified, creating if necessary" # Hour and minute bins t = time.localtime(query.mStartTime) hour_bin_str = time.strftime("%Y-%m-%d-%H", t) minute_bin_str = time.strftime("%Y-%m-%d-%H-%M", t) hour = t[3] minute = t[4] # FIXME: These start times are a bit inaccurate, but should be fine under heavy query load. if not hour_bin_str in self.mHourBins: self.mHourBins[hour_bin_str] = LLQueryStatMap(hour_bin_str, query.mStartTime) if not minute_bin_str in self.mMinuteBins: self.mMinuteBins[minute_bin_str] = LLQueryStatMap(minute_bin_str, query.mStartTime) self.mHourBins[hour_bin_str].queryStart(query) self.mMinuteBins[minute_bin_str].queryStart(query) if hour != self.mLastUpdateHour: self.mLastUpdateHour = hour # If the hour changes, dump and clean out old bins self.flushOldBins(query.mStartTime) def queryResponse(self, query): "Update associated bin for the time specified, creating if necessary" # Hour and minute bins t = time.localtime(query.mStartTime) hour_bin_str = time.strftime("%Y-%m-%d-%H", t) minute_bin_str = time.strftime("%Y-%m-%d-%H-%M", t) hour = t[3] minute = t[4] # FIXME: These start times are a bit inaccurate, but should be fine under heavy query load. if not hour_bin_str in self.mHourBins: self.mHourBins[hour_bin_str] = LLQueryStatMap(hour_bin_str, query.mStartTime) if not minute_bin_str in self.mMinuteBins: self.mMinuteBins[minute_bin_str] = LLQueryStatMap(hour_bin_str, query.mStartTime) self.mHourBins[hour_bin_str].queryResponse(query) self.mMinuteBins[minute_bin_str].queryResponse(query) # MySQL protocol sniffer, using tcpdump, ncap packet parsing and mysql internals # http://forge.mysql.com/wiki/MySQL_Internals_ClientServer_Protocol class LLQueryStream: "Process a raw tcpdump stream (in raw libpcap format)" def __init__(self, in_file): self.mInFile = in_file self.mStartTime = time.time() # # A list of all outstanding "connections", and what they're doing. # This is necessary in order to get script timing and other information. # self.mConnStatus = {} self.mConnKeys = [] self.mConnCleanupIndex = 0 # # Parse/skip past the libpcap global header # #guint32 magic_number; /* magic number */ #guint16 version_major; /* major version number */ #guint16 version_minor; /* minor version number */ #gint32 thiszone; /* GMT to local correction */ #guint32 sigfigs; /* accuracy of timestamps */ #guint32 snaplen; /* max length of captured packets, in octets */ #guint32 network; /* data link type */ # Skip past the libpcap global header format = 'IHHiIII' size = struct.calcsize(format) header_bin = self.mInFile.read(size) res = struct.unpack(format, header_bin) def createConnection(self, client_ip_port, pkt_time): # Track the connection, create a new one or return existing if not client_ip_port in self.mConnStatus: self.mConnStatus[client_ip_port] = LLConnStatus(client_ip_port, pkt_time) # Track a new key that we need to garbage collect self.mConnKeys.append(client_ip_port) conn = self.mConnStatus[client_ip_port] return conn def closeConnection(self, ip_port): if ip_port in self.mConnStatus: del self.mConnStatus[ip_port] def cleanupConnection(self,cur_time): # Cleanup some number of stale connections. CONNECTION_EXPIRY=900.0 if self.mConnCleanupIndex >= len(self.mConnKeys): self.mConnCleanupIndex = 0 # Skip if no keys if len(self.mConnKeys) == 0: return key = self.mConnKeys[self.mConnCleanupIndex] if key in self.mConnStatus: # Clean up if it's too old if self.mConnStatus[key].mLastUpdate < (cur_time - CONNECTION_EXPIRY): del self.mConnStatus[key] #print "Cleaning up old key:", key #print "num conns:", len(self.mConnStatus) #print "num keys", len(self.mConnKeys) else: # Clean up if the connection is already removed del self.mConnKeys[self.mConnCleanupIndex] self.mConnCleanupIndex += 1 def getNextEvent(self): # Get the next event out of the packet stream td_format = 'IIII' ip_format = '!BBHHHBBHII' tcp_format = '!HHIIBBHHH' while 1: # # Parse out an individual packet from the tcpdump stream # # Match the packet header # Pull a record (packet) off of the wire # Packet header # guint32 ts_sec; /* timestamp seconds */ # guint32 ts_usec; /* timestamp microseconds */ # guint32 incl_len; /* number of octets of packet saved in file */ # guint32 orig_len; /* actual length of packet */ ph_bin = self.mInFile.read(16) res = struct.unpack(td_format, ph_bin) ts_sec = res[0] ts_usec = res[1] pkt_time = ts_sec + (ts_usec/1000000.0) incl_len = res[2] orig_len = res[3] # Packet data (incl_len bytes) raw_data = self.mInFile.read(incl_len) # Parse out the MAC header # Don't bother, we don't care - 14 byte header mac_offset = 14 # Parse out the IP header (min 20 bytes) # 4 bits - version # 4 bits - header length in 32 bit words # 1 byte - type of service # 2 bytes - total length # 2 bytes - fragment identification # 3 bits - flags # 13 bits - fragment offset # 1 byte - TTL # 1 byte - Protocol (should be 6) # 2 bytes - header checksum # 4 bytes - source IP # 4 bytes - dest IP ip_header = struct.unpack(ip_format, raw_data[mac_offset:mac_offset + 20]) # Assume all packets are TCP #if ip_header[6] != 6: # print "Not TCP!" # continue src_ip_bin = ip_header[8] src_ip = lookup_ip_string(src_ip_bin) #src_ip = "%d.%d.%d.%d" % ((src_ip_bin & 0xff000000L) >> 24, # (src_ip_bin & 0x00ff0000L) >> 16, # (src_ip_bin & 0x0000ff00L) >> 8, # src_ip_bin & 0x000000ffL) dst_ip_bin = ip_header[9] dst_ip = lookup_ip_string(dst_ip_bin) #dst_ip = "%d.%d.%d.%d" % ((dst_ip_bin & 0xff000000L) >> 24, # (dst_ip_bin & 0x00ff0000L) >> 16, # (dst_ip_bin & 0x0000ff00L) >> 8, # dst_ip_bin & 0x000000ffL) ip_size = (ip_header[0] & 0x0f) * 4 # Parse out the TCP packet header # 2 bytes - src_prt # 2 bytes - dst_port # 4 bytes - sequence number # 4 bytes - ack number # 4 bits - data offset (size in 32 bit words of header # 6 bits - reserved # 6 bits - control bits # 2 bytes - window # 2 bytes - checksum # 2 bytes - urgent pointer tcp_offset = mac_offset + ip_size tcp_header = struct.unpack(tcp_format, raw_data[tcp_offset:tcp_offset+20]) tcp_size = ((tcp_header[4] & 0xf0) >> 4) * 4 src_port = tcp_header[0] dst_port = tcp_header[1] # 3 bytes - packet length # 1 byte - packet number # 1 byte - command # <n bytes> - args pkt_offset = tcp_offset + tcp_size if len(raw_data) == pkt_offset: continue # Clearly not a mysql packet if it's less than 5 bytes of data if len(raw_data) - pkt_offset < 5: continue src_ip_port = "%s:%d" % (src_ip, src_port) dst_ip_port = "%s:%d" % (dst_ip, dst_port) if src_port == 3306: # # We are processing traffic from mysql server -> client # This primarily is used to time how long it takes for use # to start receiving data to the client from the server. # mysql_arr = array.array('B', raw_data[pkt_offset]) result_type = ord(raw_data[pkt_offset]) # Get or create connection conn = self.createConnection(dst_ip_port, pkt_time) # Update the status of this connection, including query times on # connections if conn.updateResponse(pkt_time, result_type): # Event: Initial query response return "QueryResponse", conn.mLastQuery continue if dst_port == 3306: # # Processing a packet from the client to the server # # HACK! This is an easy place to put this where we can get packet time that only happens once or so per event. # Garbage collect connections self.cleanupConnection(pkt_time) # Pull out packet length from the header mysql_arr = array.array('B', raw_data[pkt_offset:pkt_offset+5]) pkt_len = mysql_arr[0] + (long(mysql_arr[1]) << 8) + (long(mysql_arr[2]) << 16) pkt_number = mysql_arr[3] # Find the connection associated with this packet # Get or create connection conn = self.createConnection(src_ip_port, pkt_time) #if conn.mLastMysqlPacketNumber != (pkt_number - 1): # print "Prev:", conn.mLastMysqlPacketNumber, "Cur:", pkt_number conn.mLastMysqlPacketNumber = pkt_number cmd = mysql_arr[4] # If we're not a command, do stuff if cmd > 0x1c: # Unfortunately, we can't trivially tell the difference between # various non-command packets # Assume that these are all AuthResponses for now. conn.updateNonCommand(pkt_time, raw_data[pkt_offset:]) if "QuerySent" == conn.mCurState: return ("QueryStart", conn.mLastQuery) continue query = None if cmd == 1: # Event: Quitting a connection conn.quit(src_ip, src_port, pkt_time) # This connection is closing, get rid of it self.closeConnection(src_ip_port) return ("Quit", conn.mLastQuery) elif cmd == 3: # Event: Starting a query conn.queryStart(src_ip, src_port, pkt_time, raw_data, pkt_len, pkt_offset + 5) # Only return an QueryStart if we have the whole query if "QuerySent" == conn.mCurState: return ("QueryStart", conn.mLastQuery) else: pass IP_PORT_RE = re.compile("(\S+):(\d+)") EVENT_RE = re.compile("(\S+)\t(\S+):(\d+)\t(\S+)\t(\S+)") SECTION_RE = re.compile("\*{38}") class LLLogQueryStream: "Process a query stream dump to generate a query stream class" "Process a raw tcpdump stream (in raw libpcap format)" def __init__(self, lineiter): self.mLineIter = lineiter self.mStartTime = None # # A list of all outstanding "connections", and what they're doing. # This is necessary in order to get script timing and other information. # self.mConnStatus = {} def closeConnection(self, ip_port): if ip_port in self.mConnStatus: del self.mConnStatus[ip_port] def getNextEvent(self): # Get the next event out of the file cur_event = None event_time = None event_type = None ip = None port = None ip_port = None cur_state = 'Metadata' for line in self.mLineIter: if line == '': return (None, None) if cur_state == 'Metadata': # We're looking for an event. Actually we better find one. m = EVENT_RE.match(line) if not m: #raise "Missing event on line: %s" % line continue else: event_time = float(m.group(1)) ip = m.group(2) port = int(m.group(3)) ip_port = m.group(2)+":"+m.group(3) clean_host = m.group(4) event_type = m.group(5) query_str = '' cur_state = 'Query' elif cur_state == 'Query': if not SECTION_RE.match(line): query_str += line else: # We're done # Generate the event to return # Track the connection if we don't know about it yet. conn = self.createConnection(ip_port, event_time) if event_type == 'QueryStart': conn.queryStartProcessed(ip, port, event_time, query_str) return ("QueryStart", conn.mLastQuery) elif event_type == 'QueryResponse': # Update the status of this connection, including query times on # connections # Hack: Result type defaults to zero if conn.updateResponse(event_time, 0): # Event: Initial query response return ("QueryResponse", conn.mLastQuery) else: # Skip responses which we don't have the start for cur_state = 'Metadata' elif event_type == 'Quit': # Event: Quitting a connection conn.quit(ip, port, event_time) # This connection is closing, get rid of it self.closeConnection(ip_port) return ("Quit", conn.mLastQuery) else: raise ("Unknown event type %s" % event_type) return (None, None) def start_dump(host, port): # Start up tcpdump pushing data into netcat on the sql server interface = "eth0" # Start up tcpdump pushing data into netcat on the sql server SRC_DUMP_CMD = "ssh root@%s '/usr/sbin/tcpdump -p -n -s 0 -w - -i %s dst port 3306 or src port 3306 | nc %s %d'" \ % (host, interface, socket.getfqdn(), port) os.popen2(SRC_DUMP_CMD, "r") def remote_mysql_stream(host): # Create a server socket, then have tcpdump dump stuff to it. serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) bound = False port = 9999 while not bound: try: serversocket.bind((socket.gethostname(), port)) bound = True except: print port, " already bound, trying again" port += 1 print "Bound port %d" % port serversocket.listen(1) # Fork off the dumper, start the server on the main connection pid = os.fork() if not pid: # Child process which gets data from the database time.sleep(1.0) print "Starting dump!" start_dump(host, port) print "Exiting dump!" sys.exit(0) print "Starting server" (clientsocket, address) = serversocket.accept() print "Accepted connection", address # Start listening to the data stream return clientsocket.makefile("rb") # # Utility stuff for query cleaner # # This is a Python port of (part of) the fingerprint() function from # the mk-query-digest script in Maatkit, added by Yoz, with various additions/tweaks hex_wildcard = r"[0-9a-f]" word = hex_wildcard + r"{4}-" long_word = hex_wildcard + r"{8}-" very_long_word = hex_wildcard + r"{12}" UUID_REGEX_STRING = long_word + word + word + word + very_long_word hex_re = re.compile("^[\da-f]+$",re.I) uuid_re = re.compile("^"+UUID_REGEX_STRING+"$",re.I) def string_replace(match): "Called by string-matching regexp in replacers" if uuid_re.match(match.group(1)): return "*uuid*" return "*string*" # list of (match,replacement) tuples used by clean_query() replacers = [ # Disabling comment removal because we may put useful inspection info in there #(re.compile(r'(?:--|#)[^\'"\r\n]*(?=[\r\n]|\Z)',re.I),""), # one-line comments #(re.compile(r"/\*[^!].*?\*/",re.I|re.M|re.S),""), # But not /*!version */ (re.compile(r"\\\\"),""), # remove backslash pairs that may confuse the next line (re.compile(r"\\[\"']"),""), # remove escaped quotes (re.compile(r'"([^"]*)"',re.I),string_replace), # quoted strings (re.compile(r"'([^']*)'",re.I),string_replace), # quoted strings # this next one may need more work, due to "UPDATE ... SET money = money-23" # the next two are significantly different from the maatkit original code (re.compile(r"(?<![\w\)\d])(\s*)\-\d+(\.\d+)?",re.I),"*num*"), # negative reals (re.compile(r"(?<![\w])\d+(\.\d+)?",re.I),"*num*"), # positive reals # mk-query-digest has s/[xb.+-]\?/?/g; as "clean up leftovers" here, whatever that means - I've left it out (re.compile(r"^\s+",re.I),""), # chop off leading whitespace (re.compile(r"\s+$",re.I|re.M|re.S),""), # kill trailing whitespace # reduce IN and VALUES lists (look for previously-cleaned placeholders) (re.compile(r"\b(in|values)(?:[\s,]*\(([\s\,]*\*(num|string|uuid)\*)*[\s,]*\))+", re.I|re.X),"\\1(*values*)"), # collapse IN and VALUES lists # This next one collapses chains of UNIONed functionally-identical queries, # but it's only really useful if you're regularly seeing more than 2 queries # in a chain. We don't seem to have any like that, so I'm disabling this. #(re.compile(r"\b(select\s.*?)(?:(\sunion(?:\sall)?)\s\1)+",re.I),"\\1 -- repeat\\2 --"), # collapse UNION # remove "OFFSET *num*" when following a LIMIT (re.compile(r"\blimit \*num\*(?:, ?\*num\*| offset \*num\*)?",re.I),"LIMIT *num*") ] prepare_re = re.compile('PREPARE.*', re.IGNORECASE) deallocate_re = re.compile('DEALLOCATE\s+PREPARE.*', re.IGNORECASE) execute_re = re.compile('EXECUTE.*', re.IGNORECASE) mdb_re = re.compile('MDB2_STATEMENT\S+') def clean_query(query, num_words): "Generalizes a query by removing all unique information" # Strip carriage returns query = query.replace("\n", " ") # Screw it, if it's a prepared statement or an execute, generalize the statement name if prepare_re.match(query): query = mdb_re.sub('*statement*', query) return query if execute_re.match(query): query = mdb_re.sub('*statement*', query) if deallocate_re.match(query): query = "DEALLOCATE PREPARE" return query # Loop through the replacers and perform each one for (replacer, subst) in replacers: # try block is here because, apparently, string_re may throw an exception # TODO: investigate the above try: query = replacer.sub(subst, query) except: pass # After we do the cleanup, then we get rid of extra whitespace words = query.split(None) query = " ".join(words) return query def test_clean_query(query): "A debug version of the query cleaner which prints steps as it goes" # Strip carriage returns query = query.replace("\n", " ") # Screw it, if it's a prepared statement or an execute, generalize the statement name if prepare_re.match(query): query = mdb_re.sub('*statement*', query) return query if execute_re.match(query): query = mdb_re.sub('*statement*', query) if deallocate_re.match(query): query = "DEALLOCATE PREPARE" return query # Loop through the replacers and perform each one for (replacer, subst) in replacers: try: if replacer.search(query) == None: print replacer.pattern," : No match" else: query = replacer.sub(subst, query) print replacer.pattern," : ",query except: pass # After we do the cleanup, then we get rid of extra whitespace words = query.split(None) query = " ".join(words) return query # # Hostname cache - basically, caches the "linden" host type for a particular IP address # or hostname # sim_re = re.compile(".*sim\d+.*") web_re = re.compile("int\.web\d+.*") iweb_re = re.compile("int\.iweb\d+.*") webds_re = re.compile(".*web-ds\d+.*") webster_re = re.compile(".*webster\d+.*") bankds_re = re.compile(".*bank-ds\d+.*") xmlrpc_re = re.compile(".*xmlrpc\d+.*") login_re = re.compile(".*login\d+.*") data_re = re.compile(".*data\..*") #xmlrpc_re = re.compile("(?:int\.omgiwanna.*)|(?:int\.pony.*)") ip_re = re.compile("\d+\.\d+\.\d+\.\d+") ll_re = re.compile("(.*)\.lindenlab\.com") host_type_cache = {} def get_host_type(host): "Returns the genericized linden host type from an IP address or hostname" # if host in host_type_cache: # return host_type_cache[host] named_host = str(host) if ip_re.match(host): # Look up the hostname try: named_host = str(socket.gethostbyaddr(host)[0]) except: pass # Figure out generic host type host_type = named_host if sim_re.match(named_host): host_type = "sim" elif login_re.match(named_host): host_type = "login" elif webster_re.match(named_host): host_type = "webster" elif bankds_re.match(named_host): host_type = "bank-ds" elif web_re.match(named_host): host_type = "web" elif iweb_re.match(named_host): host_type = "iweb" elif webds_re.match(named_host): host_type = "web-ds" elif data_re.match(named_host): host_type = "data" elif xmlrpc_re.match(named_host): host_type = "xmlrpc" m = ll_re.match(host_type) if m: host_type = m.group(1) host_type_cache[host] = host_type return (host_type, named_host) def LLLogIter(filenames): "An iterator that iterates line by line over a series of files, even if they're compressed." for f in filenames: curr = open_log_file(f) for line in curr: yield line def open_log_file(filename): # Open the logfile (even if it's compressed) if re.compile(".+\.gz").match(filename): # gzipped file, return a gzipped file opject return gzip.open(filename,"r") else: return open(filename, "r")
mit
-3,236,365,569,771,306,500
37.127694
126
0.570226
false
neeraj-kumar/nkpylib
nkmturk.py
1
5946
#!/usr/bin/env python """Mechanical Turk-related utilities, written by Neeraj Kumar. Licensed under the 3-clause BSD License: Copyright (c) 2013, Neeraj Kumar (neerajkumar.org) All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL NEERAJ KUMAR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os, sys, time from pprint import pprint, pformat OUTHTML_FRAME = '''<html> <head><title>Rendered mturk template</title></head> <body> %s </body> </html>''' def readcsv(fname): """Reads the CSV file given and returns a list of dicts""" import csv reader = csv.DictReader(open(fname)) ret = [row for row in reader] return ret def renderhtml(tmpl, data, rowspec=None): """Renders html from the given template and data (list of dicts). The rowspec should be an expression involving i and r, which are the row index, and a random float, resp. This will be eval'ed and only if true will the row be output. An empty or None rowspec outputs all rows. """ from random import random import re # convert template to a python-style template var = re.compile(r'\${(.*?)}') matches = var.split(tmpl) s = '' for i, m in enumerate(matches): if i%2 == 0: s += m else: s += '%%(%s)s' % (m) # go through data rows = [] for i, row in enumerate(data): r = random() if rowspec and not eval(rowspec, locals()): continue rows.append(s % row) out = OUTHTML_FRAME % ('\n'.join(rows)) return out def demultiplex(row, nperhit): """Demultiplexes this dict and returns a list of dicts.""" import re end = re.compile(r'_\d+$') # de-multiplex data ret = [] for i in range(nperhit): # copy all data d = dict(**row) for k, v in sorted(d.items()): # find input and output fields and delete them initially if not k.startswith('Input.') and not k.startswith('Answer.'): continue del d[k] # rename to simplified keys k = k.replace('Input.','').replace('Answer.','') if end.search(k): # if it's the current one, we want to add it back in if k.endswith('_%d' % i): k = k.rsplit('_', 1)[0] else: continue # remove multiplexed keys # add field back in d[k] = v ret.append(d) return ret def renderout(tmplfname, data, groupby, nperhit): """Renders mturk output""" import web, web.template from nkutils import partitionByFunc from nkwebutils import NKStor, mystorify # de-multiplex and simplify data data = sum([demultiplex(row, nperhit) for row in data], []) # group by common key grouped, _ = partitionByFunc(data, lambda d: d[groupby]) results = [] Cls = NKStor # build up list of results for gkey, g in sorted(grouped.items()): # build up a list of common keys for this result group r = Cls(g[0]) for el in g: for k, v in r.items(): if el[k] != v: del r[k] # now create each individual sub-output r['outputs'] = [Cls(el) for el in g] results.append(r) #pprint(results) # render results renfunc = web.template.frender(tmplfname) s = renfunc(results) return s if __name__ == '__main__': from pprint import pprint TASKS = 'renderhit renderout'.split(' ') if len(sys.argv) < 2: print 'Usage: python %s <%s> [<args> ...]' % (sys.argv[0], '|'.join(TASKS)) sys.exit() task = sys.argv[1] assert task in TASKS if task == 'renderhit': if len(sys.argv) < 4: print 'Usage: python %s renderhit <template> <data csv> [<rowspec>]' % (sys.argv[0]) print " rowspec is an expression involving 'i' (index) and/or 'r' (random float) which is eval'ed" sys.exit() tmpl = open(sys.argv[2]).read() data = readcsv(sys.argv[3]) try: rowspec = sys.argv[4] except Exception: rowspec = None html = renderhtml(tmpl, data, rowspec) print html elif task == 'renderout': if len(sys.argv) < 5: print 'Usage: python %s renderout <template> <data csv> <groupby> <nperhit>' % (sys.argv[0]) sys.exit() tmplfname = sys.argv[2] data = readcsv(sys.argv[3]) groupby = sys.argv[4] nperhit = int(sys.argv[5]) html = renderout(tmplfname, data, groupby, nperhit) print html
bsd-3-clause
2,662,317,386,595,247,600
35.478528
111
0.623276
false
coolkang/hsbsite
settings.py
1
12906
from __future__ import absolute_import, unicode_literals ###################### # MEZZANINE SETTINGS # ###################### # The following settings are already defined with default values in # the ``defaults.py`` module within each of Mezzanine's apps, but are # common enough to be put here, commented out, for convenient # overriding. Please consult the settings documentation for a full list # of settings Mezzanine implements: # http://mezzanine.jupo.org/docs/configuration.html#default-settings # Controls the ordering and grouping of the admin menu. # # ADMIN_MENU_ORDER = ( # ("Content", ("pages.Page", "blog.BlogPost", # "generic.ThreadedComment", ("Media Library", "fb_browse"),)), # ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")), # ("Users", ("auth.User", "auth.Group",)), # ) # A three item sequence, each containing a sequence of template tags # used to render the admin dashboard. # # DASHBOARD_TAGS = ( # ("blog_tags.quick_blog", "mezzanine_tags.app_list"), # ("comment_tags.recent_comments",), # ("mezzanine_tags.recent_actions",), # ) # A sequence of templates used by the ``page_menu`` template tag. Each # item in the sequence is a three item sequence, containing a unique ID # for the template, a label for the template, and the template path. # These templates are then available for selection when editing which # menus a page should appear in. Note that if a menu template is used # that doesn't appear in this setting, all pages will appear in it. # PAGE_MENU_TEMPLATES = ( # (1, "Top navigation bar", "pages/menus/dropdown.html"), # (2, "Left-hand tree", "pages/menus/tree.html"), # (3, "Footer", "pages/menus/footer.html"), # ) # A sequence of fields that will be injected into Mezzanine's (or any # library's) models. Each item in the sequence is a four item sequence. # The first two items are the dotted path to the model and its field # name to be added, and the dotted path to the field class to use for # the field. The third and fourth items are a sequence of positional # args and a dictionary of keyword args, to use when creating the # field instance. When specifying the field class, the path # ``django.models.db.`` can be omitted for regular Django model fields. # # EXTRA_MODEL_FIELDS = ( # ( # # Dotted path to field. # "mezzanine.blog.models.BlogPost.image", # # Dotted path to field class. # "somelib.fields.ImageField", # # Positional args for field class. # ("Image",), # # Keyword args for field class. # {"blank": True, "upload_to": "blog"}, # ), # # Example of adding a field to *all* of Mezzanine's content types: # ( # "mezzanine.pages.models.Page.another_field", # "IntegerField", # 'django.db.models.' is implied if path is omitted. # ("Another name",), # {"blank": True, "default": 1}, # ), # ) # Setting to turn on featured images for blog posts. Defaults to False. # # BLOG_USE_FEATURED_IMAGE = True # If True, the south application will be automatically added to the # INSTALLED_APPS setting. USE_SOUTH = True ######################## # MAIN DJANGO SETTINGS # ######################## # People who get code error notifications. # In the format (('Full Name', '[email protected]'), # ('Full Name', '[email protected]')) ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['localhost',] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = None # If you set this to True, Django will use timezone-aware datetimes. USE_TZ = True # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = "en" # Supported languages _ = lambda s: s LANGUAGES = ( ('en', _('English')), ) # A boolean that turns on/off debug mode. When set to ``True``, stack traces # are displayed for error pages. Should always be set to ``False`` in # production. Best set to ``True`` in local_settings.py DEBUG = False # Whether a user's session cookie expires when the Web browser is closed. SESSION_EXPIRE_AT_BROWSER_CLOSE = True SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False # Tuple of IP addresses, as strings, that: # * See debug comments, when DEBUG is true # * Receive x-headers INTERNAL_IPS = ("127.0.0.1",) # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( "django.template.loaders.filesystem.Loader", "django.template.loaders.app_directories.Loader", ) AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( "django.contrib.staticfiles.finders.FileSystemFinder", "django.contrib.staticfiles.finders.AppDirectoriesFinder", # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # The numeric mode to set newly-uploaded files to. The value should be # a mode you'd pass directly to os.chmod. FILE_UPLOAD_PERMISSIONS = 0o644 ############# # DATABASES # ############# DATABASES = { "default": { # Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle". "ENGINE": "django.db.backends.", # DB name or path to database file if using sqlite3. "NAME": "", # Not used with sqlite3. "USER": "", # Not used with sqlite3. "PASSWORD": "", # Set to empty string for localhost. Not used with sqlite3. "HOST": "", # Set to empty string for default. Not used with sqlite3. "PORT": "", } } ######### # PATHS # ######### import os # Full filesystem path to the project. PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) # Name of the directory for the project. PROJECT_DIRNAME = PROJECT_ROOT.split(os.sep)[-1] # Every cache key will get prefixed with this value - here we set it to # the name of the directory the project is in to try and use something # project specific. CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_DIRNAME # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = "/static/" # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/")) # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = STATIC_URL + "media/" # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/")) # Package/module name to import the root urlpatterns from for the project. ROOT_URLCONF = "%s.urls" % PROJECT_DIRNAME # Put strings here, like "/home/html/django_templates" # or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. TEMPLATE_DIRS = (os.path.join(PROJECT_ROOT, "templates"),) ################ # APPLICATIONS # ################ INSTALLED_APPS = ( "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.redirects", "django.contrib.sessions", "django.contrib.sites", "django.contrib.sitemaps", "django.contrib.staticfiles", "moderna_app", # This is a template I am using. "mezzanine.boot", "mezzanine.conf", "mezzanine.core", "mezzanine.generic", "mezzanine.blog", "mezzanine.forms", "mezzanine.pages", "mezzanine.galleries", #"mezzanine.twitter", #"mezzanine.accounts", #"mezzanine.mobile", ) # List of processors used by RequestContext to populate the context. # Each one should be a callable that takes the request object as its # only parameter and returns a dictionary to add to the context. TEMPLATE_CONTEXT_PROCESSORS = ( "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", "django.core.context_processors.debug", "django.core.context_processors.i18n", "django.core.context_processors.static", "django.core.context_processors.media", "django.core.context_processors.request", "django.core.context_processors.tz", "mezzanine.conf.context_processors.settings", "mezzanine.pages.context_processors.page", ) # List of middleware classes to use. Order is important; in the request phase, # these middleware classes will be applied in the order given, and in the # response phase the middleware will be applied in reverse order. MIDDLEWARE_CLASSES = ( "mezzanine.core.middleware.UpdateCacheMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.locale.LocaleMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "mezzanine.core.request.CurrentRequestMiddleware", "mezzanine.core.middleware.RedirectFallbackMiddleware", "mezzanine.core.middleware.TemplateForDeviceMiddleware", "mezzanine.core.middleware.TemplateForHostMiddleware", "mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware", "mezzanine.core.middleware.SitePermissionMiddleware", # Uncomment the following if using any of the SSL settings: # "mezzanine.core.middleware.SSLRedirectMiddleware", "mezzanine.pages.middleware.PageMiddleware", "mezzanine.core.middleware.FetchFromCacheMiddleware", ) # Store these package names here as they may change in the future since # at the moment we are using custom forks of them. PACKAGE_NAME_FILEBROWSER = "filebrowser_safe" PACKAGE_NAME_GRAPPELLI = "grappelli_safe" ######################### # OPTIONAL APPLICATIONS # ######################### # These will be added to ``INSTALLED_APPS``, only if available. OPTIONAL_APPS = ( "debug_toolbar", "django_extensions", "compressor", PACKAGE_NAME_FILEBROWSER, PACKAGE_NAME_GRAPPELLI, ) ################### # DEPLOY SETTINGS # ################### # These settings are used by the default fabfile.py provided. # Check fabfile.py for defaults. # FABRIC = { # "SSH_USER": "", # SSH username for host deploying to # "HOSTS": ALLOWED_HOSTS[:1], # List of hosts to deploy to (eg, first host) # "DOMAINS": ALLOWED_HOSTS, # Domains for public site # "REPO_URL": "ssh://[email protected]/user/project", # Project's repo URL # "VIRTUALENV_HOME": "", # Absolute remote path for virtualenvs # "PROJECT_NAME": "", # Unique identifier for project # "REQUIREMENTS_PATH": "requirements.txt", # Project's pip requirements # "GUNICORN_PORT": 8000, # Port gunicorn will listen on # "LOCALE": "en_US.UTF-8", # Should end with ".UTF-8" # "DB_PASS": "", # Live database password # "ADMIN_PASS": "", # Live admin user password # "SECRET_KEY": SECRET_KEY, # "NEVERCACHE_KEY": NEVERCACHE_KEY, # } #################### # HSBSITE SETTINGS # #################### SITE_TITLE = 'hbanner' ################## # LOCAL SETTINGS # ################## # Allow any settings to be defined in local_settings.py which should be # ignored in your version control system allowing for settings to be # defined per machine. try: from local_settings import * except ImportError as e: if "local_settings" not in str(e): raise e #################### # DYNAMIC SETTINGS # #################### # set_dynamic_settings() will rewrite globals based on what has been # defined so far, in order to provide some better defaults where # applicable. We also allow this settings module to be imported # without Mezzanine installed, as the case may be when using the # fabfile, where setting the dynamic settings below isn't strictly # required. try: from mezzanine.utils.conf import set_dynamic_settings except ImportError: pass else: set_dynamic_settings(globals())
apache-2.0
5,025,741,811,072,452,000
33.600536
79
0.685573
false
unicefuganda/uSurvey
survey/views/indicators.py
1
15675
import json import plotly.offline as opy import plotly.graph_objs as go from django.utils.safestring import mark_safe from django.contrib import messages from django.contrib.auth.decorators import permission_required, login_required from django.http import HttpResponseRedirect, HttpResponse, JsonResponse from django.shortcuts import render, get_object_or_404 from django.core.urlresolvers import reverse from survey.models import Location from survey.forms.indicator import IndicatorForm,\ IndicatorVariableForm, IndicatorFormulaeForm from survey.forms.filters import IndicatorFilterForm from survey.models import Indicator from survey.models import IndicatorVariable from survey.models import IndicatorVariableCriteria from survey.models import Survey from survey.forms.enumeration_area import LocationsFilterForm INDICATOR_DOES_NOT_EXIST_MSG = "The indicator requested does not exist." @login_required @permission_required('auth.can_view_batches') def new(request): """Creates new indicator. HTML uses with ajax to create variables on same screen with popups""" indicator_form = IndicatorForm() if request.method == 'POST': indicator_form = IndicatorForm(data=request.POST) if indicator_form.is_valid(): indicator_form.save() messages.success(request, "Indicator successfully created.") return HttpResponseRedirect(reverse('list_indicator_page')) messages.error(request, "Indicator was not created.") request.breadcrumbs([ ('Indicators', reverse('list_indicator_page')), ]) return render(request, 'indicator/new.html', {'indicator_form': indicator_form, 'title': 'Add Indicator', 'button_label': 'Create', 'cancel_url': reverse('list_indicator_page'), 'action': '/indicators/new/', 'variable_form': IndicatorVariableForm(None)}) @login_required def edit(request, indicator_id): indicator = Indicator.objects.get(id=indicator_id) indicator_form = IndicatorForm(instance=indicator) if request.method == 'POST': indicator_form = IndicatorForm(data=request.POST, instance=indicator) if indicator_form.is_valid(): indicator_form.save() messages.success(request, "Indicator successfully edited.") return HttpResponseRedirect("/indicators/") messages.error(request, "Indicator was not successfully edited.") request.breadcrumbs([ ('Indicators', reverse('list_indicator_page')), ]) context = { 'indicator_form': indicator_form, 'title': 'Edit Indicator', 'button_label': 'Save', 'cancel_url': reverse('list_indicator_page'), 'variable_form': IndicatorVariableForm(None)} return render(request, 'indicator/new.html', context) def _process_form(indicator_filter_form, indicators): if indicator_filter_form.is_valid(): survey_id = indicator_filter_form.cleaned_data['survey'] question_set_id = indicator_filter_form.cleaned_data['question_set'] # could if question_set_id.isdigit(): indicators = indicators.filter(question_set__id=question_set_id) elif survey_id.isdigit(): qsets = Survey.objects.get(id=survey_id).qsets.values_list('id', flat=True) indicators = indicators.filter(question_set__id__in=qsets) return indicators @login_required @permission_required('auth.can_view_batches') def index(request): indicators = Indicator.objects.all() data = request.GET or request.POST indicator_filter_form = IndicatorFilterForm(data=data) indicators = _process_form(indicator_filter_form, indicators) return render(request, 'indicator/index.html', {'indicators': indicators, 'indicator_filter_form': indicator_filter_form}) @login_required @permission_required('auth.can_view_batches') def delete(request, indicator_id): indicator = Indicator.objects.get(id=indicator_id) indicator.variables.all().delete() indicator.delete() messages.success(request, 'Indicator successfully deleted.') return HttpResponseRedirect('/indicators/') def validate_formulae(request): request_data = request.GET if request.method == 'GET' else request.POST return JsonResponse({'valid': IndicatorFormulaeForm(data=request_data).is_valid()}) @login_required @permission_required('auth.can_view_household_groups') def add_indicator_variable(request, indicator_id): indicator = Indicator.get(pk=indicator_id) request.breadcrumbs([ ('Indicators', reverse('list_indicator_page')), ( 'Variable List', reverse( 'view_indicator_variables', args=(indicator_id))) ]) return _add_variable(request, indicator=indicator) def _add_variable(request, indicator=None): form_action = reverse('add_variable') parameter_questions = [] if indicator: form_action = reverse("add_indicator_variable", args=(indicator.id, )) parameter_questions = indicator.eqset.all_questions variable_form = IndicatorVariableForm(indicator) if request.method == 'POST': variable_form = IndicatorVariableForm(indicator, data=request.POST) if variable_form.is_valid(): variable = variable_form.save() if request.is_ajax() is False: messages.success(request, 'Variable successfully saved.') return HttpResponseRedirect( reverse('edit_indicator_variable', args=( variable.pk, ))) context = {'variable_form': variable_form, 'indicator': indicator, 'title': "Manage Indicator Criteria", 'button_label': 'Save', 'id': 'add_group_form', "v_form_action": form_action, 'cancel_url': reverse('list_indicator_page'), 'parameter_questions': parameter_questions, 'condition_title': "Conditions"} if request.is_ajax(): context['cancel_url'] = None return render(request, 'indicator/indicator_form.html', context) return render(request, 'indicator/indicator_variable.html', context) def add_variable(request): return _add_variable(request) def ajax_edit_indicator_variable(request): data = request.GET or request.POST if request.is_ajax(): variable_id = data.get('id') return edit_indicator_variable(request, variable_id) @login_required @permission_required('auth.can_view_household_groups') def edit_indicator_variable(request, variable_id): variable = IndicatorVariable.get(id=variable_id) variable_form = IndicatorVariableForm( variable.indicator, instance=variable) parameter_questions = [] if variable.indicator: parameter_questions = variable.indicator.eqset.all_questions if request.method == 'POST': variable_form = IndicatorVariableForm( variable.indicator, instance=variable, data=request.POST) if variable_form.is_valid(): variable_form.save() if request.is_ajax() is False: messages.success(request, 'Variable successfully saved.') return HttpResponseRedirect( reverse( 'edit_indicator_variable', args=( variable.pk, ))) context = { 'variable_form': variable_form, 'indicator': variable.indicator, 'title': "Manage Indicator Criteria", 'button_label': 'Save', 'id': 'add_group_form', "v_form_action": reverse( "edit_indicator_variable", args=( variable_id, )), 'cancel_url': reverse('list_indicator_page'), 'parameter_questions': parameter_questions, 'conditions': variable.criteria.all(), 'condition_title': "Conditions"} if request.is_ajax(): context['cancel_url'] = None return render(request, 'indicator/indicator_form.html', context) breadcrumbs = [ ('Indicators', reverse('list_indicator_page')), ] if variable.indicator: breadcrumbs.append( ('Variable List', reverse( 'view_indicator_variables', args=( variable.indicator.pk, )))) request.breadcrumbs(breadcrumbs) return render(request, 'indicator/indicator_variable.html', context) @login_required @permission_required('auth.can_view_household_groups') def delete_indicator_variable(request, variable_id): get_object_or_404(IndicatorVariable, id=variable_id).delete() if request.is_ajax(): return add_variable(request) messages.info(request, 'Variable removed successfully') return HttpResponseRedirect(reverse('list_indicator_page')) @login_required @permission_required('auth.can_view_household_groups') def ajax_delete_indicator_variable(request): if request.is_ajax(): variable_id = request.GET.get('id') return delete_indicator_variable(request, variable_id) @login_required @permission_required('auth.can_view_household_groups') def delete_indicator_criteria(request, indicator_criteria_id): criterion = get_object_or_404( IndicatorVariableCriteria, id=indicator_criteria_id) variable = criterion.variable criterion.delete() if request.is_ajax() is False: messages.info(request, 'condition removed successfully') return HttpResponseRedirect( reverse( 'edit_indicator_variable', args=( variable.pk, ))) def view_indicator_variables(request, indicator_id): indicator = get_object_or_404(Indicator, id=indicator_id) request.breadcrumbs([ ('Indicators', reverse('list_indicator_page')), ]) context = {'indicator': indicator, 'variables': indicator.variables.all()} return render(request, 'indicator/indicator_variable_list.html', context) @login_required def variables(request): # return questions before last question if request.GET.get('id', None): indicator = Indicator.get(pk=request.GET.get('id', None)) response = list(indicator.variables.values_list('name', flat=True)) else: var_ids = request.GET.getlist('var_id[]') response = list( IndicatorVariable.objects.filter( id__in=var_ids).values_list( 'name', flat=True)) return JsonResponse(response, safe=False) @login_required @permission_required('auth.can_view_batches') def indicator_formula(request, indicator_id): try: indicator = Indicator.get(id=indicator_id) except Indicator.DoesNotExist: messages.error(request, INDICATOR_DOES_NOT_EXIST_MSG) return HttpResponseRedirect(reverse('list_indicator_page')) if request.method == 'POST': formulae_form = IndicatorFormulaeForm(instance=indicator, data=request.POST) if formulae_form.is_valid(): formulae_form.save() messages.info(request, 'Formulae has been saved!') return HttpResponseRedirect(reverse('list_indicator_page')) else: formulae_form = IndicatorFormulaeForm(instance=indicator) request.breadcrumbs([ ('Indicator List', reverse('list_indicator_page')), ]) context = { 'indicator_form': formulae_form, 'title': 'Indicator Formulae', 'button_label': 'Save', 'indicator': indicator, 'cancel_url': reverse('list_indicator_page')} return render(request, 'indicator/formulae.html', context) def _retrieve_data_frame(request, indicator_id): selected_location = Location.objects.get(parent__isnull=True) params = request.GET or request.POST locations_filter = LocationsFilterForm(data=params) first_level_location_analyzed = Location.objects.filter( type__name__iexact="country")[0] indicator = Indicator.objects.get(id=indicator_id) last_selected_loc = locations_filter.last_location_selected if last_selected_loc: selected_location = last_selected_loc report_locations = selected_location.get_children().order_by('name') context = {'request': request, 'indicator': indicator, 'locations_filter': locations_filter, 'selected_location': selected_location, 'report_locations': report_locations } return context, indicator.get_data(selected_location, report_level=selected_location.level+1) @permission_required('auth.can_view_batches') def simple_indicator(request, indicator_id): request.breadcrumbs([ ('Indicator List', reverse('list_indicator_page')), ]) context, reports_df = _retrieve_data_frame(request, indicator_id) indicator = context['indicator'] # hence set the location where the report is based. i.e the child current # selected location. context['report'] = mark_safe( reports_df.to_html( na_rep='-', classes='table table-striped\ table-bordered dataTable table-hover table-sort')) variable_names = indicator.active_variables() report_locations = context['report_locations'] def make_hover_text(row): return '<br />'.join(['%s: %d' % (name, row[name]) for name in variable_names if str(row[name]).isdigit()]) reports_df['hover-text'] = reports_df.apply(make_hover_text, axis=1) if report_locations: trace1 = go.Bar(x=reports_df.index, y=reports_df[indicator.REPORT_FIELD_NAME], x0=0, y0=0, name=indicator.name, text=reports_df['hover-text'],) data = go.Data([trace1]) margin = go.Margin(pad=15) layout = go.Layout( title=indicator.name, xaxis={'title': report_locations[0].type.name}, yaxis={'title': 'Values per %s' % report_locations[0].type.name}, margin=margin, annotations=[ dict( x=xi, y=yi, text=str(yi), xanchor='center', yanchor='bottom', showarrow=False, ) for xi, yi in zip( reports_df.index, reports_df[indicator.REPORT_FIELD_NAME])] ) figure = go.Figure(data=data, layout=layout) graph_div = opy.plot( figure, auto_open=False, output_type='div', show_link=False) context['graph'] = mark_safe(graph_div) return render(request, 'indicator/simple_indicator.html', context) @login_required @permission_required('auth.can_view_batches') def download_indicator_analysis(request, indicator_id): context, reports_df = _retrieve_data_frame(request, indicator_id) last_selected_loc = context['selected_location'] indicator = context['indicator'] file_name = '%s%s' % ('%s-%s-' % ( last_selected_loc.type.name, last_selected_loc.name) if last_selected_loc else '', indicator.name) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment;\ filename="%s.csv"' % file_name reports_df.to_csv( response, date_format='%Y-%m-%d %H:%M:%S', encoding='utf-8') # exclude interview id return response
bsd-3-clause
-3,426,124,051,870,418,000
37.703704
99
0.637321
false
dpgaspar/Flask-AppBuilder
examples/quickimages/config.py
1
1704
import os basedir = os.path.abspath(os.path.dirname(__file__)) CSRF_ENABLED = True SECRET_KEY = "\2\1thisismyscretkey\1\2\e\y\y\h" OPENID_PROVIDERS = [ {"name": "Google", "url": "https://www.google.com/accounts/o8/id"}, {"name": "Yahoo", "url": "https://me.yahoo.com"}, {"name": "AOL", "url": "http://openid.aol.com/<username>"}, {"name": "Flickr", "url": "http://www.flickr.com/<username>"}, {"name": "MyOpenID", "url": "https://www.myopenid.com"}, ] SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(basedir, "app.db") # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp' BABEL_DEFAULT_LOCALE = "en" BABEL_DEFAULT_FOLDER = "translations" LANGUAGES = { "en": {"flag": "gb", "name": "English"}, "pt": {"flag": "pt", "name": "Portuguese"}, "es": {"flag": "es", "name": "Spanish"}, "de": {"flag": "de", "name": "German"}, "zh": {"flag": "cn", "name": "Chinese"}, "ru": {"flag": "ru", "name": "Russian"}, } # ------------------------------ # GLOBALS FOR GENERAL APP's # ------------------------------ UPLOAD_FOLDER = basedir + "/app/static/uploads/" IMG_UPLOAD_FOLDER = basedir + "/app/static/uploads/" IMG_UPLOAD_URL = "/static/uploads/" IMG_SIZE = (150, 150, True) AUTH_TYPE = 1 AUTH_ROLE_ADMIN = "Admin" AUTH_ROLE_PUBLIC = "Public" APP_NAME = "F.A.B. Example" APP_THEME = "" # default # APP_THEME = "cerulean.css" # COOL # APP_THEME = "amelia.css" # APP_THEME = "cosmo.css" # APP_THEME = "cyborg.css" # COOL # APP_THEME = "flatly.css" # APP_THEME = "journal.css" # APP_THEME = "readable.css" # APP_THEME = "simplex.css" # APP_THEME = "slate.css" # COOL # APP_THEME = "spacelab.css" # NICE # APP_THEME = "united.css"
bsd-3-clause
6,128,436,733,377,320,000
31.150943
72
0.572183
false
ClearCorp-dev/odoo-costa-rica
l10n_cr_hr_payroll/hr_employee.py
1
2087
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Addons modules by CLEARCORP S.A. # Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api class hr_employee(models.Model): _inherit = 'hr.employee' def _check_report_number_child(self, cr, uid, ids, context=None): for employee in self.browse(cr, uid, ids, context=context): if employee.report_number_child < 0: return False return True @api.onchange('marital') def _onchange_marital(self): self.report_spouse = False marital= fields.Selection([('single', 'Single'), ('married', 'Married'), ('widower', 'Widower'), ('divorced', 'Divorced')], String = 'Marital') report_spouse= fields.Boolean('Report Spouse', help="If this employee reports his spouse for rent payment") report_number_child= fields.Integer('Number of children to report', help="Number of children to report for rent payment") _defaults = { 'report_number_child': 0, } _constraints = [ (_check_report_number_child, 'Error! The number of child to report must be greater or equal to zero.', ['report_number_child']) ]
agpl-3.0
1,186,865,629,129,506,300
40.76
147
0.624341
false
dahiro/shotgun-replica
shotgun_replica/python/tests/shotgun_replica_tests/sync/local_to_shotgun/test_entities_field_change.py
1
4097
''' Created on Nov 15, 2012 @author: bach ''' import unittest import tests_elefant from shotgun_replica import factories, entities from tests_elefant import commanda from shotgun_replica.sync import local_to_shotgun, shotgun_to_local from shotgun_replica.utilities import entityNaming, debug class Test( unittest.TestCase ): local2shotgun = None testassetlibrary = None task = None testasset = None linkedAsset = None def setUp( self ): self.local2shotgun = local_to_shotgun.LocalDBEventSpooler() self.shotgun2local = shotgun_to_local.EventSpooler() self.testassetlibrary = factories.getObject( entities.AssetLibrary().getType(), remote_id = commanda.TEST_ASSETLIBRARY_ID ) self.task = factories.getObject( "Task", remote_id = tests_elefant.testTaskID ) self.testasset = tests_elefant.createTestAsset( self.testassetlibrary ) debug.debug( self.testasset.getLocalID() ) self.linkedAsset = tests_elefant.createTestAsset( self.testassetlibrary ) debug.debug( self.linkedAsset.getLocalID() ) def tearDown( self ): self.testasset.delete() self.linkedAsset.delete() self.assertTrue( self.local2shotgun.connectAndRun(), "synch not successful" ) self.assertTrue( self.shotgun2local.connectAndRun(), "synch not successful" ) def testLinkedAsset( self ): self.testasset.assets = [ self.linkedAsset ] self.testasset.save() # get connection objects from source connObj = factories.getConnectionObj( baseObj = self.testasset, linkedObj = self.linkedAsset, attribute = "assets" ) self.assertNotEqual( connObj, None ) # TODO: synch and check if not two connObj # self.assertTrue( self.local2shotgun.connectAndRun(), "synch not successful" ) connObj = factories.getConnectionObj( baseObj = self.testasset, linkedObj = self.linkedAsset, attribute = "assets" ) self.assertNotEqual( type( connObj ), list, "multiple connection objects after synch" ) # get attribute of reverse field reverseAttrName = entityNaming.getReverseAttributeName( "Asset", "assets" ) linkedAsset = factories.getObject( "Asset", local_id = self.linkedAsset.getLocalID() ) retLinks = linkedAsset.getField( reverseAttrName ) self.assertTrue( retLinks != None and self.testasset in retLinks ) # checking sync from shotgun to local self.assertTrue( self.shotgun2local.connectAndRun(), "synch not successful" ) connObj = factories.getConnectionObj( baseObj = self.testasset, linkedObj = self.linkedAsset, attribute = "assets" ) self.assertNotEqual( type( connObj ), list, "multiple connection objects after synch" ) # remove connection self.testasset.assets = [ ] self.testasset.save() connObj = factories.getConnectionObj( baseObj = self.testasset, linkedObj = self.linkedAsset, attribute = "assets" ) self.assertEqual( connObj, None ) linkedAsset = factories.getObject( "Asset", local_id = self.linkedAsset.getLocalID() ) retLinks = linkedAsset.getField( reverseAttrName ) self.assertEqual( retLinks, [] ) self.assertTrue( self.local2shotgun.connectAndRun(), "synch not successful" ) connObj = factories.getConnectionObj( baseObj = self.testasset, linkedObj = self.linkedAsset, attribute = "assets" ) self.assertEqual( connObj, None ) if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testLinkedAsset'] unittest.main()
bsd-3-clause
-1,195,573,157,826,296,600
39.97
96
0.604833
false
eckardm/archivematica
src/MCPClient/lib/clientScripts/archivematicaMoveSIP.py
1
2124
#!/usr/bin/env python2 # This file is part of Archivematica. # # Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com> # # Archivematica is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Archivematica is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Archivematica. If not, see <http://www.gnu.org/licenses/>. # @package Archivematica # @subpackage archivematicaClientScript # @author Joseph Perry <[email protected]> import os import shutil import sys import django django.setup() # dashboard from main.models import SIP # archivematicaCommon from custom_handlers import get_script_logger from fileOperations import renameAsSudo def updateDB(dst, sip_uuid): SIP.objects.filter(uuid=sip_uuid).update(currentpath=dst) def moveSIP(src, dst, sipUUID, sharedDirectoryPath): # Prepare paths if src.endswith("/"): src = src[:-1] dest = dst.replace(sharedDirectoryPath, "%sharedPath%", 1) if dest.endswith("/"): dest = os.path.join(dest, os.path.basename(src)) if dest.endswith("/."): dest = os.path.join(dest[:-1], os.path.basename(src)) updateDB(dest + "/", sipUUID) # If destination already exists, delete it with warning dest_path = os.path.join(dst, os.path.basename(src)) if os.path.exists(dest_path): print >>sys.stderr, dest_path, 'exists, deleting' shutil.rmtree(dest_path) renameAsSudo(src, dst) if __name__ == '__main__': logger = get_script_logger("archivematica.mcp.client.moveSIP") src = sys.argv[1] dst = sys.argv[2] sipUUID = sys.argv[3] sharedDirectoryPath = sys.argv[4] moveSIP(src, dst, sipUUID, sharedDirectoryPath)
agpl-3.0
5,462,322,742,759,084,000
31.181818
77
0.713277
false
moonso/loqusdb
loqusdb/plugins/load.py
1
1221
class BaseLoadMixin(object): def load_case(self, case_obj, variants): """Load a case and all of it's variants to the database. Args: variant (dict): A variant dictionary """ raise NotImplementedError def get_variant(self, variant): """Return a variant from the database Args: variant (dict): A variant dictionary Returns: variant (dict): A variant dictionary """ raise NotImplementedError def delete_variant(self, variant): """Remove variant from database This means that we take down the observations variable with one. If 'observations' == 1 we remove the variant. If variant was homozygote we decrease 'homozygote' with one. Args: variant (dict): A variant dictionary """ raise NotImplementedError def add_bulk(self, variants): """Add a bulk of variants to the database Args: variants (Iterable(dict)): An iterable with variant dictionaries """ raise NotImplementedError
mit
5,326,020,432,095,458,000
28.095238
83
0.553645
false