repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ramineni/my_congress | congress/tests/datasources/fakes.py | 1 | 6195 | # Copyright (c) 2014 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import mock
class NovaFakeClient(mock.MagicMock):
# TODO(rajdeepd): Replace Fake with mocks directly in test_neutron_driver
def __init__(self, *args, **kwargs):
super(NovaFakeClient, self).__init__(*args, **kwargs)
self.servers = mock.MagicMock()
self.servers.list.return_value = self.get_server_list()
self.flavors = mock.MagicMock()
self.flavors.list.return_value = self.get_flavor_list()
self.hosts = mock.MagicMock()
self.hosts.list.return_value = self.get_host_list()
self.services = mock.MagicMock()
self.services.list.return_value = self.get_service_list()
self.availability_zones = mock.MagicMock()
self.availability_zones.list.return_value = self.get_zone_list()
def get_mock_server(self, id, name, host_id, status, tenant_id, user_id,
flavor, image, zone=None, host_name=None):
server = mock.MagicMock()
server.id = id
server.hostId = host_id
server.tenant_id = tenant_id
server.user_id = user_id
server.status = status
server.name = name
server.image = image
server.flavor = flavor
if zone is not None:
setattr(server, 'OS-EXT-AZ:availability_zone', zone)
else:
# This ensures that the magic mock raises an AttributeError
delattr(server, 'OS-EXT-AZ:availability_zone')
if host_name is not None:
setattr(server, 'OS-EXT-SRV-ATTR:hypervisor_hostname',
host_name)
else:
# This ensures that the magic mock raises an AttributeError
delattr(server, 'OS-EXT-SRV-ATTR:hypervisor_hostname')
return server
def get_server_list(self):
server_one = (
self.get_mock_server(1234, 'sample-server',
"e4d909c290d0fb1ca068ffaddf22cbd0",
'BUILD',
'50e14867-7c64-4ec9-be8d-ed2470ca1d24',
'33ea0494-2bdf-4382-a445-9068997430b9',
{"id": 1}, {"id": 2}, 'default', 'host1'))
server_two = (
self.get_mock_server(5678, 'sample-server2',
"9e107d9d372bb6826bd81d3542a419d6",
'ACTIVE',
'50e14867-7c64-4ec9-be8d-ed2470ca1d24',
'33ea0494-2bdf-4382-a445-9068997430b9',
{"id": 1}, {"id": 2}))
server_three = (
self.get_mock_server(9012, 'sample-server3',
"9e107d9d372bb6826bd81d3542a419d6",
'ACTIVE',
'50e14867-7c64-4ec9-be8d-ed2470ca1d24',
'33ea0494-2bdf-4382-a445-9068997430b9',
{"id": 1}, {"id": 2}, 'foo', 'host2'))
return [server_one, server_two, server_three]
def get_flavor(self, id, name, vcpus, ram, disk, ephemeral, rxtx_factor):
f = mock.MagicMock()
f.id = id
f.name = name
f.vcpus = vcpus
f.ram = ram
f.disk = disk
f.ephemeral = ephemeral
f.rxtx_factor = rxtx_factor
return f
def get_flavor_list(self):
flavor_one = self.get_flavor(1, "256 MB Server", 1, 256, 10, 10, 1.0)
flavor_two = self.get_flavor(2, "512 MB Server", 2, 512, 20, 20, 1.0)
flavor_three = self.get_flavor(3, "128 MB Server", 4, 128, 0, 0, 3.0)
flavor_four = self.get_flavor(4, "1024 MB Server", 3, 1024, 10, 10,
2.0)
return [flavor_one, flavor_two, flavor_three, flavor_four]
def get_host(self, host_name, service, zone):
h = mock.MagicMock()
h.host_name = host_name
h.service = service
h.zone = zone
return h
def get_host_list(self):
h_one = self.get_host('host1', 'nova-compute', 'nova1')
h_two = self.get_host('host2', 'nova-cert', 'nova1')
return [h_one, h_two]
def get_service(self, id, binary, host, zone, status, state,
updated_at, disabled_reason):
s = mock.MagicMock()
s.id = id
s.binary = binary
s.host = host
s.zone = zone
s.status = status
s.state = state
s.updated_at = updated_at
s.disabled_reason = disabled_reason
return s
def get_service_list(self):
service_one = self.get_service(1, 'nova-compute', 'nova',
'nova1', 'enabled', 'up',
'2015-07-28T08:28:37.000000', None)
service_two = self.get_service(2, 'nova-schedule', 'nova',
'nova1', 'disabled', 'up',
'2015-07-28T08:28:38.000000',
'daily maintenance')
return [service_one, service_two]
def get_availability_zone(self, name, state):
zone = mock.MagicMock()
zone.zoneName = name
zone.zoneState = state
return zone
def get_zone_list(self):
zone_one = self.get_availability_zone('AZ1', 'available')
zone_two = self.get_availability_zone('AZ2', 'not available')
return [zone_one, zone_two]
| apache-2.0 | 7,589,944,185,511,851,000 | 37.962264 | 78 | 0.54318 | false |
xbmcmegapack/plugin.video.megapack.dev | resources/lib/menus/home_languages_northern_sami.py | 1 | 1123 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Languages_Northern_sami():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels",
"Events", "Live", "Movies", "Sports", "TVShows"],
languages=["Northern Sami"])) | gpl-3.0 | -7,259,454,216,034,070,000 | 37.689655 | 76 | 0.696699 | false |
caperren/Archives | OSU Robotics Club/Mars Rover 2016-2017/common/soil-sensor-test.py | 1 | 2721 | #!/usr/bin/env python
import sys
import math
import struct
import serial
import signal
import os
import time
import struct
SerialPath="/dev/ttyUSB0"
class SoilSensor():
def __init__(self, path):
self.path = path
self.__tty = serial.Serial(port=self.path,
baudrate=9600,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout=0.2)
def set_mode_rx(self):
"""Set the transceiver to receive mode."""
#Set DTR for receive mode, clear for transmit
self.__tty.setDTR(True)
def set_mode_tx(self):
"""Set the transceiver to transmit mode."""
self.__tty.setDTR(False)
def send_command(self, addr_str, command_str):
"""Send a command to the soil sensor."""
self.set_mode_tx()
time.sleep(.04)
self.__tty.write(addr_str + command_str + "\r")
self.__tty.flush()
time.sleep(.05)
self.__tty.write("\n")
time.sleep(.005)
self.set_mode_rx()
reply = self.__tty.read(size=10000000)
return reply
def set_data(self, addr_str, command_str, data_str):
"""Set data in the soil sensor."""
self.set_mode_tx()
time.sleep(.04)
self.__tty.write(addr_str + command_str + "=" + data_str + "\r\n")
self.__tty.flush()
time.sleep(.05)
self.__tty.write("\n")
time.sleep(.005)
self.set_mode_rx()
reply = self.__tty.read(size=10000000)
return reply
def get_data(self, addr_str, command_str):
"""Get data from the sensor, returning the data.
command_str is the two-character string."""
self.set_mode_tx()
time.sleep(.04)
self.__tty.write(addr_str + command_str + "=?" + "\r")
self.__tty.flush()
time.sleep(.05)
self.__tty.write("\n")
time.sleep(.005)
self.set_mode_rx()
reply = self.__tty.read(size=10000000)
return reply
def get_measurement(self, addr_str):
Moisture = float(data[2])
Cond = float(data[4])
"""Take and return a soil measurement."""
addr = addr_str
self.send_command(addr, "TR")
time.sleep(1)
data = self.send_command(addr, "T3")
print data
data = data[3:]
data = data.split(",")
print "Raw Values (reading set 3):", data
TempC = float(data[0])
PermR = float(data[6])
PermI = float(data[8])
Salinity = Cond * 6.4
return {"TempC":TempC, "Moisture":Moisture, "Salinity":Salinity}
def main():
s = SoilSensor(SerialPath)
addr = s.get_data("///", "SN")[0:3]
s.set_data(addr, "PE", "1")
time.sleep(1)
while True:
print s.get_measurement(addr)
print ""
time.sleep(10)
if __name__ == "__main__":
main() | gpl-3.0 | 7,409,158,072,236,962,000 | 25.23 | 68 | 0.586182 | false |
YzPaul3/h2o-3 | h2o-py/h2o/connection.py | 1 | 29700 | """
An H2OConnection represents the latest active handle to a cloud. No more than a single
H2OConnection object will be active at any one time.
"""
from __future__ import print_function
from __future__ import absolute_import
import requests
import math
import tempfile
import os
import re
import sys
import time
import subprocess
import atexit
import warnings
import site
from .display import H2ODisplay
from .h2o_logging import _is_logging, _log_rest
from .two_dim_table import H2OTwoDimTable
from .utils.shared_utils import quote
from six import iteritems, PY3
from string import ascii_lowercase, digits
from random import choice
warnings.simplefilter('always', UserWarning)
try:
warnings.simplefilter('ignore', requests.packages.urllib3.exceptions.InsecureRequestWarning)
except:
pass
__H2OCONN__ = None # the single active connection to H2O cloud
__H2O_REST_API_VERSION__ = 3 # const for the version of the rest api
class H2OConnection(object):
"""
H2OConnection is a class that represents a connection to the H2O cluster.
It is specified by an IP address and a port number.
Objects of type H2OConnection are not instantiated directly!
This class contains static methods for performing the common REST methods
GET, POST, and DELETE.
"""
__ENCODING__ = "utf-8"
__ENCODING_ERROR__ = "replace"
def __init__(self, ip, port, start_h2o, enable_assertions, license, nthreads, max_mem_size, min_mem_size, ice_root,
strict_version_check, proxy, https, insecure, username, password, max_mem_size_GB, min_mem_size_GB, proxies, size):
"""
Instantiate the package handle to the H2O cluster.
:param ip: An IP address, default is "localhost"
:param port: A port, default is 54321
:param start_h2o: A boolean dictating whether this module should start the H2O jvm. An attempt is made anyways if _connect fails.
:param enable_assertions: If start_h2o, pass `-ea` as a VM option.
:param license: If not None, is a path to a license file.
:param nthreads: Number of threads in the thread pool. This relates very closely to the number of CPUs used.
-1 means use all CPUs on the host. A positive integer specifies the number of CPUs directly. This value is only used when Python starts H2O.
:param max_mem_size: Maximum heap size (jvm option Xmx) in gigabytes.
:param min_mem_size: Minimum heap size (jvm option Xms) in gigabytes.
:param ice_root: A temporary directory (default location is determined by tempfile.mkdtemp()) to hold H2O log files.
:param strict_version_check: Setting this to False is unsupported and should only be done when advised by technical support.
:param proxy: A dictionary with keys 'ftp', 'http', 'https' and values that correspond to a proxy path.
:param https: Set this to True to use https instead of http.
:param insecure: Set this to True to disable SSL certificate checking.
:param username: Username to login with.
:param password: Password to login with.
:param max_mem_size_GB: DEPRECATED. Use max_mem_size.
:param min_mem_size_GB: DEPRECATED. Use min_mem_size.
:param proxies: DEPRECATED. Use proxy.
:param size: DEPRECATED.
:return: None
"""
port = as_int(port)
if not (isinstance(port, int) and 0 <= port <= sys.maxsize): raise ValueError("Port out of range, "+port)
if https != insecure: raise ValueError("`https` and `insecure` must both be True to enable HTTPS")
#Deprecated params
if max_mem_size_GB is not None:
warnings.warn("`max_mem_size_GB` is deprecated. Use `max_mem_size` instead.", category=DeprecationWarning)
max_mem_size = max_mem_size_GB
if min_mem_size_GB is not None:
warnings.warn("`min_mem_size_GB` is deprecated. Use `min_mem_size` instead.", category=DeprecationWarning)
min_mem_size = min_mem_size_GB
if proxies is not None:
warnings.warn("`proxies` is deprecated. Use `proxy` instead.", category=DeprecationWarning)
proxy = proxies
if size is not None:
warnings.warn("`size` is deprecated.", category=DeprecationWarning)
global __H2OCONN__
self._cld = None
self._ip = ip
self._port = port
self._proxy = proxy
self._https = https
self._insecure = insecure
self._username = username
self._password = password
self._session_id = None
self._rest_version = __H2O_REST_API_VERSION__
self._child = getattr(__H2OCONN__, "_child") if hasattr(__H2OCONN__, "_child") else None
__H2OCONN__ = self
#Give user warning if proxy environment variable is found. PUBDEV-2504
for name, value in os.environ.items():
if name.lower()[-6:] == '_proxy' and value:
warnings.warn("Proxy environment variable `" + name + "` with value `" + value + "` found. This may interfere with your H2O Connection.")
jarpaths = H2OConnection.jar_paths()
if os.path.exists(jarpaths[0]): jar_path = jarpaths[0]
elif os.path.exists(jarpaths[1]): jar_path = jarpaths[1]
elif os.path.exists(jarpaths[2]): jar_path = jarpaths[2]
elif os.path.exists(jarpaths[3]): jar_path = jarpaths[3]
elif os.path.exists(jarpaths[4]): jar_path = jarpaths[4]
else: jar_path = jarpaths[5]
try:
cld = self._connect()
except:
# try to start local jar or re-raise previous exception
if not start_h2o: raise ValueError("Cannot connect to H2O server. Please check that H2O is running at {}".format(H2OConnection.make_url("")))
print()
print()
print("No instance found at ip and port: " + ip + ":" + str(port) + ". Trying to start local jar...")
print()
print()
path_to_jar = os.path.exists(jar_path)
if path_to_jar:
if not ice_root:
ice_root = tempfile.mkdtemp()
cld = self._start_local_h2o_jar(max_mem_size, min_mem_size, enable_assertions, license, ice_root, jar_path, nthreads)
else:
print("No jar file found. Could not start local instance.")
print("Jar Paths searched: ")
for jp in jarpaths:
print("\t" + jp)
print()
raise
__H2OCONN__._cld = cld
if strict_version_check and os.environ.get('H2O_DISABLE_STRICT_VERSION_CHECK') is None:
ver_h2o = cld['version']
from .__init__ import __version__
ver_pkg = "UNKNOWN" if __version__ == "SUBST_PROJECT_VERSION" else __version__
if ver_h2o != ver_pkg:
try:
branch_name_h2o = cld['branch_name']
except KeyError:
branch_name_h2o = None
else:
branch_name_h2o = cld['branch_name']
try:
build_number_h2o = cld['build_number']
except KeyError:
build_number_h2o = None
else:
build_number_h2o = cld['build_number']
if build_number_h2o is None:
raise EnvironmentError("Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Upgrade H2O and h2o-Python to latest stable version - "
"http://h2o-release.s3.amazonaws.com/h2o/latest_stable.html"
"".format(ver_h2o, str(ver_pkg)))
elif build_number_h2o == 'unknown':
raise EnvironmentError("Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Upgrade H2O and h2o-Python to latest stable version - "
"http://h2o-release.s3.amazonaws.com/h2o/latest_stable.html"
"".format(ver_h2o, str(ver_pkg)))
elif build_number_h2o == '99999':
raise EnvironmentError("Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"This is a developer build, please contact your developer."
"".format(ver_h2o, str(ver_pkg)))
else:
raise EnvironmentError("Version mismatch. H2O is version {0}, but the h2o-python package is version {1}. "
"Install the matching h2o-Python version from - "
"http://h2o-release.s3.amazonaws.com/h2o/{2}/{3}/index.html."
"".format(ver_h2o, str(ver_pkg),branch_name_h2o, build_number_h2o))
self._session_id = H2OConnection.get_json(url_suffix="InitID")["session_key"]
H2OConnection._cluster_info()
@staticmethod
def default():
H2OConnection.__ENCODING__ = "utf-8"
H2OConnection.__ENCODING_ERROR__ = "replace"
@staticmethod
def jar_paths():
sys_prefix1 = sys_prefix2 = sys.prefix
if sys_prefix1.startswith('/Library'): sys_prefix2 = '/System'+sys_prefix1
elif sys_prefix1.startswith('/System'): sys_prefix2 = sys_prefix1.split('/System')[1]
return [os.path.join(sys_prefix1, "h2o_jar", "h2o.jar"),
os.path.join(os.path.sep,"usr","local","h2o_jar","h2o.jar"),
os.path.join(sys_prefix1, "local", "h2o_jar", "h2o.jar"),
os.path.join(site.USER_BASE, "h2o_jar", "h2o.jar"),
os.path.join(sys_prefix2, "h2o_jar", "h2o.jar"),
os.path.join(sys_prefix2, "h2o_jar", "h2o.jar"),
]
@staticmethod
def _cluster_info():
global __H2OCONN__
cld = __H2OCONN__._cld
ncpus = sum([n['num_cpus'] for n in cld['nodes']])
allowed_cpus = sum([n['cpus_allowed'] for n in cld['nodes']])
mfree = sum([n['free_mem'] for n in cld['nodes']])
cluster_health = all([n['healthy'] for n in cld['nodes']])
ip = "127.0.0.1" if __H2OCONN__._ip=="localhost" else __H2OCONN__._ip
cluster_info = [
["H2O cluster uptime: ", get_human_readable_time(cld["cloud_uptime_millis"])],
["H2O cluster version: ", cld["version"]],
["H2O cluster name: ", cld["cloud_name"]],
["H2O cluster total nodes: ", cld["cloud_size"]],
["H2O cluster total free memory: ", get_human_readable_size(mfree)],
["H2O cluster total cores: ", str(ncpus)],
["H2O cluster allowed cores: ", str(allowed_cpus)],
["H2O cluster healthy: ", str(cluster_health)],
["H2O Connection ip: ", ip],
["H2O Connection port: ", __H2OCONN__._port],
["H2O Connection proxy: ", __H2OCONN__._proxy],
["Python Version: ", sys.version.split()[0]],
]
__H2OCONN__._cld = H2OConnection.get_json(url_suffix="Cloud") # update the cached version of cld
H2ODisplay(cluster_info)
def _connect(self, size=1, max_retries=5, print_dots=False):
"""
Does not actually "connect", instead simply tests that the cluster can be reached,
is of a certain size, and is taking basic status commands.
:param size: The number of H2O instances in the cloud.
:return: The JSON response from a "stable" cluster.
"""
retries = 0
while True:
retries += 1
if print_dots:
self._print_dots(retries)
try:
cld = H2OConnection.get_json(url_suffix="Cloud")
if not cld['cloud_healthy']:
raise ValueError("Cluster reports unhealthy status", cld)
if cld['cloud_size'] >= size and cld['consensus']:
if print_dots: print(" Connection successful!")
return cld
except EnvironmentError:
pass
# Cloud too small or voting in progress; sleep; try again
time.sleep(0.1)
if retries > max_retries:
raise EnvironmentError("Max retries exceeded. Could not establish link to the H2O cloud @ " + str(self._ip) + ":" + str(self._port))
def _print_dots(self, retries):
sys.stdout.write("\rStarting H2O JVM and connecting: {}".format("." * retries))
sys.stdout.flush()
def _start_local_h2o_jar(self, mmax, mmin, ea, license, ice, jar_path, nthreads):
command = H2OConnection._check_java()
if license:
if not os.path.exists(license):
raise ValueError("License file not found (" + license + ")")
if not ice:
raise ValueError("`ice_root` must be specified")
stdout = open(H2OConnection._tmp_file("stdout"), 'w')
stderr = open(H2OConnection._tmp_file("stderr"), 'w')
print("Using ice_root: " + ice)
print()
jver = subprocess.check_output([command, "-version"], stderr=subprocess.STDOUT)
if PY3: jver = str(jver, H2OConnection.__ENCODING__)
print()
print("Java Version: " + jver)
print()
if "GNU libgcj" in jver:
raise ValueError("Sorry, GNU Java is not supported for H2O.\n"+
"Please download the latest Java SE JDK 7 from the following URL:\n"+
"http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html")
if "Client VM" in jver:
print("WARNING: ")
print("You have a 32-bit version of Java. H2O works best with 64-bit Java.")
print("Please download the latest Java SE JDK 7 from the following URL:")
print("http://www.oracle.com/technetwork/java/javase/downloads/jdk7-downloads-1880260.html")
print()
vm_opts = []
if mmin: vm_opts += ["-Xms{}g".format(mmin)]
if mmax: vm_opts += ["-Xmx{}g".format(mmax)]
if ea: vm_opts += ["-ea"]
h2o_opts = ["-verbose:gc",
"-XX:+PrintGCDetails",
"-XX:+PrintGCTimeStamps",
"-jar", jar_path,
"-name", "H2O_started_from_python_"
+ re.sub("[^A-Za-z0-9]", "_",
(os.getenv("USERNAME") if sys.platform == "win32" else os.getenv("USER")) or "unknownUser")
+ "_" + "".join([choice(ascii_lowercase) for _ in range(3)] + [choice(digits) for _ in range(3)]),
"-ip", "127.0.0.1",
"-port", "54321",
"-ice_root", ice,
]
if nthreads > 0: h2o_opts += ["-nthreads", str(nthreads)]
if license: h2o_opts += ["-license", license]
cmd = [command] + vm_opts + h2o_opts
cwd = os.path.abspath(os.getcwd())
if sys.platform == "win32":
self._child = subprocess.Popen(args=cmd,stdout=stdout,stderr=stderr,cwd=cwd,creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
else:
self._child = subprocess.Popen(args=cmd, stdout=stdout, stderr=stderr, cwd=cwd, preexec_fn=os.setsid)
cld = self._connect(1, 30, True)
return cld
@staticmethod
def _check_java():
# *WARNING* some over-engineering follows... :{
# is java in PATH?
if H2OConnection._pwhich("java"):
return H2OConnection._pwhich("java")
# check if JAVA_HOME is set (for windoz)
if os.getenv("JAVA_HOME"):
return os.path.join(os.getenv("JAVA_HOME"), "bin", "java.exe")
# check /Program Files/ and /Program Files (x86)/ if os is windoz
if sys.platform == "win32":
program_folder = os.path.join("C:", "{}", "Java")
program_folders = [program_folder.format("Program Files"),
program_folder.format("Program Files (x86)")]
# check both possible program files...
for folder in program_folders:
# hunt down the jdk directory
possible_jdk_dir = [d for d in folder if 'jdk' in d]
# if got a non-empty list of jdk directory candidates
if len(possible_jdk_dir) != 0:
# loop over and check if the java.exe exists
for jdk in possible_jdk_dir:
path = os.path.join(folder, jdk, "bin", "java.exe")
if os.path.exists(path):
return path
# check for JRE and warn
for folder in program_folders:
path = os.path.join(folder, "jre7", "bin", "java.exe")
if os.path.exists(path):
raise ValueError("Found JRE at " + path + "; but H2O requires the JDK to run.")
else:
raise ValueError("Cannot find Java. Please install the latest JDK from\n"
+"http://www.oracle.com/technetwork/java/javase/downloads/index.html" )
@staticmethod
def _pwhich(e):
"""
POSIX style which
"""
ok = os.X_OK
if e:
if os.access(e, ok):
return e
for path in os.getenv('PATH').split(os.pathsep):
full_path = os.path.join(path, e)
if os.access(full_path, ok):
return full_path
return None
@staticmethod
def _tmp_file(type):
usr = re.sub("[^A-Za-z0-9]", "_", (os.getenv("USERNAME") if sys.platform == "win32" else os.getenv("USER")) or "unknownUser")
if type == "stdout":
path = os.path.join(tempfile.mkdtemp(), "h2o_{}_started_from_python.out".format(usr))
print("JVM stdout: " + path)
return path
if type == "stderr":
path = os.path.join(tempfile.mkdtemp(), "h2o_{}_started_from_python.err".format(usr))
print("JVM stderr: " + path)
return path
if type == "pid":
return os.path.join(tempfile.mkdtemp(), "h2o_{}_started_from_python.pid".format(usr))
raise ValueError("Unkown type in H2OConnection._tmp_file call: " + type)
@staticmethod
def _shutdown(conn, prompt):
"""
Shut down the specified instance. All data will be lost.
This method checks if H2O is running at the specified IP address and port, and if it is, shuts down that H2O
instance.
:param conn: An H2OConnection object containing the IP address and port of the server running H2O.
:param prompt: A logical value indicating whether to prompt the user before shutting down the H2O server.
:return: None
"""
global __H2OCONN__
if conn is None: raise ValueError("There is no H2O instance running.")
try:
if not conn.cluster_is_up(conn): raise ValueError("There is no H2O instance running at ip: {0} and port: "
"{1}".format(conn.ip(), conn.port()))
except:
#H2O is already shutdown on the java side
ip = conn.ip()
port = conn.port()
__H2OCONN__= None
raise ValueError("The H2O instance running at {0}:{1} has already been shutdown.".format(ip, port))
if not isinstance(prompt, bool): raise ValueError("`prompt` must be TRUE or FALSE")
if prompt:
question = "Are you sure you want to shutdown the H2O instance running at {0}:{1} (Y/N)? ".format(conn.ip(), conn.port())
response = input(question) if PY3 else raw_input(question)
else: response = "Y"
if response == "Y" or response == "y":
conn.post(url_suffix="Shutdown")
__H2OCONN__ = None #so that the "Did you run `h2o.init()`" ValueError is triggered
@staticmethod
def rest_version(): return __H2OCONN__._rest_version
@staticmethod
def session_id(): return __H2OCONN__._session_id
@staticmethod
def port(): return __H2OCONN__._port
@staticmethod
def ip(): return __H2OCONN__._ip
@staticmethod
def https(): return __H2OCONN__._https
@staticmethod
def username(): return __H2OCONN__._username
@staticmethod
def password(): return __H2OCONN__._password
@staticmethod
def insecure(): return __H2OCONN__._insecure
@staticmethod
def current_connection(): return __H2OCONN__
@staticmethod
def check_conn():
if not __H2OCONN__:
raise EnvironmentError("No active connection to an H2O cluster. Try calling `h2o.init()`")
return __H2OCONN__
@staticmethod
def cluster_is_up(conn):
"""
Determine if an H2O cluster is up or not
:param conn: An H2OConnection object containing the IP address and port of the server running H2O.
:return: TRUE if the cluster is up; FALSE otherwise
"""
if not isinstance(conn, H2OConnection): raise ValueError("`conn` must be an H2OConnection object")
rv = conn.current_connection()._attempt_rest(url=("https" if conn.https() else "http") +"://{0}:{1}/".format(conn.ip(), conn.port()), method="GET",
post_body="", file_upload_info="")
if rv.status_code == 401: warnings.warn("401 Unauthorized Access. Did you forget to provide a username and password?")
return rv.status_code == 200 or rv.status_code == 301
"""
Below is the REST implementation layer:
_attempt_rest -- GET, POST, DELETE
_do_raw_rest
get
post
get_json
post_json
All methods are static and rely on an active __H2OCONN__ object.
"""
@staticmethod
def make_url(url_suffix, _rest_version=None):
scheme = "https" if H2OConnection.https() else "http"
_rest_version = _rest_version or H2OConnection.rest_version()
return "{}://{}:{}/{}/{}".format(scheme,H2OConnection.ip(),H2OConnection.port(),_rest_version,url_suffix)
@staticmethod
def get(url_suffix, **kwargs):
if __H2OCONN__ is None:
raise ValueError("No h2o connection. Did you run `h2o.init()` ?")
return __H2OCONN__._do_raw_rest(url_suffix, "GET", None, **kwargs)
@staticmethod
def post(url_suffix, file_upload_info=None, **kwargs):
if __H2OCONN__ is None:
raise ValueError("No h2o connection. Did you run `h2o.init()` ?")
return __H2OCONN__._do_raw_rest(url_suffix, "POST", file_upload_info, **kwargs)
@staticmethod
def delete(url_suffix, **kwargs):
if __H2OCONN__ is None:
raise ValueError("No h2o connection. Did you run `h2o.init()` ?")
return __H2OCONN__._do_raw_rest(url_suffix, "DELETE", None, **kwargs)
@staticmethod
def get_json(url_suffix, **kwargs):
if __H2OCONN__ is None:
raise ValueError("No h2o connection. Did you run `h2o.init()` ?")
return __H2OCONN__._rest_json(url_suffix, "GET", None, **kwargs)
@staticmethod
def post_json(url_suffix, file_upload_info=None, **kwargs):
if __H2OCONN__ is None:
raise ValueError("No h2o connection. Did you run `h2o.init()` ?")
return __H2OCONN__._rest_json(url_suffix, "POST", file_upload_info, **kwargs)
def _rest_json(self, url_suffix, method, file_upload_info, **kwargs):
raw_txt = self._do_raw_rest(url_suffix, method, file_upload_info, **kwargs)
return self._process_tables(raw_txt.json())
# Massage arguments into place, call _attempt_rest
def _do_raw_rest(self, url_suffix, method, file_upload_info, **kwargs):
if not url_suffix:
raise ValueError("No url suffix supplied.")
# allow override of REST version, currently used for Rapids which is /99
if '_rest_version' in kwargs:
_rest_version = kwargs['_rest_version']
del kwargs['_rest_version']
else:
_rest_version = self._rest_version
url = H2OConnection.make_url(url_suffix,_rest_version)
query_string = ""
for k,v in iteritems(kwargs):
if v is None: continue #don't send args set to None so backend defaults take precedence
if isinstance(v, list):
x = '['
for l in v:
if isinstance(l,list):
x += '['
x += ','.join([str(e) if PY3 else str(e).encode(H2OConnection.__ENCODING__, errors=H2OConnection.__ENCODING_ERROR__) for e in l])
x += ']'
else:
x += str(l) if PY3 else str(l).encode(H2OConnection.__ENCODING__, errors=H2OConnection.__ENCODING_ERROR__)
x += ','
x = x[:-1]
x += ']'
else:
x = str(v) if PY3 else str(v).encode(H2OConnection.__ENCODING__, errors=H2OConnection.__ENCODING_ERROR__)
query_string += k+"="+quote(x)+"&"
query_string = query_string[:-1] # Remove trailing extra &
post_body = ""
if not file_upload_info:
if method == "POST":
post_body = query_string
elif query_string != '':
url = "{}?{}".format(url, query_string)
else:
if not method == "POST":
raise ValueError("Received file upload info and expected method to be POST. Got: " + str(method))
if query_string != '':
url = "{}?{}".format(url, query_string)
if _is_logging():
_log_rest("------------------------------------------------------------\n")
_log_rest("\n")
_log_rest("Time: {0}\n".format(time.strftime('Y-%m-%d %H:%M:%OS3')))
_log_rest("\n")
_log_rest("{0} {1}\n".format(method, url))
_log_rest("postBody: {0}\n".format(post_body))
global _rest_ctr; _rest_ctr = _rest_ctr+1
begin_time_seconds = time.time()
http_result = self._attempt_rest(url, method, post_body, file_upload_info)
end_time_seconds = time.time()
elapsed_time_seconds = end_time_seconds - begin_time_seconds
elapsed_time_millis = elapsed_time_seconds * 1000
if not http_result.ok:
detailed_error_msgs = []
try:
result = http_result.json()
if 'messages' in result.keys():
detailed_error_msgs = '\n'.join([m['message'] for m in result['messages'] if m['message_type'] in ['ERRR']])
elif 'exception_msg' in result.keys():
detailed_error_msgs = result['exception_msg']
except ValueError:
pass
raise EnvironmentError(("h2o-py got an unexpected HTTP status code:\n {} {} (method = {}; url = {}). \n"+ \
"detailed error messages: {}")
.format(http_result.status_code,http_result.reason,method,url,detailed_error_msgs))
if _is_logging():
_log_rest("\n")
_log_rest("httpStatusCode: {0}\n".format(http_result.status_code))
_log_rest("httpStatusMessage: {0}\n".format(http_result.reason))
_log_rest("millis: {0}\n".format(elapsed_time_millis))
_log_rest("\n")
_log_rest("{0}\n".format(http_result.json()))
_log_rest("\n")
return http_result
# Low level request call
def _attempt_rest(self, url, method, post_body, file_upload_info):
auth = (self._username, self._password)
verify = not self._insecure
headers = {'User-Agent': 'H2O Python client/'+sys.version.replace('\n','')}
try:
if method == "GET":
return requests.get(url, headers=headers, proxies=self._proxy, auth=auth, verify=verify)
elif file_upload_info:
files = {file_upload_info["file"] : open(file_upload_info["file"], "rb")}
return requests.post(url, files=files, headers=headers, proxies=self._proxy, auth=auth, verify=verify)
elif method == "POST":
headers["Content-Type"] = "application/x-www-form-urlencoded"
return requests.post(url, data=post_body, headers=headers, proxies=self._proxy, auth=auth, verify=verify)
elif method == "DELETE":
return requests.delete(url, headers=headers, proxies=self._proxy, auth=auth, verify=verify)
else:
raise ValueError("Unknown HTTP method " + method)
except requests.ConnectionError as e:
raise EnvironmentError("h2o-py encountered an unexpected HTTP error:\n {}".format(e))
# TODO:
# @staticmethod
# def _process_matrices(x=None):
# if x:
# if isinstance(x, "dict"):
#
# return x
@staticmethod
def _process_tables(x=None):
if x:
if isinstance(x, dict):
has_meta = "__meta" in x
has_schema_type = has_meta and "schema_type" in x["__meta"]
have_table = has_schema_type and x["__meta"]["schema_type"] == "TwoDimTable"
if have_table:
col_formats = [c["format"] for c in x["columns"]]
table_header = x["name"]
table_descr = x["description"]
col_types = [c["type"] for c in x["columns"]]
col_headers = [c["name"] for c in x["columns"]]
row_headers = ["" for i in range(len(col_headers))]
cell_values = x["data"]
tbl = H2OTwoDimTable(row_header=row_headers, col_header=col_headers,
col_types=col_types, table_header=table_header,
raw_cell_values=cell_values,
col_formats=col_formats,table_description=table_descr)
x = tbl
else:
for k in x:
x[k] = H2OConnection._process_tables(x[k])
if isinstance(x, list):
for it in range(len(x)):
x[it] = H2OConnection._process_tables(x[it])
return x
global _rest_ctr
_rest_ctr = 0
@staticmethod
def rest_ctr(): global _rest_ctr; return _rest_ctr
# On exit, close the session to allow H2O to cleanup any temps
def end_session():
try:
H2OConnection.delete(url_suffix="InitID")
print("Sucessfully closed the H2O Session.")
except:
pass
def get_human_readable_size(num):
exp_str = [(0, 'B'), (10, 'KB'), (20, 'MB'), (30, 'GB'), (40, 'TB'), (50, 'PB'), ]
i = 0
rounded_val = 0
while i + 1 < len(exp_str) and num >= (2 ** exp_str[i + 1][0]):
i += 1
rounded_val = round(float(num) / 2 ** exp_str[i][0], 2)
return '%s %s' % (rounded_val, exp_str[i][1])
def get_human_readable_time(epochTimeMillis):
days = epochTimeMillis/(24*60*60*1000.0)
hours = (days-math.floor(days))*24
minutes = (hours-math.floor(hours))*60
seconds = (minutes-math.floor(minutes))*60
milliseconds = (seconds-math.floor(seconds))*1000
duration_vec = [int(math.floor(t)) for t in [days,hours,minutes,seconds,milliseconds]]
names_duration_vec = ["days","hours","minutes","seconds","milliseconds"]
duration_dict = dict(zip(names_duration_vec, duration_vec))
readable_time = ""
for name in names_duration_vec:
if duration_dict[name] > 0:
readable_time += str(duration_dict[name]) + " " + name + " "
return readable_time
def is_int(possible_int):
try:
int(possible_int)
return True
except ValueError:
return False
def as_int(the_int):
if not is_int(the_int):
raise ValueError("Not a valid int value: " + str(the_int))
return int(the_int)
def _kill_jvm_fork():
global __H2OCONN__
if __H2OCONN__ is not None:
if __H2OCONN__._child:
__H2OCONN__._child.kill()
print("Successfully stopped H2O JVM started by the h2o python module.")
atexit.register(_kill_jvm_fork)
atexit.register(end_session)
| apache-2.0 | -7,047,899,726,068,955,000 | 39.189445 | 151 | 0.615623 | false |
ebigelow/LOTlib | LOTlib/Inference/Samplers/AdaptiveParallelTempering.py | 1 | 3107 | from scipy import interpolate
from ParallelTempering import ParallelTemperingSampler
class AdaptiveParallelTemperingSampler(ParallelTemperingSampler):
"""
Adaptive setting of the temperatures via
Katzgraber, H. G., Trebst, S., Huse, D. A., & Troyer, M. (2006). Feedback-optimized parallel tempering monte carlo. Journal of Statistical Mechanics: Theory and Experiment, 2006, P03018
"""
def __init__(self, make_h0, data, adapt_at=[50000, 100000, 200000, 300000, 500000, 1000000], **kwargs):
ParallelTemperingSampler.__init__(self, make_h0, data, **kwargs)
self.adapt_at = adapt_at
def adapt_temperatures(self, epsilon=0.001):
"""
Adapat our temperatures, given self.nup and self.ndown
This follows ComputeAdaptedTemperatures in https://github.com/stuhlmueller/mcnets/blob/master/mcnets/tempering.py
:return:
"""
hist = self.get_hist()
linear_hist = [x/float(self.nchains-1) for x in reversed(range(self.nchains))]
monotonic_hist = [x*float(1.-epsilon) + y*epsilon for x, y in zip(hist, linear_hist)]
# print "Linear:", linear_hist
# print "Monotonic:", monotonic_hist
# Hmm force monotonic to have 0,1?
monotonic_hist[0], monotonic_hist[-1] = 1.0, 0.0
f = interpolate.interp1d(list(reversed(monotonic_hist)), list(reversed(self.temperatures)))
newt = [self.temperatures[0]]
for i in reversed(range(2, self.nchains)):
# print i, float(i-1) / (self.nchains-1), frac(float(i-1) / (self.nchains-1))
newt.append(f([float(i-1.) / (self.nchains-1)])[0])
# keep the old temps
newt.append(self.temperatures[-1])
self.temperatures = newt
print "# Adapting temperatures to ", self.temperatures
print "# Acceptance ratio:", self.acceptance_ratio()
# And set each temperature chain
for c, t in zip(self.chains, self.temperatures):
c.likelihod_temperature = t
def next(self):
ret = ParallelTemperingSampler.next(self)
if self.nsamples in self.adapt_at: ## TODO: Maybe make this faster?
self.adapt_temperatures()
return ret
if __name__ == "__main__":
from LOTlib import break_ctrlc
from LOTlib.Examples.Number2015.Model import generate_data, make_h0
data = generate_data(300)
from LOTlib.MCMCSummary.Z import Z
from LOTlib.MCMCSummary.TopN import TopN
z = Z(unique=True)
tn = TopN(N=10)
from LOTlib.Miscellaneous import logrange
sampler = AdaptiveParallelTemperingSampler(make_h0, data, steps=1000000, \
yield_only_t0=False, whichtemperature='acceptance_temperature', \
temperatures=logrange(1.0, 10.0, 10))
for h in break_ctrlc(tn(z(sampler))):
# print sampler.chain_idx, h.posterior_score, h
pass
for x in tn.get_all(sorted=True):
print x.posterior_score, x
print z
print sampler.nup, sampler.ndown
print sampler.get_hist() | gpl-3.0 | 4,413,269,540,533,469,700 | 32.419355 | 189 | 0.632121 | false |
dnsbob/pynet_testz | test_telnet.py | 1 | 1310 | #!/usr/bin/env python
# test_telnet.py
import telnetlib
import time
import socket
import sys
TELNET_PORT=23
TELNET_TIMEOUT=6
def telnet_connect(ip_addr, TELNET_PORT, TELNET_TIMEOUT):
try:
return telnetlib.Telnet(ip_addr, TELNET_PORT, TELNET_TIMEOUT)
except socket.timeout:
sys.exit("Connection timed out")
def login(remote_conn, username, password):
output=remote_conn.read_until("sername:", TELNET_TIMEOUT)
remote_conn.write(username + '\n')
output += remote_conn.read_until("ssword:", TELNET_TIMEOUT)
remote_conn.write(password + '\n')
return output
def send_command(remote_conn, cmd):
cmd=cmd.rstrip() # remove trailing linefeed if any
remote_conn.write(cmd + '\n')
time.sleep(1)
return remote_conn.read_very_eager()
def main():
ip_addr='184.105.247.70'
username='pyclass'
password='88newclass'
remote_conn=telnet_connect(ip_addr, TELNET_PORT, TELNET_TIMEOUT)
output=login(remote_conn, username, password)
print output
time.sleep(1)
output=remote_conn.read_very_eager()
print output
output=send_command(remote_conn, 'terminal length 0')
print output
output=send_command(remote_conn, 'show version')
print output
remote_conn.close
if __name__ == '__main__':
main()
| apache-2.0 | 3,303,492,153,968,261,600 | 22.818182 | 69 | 0.676336 | false |
ebonyclock/vizdoom_cig2017 | f1/F1_track1/tensorpack/train/trainer.py | 1 | 9557 | # -*- coding: UTF-8 -*-
# File: trainer.py
# Author: Yuxin Wu <[email protected]>
import tensorflow as tf
import threading
import time
from six.moves import zip
from .base import Trainer
from ..dataflow.common import RepeatedData
from ..models import TowerContext
from ..utils import *
from ..tfutils import *
from ..tfutils.summary import summary_moving_average, add_moving_summary
from ..tfutils.modelutils import describe_model
from ..predict import OnlinePredictor, build_multi_tower_prediction_graph
__all__ = ['SimpleTrainer', 'QueueInputTrainer']
class PredictorFactory(object):
""" Make predictors for a trainer"""
def __init__(self, sess, model, towers):
"""
:param towers: list of gpu relative id
"""
self.sess = sess
self.model = model
self.towers = towers
self.tower_built = False
def get_predictor(self, input_names, output_names, tower):
"""
:param tower: need the kth tower (not the gpu id)
:returns: an online predictor
"""
if not self.tower_built:
self._build_predict_tower()
tower = self.towers[tower % len(self.towers)]
raw_input_vars = get_vars_by_names(input_names)
output_names = ['towerp{}/'.format(tower) + n for n in output_names]
output_vars = get_vars_by_names(output_names)
return OnlinePredictor(self.sess, raw_input_vars, output_vars)
def _build_predict_tower(self):
# build_predict_tower might get called anywhere, but 'towerp' should be the outermost name scope
tf.get_variable_scope().reuse_variables()
with tf.name_scope(None), \
freeze_collection(SUMMARY_BACKUP_KEYS):
build_multi_tower_prediction_graph(
self.model, self.towers)
self.tower_built = True
class SimpleTrainer(Trainer):
def run_step(self):
data = next(self.data_producer)
feed = dict(zip(self.input_vars, data))
self.sess.run([self.train_op], feed_dict=feed) # faster since train_op return None
def train(self):
model = self.model
self.input_vars = model.get_input_vars()
with TowerContext(''):
model.build_graph(self.input_vars)
cost_var = model.get_cost() # TODO assert scalar
add_moving_summary(cost_var)
grads = self.config.optimizer.compute_gradients(cost_var)
grads = self.process_grads(grads)
avg_maintain_op = summary_moving_average()
self.train_op = tf.group(
self.config.optimizer.apply_gradients(grads, get_global_step_var()),
avg_maintain_op)
self.init_session_and_coord()
describe_model()
# create an infinte data producer
self.config.dataset.reset_state()
self.data_producer = RepeatedData(self.config.dataset, -1).get_data()
self.main_loop()
def _trigger_epoch(self):
if self.summary_op is not None:
data = next(self.data_producer)
feed = dict(zip(self.input_vars, data))
summary_str = self.summary_op.eval(feed_dict=feed)
self._process_summary(summary_str)
def get_predict_func(self, input_names, output_names):
if not hasattr(self, 'predictor_factory'):
self.predictor_factory = PredictorFactory(self.sess, self.model, [0])
return self.predictor_factory.get_predictor(input_names, output_names, 0)
class EnqueueThread(threading.Thread):
def __init__(self, trainer):
super(EnqueueThread, self).__init__()
self.sess = trainer.sess
self.coord = trainer.coord
self.dataflow = RepeatedData(trainer.config.dataset, -1)
self.input_vars = trainer.input_vars
self.queue = trainer.input_queue
self.op = self.queue.enqueue(self.input_vars)
self.close_op = self.queue.close(cancel_pending_enqueues=True)
self.size_op = self.queue.size()
self.daemon = True
def run(self):
self.dataflow.reset_state()
with self.sess.as_default():
try:
while True:
for dp in self.dataflow.get_data():
#import IPython;
#IPython.embed(config=IPython.terminal.ipapp.load_default_config())
if self.coord.should_stop():
return
feed = dict(zip(self.input_vars, dp))
#print 'TFQ:', self.sess.run([self.op, self.size_op], feed_dict=feed)[1]
self.op.run(feed_dict=feed)
except tf.errors.CancelledError as e:
pass
except Exception:
logger.exception("Exception in EnqueueThread:")
finally:
try:
self.sess.run(self.close_op)
except RuntimeError: # session already closed
pass
self.coord.request_stop()
logger.info("Enqueue Thread Exited.")
class QueueInputTrainer(Trainer):
""" Single GPU Trainer, takes input from a queue"""
def __init__(self, config, input_queue=None, predict_tower=None):
"""
:param config: a `TrainConfig` instance
:param input_queue: a `tf.QueueBase` instance to be used to buffer datapoints.
Defaults to a FIFO queue of size 100.
:param predict_tower: list of gpu relative idx to run prediction. default to be [0].
Use -1 for cpu.
"""
super(QueueInputTrainer, self).__init__(config)
self.input_vars = self.model.get_input_vars()
# use a smaller queue size for now, to avoid https://github.com/tensorflow/tensorflow/issues/2942
if input_queue is None:
self.input_queue = tf.FIFOQueue(
50, [x.dtype for x in self.input_vars], name='input_queue')
else:
self.input_queue = input_queue
# by default, use the first training gpu for prediction
self.predict_tower = predict_tower or [0]
self.dequed_inputs = None
def _get_model_inputs(self):
""" Dequeue a datapoint from input_queue and return"""
ret = self.input_queue.dequeue(name='input_deque')
if isinstance(ret, tf.Tensor): # only one input
ret = [ret]
assert len(ret) == len(self.input_vars)
for qv, v in zip(ret, self.input_vars):
qv.set_shape(v.get_shape())
return ret
def _single_tower_grad(self):
""" Get grad and cost for single-tower"""
self.dequed_inputs = model_inputs = self._get_model_inputs()
# test the overhead of queue
#with tf.device('/gpu:0'):
#self.dequed_inputs = [tf.Variable(tf.random_normal([128,224,224,3],
#dtype=tf.float32), trainable=False),
#tf.Variable(tf.ones([128], dtype=tf.int32), trainable=False)]
with TowerContext(''):
self.model.build_graph(self.dequed_inputs)
cost_var = self.model.get_cost()
grads = self.config.optimizer.compute_gradients(
cost_var, gate_gradients=0) # GATE_NONE
add_moving_summary(cost_var)
return grads
def _build_enque_thread(self):
""" create a thread that keeps filling the queue """
self.input_th = EnqueueThread(self)
self._extra_threads_procs.append(self.input_th)
def train(self):
assert len(self.config.tower) == 1, \
"QueueInputTrainer doesn't support multigpu! Use Sync/AsyncMultiGPUTrainer instead."
self.init_session_and_coord()
self._build_enque_thread()
grads = self._single_tower_grad()
grads = self.process_grads(grads)
describe_model()
self.train_op = tf.group(
self.config.optimizer.apply_gradients(grads, get_global_step_var()),
summary_moving_average(), name='train_op')
# skip training
#self.train_op = tf.group(*self.dequed_inputs)
self.main_loop()
def run_step(self):
""" Simply run self.train_op"""
self.sess.run(self.train_op)
#run_metadata = tf.RunMetadata()
#self.sess.run([self.train_op],
#options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),
#run_metadata=run_metadata
#)
#from tensorflow.python.client import timeline
#trace = timeline.Timeline(step_stats=run_metadata.step_stats)
#trace_file = open('timeline.ctf.json', 'w')
#trace_file.write(trace.generate_chrome_trace_format())
#import sys; sys.exit()
def _trigger_epoch(self):
# need to run summary_op every epoch
# note that summary_op will take a data from the queue
if self.summary_op is not None:
summary_str = self.summary_op.eval()
self._process_summary(summary_str)
def get_predict_func(self, input_names, output_names, tower=0):
"""
:param tower: return the kth predict_func
:returns: an `OnlinePredictor`
"""
if not hasattr(self, 'predictor_factory'):
self.predictor_factory = PredictorFactory(
self.sess, self.model, self.predict_tower)
return self.predictor_factory.get_predictor(input_names, output_names, tower)
def get_predict_funcs(self, input_names, output_names, n):
return [self.get_predict_func(input_names, output_names, k) for k in range(n)]
| mit | -8,208,971,393,597,252,000 | 37.692308 | 105 | 0.601444 | false |
saullocastro/pyNastran | pyNastran/bdf/bdf_interface/write_mesh.py | 1 | 45439 | # coding: utf-8
"""
This file defines:
- WriteMesh
"""
from __future__ import (nested_scopes, generators, division, absolute_import,
print_function, unicode_literals)
import sys
import io
from codecs import open
from six import string_types, iteritems, itervalues, PY2, StringIO
#from pyNastran.utils import is_file_obj
from pyNastran.bdf.utils import print_filename
from pyNastran.utils.gui_io import save_file_dialog
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.field_writer_16 import print_card_16
from pyNastran.bdf.bdf_interface.attributes import BDFAttributes
class WriteMesh(BDFAttributes):
"""
Defines methods for writing cards
Major methods:
- model.write_bdf(...)
- model.echo_bdf(...)
- model.auto_reject_bdf(...)
"""
def __init__(self):
"""creates methods for writing cards"""
BDFAttributes.__init__(self)
self._auto_reject = True
self.cards_to_read = set([])
#def echo_bdf(self, infile_name):
#"""
#This method removes all comment lines from the bdf
#A write method is stil required.
#.. todo:: maybe add the write method
#.. code-block:: python
#model = BDF()
#model.echo_bdf(bdf_filename)
#"""
#self.deprecated('self.echo_bdf()', 'removed...', '0.8')
#self.cards_to_read = set([])
#return self.read_bdf(infile_name)
#def auto_reject_bdf(self, infile_name):
#"""
#This method parses supported cards, but does not group them into
#nodes, elements, properties, etc.
#.. todo:: maybe add the write method
#"""
#self.deprecated('self.auto_reject_bdf()', 'removed...', '0.8')
#self._auto_reject = True
#return self.read_bdf(infile_name)
def get_encoding(self, encoding=None):
if encoding is not None:
pass
else:
encoding = self._encoding
if encoding is None:
encoding = sys.getdefaultencoding()
return encoding
def _output_helper(self, out_filename, interspersed, size, is_double):
"""
Performs type checking on the write_bdf inputs
"""
if out_filename is None:
wildcard_wx = "Nastran BDF (*.bdf; *.dat; *.nas; *.pch)|" \
"*.bdf;*.dat;*.nas;*.pch|" \
"All files (*.*)|*.*"
wildcard_qt = "Nastran BDF (*.bdf *.dat *.nas *.pch);;All files (*)"
title = 'Save BDF/DAT/PCH'
out_filename = save_file_dialog(title, wildcard_wx, wildcard_qt)
assert out_filename is not None, out_filename
if PY2:
#if not is_file_obj(out_filename):
if not (hasattr(out_filename, 'read') and hasattr(out_filename, 'write')
) or isinstance(out_filename, (file, StringIO)):
return out_filename
elif not isinstance(out_filename, string_types):
msg = 'out_filename=%r must be a string; type=%s' % (
out_filename, type(out_filename))
raise TypeError(msg)
else:
if not(hasattr(out_filename, 'read') and hasattr(out_filename, 'write')
) or isinstance(out_filename, io.IOBase):
return out_filename
elif not isinstance(out_filename, string_types):
msg = 'out_filename=%r must be a string; type=%s' % (
out_filename, type(out_filename))
raise TypeError(msg)
if size == 8:
assert is_double is False, 'is_double=%r' % is_double
elif size == 16:
assert is_double in [True, False], 'is_double=%r' % is_double
else:
assert size in [8, 16], size
assert isinstance(interspersed, bool)
fname = print_filename(out_filename, self._relpath)
self.log.debug("***writing %s" % fname)
return out_filename
def write_caero_model(self, caero_bdf_filename='caero.bdf'):
"""write the CAERO cards as CQUAD4s that can be visualized"""
bdf_file = open(caero_bdf_filename, 'w')
bdf_file.write('CEND\n')
bdf_file.write('BEGIN BULK\n')
bdf_file.write('$ punch=True\n')
i = 1
mid = 1
bdf_file.write('MAT1,%s,3.0E7,,0.3\n' % mid)
for aesurf_id, aesurf in iteritems(self.aesurf):
cid = aesurf.cid1
bdf_file.write('PSHELL,%s,%s,0.1\n' % (aesurf_id, aesurf_id))
#print(cid)
#ax, ay, az = cid.i
#bx, by, bz = cid.j
#cx, cy, cz = cid.k
#bdf_file.write('CORD2R,%s,,%s,%s,%s,%s,%s,%s\n' % (cid, ax, ay, az, bx, by, bz))
#bdf_file.write(',%s,%s,%s\n' % (cx, cy, cz))
#print(cid)
bdf_file.write(str(cid))
#aesurf.elements
for eid, caero in sorted(iteritems(self.caeros)):
assert eid != 1, 'CAERO eid=1 is reserved for non-flaps'
scaero = str(caero).rstrip().split('\n')
bdf_file.write('$ ' + '\n$ '.join(scaero) + '\n')
points, elements = caero.panel_points_elements()
npoints = points.shape[0]
#nelements = elements.shape[0]
for ipoint, point in enumerate(points):
x, y, z = point
bdf_file.write('GRID,%s,,%s,%s,%s\n' % (i + ipoint, x, y, z))
pid = eid
mid = eid
#if 0:
#bdf_file.write('PSHELL,%s,%s,0.1\n' % (pid, mid))
#bdf_file.write('MAT1,%s,3.0E7,,0.3\n' % mid)
#else:
bdf_file.write('PSHELL,%s,%s,0.1\n' % (1, 1))
bdf_file.write('MAT1,%s,3.0E7,,0.3\n' % 1)
j = 0
for elem in elements + i:
p1, p2, p3, p4 = elem
eid2 = j + eid
pidi = None
for aesurf_id, aesurf in iteritems(self.aesurf):
aelist_id = aesurf.AELIST_id1()
aelist = self.aelists[aelist_id]
if eid2 in aelist.elements:
pidi = aesurf_id
break
if pidi is None:
#pidi = pid
pidi = 1
bdf_file.write('CQUAD4,%s,%s,%s,%s,%s,%s\n' % (j + eid, pidi, p1, p2, p3, p4))
j += 1
i += npoints
#break
#j += nelements
bdf_file.write('ENDDATA\n')
def write_bdf(self, out_filename=None, encoding=None,
size=8, is_double=False,
interspersed=False, enddata=None, close=True):
"""
Writes the BDF.
Parameters
----------
out_filename : varies; default=None
str - the name to call the output bdf
file - a file object
StringIO() - a StringIO object
None - pops a dialog
encoding : str; default=None -> system specified encoding
the unicode encoding
latin1, and utf8 are generally good options
size : int; {8, 16}
the field size
is_double : bool; default=False
False : small field
True : large field
interspersed : bool; default=True
Writes a bdf with properties & elements
interspersed like how Patran writes the bdf. This takes
slightly longer than if interspersed=False, but makes it
much easier to compare to a Patran-formatted bdf and is
more clear.
enddata : bool; default=None
bool - enable/disable writing ENDDATA
None - depends on input BDF
close : bool; default=True
should the output file be closed
"""
#self.write_caero_model()
out_filename = self._output_helper(out_filename,
interspersed, size, is_double)
self.log.debug('---starting BDF.write_bdf of %s---' % out_filename)
encoding = self.get_encoding(encoding)
#assert encoding.lower() in ['ascii', 'latin1', 'utf8'], encoding
if hasattr(out_filename, 'read') and hasattr(out_filename, 'write'):
bdf_file = out_filename
else:
if PY2:
wb = 'wb'
else:
wb = 'w'
bdf_file = open(out_filename, 'w', encoding=encoding)
self._write_header(bdf_file, encoding)
self._write_params(bdf_file, size, is_double)
self._write_nodes(bdf_file, size, is_double)
if interspersed:
self._write_elements_properties(bdf_file, size, is_double)
else:
self._write_elements(bdf_file, size, is_double)
self._write_properties(bdf_file, size, is_double)
self._write_materials(bdf_file, size, is_double)
self._write_masses(bdf_file, size, is_double)
self._write_common(bdf_file, size, is_double)
if (enddata is None and 'ENDDATA' in self.card_count) or enddata:
bdf_file.write('ENDDATA\n')
if close:
bdf_file.close()
def _write_header(self, bdf_file, encoding):
"""
Writes the executive and case control decks.
"""
if self.punch is None:
# writing a mesh without using read_bdf
if self.executive_control_lines or self.case_control_deck:
self.punch = False
else:
self.punch = True
if self.nastran_format:
bdf_file.write('$pyNastran: version=%s\n' % self.nastran_format)
bdf_file.write('$pyNastran: punch=%s\n' % self.punch)
bdf_file.write('$pyNastran: encoding=%s\n' % encoding)
bdf_file.write('$pyNastran: nnodes=%s\n' % len(self.nodes))
bdf_file.write('$pyNastran: nelements=%s\n' % len(self.elements))
if not self.punch:
self._write_executive_control_deck(bdf_file)
self._write_case_control_deck(bdf_file)
def _write_executive_control_deck(self, bdf_file):
"""
Writes the executive control deck.
"""
if self.executive_control_lines:
msg = '$EXECUTIVE CONTROL DECK\n'
if self.sol == 600:
new_sol = 'SOL 600,%s' % self.sol_method
else:
new_sol = 'SOL %s' % self.sol
if self.sol_iline is not None:
self.executive_control_lines[self.sol_iline] = new_sol
for line in self.executive_control_lines:
msg += line + '\n'
bdf_file.write(msg)
def _write_case_control_deck(self, bdf_file):
"""
Writes the Case Control Deck.
"""
if self.case_control_deck:
msg = '$CASE CONTROL DECK\n'
msg += str(self.case_control_deck)
assert 'BEGIN BULK' in msg, msg
bdf_file.write(''.join(msg))
def _write_elements(self, bdf_file, size=8, is_double=False):
"""
Writes the elements in a sorted order
"""
if self.elements:
bdf_file.write('$ELEMENTS\n')
if self.is_long_ids:
for (eid, element) in sorted(iteritems(self.elements)):
bdf_file.write(element.write_card_16(is_double))
else:
for (eid, element) in sorted(iteritems(self.elements)):
try:
bdf_file.write(element.write_card(size, is_double))
except:
print('failed printing element...'
'type=%s eid=%s' % (element.type, eid))
raise
def _write_elements_properties(self, bdf_file, size=8, is_double=False):
"""
Writes the elements and properties in and interspersed order
"""
missing_properties = []
if self.properties:
bdf_file.write('$ELEMENTS_WITH_PROPERTIES\n')
eids_written = []
pids = sorted(self.properties.keys())
pid_eids = self.get_element_ids_dict_with_pids(pids)
msg = []
#failed_element_types = set([])
for (pid, eids) in sorted(iteritems(pid_eids)):
prop = self.properties[pid]
if eids:
msg.append(prop.write_card(size, is_double))
eids.sort()
for eid in eids:
element = self.Element(eid)
try:
msg.append(element.write_card(size, is_double))
except:
print('failed printing element...' 'type=%r eid=%s'
% (element.type, eid))
raise
eids_written += eids
else:
missing_properties.append(prop.write_card(size, is_double))
bdf_file.write(''.join(msg))
eids_missing = set(self.elements.keys()).difference(set(eids_written))
if eids_missing:
msg = ['$ELEMENTS_WITH_NO_PROPERTIES '
'(PID=0 and unanalyzed properties)\n']
for eid in sorted(eids_missing):
element = self.Element(eid, msg='')
try:
msg.append(element.write_card(size, is_double))
except:
print('failed printing element...'
'type=%s eid=%s' % (element.type, eid))
raise
bdf_file.write(''.join(msg))
if missing_properties or self.pdampt or self.pbusht or self.pelast:
msg = ['$UNASSOCIATED_PROPERTIES\n']
for card in sorted(itervalues(self.pbusht)):
msg.append(card.write_card(size, is_double))
for card in sorted(itervalues(self.pdampt)):
msg.append(card.write_card(size, is_double))
for card in sorted(itervalues(self.pelast)):
msg.append(card.write_card(size, is_double))
for card in missing_properties:
# this is a string...
#print("missing_property = ", card
msg.append(card)
bdf_file.write(''.join(msg))
def _write_aero(self, bdf_file, size=8, is_double=False):
"""Writes the aero cards"""
if self.caeros or self.paeros or self.monitor_points or self.splines:
msg = ['$AERO\n']
for (unused_id, caero) in sorted(iteritems(self.caeros)):
msg.append(caero.write_card(size, is_double))
for (unused_id, paero) in sorted(iteritems(self.paeros)):
msg.append(paero.write_card(size, is_double))
for (unused_id, spline) in sorted(iteritems(self.splines)):
msg.append(spline.write_card(size, is_double))
for monitor_point in self.monitor_points:
msg.append(monitor_point.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_aero_control(self, bdf_file, size=8, is_double=False):
"""Writes the aero control surface cards"""
if(self.aecomps or self.aefacts or self.aeparams or self.aelinks or
self.aelists or self.aestats or self.aesurf or self.aesurfs):
msg = ['$AERO CONTROL SURFACES\n']
for (unused_id, aelinks) in sorted(iteritems(self.aelinks)):
for aelink in aelinks:
msg.append(aelink.write_card(size, is_double))
for (unused_id, aecomp) in sorted(iteritems(self.aecomps)):
msg.append(aecomp.write_card(size, is_double))
for (unused_id, aeparam) in sorted(iteritems(self.aeparams)):
msg.append(aeparam.write_card(size, is_double))
for (unused_id, aestat) in sorted(iteritems(self.aestats)):
msg.append(aestat.write_card(size, is_double))
for (unused_id, aelist) in sorted(iteritems(self.aelists)):
msg.append(aelist.write_card(size, is_double))
for (unused_id, aesurf) in sorted(iteritems(self.aesurf)):
msg.append(aesurf.write_card(size, is_double))
for (unused_id, aesurfs) in sorted(iteritems(self.aesurfs)):
msg.append(aesurfs.write_card(size, is_double))
for (unused_id, aefact) in sorted(iteritems(self.aefacts)):
msg.append(aefact.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_static_aero(self, bdf_file, size=8, is_double=False):
"""Writes the static aero cards"""
if self.aeros or self.trims or self.divergs:
msg = ['$STATIC AERO\n']
# static aero
if self.aeros:
msg.append(self.aeros.write_card(size, is_double))
for (unused_id, trim) in sorted(iteritems(self.trims)):
msg.append(trim.write_card(size, is_double))
for (unused_id, diverg) in sorted(iteritems(self.divergs)):
msg.append(diverg.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _find_aero_location(self):
"""Determines where the AERO card should be written"""
write_aero_in_flutter = False
write_aero_in_gust = False
if self.aero:
if self.flfacts or self.flutters or self.mkaeros:
write_aero_in_flutter = True
elif self.gusts:
write_aero_in_gust = True
else:
# an AERO card exists, but no FLUTTER, FLFACT, MKAEROx or GUST card
write_aero_in_flutter = True
return write_aero_in_flutter, write_aero_in_gust
def _write_flutter(self, bdf_file, size=8, is_double=False, write_aero_in_flutter=True):
"""Writes the flutter cards"""
if (write_aero_in_flutter and self.aero) or self.flfacts or self.flutters or self.mkaeros:
msg = ['$FLUTTER\n']
if write_aero_in_flutter:
msg.append(self.aero.write_card(size, is_double))
for (unused_id, flutter) in sorted(iteritems(self.flutters)):
msg.append(flutter.write_card(size, is_double))
for (unused_id, flfact) in sorted(iteritems(self.flfacts)):
msg.append(flfact.write_card(size, is_double))
for mkaero in self.mkaeros:
msg.append(mkaero.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_gust(self, bdf_file, size=8, is_double=False, write_aero_in_gust=True):
"""Writes the gust cards"""
if (write_aero_in_gust and self.aero) or self.gusts:
msg = ['$GUST\n']
if write_aero_in_gust:
for (unused_id, aero) in sorted(iteritems(self.aero)):
msg.append(aero.write_card(size, is_double))
for (unused_id, gust) in sorted(iteritems(self.gusts)):
msg.append(gust.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_common(self, bdf_file, size=8, is_double=False):
"""
Write the common outputs so none get missed...
Parameters
----------
bdf_file : file
the file object
size : int (default=8)
the field width
is_double : bool (default=False)
is this double precision
Returns
-------
msg : str
part of the bdf
"""
self._write_rigid_elements(bdf_file, size, is_double)
self._write_dmigs(bdf_file, size, is_double)
self._write_loads(bdf_file, size, is_double)
self._write_dynamic(bdf_file, size, is_double)
self._write_aero(bdf_file, size, is_double)
self._write_aero_control(bdf_file, size, is_double)
self._write_static_aero(bdf_file, size, is_double)
write_aero_in_flutter, write_aero_in_gust = self._find_aero_location()
self._write_flutter(bdf_file, size, is_double, write_aero_in_flutter)
self._write_gust(bdf_file, size, is_double, write_aero_in_gust)
self._write_thermal(bdf_file, size, is_double)
self._write_thermal_materials(bdf_file, size, is_double)
self._write_constraints(bdf_file, size, is_double)
self._write_optimization(bdf_file, size, is_double)
self._write_tables(bdf_file, size, is_double)
self._write_sets(bdf_file, size, is_double)
self._write_superelements(bdf_file, size, is_double)
self._write_contact(bdf_file, size, is_double)
self._write_rejects(bdf_file, size, is_double)
self._write_coords(bdf_file, size, is_double)
def _write_constraints(self, bdf_file, size=8, is_double=False):
"""Writes the constraint cards sorted by ID"""
if self.suport or self.suport1:
msg = ['$CONSTRAINTS\n']
for suport in self.suport:
msg.append(suport.write_card(size, is_double))
for suport_id, suport in sorted(iteritems(self.suport1)):
msg.append(suport.write_card(size, is_double))
bdf_file.write(''.join(msg))
if self.spcs or self.spcadds:
#msg = ['$SPCs\n']
#str_spc = str(self.spcObject) # old
#if str_spc:
#msg.append(str_spc)
#else:
msg = ['$SPCs\n']
for (unused_id, spcadd) in sorted(iteritems(self.spcadds)):
msg.append(str(spcadd))
for (unused_id, spcs) in sorted(iteritems(self.spcs)):
for spc in spcs:
msg.append(str(spc))
bdf_file.write(''.join(msg))
if self.mpcs or self.mpcadds:
msg = ['$MPCs\n']
for (unused_id, mpcadd) in sorted(iteritems(self.mpcadds)):
msg.append(str(mpcadd))
for (unused_id, mpcs) in sorted(iteritems(self.mpcs)):
for mpc in mpcs:
msg.append(mpc.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_contact(self, bdf_file, size=8, is_double=False):
"""Writes the contact cards sorted by ID"""
is_contact = (self.bcrparas or self.bctadds or self.bctparas
or self.bctsets or self.bsurf or self.bsurfs)
if is_contact:
msg = ['$CONTACT\n']
for (unused_id, bcrpara) in sorted(iteritems(self.bcrparas)):
msg.append(bcrpara.write_card(size, is_double))
for (unused_id, bctadds) in sorted(iteritems(self.bctadds)):
msg.append(bctadds.write_card(size, is_double))
for (unused_id, bctpara) in sorted(iteritems(self.bctparas)):
msg.append(bctpara.write_card(size, is_double))
for (unused_id, bctset) in sorted(iteritems(self.bctsets)):
msg.append(bctset.write_card(size, is_double))
for (unused_id, bsurfi) in sorted(iteritems(self.bsurf)):
msg.append(bsurfi.write_card(size, is_double))
for (unused_id, bsurfsi) in sorted(iteritems(self.bsurfs)):
msg.append(bsurfsi.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_coords(self, bdf_file, size=8, is_double=False):
"""Writes the coordinate cards in a sorted order"""
msg = []
if len(self.coords) > 1:
msg.append('$COORDS\n')
for (unused_id, coord) in sorted(iteritems(self.coords)):
if unused_id != 0:
msg.append(coord.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_dmigs(self, bdf_file, size=8, is_double=False):
"""
Writes the DMIG cards
Parameters
----------
size : int
large field (16) or small field (8)
Returns
-------
msg : str
string representation of the DMIGs
"""
msg = []
for (unused_name, dmig) in sorted(iteritems(self.dmigs)):
msg.append(dmig.write_card(size, is_double))
for (unused_name, dmi) in sorted(iteritems(self.dmis)):
msg.append(dmi.write_card(size, is_double))
for (unused_name, dmij) in sorted(iteritems(self.dmijs)):
msg.append(dmij.write_card(size, is_double))
for (unused_name, dmiji) in sorted(iteritems(self.dmijis)):
msg.append(dmiji.write_card(size, is_double))
for (unused_name, dmik) in sorted(iteritems(self.dmiks)):
msg.append(dmik.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_dynamic(self, bdf_file, size=8, is_double=False):
"""Writes the dynamic cards sorted by ID"""
is_dynamic = (self.dareas or self.dphases or self.nlparms or self.frequencies or
self.methods or self.cMethods or self.tsteps or self.tstepnls or
self.transfer_functions or self.delays or self.rotors)
if is_dynamic:
msg = ['$DYNAMIC\n']
for (unused_id, method) in sorted(iteritems(self.methods)):
msg.append(method.write_card(size, is_double))
for (unused_id, cmethod) in sorted(iteritems(self.cMethods)):
msg.append(cmethod.write_card(size, is_double))
for (unused_id, darea) in sorted(iteritems(self.dareas)):
msg.append(darea.write_card(size, is_double))
for (unused_id, dphase) in sorted(iteritems(self.dphases)):
msg.append(dphase.write_card(size, is_double))
for (unused_id, nlparm) in sorted(iteritems(self.nlparms)):
msg.append(nlparm.write_card(size, is_double))
for (unused_id, nlpci) in sorted(iteritems(self.nlpcis)):
msg.append(nlpci.write_card(size, is_double))
for (unused_id, tstep) in sorted(iteritems(self.tsteps)):
msg.append(tstep.write_card(size, is_double))
for (unused_id, tstepnl) in sorted(iteritems(self.tstepnls)):
msg.append(tstepnl.write_card(size, is_double))
for (unused_id, freq) in sorted(iteritems(self.frequencies)):
msg.append(freq.write_card(size, is_double))
for (unused_id, delay) in sorted(iteritems(self.delays)):
msg.append(delay.write_card(size, is_double))
for (unused_id, rotor) in sorted(iteritems(self.rotors)):
msg.append(rotor.write_card(size, is_double))
for (unused_id, tfs) in sorted(iteritems(self.transfer_functions)):
for tf in tfs:
msg.append(tf.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_loads(self, bdf_file, size=8, is_double=False):
"""Writes the load cards sorted by ID"""
if self.loads or self.tempds:
msg = ['$LOADS\n']
for (key, loadcase) in sorted(iteritems(self.loads)):
for load in loadcase:
try:
msg.append(load.write_card(size, is_double))
except:
print('failed printing load...type=%s key=%r'
% (load.type, key))
raise
for key, tempd in sorted(iteritems(self.tempds)):
msg.append(tempd.write_card(size, is_double))
bdf_file.write(''.join(msg))
if self.dloads or self.dload_entries:
msg = ['$DLOADS\n']
for (key, loadcase) in sorted(iteritems(self.dloads)):
for load in loadcase:
try:
msg.append(load.write_card(size, is_double))
except:
print('failed printing load...type=%s key=%r'
% (load.type, key))
raise
for (key, loadcase) in sorted(iteritems(self.dload_entries)):
for load in loadcase:
try:
msg.append(load.write_card(size, is_double))
except:
print('failed printing load...type=%s key=%r'
% (load.type, key))
raise
bdf_file.write(''.join(msg))
def _write_masses(self, bdf_file, size=8, is_double=False):
"""Writes the mass cards sorted by ID"""
if self.properties_mass:
bdf_file.write('$PROPERTIES_MASS\n')
for (pid, mass) in sorted(iteritems(self.properties_mass)):
try:
bdf_file.write(mass.write_card(size, is_double))
except:
print('failed printing mass property...'
'type=%s eid=%s' % (mass.type, pid))
raise
if self.masses:
bdf_file.write('$MASSES\n')
for (eid, mass) in sorted(iteritems(self.masses)):
try:
bdf_file.write(mass.write_card(size, is_double))
except:
print('failed printing masses...'
'type=%s eid=%s' % (mass.type, eid))
raise
def _write_materials(self, bdf_file, size=8, is_double=False):
"""Writes the materials in a sorted order"""
is_materials = (self.materials or self.hyperelastic_materials or self.creep_materials or
self.MATS1 or self.MATS3 or self.MATS8 or self.MATT1 or
self.MATT2 or self.MATT3 or self.MATT4 or self.MATT5 or
self.MATT8 or self.MATT9)
if is_materials:
msg = ['$MATERIALS\n']
for (unused_mid, material) in sorted(iteritems(self.materials)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.hyperelastic_materials)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.creep_materials)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATS1)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATS3)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATS8)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT1)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT2)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT3)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT4)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT5)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT8)):
msg.append(material.write_card(size, is_double))
for (unused_mid, material) in sorted(iteritems(self.MATT9)):
msg.append(material.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_nodes(self, bdf_file, size=8, is_double=False):
"""
Writes the NODE-type cards
"""
if self.spoints:
msg = []
msg.append('$SPOINTS\n')
msg.append(self.spoints.write_card(size, is_double))
bdf_file.write(''.join(msg))
if self.epoints:
msg = []
msg.append('$EPOINTS\n')
msg.append(self.epoints.write_card(size, is_double))
bdf_file.write(''.join(msg))
if self.points:
msg = []
msg.append('$POINTS\n')
for point_id, point in sorted(iteritems(self.points)):
msg.append(point.write_card(size, is_double))
bdf_file.write(''.join(msg))
if self.nodes:
msg = []
msg.append('$NODES\n')
if self.grdset:
msg.append(self.grdset.print_card(size))
if self.is_long_ids:
for (unused_nid, node) in sorted(iteritems(self.nodes)):
msg.append(node.write_card_16(is_double))
else:
for (unused_nid, node) in sorted(iteritems(self.nodes)):
msg.append(node.write_card(size, is_double))
bdf_file.write(''.join(msg))
#if 0: # not finished
#self._write_nodes_associated(bdf_file, size, is_double)
#def _write_nodes_associated(self, bdf_file, size=8, is_double=False):
#"""
#Writes the NODE-type in associated and unassociated groups.
#.. warning:: Sometimes crashes, probably on invalid BDFs.
#"""
#msg = []
#associated_nodes = set([])
#for (eid, element) in iteritems(self.elements):
#associated_nodes = associated_nodes.union(set(element.node_ids))
#all_nodes = set(self.nodes.keys())
#unassociated_nodes = list(all_nodes.difference(associated_nodes))
##missing_nodes = all_nodes.difference(
## TODO: this really shouldn't be a list...???
#associated_nodes = list(associated_nodes)
#if associated_nodes:
#msg += ['$ASSOCIATED NODES\n']
#if self.grdset:
#msg.append(self.grdset.write_card(size, is_double))
## TODO: this really shouldn't be a dictionary...???
#for key, node in sorted(iteritems(associated_nodes)):
#msg.append(node.write_card(size, is_double))
#if unassociated_nodes:
#msg.append('$UNASSOCIATED NODES\n')
#if self.grdset and not associated_nodes:
#msg.append(self.grdset.write_card(size, is_double))
#for key, node in sorted(iteritems(unassociated_nodes)):
#if key in self.nodes:
#msg.append(node.write_card(size, is_double))
#else:
#msg.append('$ Missing NodeID=%s' % key)
#bdf_file.write(''.join(msg))
def _write_optimization(self, bdf_file, size=8, is_double=False):
"""Writes the optimization cards sorted by ID"""
is_optimization = (self.dconadds or self.dconstrs or self.desvars or self.ddvals or
self.dresps or
self.dvprels or self.dvmrels or self.dvcrels or self.doptprm or
self.dlinks or self.dequations or self.dtable is not None or
self.dvgrids)
if is_optimization:
msg = ['$OPTIMIZATION\n']
for (unused_id, dconadd) in sorted(iteritems(self.dconadds)):
msg.append(dconadd.write_card(size, is_double))
for (unused_id, dconstrs) in sorted(iteritems(self.dconstrs)):
for dconstr in dconstrs:
msg.append(dconstr.write_card(size, is_double))
for (unused_id, desvar) in sorted(iteritems(self.desvars)):
msg.append(desvar.write_card(size, is_double))
for (unused_id, ddval) in sorted(iteritems(self.ddvals)):
msg.append(ddval.write_card(size, is_double))
for (unused_id, dlink) in sorted(iteritems(self.dlinks)):
msg.append(dlink.write_card(size, is_double))
for (unused_id, dresp) in sorted(iteritems(self.dresps)):
msg.append(dresp.write_card(size, is_double))
for (unused_id, dvcrel) in sorted(iteritems(self.dvcrels)):
msg.append(dvcrel.write_card(size, is_double))
for (unused_id, dvmrel) in sorted(iteritems(self.dvmrels)):
msg.append(dvmrel.write_card(size, is_double))
for (unused_id, dvprel) in sorted(iteritems(self.dvprels)):
msg.append(dvprel.write_card(size, is_double))
for (unused_id, dvgrids) in sorted(iteritems(self.dvgrids)):
for dvgrid in dvgrids:
msg.append(dvgrid.write_card(size, is_double))
for (unused_id, equation) in sorted(iteritems(self.dequations)):
msg.append(str(equation))
if self.dtable is not None:
msg.append(self.dtable.write_card(size, is_double))
if self.doptprm is not None:
msg.append(self.doptprm.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_params(self, bdf_file, size=8, is_double=False):
"""
Writes the PARAM cards
"""
if self.params:
msg = ['$PARAMS\n']
if self.is_long_ids:
for (unused_key, param) in sorted(iteritems(self.params)):
msg.append(param.write_card(16, is_double))
else:
for (unused_key, param) in sorted(iteritems(self.params)):
msg.append(param.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_properties(self, bdf_file, size=8, is_double=False):
"""Writes the properties in a sorted order"""
if self.properties:
msg = ['$PROPERTIES\n']
prop_groups = (self.properties, self.pelast, self.pdampt, self.pbusht)
if self.is_long_ids:
for prop_group in prop_groups:
for unused_pid, prop in sorted(iteritems(prop_group)):
msg.append(prop.write_card_16(is_double))
#except:
#print('failed printing property type=%s' % prop.type)
#raise
else:
for prop_group in prop_groups:
for unused_pid, prop in sorted(iteritems(prop_group)):
msg.append(prop.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_rejects(self, bdf_file, size=8, is_double=False):
"""
Writes the rejected (processed) cards and the rejected unprocessed
cardlines
"""
if size == 8:
print_func = print_card_8
else:
print_func = print_card_16
msg = []
if self.reject_cards:
msg.append('$REJECTS\n')
for reject_card in self.reject_cards:
try:
msg.append(print_func(reject_card))
except RuntimeError:
for field in reject_card:
if field is not None and '=' in field:
raise SyntaxError('cannot reject equal signed '
'cards\ncard=%s\n' % reject_card)
raise
if self.rejects:
msg.append('$REJECT_LINES\n')
for reject_lines in self.reject_lines:
if isinstance(reject_lines, (list, tuple)):
for reject in reject_lines:
reject2 = reject.rstrip()
if reject2:
msg.append('%s\n' % reject2)
elif isinstance(reject_lines, string_types):
reject2 = reject_lines.rstrip()
if reject2:
msg.append('%s\n' % reject2)
else:
raise TypeError(reject_lines)
bdf_file.write(''.join(msg))
def _write_rigid_elements(self, bdf_file, size=8, is_double=False):
"""Writes the rigid elements in a sorted order"""
if self.rigid_elements:
bdf_file.write('$RIGID ELEMENTS\n')
if self.is_long_ids:
for (eid, element) in sorted(iteritems(self.rigid_elements)):
try:
bdf_file.write(element.write_card_16(is_double))
except:
print('failed printing element...'
'type=%s eid=%s' % (element.type, eid))
raise
else:
for (eid, element) in sorted(iteritems(self.rigid_elements)):
try:
bdf_file.write(element.write_card(size, is_double))
except:
print('failed printing element...'
'type=%s eid=%s' % (element.type, eid))
raise
if self.plotels:
bdf_file.write('$PLOT ELEMENTS\n')
for (eid, element) in sorted(iteritems(self.plotels)):
bdf_file.write(element.write_card(size, is_double))
def _write_sets(self, bdf_file, size=8, is_double=False):
"""Writes the SETx cards sorted by ID"""
is_sets = (self.sets or self.asets or self.bsets or self.csets or self.qsets
or self.usets)
if is_sets:
msg = ['$SETS\n']
for (unused_id, set_obj) in sorted(iteritems(self.sets)): # dict
msg.append(set_obj.write_card(size, is_double))
for set_obj in self.asets: # list
msg.append(set_obj.write_card(size, is_double))
for set_obj in self.bsets: # list
msg.append(set_obj.write_card(size, is_double))
for set_obj in self.csets: # list
msg.append(set_obj.write_card(size, is_double))
for set_obj in self.qsets: # list
msg.append(set_obj.write_card(size, is_double))
for name, usets in sorted(iteritems(self.usets)): # dict
for set_obj in usets: # list
msg.append(set_obj.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_superelements(self, bdf_file, size=8, is_double=False):
"""Writes the SETx cards sorted by ID"""
is_sets = (self.se_sets or self.se_bsets or self.se_csets or self.se_qsets
or self.se_usets)
if is_sets:
msg = ['$SUPERELEMENTS\n']
for set_obj in self.se_bsets: # list
msg.append(set_obj.write_card(size, is_double))
for set_obj in self.se_csets: # list
msg.append(set_obj.write_card(size, is_double))
for set_obj in self.se_qsets: # list
msg.append(set_obj.write_card(size, is_double))
for (set_id, set_obj) in sorted(iteritems(self.se_sets)): # dict
msg.append(set_obj.write_card(size, is_double))
for name, usets in sorted(iteritems(self.se_usets)): # dict
for set_obj in usets: # list
msg.append(set_obj.write_card(size, is_double))
for suport in self.se_suport: # list
msg.append(suport.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_tables(self, bdf_file, size=8, is_double=False):
"""Writes the TABLEx cards sorted by ID"""
if self.tables or self.tables_sdamping:
msg = ['$TABLES\n']
for (unused_id, table) in sorted(iteritems(self.tables)):
msg.append(table.write_card(size, is_double))
for (unused_id, table) in sorted(iteritems(self.tables_sdamping)):
msg.append(table.write_card(size, is_double))
bdf_file.write(''.join(msg))
if self.random_tables:
msg = ['$RANDOM TABLES\n']
for (unused_id, table) in sorted(iteritems(self.random_tables)):
msg.append(table.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_thermal(self, bdf_file, size=8, is_double=False):
"""Writes the thermal cards"""
# PHBDY
if self.phbdys or self.convection_properties or self.bcs:
# self.thermalProperties or
msg = ['$THERMAL\n']
for (unused_key, phbdy) in sorted(iteritems(self.phbdys)):
msg.append(phbdy.write_card(size, is_double))
#for unused_key, prop in sorted(iteritems(self.thermalProperties)):
# msg.append(str(prop))
for (unused_key, prop) in sorted(iteritems(self.convection_properties)):
msg.append(prop.write_card(size, is_double))
# BCs
for (unused_key, bcs) in sorted(iteritems(self.bcs)):
for boundary_condition in bcs: # list
msg.append(boundary_condition.write_card(size, is_double))
bdf_file.write(''.join(msg))
def _write_thermal_materials(self, bdf_file, size=8, is_double=False):
"""Writes the thermal materials in a sorted order"""
if self.thermal_materials:
msg = ['$THERMAL MATERIALS\n']
for (unused_mid, material) in sorted(iteritems(self.thermal_materials)):
msg.append(material.write_card(size, is_double))
bdf_file.write(''.join(msg))
| lgpl-3.0 | 4,066,461,600,381,241,000 | 43.201362 | 98 | 0.54286 | false |
ingolemo/python-lenses | lenses/__init__.py | 1 | 1059 | """A python module for manipulating deeply nested data structures
without mutating them.
A simple overview for this module is available in the readme or
at [http://github.com/ingolemo/python-lenses] . More detailed
information for each object is available in the relevant
docstrings. `help(lenses.UnboundLens)` is particularly useful.
The entry point to this library is the `lens` object:
>>> from lenses import lens
>>> lens
UnboundLens(TrivialIso())
You can also obtain a bound lens with the `bind` function.
>>> from lenses import bind
>>> bind([1, 2, 3])
BoundLens([1, 2, 3], TrivialIso())
"""
from typing import TypeVar
from . import optics
from . import ui
# included so you can run pydoc lenses.UnboundLens
from .ui import UnboundLens
S = TypeVar("S")
def bind(state: S) -> ui.BoundLens[S, S, S, S]:
"Returns a simple BoundLens object bound to `state`."
return ui.BoundLens(state, optics.TrivialIso())
lens = ui.UnboundLens(optics.TrivialIso()) # type: ui.UnboundLens
__all__ = ["lens", "bind", "optics"]
| gpl-3.0 | 6,049,408,742,977,415,000 | 25.475 | 66 | 0.707271 | false |
SUSE/kiwi | test/unit/xml_state_test.py | 1 | 39351 | import logging
from collections import namedtuple
from mock import (
patch, Mock
)
from pytest import (
raises, fixture
)
from kiwi.xml_state import XMLState
from kiwi.xml_description import XMLDescription
from kiwi.exceptions import (
KiwiTypeNotFound,
KiwiDistributionNameError,
KiwiProfileNotFound
)
class TestXMLState:
@fixture(autouse=True)
def inject_fixtures(self, caplog):
self._caplog = caplog
@patch('platform.machine')
def setup(self, mock_machine):
mock_machine.return_value = 'x86_64'
self.description = XMLDescription(
'../data/example_config.xml'
)
self.state = XMLState(
self.description.load()
)
boot_description = XMLDescription(
'../data/isoboot/example-distribution/config.xml'
)
self.boot_state = XMLState(
boot_description.load()
)
no_image_packages_description = XMLDescription(
'../data/example_no_image_packages_config.xml'
)
self.no_image_packages_boot_state = XMLState(
no_image_packages_description.load()
)
self.bootloader = Mock()
self.bootloader.get_name.return_value = 'some-loader'
self.bootloader.get_timeout.return_value = 'some-timeout'
self.bootloader.get_timeout_style.return_value = 'some-style'
self.bootloader.get_targettype.return_value = 'some-target'
self.bootloader.get_console.return_value = 'some-console'
self.bootloader.get_serial_line.return_value = 'some-serial'
def test_get_description_section(self):
description = self.state.get_description_section()
assert description.author == 'Marcus'
assert description.contact == '[email protected]'
assert description.specification == \
'Testing various configuration states'
@patch('platform.machine')
def test_get_preferences_by_architecture(self, mock_machine):
mock_machine.return_value = 'aarch64'
state = XMLState(
self.description.load()
)
preferences = state.get_preferences_sections()
assert len(preferences) == 3
assert preferences[2].get_arch() == 'aarch64'
assert state.get_build_type_name() == 'iso'
def test_build_type_primary_selected(self):
assert self.state.get_build_type_name() == 'oem'
def test_build_type_first_selected(self):
self.state.xml_data.get_preferences()[2].get_type()[0].set_primary(
False
)
assert self.state.get_build_type_name() == 'oem'
@patch('kiwi.xml_state.XMLState.get_preferences_sections')
def test_get_rpm_excludedocs_without_entry(self, mock_preferences):
mock_preferences.return_value = []
assert self.state.get_rpm_excludedocs() is False
def test_get_rpm_excludedocs(self):
assert self.state.get_rpm_excludedocs() is True
@patch('kiwi.xml_state.XMLState.get_preferences_sections')
def test_get_rpm_check_signatures_without_entry(self, mock_preferences):
mock_preferences.return_value = []
assert self.state.get_rpm_check_signatures() is False
def test_get_rpm_check_signatures(self):
assert self.state.get_rpm_check_signatures() is True
def test_get_package_manager(self):
assert self.state.get_package_manager() == 'zypper'
@patch('kiwi.xml_state.XMLState.get_preferences_sections')
def test_get_default_package_manager(self, mock_preferences):
mock_preferences.return_value = []
assert self.state.get_package_manager() == 'dnf'
def test_get_image_version(self):
assert self.state.get_image_version() == '1.13.2'
def test_get_bootstrap_packages(self):
assert self.state.get_bootstrap_packages() == [
'filesystem', 'zypper'
]
assert self.state.get_bootstrap_packages(plus_packages=['vim']) == [
'filesystem', 'vim', 'zypper'
]
assert self.no_image_packages_boot_state.get_bootstrap_packages() == [
'patterns-openSUSE-base'
]
def test_get_system_packages(self):
assert self.state.get_system_packages() == [
'gfxboot-branding-openSUSE',
'grub2-branding-openSUSE',
'ifplugd',
'iputils',
'kernel-default',
'openssh',
'plymouth-branding-openSUSE',
'vim'
]
@patch('platform.machine')
def test_get_system_packages_some_arch(self, mock_machine):
mock_machine.return_value = 's390'
state = XMLState(
self.description.load()
)
assert state.get_system_packages() == [
'foo',
'gfxboot-branding-openSUSE',
'grub2-branding-openSUSE',
'ifplugd',
'iputils',
'kernel-default',
'openssh',
'plymouth-branding-openSUSE',
'vim'
]
def test_get_system_collections(self):
assert self.state.get_system_collections() == [
'base'
]
def test_get_system_products(self):
assert self.state.get_system_products() == [
'openSUSE'
]
def test_get_system_archives(self):
assert self.state.get_system_archives() == [
'/absolute/path/to/image.tgz'
]
def test_get_system_ignore_packages(self):
assert self.state.get_system_ignore_packages() == [
'bar', 'baz', 'foo'
]
self.state.host_architecture = 'aarch64'
assert self.state.get_system_ignore_packages() == [
'baz', 'foo'
]
self.state.host_architecture = 's390'
assert self.state.get_system_ignore_packages() == [
'baz'
]
def test_get_system_collection_type(self):
assert self.state.get_system_collection_type() == 'plusRecommended'
def test_get_bootstrap_collections(self):
assert self.state.get_bootstrap_collections() == [
'bootstrap-collection'
]
def test_get_bootstrap_products(self):
assert self.state.get_bootstrap_products() == ['kiwi']
def test_get_bootstrap_archives(self):
assert self.state.get_bootstrap_archives() == ['bootstrap.tgz']
def test_get_bootstrap_collection_type(self):
assert self.state.get_bootstrap_collection_type() == 'onlyRequired'
def test_set_repository(self):
self.state.set_repository('repo', 'type', 'alias', 1, True, False)
assert self.state.xml_data.get_repository()[0].get_source().get_path() \
== 'repo'
assert self.state.xml_data.get_repository()[0].get_type() == 'type'
assert self.state.xml_data.get_repository()[0].get_alias() == 'alias'
assert self.state.xml_data.get_repository()[0].get_priority() == 1
assert self.state.xml_data.get_repository()[0] \
.get_imageinclude() is True
assert self.state.xml_data.get_repository()[0] \
.get_package_gpgcheck() is False
def test_add_repository(self):
self.state.add_repository('repo', 'type', 'alias', 1, True)
assert self.state.xml_data.get_repository()[3].get_source().get_path() \
== 'repo'
assert self.state.xml_data.get_repository()[3].get_type() == 'type'
assert self.state.xml_data.get_repository()[3].get_alias() == 'alias'
assert self.state.xml_data.get_repository()[3].get_priority() == 1
assert self.state.xml_data.get_repository()[3] \
.get_imageinclude() is True
def test_add_repository_with_empty_values(self):
self.state.add_repository('repo', 'type', '', '', True)
assert self.state.xml_data.get_repository()[3].get_source().get_path() \
== 'repo'
assert self.state.xml_data.get_repository()[3].get_type() == 'type'
assert self.state.xml_data.get_repository()[3].get_alias() == ''
assert self.state.xml_data.get_repository()[3].get_priority() is None
assert self.state.xml_data.get_repository()[3] \
.get_imageinclude() is True
def test_get_to_become_deleted_packages(self):
assert self.state.get_to_become_deleted_packages() == [
'kernel-debug'
]
def test_get_build_type_vagrant_config_section(self):
vagrant_config = self.state.get_build_type_vagrant_config_section()
assert vagrant_config.get_provider() == 'libvirt'
assert self.boot_state.get_build_type_vagrant_config_section() is None
def test_virtualbox_guest_additions_vagrant_config_section(self):
assert not self.state.get_vagrant_config_virtualbox_guest_additions()
def test_virtualbox_guest_additions_vagrant_config_section_missing(self):
self.state. \
get_build_type_vagrant_config_section() \
.virtualbox_guest_additions_present = True
assert self.state.get_vagrant_config_virtualbox_guest_additions()
def test_get_build_type_system_disk_section(self):
assert self.state.get_build_type_system_disk_section().get_name() == \
'mydisk'
def test_get_build_type_vmdisk_section(self):
assert self.state.get_build_type_vmdisk_section().get_id() == 0
assert self.boot_state.get_build_type_vmdisk_section() is None
def test_get_build_type_vmnic_entries(self):
assert self.state.get_build_type_vmnic_entries()[0].get_interface() \
== ''
assert self.boot_state.get_build_type_vmnic_entries() == []
def test_get_build_type_vmdvd_section(self):
assert self.state.get_build_type_vmdvd_section().get_id() == 0
assert self.boot_state.get_build_type_vmdvd_section() is None
def test_get_volume_management(self):
assert self.state.get_volume_management() == 'lvm'
def test_get_volume_management_none(self):
assert self.boot_state.get_volume_management() is None
def test_get_volume_management_btrfs(self):
description = XMLDescription('../data/example_btrfs_config.xml')
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_volume_management() == 'btrfs'
def test_get_volume_management_lvm_prefer(self):
description = XMLDescription('../data/example_lvm_preferred_config.xml')
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_volume_management() == 'lvm'
def test_get_volume_management_lvm_default(self):
description = XMLDescription('../data/example_lvm_default_config.xml')
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_volume_management() == 'lvm'
def test_build_type_explicitly_selected(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
assert state.get_build_type_name() == 'oem'
def test_build_type_not_found(self):
xml_data = self.description.load()
with raises(KiwiTypeNotFound):
XMLState(xml_data, ['vmxFlavour'], 'foo')
def test_build_type_not_found_no_default_type(self):
description = XMLDescription('../data/example_no_default_type.xml')
xml_data = description.load()
with raises(KiwiTypeNotFound):
XMLState(xml_data, ['minimal'])
def test_profile_not_found(self):
xml_data = self.description.load()
with raises(KiwiProfileNotFound):
XMLState(xml_data, ['foo'])
def test_profile_requires(self):
xml_data = self.description.load()
xml_state = XMLState(xml_data, ['composedProfile'])
assert xml_state.profiles == [
'composedProfile', 'vmxSimpleFlavour', 'xenDomUFlavour'
]
def test_get_volumes_custom_root_volume_name(self):
description = XMLDescription(
'../data/example_lvm_custom_rootvol_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
volume_type = namedtuple(
'volume_type', [
'name',
'size',
'realpath',
'mountpoint',
'fullsize',
'label',
'attributes',
'is_root_volume'
]
)
assert state.get_volumes() == [
volume_type(
name='myroot', size='freespace:500',
realpath='/',
mountpoint=None, fullsize=False,
label=None,
attributes=[],
is_root_volume=True
)
]
def test_get_volumes(self):
description = XMLDescription('../data/example_lvm_default_config.xml')
xml_data = description.load()
state = XMLState(xml_data)
volume_type = namedtuple(
'volume_type', [
'name',
'size',
'realpath',
'mountpoint',
'fullsize',
'label',
'attributes',
'is_root_volume'
]
)
assert state.get_volumes() == [
volume_type(
name='usr_lib', size='size:1024',
realpath='usr/lib',
mountpoint='usr/lib',
fullsize=False,
label='library',
attributes=[],
is_root_volume=False
),
volume_type(
name='LVRoot', size='freespace:500',
realpath='/',
mountpoint=None, fullsize=False,
label=None,
attributes=[],
is_root_volume=True
),
volume_type(
name='etc_volume', size='freespace:30',
realpath='etc',
mountpoint='etc', fullsize=False,
label=None,
attributes=['no-copy-on-write'],
is_root_volume=False
),
volume_type(
name='bin_volume', size=None,
realpath='/usr/bin',
mountpoint='/usr/bin', fullsize=True,
label=None,
attributes=[],
is_root_volume=False
),
volume_type(
name='LVSwap', size='size:128',
realpath='swap',
mountpoint=None, fullsize=False,
label='SWAP',
attributes=[],
is_root_volume=False
)
]
def test_get_volumes_no_explicit_root_setup(self):
description = XMLDescription('../data/example_lvm_no_root_config.xml')
xml_data = description.load()
state = XMLState(xml_data)
volume_type = namedtuple(
'volume_type', [
'name',
'size',
'realpath',
'mountpoint',
'fullsize',
'label',
'attributes',
'is_root_volume'
]
)
assert state.get_volumes() == [
volume_type(
name='LVRoot', size=None, realpath='/',
mountpoint=None, fullsize=True,
label=None,
attributes=[],
is_root_volume=True
),
volume_type(
name='LVSwap', size='size:128',
realpath='swap',
mountpoint=None, fullsize=False,
label='SWAP',
attributes=[],
is_root_volume=False
)
]
def test_get_volumes_no_explicit_root_setup_other_fullsize_volume(self):
description = XMLDescription(
'../data/example_lvm_no_root_full_usr_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
volume_type = namedtuple(
'volume_type', [
'name',
'size',
'realpath',
'mountpoint',
'fullsize',
'label',
'attributes',
'is_root_volume'
]
)
assert state.get_volumes() == [
volume_type(
name='usr', size=None, realpath='usr',
mountpoint='usr', fullsize=True,
label=None,
attributes=[],
is_root_volume=False
),
volume_type(
name='LVRoot', size='freespace:30', realpath='/',
mountpoint=None, fullsize=False,
label=None,
attributes=[],
is_root_volume=True
),
volume_type(
name='LVSwap', size='size:128',
realpath='swap',
mountpoint=None, fullsize=False,
label='SWAP',
attributes=[],
is_root_volume=False
)
]
@patch('kiwi.xml_state.XMLState.get_build_type_system_disk_section')
def test_get_empty_volumes(self, mock_system_disk):
mock_system_disk.return_value = None
assert self.state.get_volumes() == []
def test_get_strip_files_to_delete(self):
assert self.state.get_strip_files_to_delete() == ['del-a', 'del-b']
def test_get_strip_tools_to_keep(self):
assert self.state.get_strip_tools_to_keep() == ['tool-a', 'tool-b']
def test_get_strip_libraries_to_keep(self):
assert self.state.get_strip_libraries_to_keep() == ['lib-a', 'lib-b']
def test_get_build_type_machine_section(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxSimpleFlavour'], 'oem')
assert state.get_build_type_machine_section().get_guestOS() == 'suse'
def test_get_drivers_list(self):
assert self.state.get_drivers_list() == \
['crypto/*', 'drivers/acpi/*', 'bar']
def test_get_build_type_oemconfig_section(self):
xml_data = self.description.load()
state = XMLState(xml_data, None, 'oem')
assert state.get_build_type_oemconfig_section().get_oem_swap()[0] is \
True
def test_get_oemconfig_oem_resize(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
assert state.get_oemconfig_oem_resize() is True
description = XMLDescription(
'../data/example_multiple_users_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_oemconfig_oem_resize() is False
def test_get_oemconfig_oem_multipath_scan(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
assert state.get_oemconfig_oem_multipath_scan() is False
description = XMLDescription(
'../data/example_disk_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_oemconfig_oem_multipath_scan() is False
def test_get_oemconfig_swap_mbytes(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['containerFlavour'], 'docker')
assert state.get_oemconfig_swap_mbytes() is None
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
assert state.get_oemconfig_swap_mbytes() == 42
def test_get_oemconfig_swap_name(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['containerFlavour'], 'docker')
assert state.get_oemconfig_swap_name() == 'LVSwap'
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
assert state.get_oemconfig_swap_name() == 'swap'
def test_get_oemconfig_swap_mbytes_default(self):
description = XMLDescription(
'../data/example_btrfs_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_oemconfig_swap_mbytes() == 128
def test_get_users_sections(self):
assert self.state.get_users_sections()[0].get_user()[0].get_name() == \
'root'
def test_get_users(self):
description = XMLDescription(
'../data/example_multiple_users_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
users = state.get_users()
assert len(users) == 3
assert any(u.get_name() == 'root' for u in users)
assert any(u.get_name() == 'tux' for u in users)
assert any(u.get_name() == 'kiwi' for u in users)
def test_get_user_groups(self):
description = XMLDescription(
'../data/example_multiple_users_config.xml'
)
xml_data = description.load()
state = XMLState(xml_data)
assert len(state.get_user_groups('root')) == 0
assert len(state.get_user_groups('tux')) == 1
assert any(grp == 'users' for grp in state.get_user_groups('tux'))
assert len(state.get_user_groups('kiwi')) == 3
assert any(grp == 'users' for grp in state.get_user_groups('kiwi'))
assert any(grp == 'kiwi' for grp in state.get_user_groups('kiwi'))
assert any(grp == 'admin' for grp in state.get_user_groups('kiwi'))
def test_copy_displayname(self):
self.state.copy_displayname(self.boot_state)
assert self.boot_state.xml_data.get_displayname() == 'Bob'
def test_copy_drivers_sections(self):
self.state.copy_drivers_sections(self.boot_state)
assert 'bar' in self.boot_state.get_drivers_list()
def test_copy_systemdisk_section(self):
self.state.copy_systemdisk_section(self.boot_state)
systemdisk = self.boot_state.get_build_type_system_disk_section()
assert systemdisk.get_name() == 'mydisk'
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_copy_bootloader_section(self, mock_bootloader):
mock_bootloader.return_value = [self.bootloader]
self.state.copy_bootloader_section(self.boot_state)
assert self.boot_state.get_build_type_bootloader_section() == \
self.bootloader
def test_copy_strip_sections(self):
self.state.copy_strip_sections(self.boot_state)
assert 'del-a' in self.boot_state.get_strip_files_to_delete()
def test_copy_machine_section(self):
self.state.copy_machine_section(self.boot_state)
machine = self.boot_state.get_build_type_machine_section()
assert machine.get_memory() == 512
def test_copy_oemconfig_section(self):
self.state.copy_oemconfig_section(self.boot_state)
oemconfig = self.boot_state.get_build_type_oemconfig_section()
assert oemconfig.get_oem_systemsize()[0] == 2048
def test_copy_repository_sections(self):
self.state.copy_repository_sections(self.boot_state, True)
repository = self.boot_state.get_repository_sections()[0]
assert repository.get_source().get_path() == 'iso:///image/CDs/dvd.iso'
def test_copy_preferences_subsections(self):
self.state.copy_preferences_subsections(
['bootsplash_theme'], self.boot_state
)
preferences = self.boot_state.get_preferences_sections()[0]
assert preferences.get_bootsplash_theme()[0] == 'openSUSE'
def test_copy_build_type_attributes(self):
self.state.copy_build_type_attributes(
['firmware'], self.boot_state
)
assert self.boot_state.build_type.get_firmware() == 'efi'
def test_copy_bootincluded_packages_with_no_image_packages(self):
self.state.copy_bootincluded_packages(self.boot_state)
bootstrap_packages = self.boot_state.get_bootstrap_packages()
assert 'plymouth-branding-openSUSE' in bootstrap_packages
assert 'grub2-branding-openSUSE' in bootstrap_packages
assert 'gfxboot-branding-openSUSE' in bootstrap_packages
to_delete_packages = self.boot_state.get_to_become_deleted_packages()
assert 'gfxboot-branding-openSUSE' not in to_delete_packages
def test_copy_bootincluded_packages_with_image_packages(self):
boot_description = XMLDescription(
'../data/isoboot/example-distribution/config.xml'
)
boot_state = XMLState(boot_description.load(), ['std'])
self.state.copy_bootincluded_packages(boot_state)
image_packages = boot_state.get_system_packages()
assert 'plymouth-branding-openSUSE' in image_packages
assert 'grub2-branding-openSUSE' in image_packages
assert 'gfxboot-branding-openSUSE' in image_packages
to_delete_packages = boot_state.get_to_become_deleted_packages()
assert 'gfxboot-branding-openSUSE' not in to_delete_packages
def test_copy_bootincluded_archives(self):
self.state.copy_bootincluded_archives(self.boot_state)
bootstrap_archives = self.boot_state.get_bootstrap_archives()
assert '/absolute/path/to/image.tgz' in bootstrap_archives
def test_copy_bootdelete_packages(self):
self.state.copy_bootdelete_packages(self.boot_state)
to_delete_packages = self.boot_state.get_to_become_deleted_packages()
assert 'vim' in to_delete_packages
def test_copy_bootdelete_packages_no_delete_section_in_boot_descr(self):
boot_description = XMLDescription(
'../data/isoboot/example-distribution-no-delete-section/config.xml'
)
boot_state = XMLState(
boot_description.load()
)
self.state.copy_bootdelete_packages(boot_state)
to_delete_packages = boot_state.get_to_become_deleted_packages()
assert 'vim' in to_delete_packages
def test_build_type_size(self):
result = self.state.get_build_type_size()
assert result.mbytes == 1024
assert result.additive
def test_build_type_size_with_unpartitioned(self):
state = XMLState(self.description.load(), ['vmxSimpleFlavour'], 'oem')
result = state.get_build_type_size()
assert result.mbytes == 3072
assert not result.additive
result = state.get_build_type_size(include_unpartitioned=True)
assert result.mbytes == 4096
assert not result.additive
def test_get_build_type_unpartitioned_bytes(self):
assert self.state.get_build_type_unpartitioned_bytes() == 0
state = XMLState(self.description.load(), ['vmxSimpleFlavour'], 'oem')
assert state.get_build_type_unpartitioned_bytes() == 1073741824
state = XMLState(self.description.load(), ['vmxFlavour'], 'oem')
assert state.get_build_type_unpartitioned_bytes() == 0
state = XMLState(self.description.load(), ['ec2Flavour'], 'oem')
assert state.get_build_type_unpartitioned_bytes() == 0
def test_get_volume_group_name(self):
assert self.state.get_volume_group_name() == 'mydisk'
def test_get_volume_group_name_default(self):
assert self.boot_state.get_volume_group_name() == 'systemVG'
def test_get_distribution_name_from_boot_attribute(self):
assert self.state.get_distribution_name_from_boot_attribute() == \
'distribution'
def test_get_fs_mount_option_list(self):
assert self.state.get_fs_mount_option_list() == ['async']
def test_get_fs_create_option_list(self):
assert self.state.get_fs_create_option_list() == ['-O', '^has_journal']
@patch('kiwi.xml_parse.type_.get_boot')
def test_get_distribution_name_from_boot_attribute_no_boot(self, mock_boot):
mock_boot.return_value = None
with raises(KiwiDistributionNameError):
self.state.get_distribution_name_from_boot_attribute()
@patch('kiwi.xml_parse.type_.get_boot')
def test_get_distribution_name_from_boot_attribute_invalid_boot(
self, mock_boot
):
mock_boot.return_value = 'invalid'
with raises(KiwiDistributionNameError):
self.state.get_distribution_name_from_boot_attribute()
def test_delete_repository_sections(self):
self.state.delete_repository_sections()
assert self.state.get_repository_sections() == []
def test_delete_repository_sections_used_for_build(self):
self.state.delete_repository_sections_used_for_build()
assert self.state.get_repository_sections()[0].get_imageonly()
def test_get_build_type_vmconfig_entries(self):
assert self.state.get_build_type_vmconfig_entries() == []
def test_get_build_type_vmconfig_entries_for_simple_disk(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxSimpleFlavour'], 'oem')
assert state.get_build_type_vmconfig_entries() == [
'numvcpus = "4"', 'cpuid.coresPerSocket = "2"'
]
def test_get_build_type_vmconfig_entries_no_machine_section(self):
description = XMLDescription('../data/example_disk_config.xml')
xml_data = description.load()
state = XMLState(xml_data)
assert state.get_build_type_vmconfig_entries() == []
def test_get_build_type_docker_containerconfig_section(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['containerFlavour'], 'docker')
containerconfig = state.get_build_type_containerconfig_section()
assert containerconfig.get_name() == \
'container_name'
assert containerconfig.get_maintainer() == \
'tux'
assert containerconfig.get_workingdir() == \
'/root'
def test_set_container_tag(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['containerFlavour'], 'docker')
state.set_container_config_tag('new_tag')
config = state.get_container_config()
assert config['container_tag'] == 'new_tag'
def test_add_container_label(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['containerFlavour'], 'docker')
state.add_container_config_label('somelabel', 'overwrittenvalue')
state.add_container_config_label('new_label', 'new value')
config = state.get_container_config()
assert config['labels'] == {
'somelabel': 'overwrittenvalue',
'someotherlabel': 'anotherlabelvalue',
'new_label': 'new value'
}
def test_add_container_label_without_contianerconfig(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['xenDom0Flavour'], 'docker')
state.add_container_config_label('somelabel', 'newlabelvalue')
config = state.get_container_config()
assert config['labels'] == {
'somelabel': 'newlabelvalue'
}
def test_add_container_label_no_container_image_type(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
state.add_container_config_label('somelabel', 'newlabelvalue')
with self._caplog.at_level(logging.WARNING):
config = state.get_container_config()
assert config == {
'history': {'author': 'Marcus <[email protected]>'},
'maintainer': 'Marcus <[email protected]>'
}
def test_set_container_tag_not_applied(self):
with self._caplog.at_level(logging.WARNING):
self.state.set_container_config_tag('new_tag')
def test_get_container_config(self):
expected_config = {
'labels': {
'somelabel': 'labelvalue',
'someotherlabel': 'anotherlabelvalue'
},
'maintainer': 'tux',
'entry_subcommand': ['ls', '-l'],
'container_name': 'container_name',
'container_tag': 'container_tag',
'additional_tags': ['current', 'foobar'],
'workingdir': '/root',
'environment': {
'PATH': '/bin:/usr/bin:/home/user/bin',
'SOMEVAR': 'somevalue'
},
'user': 'root',
'volumes': ['/tmp', '/var/log'],
'entry_command': ['/bin/bash', '-x'],
'expose_ports': ['80', '8080'],
'history': {
'author': 'history author',
'comment': 'This is a comment',
'created_by': 'created by text',
'application_id': '123',
'package_version': '2003.12.0.0',
'launcher': 'app'
}
}
xml_data = self.description.load()
state = XMLState(xml_data, ['containerFlavour'], 'docker')
assert state.get_container_config() == expected_config
def test_get_container_config_clear_commands(self):
expected_config = {
'maintainer': 'tux',
'entry_subcommand': [],
'container_name': 'container_name',
'container_tag': 'container_tag',
'workingdir': '/root',
'user': 'root',
'entry_command': [],
'history': {'author': 'Marcus <[email protected]>'}
}
xml_data = self.description.load()
state = XMLState(xml_data, ['derivedContainer'], 'docker')
assert state.get_container_config() == expected_config
def test_get_spare_part(self):
assert self.state.get_build_type_spare_part_size() == 200
assert self.state.get_build_type_spare_part_fs_attributes() == [
'no-copy-on-write'
]
def test_get_build_type_format_options(self):
assert self.state.get_build_type_format_options() == {
'super': 'man',
'force_size': None
}
def test_get_derived_from_image_uri(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['derivedContainer'], 'docker')
assert state.get_derived_from_image_uri().uri == \
'obs://project/repo/image#mytag'
def test_set_derived_from_image_uri(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['derivedContainer'], 'docker')
state.set_derived_from_image_uri('file:///new_uri')
assert state.get_derived_from_image_uri().translate() == '/new_uri'
def test_set_derived_from_image_uri_not_applied(self):
with self._caplog.at_level(logging.WARNING):
self.state.set_derived_from_image_uri('file:///new_uri')
def test_is_xen_server(self):
assert self.state.is_xen_server() is True
def test_is_xen_guest_by_machine_setup(self):
assert self.state.is_xen_guest() is True
def test_is_xen_guest_no_xen_guest_setup(self):
assert self.boot_state.is_xen_guest() is False
@patch('platform.machine')
def test_is_xen_guest_by_firmware_setup(self, mock_platform_machine):
mock_platform_machine.return_value = 'x86_64'
xml_data = self.description.load()
state = XMLState(xml_data, ['ec2Flavour'], 'oem')
assert state.is_xen_guest() is True
@patch('platform.machine')
def test_is_xen_guest_by_architecture(self, mock_platform_machine):
mock_platform_machine.return_value = 'unsupported'
xml_data = self.description.load()
state = XMLState(xml_data, ['ec2Flavour'], 'oem')
assert state.is_xen_guest() is False
def test_get_initrd_system(self):
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxFlavour'], 'oem')
assert state.get_initrd_system() == 'dracut'
state = XMLState(xml_data, ['vmxSimpleFlavour'], 'iso')
assert state.get_initrd_system() == 'dracut'
state = XMLState(xml_data, ['containerFlavour'], 'docker')
assert state.get_initrd_system() is None
state = XMLState(xml_data, [], 'oem')
assert state.get_initrd_system() == 'dracut'
def test_get_rpm_locale_filtering(self):
assert self.state.get_rpm_locale_filtering() is True
assert self.boot_state.get_rpm_locale_filtering() is False
def test_get_locale(self):
assert self.state.get_locale() == ['en_US', 'de_DE']
assert self.boot_state.get_locale() is None
def test_get_rpm_locale(self):
assert self.state.get_rpm_locale() == [
'POSIX', 'C', 'C.UTF-8', 'en_US', 'de_DE'
]
assert self.boot_state.get_rpm_locale() is None
def test_set_root_partition_uuid(self):
assert self.state.get_root_partition_uuid() is None
self.state.set_root_partition_uuid('some-id')
assert self.state.get_root_partition_uuid() == 'some-id'
def test_set_root_filesystem_uuid(self):
assert self.state.get_root_filesystem_uuid() is None
self.state.set_root_filesystem_uuid('some-id')
assert self.state.get_root_filesystem_uuid() == 'some-id'
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_get_build_type_bootloader_name(self, mock_bootloader):
mock_bootloader.return_value = [None]
assert self.state.get_build_type_bootloader_name() == 'grub2'
mock_bootloader.return_value = [self.bootloader]
assert self.state.get_build_type_bootloader_name() == 'some-loader'
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_get_build_type_bootloader_console(self, mock_bootloader):
mock_bootloader.return_value = [self.bootloader]
assert self.state.get_build_type_bootloader_console() == \
'some-console'
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_get_build_type_bootloader_serial_line_setup(self, mock_bootloader):
mock_bootloader.return_value = [self.bootloader]
assert self.state.get_build_type_bootloader_serial_line_setup() == \
'some-serial'
mock_bootloader.return_value = [None]
assert self.state.get_build_type_bootloader_serial_line_setup() \
is None
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_get_build_type_bootloader_timeout(self, mock_bootloader):
mock_bootloader.return_value = [self.bootloader]
assert self.state.get_build_type_bootloader_timeout() == \
'some-timeout'
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_get_build_type_bootloader_timeout_style(self, mock_bootloader):
mock_bootloader.return_value = [self.bootloader]
assert self.state.get_build_type_bootloader_timeout_style() == \
'some-style'
mock_bootloader.return_value = [None]
assert self.state.get_build_type_bootloader_timeout_style() \
is None
@patch('kiwi.xml_parse.type_.get_bootloader')
def test_get_build_type_bootloader_targettype(self, mock_bootloader):
mock_bootloader.return_value = [self.bootloader]
assert self.state.get_build_type_bootloader_targettype() == \
'some-target'
def test_get_installintrd_modules(self):
self.state.get_installmedia_initrd_modules('add') == ['network-legacy']
self.state.get_installmedia_initrd_modules('set') == []
self.state.get_installmedia_initrd_modules('omit') == []
xml_data = self.description.load()
state = XMLState(xml_data, ['vmxSimpleFlavour'], 'oem')
state.get_installmedia_initrd_modules('add') == []
| gpl-3.0 | -1,623,195,503,704,412,200 | 38.628399 | 80 | 0.597139 | false |
geotagx/geotagx-pybossa-archive | test/test_sched.py | 1 | 17119 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
import json
import random
from helper import sched
from base import model, Fixtures, db, redis_flushall
import pybossa
class TestSched(sched.Helper):
def setUp(self):
super(TestSched, self).setUp()
Fixtures.create()
self.endpoints = ['app', 'task', 'taskrun']
# Tests
def test_anonymous_01_newtask(self):
""" Test SCHED newtask returns a Task for the Anonymous User"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
assert data['info'], data
def test_anonymous_02_gets_different_tasks(self):
""" Test SCHED newtask returns N different Tasks for the Anonymous User"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
assigned_tasks = []
# Get a Task until scheduler returns None
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
while data.get('info') is not None:
# Check that we have received a Task
assert data.get('info'), data
# Save the assigned task
assigned_tasks.append(data)
# Submit an Answer for the assigned task
tr = model.TaskRun(app_id=data['app_id'], task_id=data['id'],
user_ip="127.0.0.1",
info={'answer': 'Yes'})
db.session.add(tr)
db.session.commit()
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
assert len(assigned_tasks) == len(tasks), len(assigned_tasks)
# Check if all the assigned Task.id are equal to the available ones
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
def test_anonymous_03_respects_limit_tasks(self):
""" Test SCHED newtask respects the limit of 30 TaskRuns per Task"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
assigned_tasks = []
# Get Task until scheduler returns None
for i in range(10):
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
while data.get('info') is not None:
# Check that we received a Task
assert data.get('info'), data
# Save the assigned task
assigned_tasks.append(data)
# Submit an Answer for the assigned task
tr = model.TaskRun(app_id=data['app_id'], task_id=data['id'],
user_ip="127.0.0." + str(i),
info={'answer': 'Yes'})
db.session.add(tr)
db.session.commit()
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
# Check if there are 30 TaskRuns per Task
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
for t in tasks:
assert len(t.task_runs) == 10, len(t.task_runs)
# Check that all the answers are from different IPs
err_msg = "There are two or more Answers from same IP"
for t in tasks:
for tr in t.task_runs:
assert self.is_unique(tr.user_ip, t.task_runs), err_msg
def test_user_01_newtask(self):
""" Test SCHED newtask returns a Task for John Doe User"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
# Register
self.register()
self.signin()
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
assert data['info'], data
self.signout()
def test_user_02_gets_different_tasks(self):
""" Test SCHED newtask returns N different Tasks for John Doe User"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
# Register
self.register()
self.signin()
assigned_tasks = []
# Get Task until scheduler returns None
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
while data.get('info') is not None:
# Check that we received a Task
assert data.get('info'), data
# Save the assigned task
assigned_tasks.append(data)
# Submit an Answer for the assigned task
tr = dict(app_id=data['app_id'], task_id=data['id'],
info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
assert len(assigned_tasks) == len(tasks), assigned_tasks
# Check if all the assigned Task.id are equal to the available ones
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
def test_user_03_respects_limit_tasks(self):
""" Test SCHED newtask respects the limit of 30 TaskRuns per Task"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
assigned_tasks = []
# We need one extra loop to allow the scheduler to mark a task as completed
for i in range(11):
self.register(fullname=self.user.username + str(i),
username=self.user.username + str(i),
password=self.user.username + str(i))
print "Number of users %s" % len(db.session.query(model.User).all())
print "Giving answers as User: %s" % self.user.username + str(i)
self.signin()
# Get Task until scheduler returns None
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
while data.get('info') is not None:
# Check that we received a Task
assert data.get('info'), data
# Save the assigned task
assigned_tasks.append(data)
# Submit an Answer for the assigned task
tr = dict(app_id=data['app_id'], task_id=data['id'],
info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
self.signout()
# Check if there are 30 TaskRuns per Task
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
for t in tasks:
print len(t.task_runs)
assert len(t.task_runs) == 10, t.task_runs
# Check that all the answers are from different IPs
err_msg = "There are two or more Answers from same User"
for t in tasks:
for tr in t.task_runs:
assert self.is_unique(tr.user_id, t.task_runs), err_msg
# Check that task.state is updated to completed
for t in tasks:
assert t.state == "completed", t.state
def test_tasks_for_user_ip_id(self):
""" Test SCHED newtask to see if sends the same ammount of Task to
user_id and user_ip
"""
redis_flushall()
# Del Fixture Task
self.del_task_runs()
assigned_tasks = []
for i in range(10):
signin = False
if random.random >= 0.5:
signin = True
self.register(fullname=self.user.username + str(i),
username=self.user.username + str(i),
password=self.user.username + str(i))
if signin:
print "Giving answers as User: %s" % self.user.username + str(i)
else:
print "Giving answers as User IP: 127.0.0.%s" % str(i)
if signin:
self.signin()
# Get Task until scheduler returns None
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
while data.get('info') is not None:
# Check that we received a Task
assert data.get('info'), data
# Save the assigned task
assigned_tasks.append(data)
# Submit an Answer for the assigned task
if signin:
tr = dict(app_id=data['app_id'], task_id=data['id'],
info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
else:
tr = model.TaskRun(app_id=data['app_id'], task_id=data['id'],
user_ip="127.0.0." + str(i),
info={'answer': 'Yes'})
db.session.add(tr)
db.session.commit()
res = self.app.get('api/app/1/newtask')
data = json.loads(res.data)
if signin:
self.signout()
# Check if there are 30 TaskRuns per Task
tasks = db.session.query(model.Task).filter_by(app_id=1).all()
for t in tasks:
print len(t.task_runs)
assert len(t.task_runs) == 10, t.task_runs
# Check that all the answers are from different IPs and IDs
err_msg1 = "There are two or more Answers from same User ID"
err_msg2 = "There are two or more Answers from same User IP"
for t in tasks:
for tr in t.task_runs:
if tr.user_id:
assert self.is_unique(tr.user_id, t.task_runs), err_msg1
else:
assert self.is_unique(tr.user_ip, t.task_runs), err_msg2
def test_task_preloading(self):
"""Test TASK Pre-loading works"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
# Register
self.register()
self.signin()
assigned_tasks = []
# Get Task until scheduler returns None
res = self.app.get('api/app/1/newtask')
task1 = json.loads(res.data)
# Check that we received a Task
assert task1.get('info'), task1
# Pre-load the next task for the user
res = self.app.get('api/app/1/newtask?offset=1')
task2 = json.loads(res.data)
# Check that we received a Task
assert task2.get('info'), task2
# Check that both tasks are different
assert task1.get('id') != task2.get('id'), "Tasks should be different"
## Save the assigned task
assigned_tasks.append(task1)
assigned_tasks.append(task2)
# Submit an Answer for the assigned and pre-loaded task
for t in assigned_tasks:
tr = dict(app_id=t['app_id'], task_id=t['id'], info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
# Get two tasks again
res = self.app.get('api/app/1/newtask')
task3 = json.loads(res.data)
# Check that we received a Task
assert task3.get('info'), task1
# Pre-load the next task for the user
res = self.app.get('api/app/1/newtask?offset=1')
task4 = json.loads(res.data)
# Check that we received a Task
assert task4.get('info'), task2
# Check that both tasks are different
assert task3.get('id') != task4.get('id'), "Tasks should be different"
assert task1.get('id') != task3.get('id'), "Tasks should be different"
assert task2.get('id') != task4.get('id'), "Tasks should be different"
# Check that a big offset returns None
res = self.app.get('api/app/1/newtask?offset=11')
print json.loads(res.data)
assert json.loads(res.data) == {}, res.data
def test_task_priority(self):
"""Test SCHED respects priority_0 field"""
redis_flushall()
# Del previous TaskRuns
self.del_task_runs()
# Register
self.register()
self.signin()
# By default, tasks without priority should be ordered by task.id (FIFO)
tasks = db.session.query(model.Task).filter_by(app_id=1).order_by('id').all()
res = self.app.get('api/app/1/newtask')
task1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert task1.get('id') == tasks[0].id, err_msg
# Now let's change the priority to a random task
import random
t = random.choice(tasks)
# Increase priority to maximum
t.priority_0 = 1
db.session.add(t)
db.session.commit()
# Request again a new task
res = self.app.get('api/app/1/newtask')
task1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert task1.get('id') == t.id, err_msg
err_msg = "Task.priority_0 should be the 1"
assert task1.get('priority_0') == 1, err_msg
class TestGetBreadthFirst:
@classmethod
def teardown_class(cls):
model.rebuild_db()
def tearDown(self):
db.session.remove()
redis_flushall()
def del_task_runs(self, app_id=1):
"""Deletes all TaskRuns for a given app_id"""
db.session.query(model.TaskRun).filter_by(app_id=1).delete()
db.session.commit()
db.session.remove()
def test_get_default_task_anonymous(self):
self._test_get_breadth_first_task()
def test_get_breadth_first_task_user(self):
user = Fixtures.create_users()[0]
self._test_get_breadth_first_task(user)
def _test_get_breadth_first_task(self, user=None):
self.del_task_runs()
if user:
short_name = 'xyzuser'
else:
short_name = 'xyznouser'
app = model.App(short_name=short_name)
task = model.Task(app=app, state='0', info={})
task2 = model.Task(app=app, state='0', info={})
db.session.add(app)
db.session.add(task)
db.session.add(task2)
db.session.commit()
taskid = task.id
appid = app.id
# give task2 a bunch of runs
for idx in range(2):
self._add_task_run(task2)
# now check we get task without task runs
out = pybossa.sched.get_breadth_first_task(appid)
assert out.id == taskid, out
# now check that offset works
out1 = pybossa.sched.get_breadth_first_task(appid)
out2 = pybossa.sched.get_breadth_first_task(appid, offset=1)
assert out1.id != out2.id, out
# asking for a bigger offset (max 10)
out2 = pybossa.sched.get_breadth_first_task(appid, offset=11)
assert out2 is None, out
self._add_task_run(task)
out = pybossa.sched.get_breadth_first_task(appid)
assert out.id == taskid, out
# now add 2 more taskruns. We now have 3 and 2 task runs per task
self._add_task_run(task)
self._add_task_run(task)
out = pybossa.sched.get_breadth_first_task(appid)
assert out.id == task2.id, out
def _add_task_run(self, task, user=None):
tr = model.TaskRun(task=task, user=user)
db.session.add(tr)
db.session.commit()
| agpl-3.0 | 1,114,941,357,677,361,800 | 37.042222 | 85 | 0.562299 | false |
ironsmile/tank4eta | pathfinding/core/node.py | 1 | 1197 | # -*- coding: utf-8 -*-
class Node(object):
"""
basic node, saves X and Y coordinates on some grid and determine if
it is walkable.
"""
def __init__(self, x=0, y=0, walkable=True, see_through=True):
# Coordinates
self.x = x
self.y = y
# Whether this node can be walked through.
self.walkable = walkable
# Whether this node is visible for line of sight calculations
self.see_through = see_through
# values used in the finder
# cost from this node to the goal
self.h = 0.0
# cost from the start node to this node
self.g = 0.0
# distance from start to this point (f = g + h )
self.f = 0.0
self.opened = 0
self.closed = False
# used for backtracking to the start point
self.parent = None
# used for recurion tracking of IDA*
self.retain_count = 0
# used for IDA* and Jump-Point-Search
self.tested = False
def __lt__(self, other):
"""
nodes are sorted by f value (see a_star.py)
:param other: compare Node
:return:
"""
return self.f < other.f
| mit | -6,834,358,347,663,112,000 | 24.468085 | 71 | 0.552214 | false |
speric/simplehttp | pysimplehttp/src/file_to_simplequeue.py | 1 | 6682 | import tornado.ioloop
import tornado.httpclient
import os
import functools
import gzip
import logging
import urllib
try:
import ujson as json
except ImportError:
import json
class FileToSimplequeue(object):
http = tornado.httpclient.AsyncHTTPClient(max_simultaneous_connections=50, max_clients=50)
def __init__(self, input_file, max_concurrent, max_queue_depth, simplequeue_urls,
check_simplequeue_interval, stats_interval, filter_require=None, filter_exclude=None, io_loop=None):
assert isinstance(simplequeue_urls, (list, tuple))
assert isinstance(max_queue_depth, int)
assert isinstance(max_concurrent, int)
assert isinstance(check_simplequeue_interval, int)
assert isinstance(stats_interval, int)
assert isinstance(filter_require, (None.__class__, list, tuple))
assert isinstance(filter_exclude, (None.__class__, list, tuple))
for entry in simplequeue_urls:
assert entry.startswith("http://") or entry.startswith("https://"), "simplequeue url %s is not valid" % entry
self.simplequeue_urls = simplequeue_urls
self.input = self.open_file(input_file)
self.concurrent = 0
self.finished = False
self.fill_check = False
self.max_queue_depth = max_queue_depth
self.max_concurrent = max_concurrent
self.check_simplequeue_interval = check_simplequeue_interval
self.pending = dict([[simplequeue, 0] for simplequeue in simplequeue_urls])
self.stats_interval = stats_interval
self.filter_require = dict([data.split('=', 1) for data in (filter_require or [])])
for key, value in self.filter_require.items():
logging.info("requiring json key=%s value=%s" % (key, value) )
self.filter_exclude = dict([data.split('=', 1) for data in (filter_exclude or [])])
for key, value in self.filter_exclude.items():
logging.info("excluding json key=%s value=%s" % (key, value) )
self.stats_reset()
self.io_loop = io_loop or tornado.ioloop.IOLoop.instance()
def stats_incr(self, successful=True, filtered=False):
if filtered:
self.filtered += 1
return
if successful:
self.success += 1
else:
self.failed += 1
def stats_reset(self):
self.success = 0
self.failed = 0
self.filtered = 0
def print_and_reset_stats(self):
logging.warning('success: %5d failed: %5d filtered: %5d concurrent: %2d' % (self.success, self.failed, self.filtered, self.concurrent))
self.stats_reset()
def start(self):
self.stats_timer = tornado.ioloop.PeriodicCallback(self.print_and_reset_stats, self.stats_interval * 1000)
self.stats_timer.start()
self.check_timer = tornado.ioloop.PeriodicCallback(self.check_simplequeue_depth, self.check_simplequeue_interval * 1000)
self.check_timer.start()
self.check_simplequeue_depth() # seed the loop
self.io_loop.start()
def open_file(self, filename):
assert os.path.exists(filename), "%r is not accessible" % filename
if filename.endswith('.gz'):
return gzip.open(filename, 'rb')
else:
return open(filename, 'rb')
def check_simplequeue_depth(self):
"""query the simplequeue and fill it based on where it's dept should be"""
if self.finished:
return self.finish()
for simplequeue in self.simplequeue_urls:
self.http.fetch(simplequeue + '/stats?format=json',
callback=functools.partial(self.finish_check_simplequeue_depth, simplequeue=simplequeue))
def finish_check_simplequeue_depth(self, response, simplequeue):
if response.code != 200:
logging.error('failed checking simplequeue depth %s/stats?format=json' % simplequeue)
self.continue_fill()
return
stats = json.loads(response.body)
entries_needed = self.max_queue_depth - stats['depth']
entries_needed = max(0, entries_needed)
logging.info('%s needs %d entries' % (simplequeue, entries_needed))
self.pending[simplequeue] = entries_needed
self.continue_fill()
def continue_fill(self):
if not self.fill_check:
self.fill_check = True
self.io_loop.add_callback(self.fill_as_needed)
def fill_as_needed(self):
"""
as needed based on how many more should go in a simplequeue, and the current concurrency
"""
self.fill_check = False
if self.finished:
return self.finish()
available_concurrency = self.max_concurrent - self.concurrent
for simplequeue in self.pending.keys():
while available_concurrency and self.pending[simplequeue] > 0:
if self.fill_one(simplequeue):
available_concurrency -= 1
self.pending[simplequeue] -= 1
def fill_one(self, endpoint):
"""read one line from `self.input` and send it to a simplequeue"""
data = self.input.readline()
if not data:
if not self.finished:
logging.info('at end of input stream')
self.finish()
return True
if self.filter_require or self.filter_exclude:
try:
msg = json.loads(data)
except Exception:
logging.error('failed json.loads(%r)' % data)
self.stats_incr(successful=False)
return False
for key, value in self.filter_require.items():
if msg.get(key) != value:
self.stats_incr(filtered=True)
return False
for key, value in self.filter_exclude.items():
if msg.get(key) == value:
self.stats_incr(filtered=True)
return False
self.concurrent += 1
url = endpoint + '/put?' + urllib.urlencode(dict(data=data))
self.http.fetch(url, self.finish_fill_one)
return True
def finish_fill_one(self, response):
self.concurrent -= 1
if response.code != 200:
logging.info(response)
self.failed += 1
else:
self.success += 1
# continue loop
if self.max_concurrent > self.concurrent:
self.continue_fill()
def finish(self):
self.finished = True
if self.concurrent == 0:
logging.info('stopping ioloop')
self.io_loop.stop()
| mit | 1,708,702,651,177,100,500 | 39.993865 | 143 | 0.601766 | false |
bitcraft/pyglet | pyglet/media/avbin.py | 1 | 20165 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Use avbin to decode audio and video media.
"""
import struct
import ctypes
import threading
import time
import pyglet
import pyglet.lib
from pyglet import gl
from pyglet.gl import gl_info
from pyglet import image
from pyglet.compat import asbytes, asbytes_filename
from pyglet.media import (MediaFormatException, StreamingSource, VideoFormat,
AudioFormat, AudioData, MediaEvent, WorkerThread,
SourceInfo)
if pyglet.compat_platform.startswith('win') and struct.calcsize('P') == 8:
av = 'avbin64'
else:
av = 'avbin'
av = pyglet.lib.load_library(av)
AVBIN_RESULT_ERROR = -1
AVBIN_RESULT_OK = 0
AVbinResult = ctypes.c_int
AVBIN_STREAM_TYPE_UNKNOWN = 0
AVBIN_STREAM_TYPE_VIDEO = 1
AVBIN_STREAM_TYPE_AUDIO = 2
AVbinStreamType = ctypes.c_int
AVBIN_SAMPLE_FORMAT_U8 = 0
AVBIN_SAMPLE_FORMAT_S16 = 1
AVBIN_SAMPLE_FORMAT_S24 = 2
AVBIN_SAMPLE_FORMAT_S32 = 3
AVBIN_SAMPLE_FORMAT_FLOAT = 4
AVbinSampleFormat = ctypes.c_int
AVBIN_LOG_QUIET = -8
AVBIN_LOG_PANIC = 0
AVBIN_LOG_FATAL = 8
AVBIN_LOG_ERROR = 16
AVBIN_LOG_WARNING = 24
AVBIN_LOG_INFO = 32
AVBIN_LOG_VERBOSE = 40
AVBIN_LOG_DEBUG = 48
AVbinLogLevel = ctypes.c_int
AVbinFileP = ctypes.c_void_p
AVbinStreamP = ctypes.c_void_p
Timestamp = ctypes.c_int64
class AVbinFileInfo(ctypes.Structure):
_fields_ = [
('structure_size', ctypes.c_size_t),
('n_streams', ctypes.c_int),
('start_time', Timestamp),
('duration', Timestamp),
('title', ctypes.c_char * 512),
('author', ctypes.c_char * 512),
('copyright', ctypes.c_char * 512),
('comment', ctypes.c_char * 512),
('album', ctypes.c_char * 512),
('year', ctypes.c_int),
('track', ctypes.c_int),
('genre', ctypes.c_char * 32),
]
class _AVbinStreamInfoVideo8(ctypes.Structure):
_fields_ = [
('width', ctypes.c_uint),
('height', ctypes.c_uint),
('sample_aspect_num', ctypes.c_uint),
('sample_aspect_den', ctypes.c_uint),
('frame_rate_num', ctypes.c_uint),
('frame_rate_den', ctypes.c_uint),
]
class _AVbinStreamInfoAudio8(ctypes.Structure):
_fields_ = [
('sample_format', ctypes.c_int),
('sample_rate', ctypes.c_uint),
('sample_bits', ctypes.c_uint),
('channels', ctypes.c_uint),
]
class _AVbinStreamInfoUnion8(ctypes.Union):
_fields_ = [
('video', _AVbinStreamInfoVideo8),
('audio', _AVbinStreamInfoAudio8),
]
class AVbinStreamInfo8(ctypes.Structure):
_fields_ = [
('structure_size', ctypes.c_size_t),
('type', ctypes.c_int),
('u', _AVbinStreamInfoUnion8)
]
class AVbinPacket(ctypes.Structure):
_fields_ = [
('structure_size', ctypes.c_size_t),
('timestamp', Timestamp),
('stream_index', ctypes.c_int),
('data', ctypes.POINTER(ctypes.c_uint8)),
('size', ctypes.c_size_t),
]
AVbinLogCallback = ctypes.CFUNCTYPE(None, ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p)
av.avbin_get_version.restype = ctypes.c_int
av.avbin_get_ffmpeg_revision.restype = ctypes.c_int
av.avbin_get_audio_buffer_size.restype = ctypes.c_size_t
av.avbin_have_feature.restype = ctypes.c_int
av.avbin_have_feature.argtypes = [ctypes.c_char_p]
av.avbin_init.restype = AVbinResult
av.avbin_set_log_level.restype = AVbinResult
av.avbin_set_log_level.argtypes = [AVbinLogLevel]
av.avbin_set_log_callback.argtypes = [AVbinLogCallback]
av.avbin_open_filename.restype = AVbinFileP
av.avbin_open_filename.argtypes = [ctypes.c_char_p]
av.avbin_close_file.argtypes = [AVbinFileP]
av.avbin_seek_file.argtypes = [AVbinFileP, Timestamp]
av.avbin_file_info.argtypes = [AVbinFileP, ctypes.POINTER(AVbinFileInfo)]
av.avbin_stream_info.argtypes = [AVbinFileP, ctypes.c_int,
ctypes.POINTER(AVbinStreamInfo8)]
av.avbin_open_stream.restype = ctypes.c_void_p
av.avbin_open_stream.argtypes = [AVbinFileP, ctypes.c_int]
av.avbin_close_stream.argtypes = [AVbinStreamP]
av.avbin_read.argtypes = [AVbinFileP, ctypes.POINTER(AVbinPacket)]
av.avbin_read.restype = AVbinResult
av.avbin_decode_audio.restype = ctypes.c_int
av.avbin_decode_audio.argtypes = [AVbinStreamP,
ctypes.c_void_p, ctypes.c_size_t,
ctypes.c_void_p, ctypes.POINTER(ctypes.c_int)]
av.avbin_decode_video.restype = ctypes.c_int
av.avbin_decode_video.argtypes = [AVbinStreamP,
ctypes.c_void_p, ctypes.c_size_t,
ctypes.c_void_p]
if True:
# TODO: lock all avbin calls. not clear from ffmpeg documentation if this
# is necessary. leaving it on while debugging to rule out the possiblity
# of a problem.
def synchronize(func, lock):
def f(*args):
lock.acquire()
result = func(*args)
lock.release()
return result
return f
avbin_lock = threading.Lock()
for name in dir(av):
if name.startswith('avbin_'):
setattr(av, name, synchronize(getattr(av, name), avbin_lock))
def get_version():
return av.avbin_get_version()
class AVbinException(MediaFormatException):
pass
def timestamp_from_avbin(timestamp):
return float(timestamp) / 1000000
def timestamp_to_avbin(timestamp):
return int(timestamp * 1000000)
class VideoPacket:
_next_id = 0
def __init__(self, packet):
self.timestamp = timestamp_from_avbin(packet.timestamp)
self.data = (ctypes.c_uint8 * packet.size)()
self.size = packet.size
ctypes.memmove(self.data, packet.data, self.size)
# Decoded image. 0 == not decoded yet; None == Error or discarded
self.image = 0
self.id = self._next_id
self.__class__._next_id += 1
class AVbinSource(StreamingSource):
def __init__(self, filename, file=None):
if file is not None:
raise NotImplementedError(
'Loading from file stream is not supported')
self._file = av.avbin_open_filename(asbytes_filename(filename))
if not self._file:
raise AVbinException('Could not open "%s"' % filename)
self._video_stream = None
self._video_stream_index = -1
self._audio_stream = None
self._audio_stream_index = -1
self._audio_packet_size = 0
file_info = AVbinFileInfo()
file_info.structure_size = ctypes.sizeof(file_info)
av.avbin_file_info(self._file, ctypes.byref(file_info))
self._duration = timestamp_from_avbin(file_info.duration)
self.info = SourceInfo()
self.info.title = file_info.title
self.info.author = file_info.author
self.info.copyright = file_info.copyright
self.info.comment = file_info.comment
self.info.album = file_info.album
self.info.year = file_info.year
self.info.track = file_info.track
self.info.genre = file_info.genre
# Pick the first video and audio streams found, ignore others.
for i in range(file_info.n_streams):
info = AVbinStreamInfo8()
info.structure_size = ctypes.sizeof(info)
av.avbin_stream_info(self._file, i, info)
if (info.type == AVBIN_STREAM_TYPE_VIDEO and
not self._video_stream):
stream = av.avbin_open_stream(self._file, i)
if not stream:
continue
self.video_format = VideoFormat(
width=info.u.video.width,
height=info.u.video.height)
if info.u.video.sample_aspect_num != 0:
self.video_format.sample_aspect = (
float(info.u.video.sample_aspect_num) /
info.u.video.sample_aspect_den)
if _have_frame_rate:
self.video_format.frame_rate = (
float(info.u.video.frame_rate_num) /
info.u.video.frame_rate_den)
self._video_stream = stream
self._video_stream_index = i
elif (info.type == AVBIN_STREAM_TYPE_AUDIO and
info.u.audio.sample_bits in (8, 16) and
info.u.audio.channels in (1, 2) and
not self._audio_stream):
stream = av.avbin_open_stream(self._file, i)
if not stream:
continue
self.audio_format = AudioFormat(
channels=info.u.audio.channels,
sample_size=info.u.audio.sample_bits,
sample_rate=info.u.audio.sample_rate)
self._audio_stream = stream
self._audio_stream_index = i
self._packet = AVbinPacket()
self._packet.structure_size = ctypes.sizeof(self._packet)
self._packet.stream_index = -1
self._events = list()
# Timestamp of last video packet added to decoder queue.
self._video_timestamp = 0
self._buffered_audio_data = list()
if self.audio_format:
self._audio_buffer = \
(ctypes.c_uint8 * av.avbin_get_audio_buffer_size())()
if self.video_format:
self._video_packets = list()
self._decode_thread = WorkerThread()
self._decode_thread.start()
self._condition = threading.Condition()
def __del__(self):
if _debug:
print('del avbin source')
try:
if self._video_stream:
av.avbin_close_stream(self._video_stream)
if self._audio_stream:
av.avbin_close_stream(self._audio_stream)
av.avbin_close_file(self._file)
except:
pass
# TODO: TODO call this / add to source api
def delete(self):
if self.video_format:
self._decode_thread.stop()
def seek(self, timestamp):
if _debug:
print('AVbin seek', timestamp)
av.avbin_seek_file(self._file, timestamp_to_avbin(timestamp))
self._audio_packet_size = 0
del self._events[:]
del self._buffered_audio_data[:]
if self.video_format:
self._video_timestamp = 0
self._condition.acquire()
for packet in self._video_packets:
packet.image = None
self._condition.notify()
self._condition.release()
del self._video_packets[:]
self._decode_thread.clear_jobs()
def _get_packet(self):
"""Read a packet into self._packet.
Returns True if OK, False if no more packets are in stream.
"""
return av.avbin_read(self._file, self._packet) == AVBIN_RESULT_OK
def _process_packet(self):
"""Returns (packet_type, packet)
where packet_type = 'video' or 'audio'; and packet is VideoPacket or
AudioData. In either case, packet is buffered or queued for decoding;
no further action is necessary.
Returns (None, None) if packet was neither type.
"""
if self._packet.stream_index == self._video_stream_index:
if self._packet.timestamp < 0:
# TODO: TODO
# AVbin needs hack to decode timestamp for B frames in
# some containers (OGG?). See
# http://www.dranger.com/ffmpeg/tutorial05.html
# For now we just drop these frames.
return None, None
video_packet = VideoPacket(self._packet)
if _debug:
print('Created and queued frame %d (%f)' %
(video_packet.id, video_packet.timestamp))
self._video_timestamp = max(self._video_timestamp,
video_packet.timestamp)
self._video_packets.append(video_packet)
self._decode_thread.put_job(
lambda: self._decode_video_packet(video_packet))
return 'video', video_packet
elif self._packet.stream_index == self._audio_stream_index:
audio_data = self._decode_audio_packet()
if audio_data:
if _debug:
print('Got an audio packet at', audio_data.timestamp)
self._buffered_audio_data.append(audio_data)
return 'audio', audio_data
return None, None
def get_audio_data(self, bytes_):
try:
audio_data = self._buffered_audio_data.pop(0)
audio_data_timeend = audio_data.timestamp + audio_data.duration
except IndexError:
audio_data = None
audio_data_timeend = self._video_timestamp + 1
if _debug:
print('get_audio_data')
have_video_work = False
# Keep reading packets until we have an audio packet and all the
# associated video packets have been enqueued on the decoder thread.
while not audio_data or (
self._video_stream and self._video_timestamp < audio_data_timeend):
if not self._get_packet():
break
packet_type, packet = self._process_packet()
if packet_type == 'video':
have_video_work = True
elif not audio_data and packet_type == 'audio':
audio_data = self._buffered_audio_data.pop(0)
if _debug:
print(
'Got requested audio packet at', audio_data.timestamp)
audio_data_timeend = audio_data.timestamp + audio_data.duration
if have_video_work:
# Give decoder thread a chance to run before we return this audio
# data.
time.sleep(0)
if not audio_data:
if _debug:
print('get_audio_data returning None')
return None
while self._events and self._events[0].timestamp <= audio_data_timeend:
event = self._events.pop(0)
if event.timestamp >= audio_data.timestamp:
event.timestamp -= audio_data.timestamp
audio_data.events.append(event)
if _debug:
print('get_audio_data returning ts %f with events' %
audio_data.timestamp, audio_data.events)
print('remaining events are', self._events)
return audio_data
def _decode_audio_packet(self):
packet = self._packet
size_out = ctypes.c_int(len(self._audio_buffer))
while True:
audio_packet_ptr = ctypes.cast(packet.data, ctypes.c_void_p)
audio_packet_size = packet.size
used = av.avbin_decode_audio(self._audio_stream,
audio_packet_ptr, audio_packet_size,
self._audio_buffer, size_out)
if used < 0:
self._audio_packet_size = 0
break
audio_packet_ptr.value += used
audio_packet_size -= used
if size_out.value <= 0:
continue
# TODO: how did this ever work? replaced with copy below
# buffer = ctypes.string_at(self._audio_buffer, size_out)
# TODO: to actually copy the data.. but it never used to crash, so
# maybe I'm missing something
buffer = ctypes.create_string_buffer(size_out.value)
ctypes.memmove(buffer, self._audio_buffer, len(buffer))
buffer = buffer.raw
duration = float(len(buffer)) / self.audio_format.bytes_per_second
self._audio_packet_timestamp = \
timestamp = timestamp_from_avbin(packet.timestamp)
return AudioData(buffer, len(buffer), timestamp, duration, list())
def _decode_video_packet(self, packet):
width = self.video_format.width
height = self.video_format.height
pitch = width * 3
buffer = (ctypes.c_uint8 * (pitch * height))()
result = av.avbin_decode_video(self._video_stream,
packet.data, packet.size,
buffer)
if result < 0:
image_data = None
else:
image_data = image.ImageData(width, height, 'RGB', buffer, pitch)
packet.image = image_data
# Notify get_next_video_frame() that another one is ready.
self._condition.acquire()
self._condition.notify()
self._condition.release()
def _ensure_video_packets(self):
"""Process packets until a video packet has been queued (and begun
decoding). Return False if EOS.
"""
if not self._video_packets:
if _debug:
print('No video packets...')
# Read ahead until we have another video packet
self._get_packet()
packet_type, _ = self._process_packet()
while packet_type and packet_type != 'video':
self._get_packet()
packet_type, _ = self._process_packet()
if not packet_type:
return False
if _debug:
print('Queued packet', _)
return True
def get_next_video_timestamp(self):
if not self.video_format:
return
if self._ensure_video_packets():
if _debug:
print(
'Next video timestamp is', self._video_packets[0].timestamp)
return self._video_packets[0].timestamp
def get_next_video_frame(self):
if not self.video_format:
return
if self._ensure_video_packets():
packet = self._video_packets.pop(0)
if _debug:
print('Waiting for', packet)
# Block until decoding is complete
self._condition.acquire()
while packet.image == 0:
self._condition.wait()
self._condition.release()
if _debug:
print('Returning', packet)
return packet.image
av.avbin_init()
if pyglet.options['debug_media']:
_debug = True
av.avbin_set_log_level(AVBIN_LOG_DEBUG)
else:
_debug = False
av.avbin_set_log_level(AVBIN_LOG_QUIET)
_have_frame_rate = av.avbin_have_feature(asbytes('frame_rate'))
| bsd-3-clause | -6,378,432,242,768,156,000 | 33.235993 | 83 | 0.583635 | false |
anetasie/sherpa | sherpa/astro/xspec/utils.py | 3 | 5433 | #
# Copyright (C) 2017, 2018, 2019 Smithsonian Astrophysical Observatory
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from distutils.version import LooseVersion
from . import _xspec
__all__ = ['ModelMeta', 'include_if', 'version_at_least']
XSPEC_VERSION = LooseVersion(_xspec.get_xsversion())
class ModelMeta(type):
"""
Metaclass for xspec models. The __function__ member in xspec model classes is seamlessly
transformed from a string representing the low level function in the sherpa xspec extension
into a proper call, taking into account error cases (e.g. the function cannot be found in the
xspec extension at runtime).
"""
NOT_COMPILED_FUNCTION_MESSAGE = "Calling an xspec function that was not compiled"
def __init__(cls, *args, **kwargs):
if hasattr(cls, '__function__'):
try:
cls._calc = getattr(_xspec, cls.__function__)
except AttributeError:
# Error handling: the model meets the condition expressed in the decorator
# but the low level function is not included in the xspec extension
cls._calc = ModelMeta._not_compiled
# The `__function__` member signals that `cls` is a model that needs the `_calc` method
# to be generated.
# If the class does not have the `__function__` member, the we assume the class provides
# a `_calc` method itself, or it does not need it to begin with. This is the case for
# some classes extending `XSModel` but that are base classes themselves,
# like `XSAdditiveModel`, or they have a more complex `_calc` implementation, like `XSTableModel`.
# In principle there is room for mistakes, i.e. a proper model class might be defined without
# the `__function__` member. Tests should make sure this is not the case. `test_xspec_models`
# is indeed such a test, because it calls all models making sure they are usable. A model without
# the `_calc_ method or the `__function__` member would fail the test.
# The alternative would be to include more logic to handle the error cases, but that would require
# more tests, making this choice impractical.
super(ModelMeta, cls).__init__(*args, **kwargs)
@staticmethod
def _not_compiled(*args, **kwargs):
raise AttributeError(ModelMeta.NOT_COMPILED_FUNCTION_MESSAGE)
def equal_or_greater_than(version_string):
"""
Utility function that compares a version string with the version of the current xspec instance.
For better or worse the xspec current instance is not cached across calls. It probably could be but
it just seems safer not to, and any overhead insists on models initialization only.
The comparison is made in terms of the `distutils.version.LooseVersion` class.
:param version_string: the version against which to compare the current xspec version
:return: `True` if the version of xspec is equal or greater than the argument, `False` otherwise
"""
return XSPEC_VERSION >= LooseVersion(version_string)
class include_if():
"""
Generic decorator for including xspec models conditionally. It takes a boolean condition as an argument.
If the boolean condition is not met, then the model is not included, and its function is replaced with a
dummy function that throws an exception.
If the model is disabled, then its class's `version_enabled` attribute is set to `False`.
"""
DISABLED_MODEL_MESSAGE = "Model {} is disabled because of an unmet condition"
def __init__(self, condition):
self.condition = condition
def __call__(self, model_class):
if not self.condition:
model_class.version_enabled = False
model_class._calc = self._disabled(self.get_message(model_class))
return model_class
def get_message(self, model_class):
return self.DISABLED_MODEL_MESSAGE.format(model_class.__name__)
@staticmethod
def _disabled(message):
def wrapped(*args, **kwargs):
raise AttributeError(message)
return wrapped
class version_at_least(include_if):
"""
Decorator which takes a version string as an argument and enables a model only if
the xspec version detected at runtime is equal or greater than the one provided to the decorator.
"""
DISABLED_MODEL_MESSAGE = "Model {} is disabled because XSPEC version >= {} is required"
def __init__(self, version_string):
include_if.__init__(self, equal_or_greater_than(version_string))
self.version_string = version_string
def get_message(self, model_class):
return self.DISABLED_MODEL_MESSAGE.format(model_class.__name__, self.version_string)
| gpl-3.0 | 8,876,311,769,032,798,000 | 42.814516 | 108 | 0.692987 | false |
KorolevskyMax/TestFrameworkTemplate | pages/base_page.py | 1 | 1954 | from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.common.exceptions import NoSuchElementException
from webium import BasePage as WebiumBasePage, Find
class BasePage(WebiumBasePage):
url_path = None
a_tag = "//a[contains(.,'{link_text}')]"
login_link = Find(by=By.XPATH, value=a_tag.format(link_text='Sign in'))
logout_btn = Find(by=By.XPATH, value="//button[contains(.,'Sign out')]")
account_options_btn = Find(by=By.XPATH, value=a_tag.replace('.', '@aria-label').format(link_text='View profile and more'))
loader_xpath = "//div[@id='prestatus']"
def clear_send_keys(self, element_name, kwargs):
value = kwargs.get(element_name)
element = getattr(self, element_name)
element.clear()
element.send_keys(value)
def hover(self, element):
hov = ActionChains(self._driver).move_to_element(element)
hov.perform()
self.wait_for_loading()
self.wait_for_loader_disappear()
def get_login_status(self):
try:
self.account_options_btn.click()
return 'logged in' if self.logout_btn.is_displayed() == True else 'logged out'
except NoSuchElementException:
return 'logged out'
def wait_for_loading(self, seconds=180):
wait = WebDriverWait(self._driver, seconds)
wait.until(lambda x: self._driver.execute_script('return jQuery.active == 0') is True)
def replace_bad_elements(self, css_locator):
self._driver.execute_script("$('{}').remove()".format(css_locator))
def is_loader_displayed(self, *args):
return self._driver.find_element_by_xpath(self.loader_xpath).is_displayed()
def wait_for_loader_disappear(self):
WebDriverWait(self._driver, timeout=500).until_not(
self.is_loader_displayed, "Timeout waiting for loader disappear")
| mit | 8,327,454,099,560,124,000 | 38.877551 | 126 | 0.669396 | false |
eltoncarr/tubular | tubular/scripts/retrieve_base_ami.py | 1 | 2964 | #! /usr/bin/env python3
"""
Command-line script used to retrieve the last base AMI ID used for an environment/deployment/play.
"""
# pylint: disable=invalid-name
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from os import path
import io
import sys
import logging
import traceback
import click
import yaml
# Add top-level module path to sys.path before importing tubular code.
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
from tubular import ec2 # pylint: disable=wrong-import-position
logging.basicConfig(level=logging.INFO)
@click.command()
@click.option(
'--environment', '-e',
help='Environment for AMI, e.g. prod, stage',
)
@click.option(
'--deployment', '-d',
help='Deployment for AMI e.g. edx, edge',
)
@click.option(
'--play', '-p',
help='Play for AMI, e.g. edxapp, insights, discovery',
)
@click.option(
'--override',
help='Override AMI id to use',
)
@click.option(
'--out_file',
help='Output file for the AMI information yaml.',
default=None
)
def retrieve_base_ami(environment, deployment, play, override, out_file):
"""
Method used to retrieve the last base AMI ID used for an environment/deployment/play.
"""
has_edp = environment is not None or deployment is not None or play is not None
if has_edp and override is not None:
logging.error("--environment, --deployment and --play are mutually exclusive with --override.")
sys.exit(1)
if not has_edp and override is None:
logging.error("Either --environment, --deployment and --play or --override are required.")
sys.exit(1)
try:
if override:
ami_id = override
else:
ami_id = ec2.active_ami_for_edp(environment, deployment, play)
ami_info = {
# This is passed directly to an ansible script that expects a base_ami_id variable
'base_ami_id': ami_id,
# This matches the key produced by the create_ami.yml ansible play to make
# generating release pages easier.
'ami_id': ami_id,
}
ami_info.update(ec2.tags_for_ami(ami_id))
logging.info("Found active AMI ID for {env}-{dep}-{play}: {ami_id}".format(
env=environment, dep=deployment, play=play, ami_id=ami_id
))
if out_file:
with io.open(out_file, 'w') as stream:
yaml.safe_dump(ami_info, stream, default_flow_style=False, explicit_start=True)
else:
print(yaml.safe_dump(ami_info, default_flow_style=False, explicit_start=True))
except Exception as err: # pylint: disable=broad-except
traceback.print_exc()
click.secho('Error finding base AMI ID.\nMessage: {}'.format(err), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
retrieve_base_ami() # pylint: disable=no-value-for-parameter
| agpl-3.0 | 7,992,221,647,808,419,000 | 30.2 | 103 | 0.643387 | false |
tim-janik/tobin | LogParser.py | 1 | 3770 | # Licensed GNU Affero GPL v3 or later: http://www.gnu.org/licenses/agpl.html
import sys, calendar, re, heapq, tempfile
_month_dict = { 'Jan' : 1, 'Feb' : 2, 'Mar' : 3, 'Apr' : 4, 'May' : 5, 'Jun' : 6,
'Jul' : 7, 'Aug' : 8, 'Sep' : 9, 'Oct' : 10, 'Nov' : 11, 'Dec' : 12 }
def _parse_logtime (string):
# e.g. string = '07/Aug/2013:21:14:18 +0200'
tup = (int (string[7:11]), _month_dict[string[3:6]], int (string[:2]),
int (string[12:14]), int (string[15:17]), int (string[18:20]))
tzone = int (string[22:24]) * 3600 + int (string[24:26]) * 60 # TZ offset in seconds
seconds = calendar.timegm (tup) # this is faster than using strptime
if string[21] == '+':
seconds -= tzone
else:
seconds += tzone
return seconds # unix time stamp in UTC
def _timestamp_from_logline (line):
b1 = line.find ('[')
b2 = line.find (']', b1)
return _parse_logtime (line[b1+1:b2]) if b2 - b1 == 27 else -1
def _log_file_sorter (logfile):
sorted_files, lines = [], []
for line in logfile:
line = '%08x|' % _timestamp_from_logline (line) + line
lines.append (line)
if len (lines) >= 1000000:
lines.sort()
f = tempfile.TemporaryFile()
f.writelines (lines)
f.seek (0)
sorted_files.append (f)
del lines[:]
if lines:
lines.sort()
f = tempfile.TemporaryFile()
f.writelines (lines)
f.seek (0)
sorted_files.append (f)
return sorted_files
def log_file_sort_pool (filelist):
sorted_files = []
for ff in filelist:
sorted_files += _log_file_sorter (open (ff))
return sorted_files
def log_file_parse_pool (sorted_files):
s = r'\s+' # separator
ip = r'([0-9.abcdef:ABCDEF]{7,39})' # ip4/ip6 addresses
#idt = r'([\w\d/.$+-]+)' # unquoted identifier (too strict for some corrupted user names)
idt = r'([^\s]+)' # space separated string
num = r'([0-9]{1,9})' # integer
xnum = r'(-|[0-9]{1,9})' # maybe integer
dt = r'\[\d\d/\w\w\w/\d{4}:\d\d:\d\d:\d\d\s[+-]\d{4}\]' # [dd/MMM/yyyy:hh:mm:ss +-zone]
#qx = r'"((?:[^"\\]|\\.)*)"' # quoted text (slow), allows escaped quotes
qx = r'"([^"\\]*(?:[^"\\]|\\.)*)"' # fast quoted text, unconditionalize/speed up the common case
logpattern = re.compile (ip + s + idt + s + idt + s + dt + s + qx + s + num + s + xnum + '(?:' + s + qx + s + qx + ')?')
urlpattern = re.compile (r'([A-Z]+)\s(.*)\s(HTTP[0-9./]*)$')
for line in heapq.merge (*sorted_files):
# extract timestamp from line in sorted pool
timestamp, line = int (line[:8], 16), line[9:]
# parse common log format
m = logpattern.match (line)
u = urlpattern.match (m.group (3 + 1)) if m else None
if not m or not u:
print >>sys.stderr, '%s: malformed input: %s' % (sys.argv[0], line.rstrip())
continue
hit = m.groups()
time_stamp_usec = 1000000 * timestamp
http_status = int (hit[4]) # http_status
tx_bytes = 0 if hit[5] == '-' else int (hit[5]) # tx_bytes
referrer = '' if hit[6] == '-' else hit[6] # referrer
uagent = '' if hit[7] == '-' else hit[7] # uagent
# split request URL
method = u.group (1)
url = u.group (2)
protocol = u.group (3)
qpos = url.find ('?')
resource, query = (url[:qpos], url[qpos:]) if qpos >= 0 else (url, '')
# yield result
yield (hit[0], hit[1], hit[2], time_stamp_usec, method, resource, query, protocol, http_status, tx_bytes, referrer, uagent)
| agpl-3.0 | 5,767,667,281,494,441,000 | 44.421687 | 128 | 0.513793 | false |
InUrSys/PescArt2.0 | src/Reports/Relatorio_SaidasPorProvinica.py | 1 | 1633 | '''
Created on 01/02/2018
@author: chernomirdinmacuvele
'''
import ReportAPI
from ui_Relatorio_SaidasPorProvincia import Ui_Form
import FuncSQL
from PyQt5.Qt import QPlainTextEdit, QComboBox
class Relatorio_SaidasPorProvincia(ReportAPI.JasperReports, Ui_Form):
def __init__(self, parent=None, dbcon=None):
super(Relatorio_SaidasPorProvincia, self).__init__(parent)
self.setupUi(self)
self.dbcon = dbcon
self.relatorio = 'Saidas_Distrito'
self.setForm()
def setForm(self):
self.LEFormato.setText(self.getFormat())
self.getInfoReport()
self.setProvincias()
self.PBGerar.clicked.connect(self.generateReport)
def getInfoReport(self):
quer = "SELECT nome, descricao FROM public.prc_relatorios where nome = '{nome}'".format(nome = self.relatorio)
bok, valOut = FuncSQL.anySelectScript(scpt= quer)
if bok:
self.LENome.setText(str(valOut[0]))
self.PTEDescricao.setPlainText(str(valOut[1]))
def setProvincias(self):
quer = "select distinct provincia from view_saidas_provincias"
lstOut = []
bok, valOut = FuncSQL.multLineSelect(scpt=quer)
if bok:
for val in valOut:
lstOut.append(val[0])
self.CBProvincia.addItems(lstOut)
def generateReport(self):
file = self.LENome.text()
formato = self.LEFormato.text().lower()
provincia = [self.CBProvincia.currentText()]
self.getTemplateFile(file=file, format=formato, parametro=provincia) | gpl-3.0 | 6,905,557,344,856,947,000 | 31.68 | 118 | 0.63319 | false |
voanna/Deep-Features-or-Not | src/extract_features_no_finetune_temperature.py | 1 | 1123 | #!/usr/bin/env python
from __future__ import print_function
from extractCaffeActivations import features
import argparse
import HONHelpers as hon
import itertools
import os
import glob
layers = [
'pool1',
'pool2',
'pool3',
'pool4',
'pool5',
'fc6',
'fc7',
]
parser = argparse.ArgumentParser()
parser.add_argument("job_id", help="indexes the job of extracting features", type=int)
args = parser.parse_args()
job_config_list = [pair for pair in itertools.product(hon.webcams, ['train', 'test'])]
# grid engine jobs start with 1
job_id = args.job_id - 1
job_config = job_config_list[job_id]
webcam, split = job_config
print(webcam, split)
finetune_root = os.path.join(hon.experiment_root, 'finetune-temperature', 'no-finetune-features')
img_fnames = sorted(glob.glob(os.path.join(hon.hon_data_root, webcam, 'imgs_align', '*' + split + '*.png')))
deploy = hon.VGG16_deploy_path
weights = hon.VGG16_caffemodel_path
layer = 'fc7'
save_directory = os.path.join(finetune_root, webcam)
_ = features(deploy, weights, img_fnames, layer, save_directory, layers, mean_npy = None)
| mit | -2,117,628,950,225,300,000 | 23.413043 | 108 | 0.693678 | false |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Bio/motifs/meme.py | 1 | 11510 | # Copyright 2008 by Bartek Wilczynski
# Adapted from Bio.MEME.Parser by Jason A. Hackney. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from __future__ import print_function
from Bio.Alphabet import IUPAC
from Bio import Seq
from Bio import motifs
def read(handle):
"""Parses the text output of the MEME program into a meme.Record object.
Example:
>>> from Bio.motifs import meme
>>> with open("meme.output.txt") as f:
... record = meme.read(f)
>>> for motif in record:
... for instance in motif.instances:
... print(instance.motif_name, instance.sequence_name, instance.strand, instance.pvalue)
"""
record = Record()
__read_version(record, handle)
__read_datafile(record, handle)
__read_alphabet(record, handle)
__read_sequences(record, handle)
__read_command(record, handle)
for line in handle:
if line.startswith('MOTIF 1'):
break
else:
raise ValueError('Unexpected end of stream')
alphabet = record.alphabet
revcomp = 'revcomp' in record.command
while True:
motif_number, length, num_occurrences, evalue = __read_motif_statistics(line)
name = __read_motif_name(handle)
instances = __read_motif_sequences(handle, name, alphabet, length, revcomp)
motif = Motif(alphabet, instances)
motif.length = length
motif.num_occurrences = num_occurrences
motif.evalue = evalue
motif.name = name
record.append(motif)
assert len(record)==motif_number
__skip_unused_lines(handle)
try:
line = next(handle)
except StopIteration:
raise ValueError('Unexpected end of stream: Expected to find new motif, or the summary of motifs')
if line.startswith("SUMMARY OF MOTIFS"):
break
if not line.startswith('MOTIF'):
raise ValueError("Line does not start with 'MOTIF':\n%s" % line)
return record
class Motif(motifs.Motif):
"""A subclass of Motif used in parsing MEME (and MAST) output.
This subclass defines functions and data specific to MEME motifs.
This includes the motif name, the evalue for a motif, and its number
of occurrences.
"""
def __init__(self, alphabet=None, instances=None):
motifs.Motif.__init__(self, alphabet, instances)
self.evalue = 0.0
self.num_occurrences = 0
self.name = None
class Instance(Seq.Seq):
"""A class describing the instances of a MEME motif, and the data thereof.
"""
def __init__(self, *args, **kwds):
Seq.Seq.__init__(self, *args, **kwds)
self.sequence_name = ""
self.start = 0
self.pvalue = 1.0
self.strand = 0
self.length = 0
self.motif_name = ""
class Record(list):
"""A class for holding the results of a MEME run.
A meme.Record is an object that holds the results from running
MEME. It implements no methods of its own.
The meme.Record class inherits from list, so you can access individual
motifs in the record by their index. Alternatively, you can find a motif
by its name:
>>> from Bio import motifs
>>> with open("meme.output.txt") as f:
... record = motifs.parse(f, 'MEME')
>>> motif = record[0]
>>> print(motif.name)
Motif 1
>>> motif = record['Motif 1']
>>> print(motif.name)
Motif 1
"""
def __init__(self):
"""__init__ (self)"""
self.version = ""
self.datafile = ""
self.command = ""
self.alphabet = None
self.sequences = []
def __getitem__(self, key):
if isinstance(key, str):
for motif in self:
if motif.name == key:
return motif
else:
return list.__getitem__(self, key)
# Everything below is private
def __read_version(record, handle):
for line in handle:
if line.startswith('MEME version'):
break
else:
raise ValueError("Improper input file. File should contain a line starting MEME version.")
line = line.strip()
ls = line.split()
record.version = ls[2]
def __read_datafile(record, handle):
for line in handle:
if line.startswith('TRAINING SET'):
break
else:
raise ValueError("Unexpected end of stream: 'TRAINING SET' not found.")
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with '****'")
if not line.startswith('****'):
raise ValueError("Line does not start with '****':\n%s" % line)
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'DATAFILE'")
if not line.startswith('DATAFILE'):
raise ValueError("Line does not start with 'DATAFILE':\n%s" % line)
line = line.strip()
line = line.replace('DATAFILE= ', '')
record.datafile = line
def __read_alphabet(record, handle):
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'ALPHABET'")
if not line.startswith('ALPHABET'):
raise ValueError("Line does not start with 'ALPHABET':\n%s" % line)
line = line.strip()
line = line.replace('ALPHABET= ', '')
if line == 'ACGT':
al = IUPAC.unambiguous_dna
else:
al = IUPAC.protein
record.alphabet = al
def __read_sequences(record, handle):
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'Sequence name'")
if not line.startswith('Sequence name'):
raise ValueError("Line does not start with 'Sequence name':\n%s" % line)
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with '----'")
if not line.startswith('----'):
raise ValueError("Line does not start with '----':\n%s" % line)
for line in handle:
if line.startswith('***'):
break
line = line.strip()
ls = line.split()
record.sequences.append(ls[0])
if len(ls) == 6:
record.sequences.append(ls[3])
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with '***'")
def __read_command(record, handle):
for line in handle:
if line.startswith('command:'):
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'command'")
line = line.strip()
line = line.replace('command: ', '')
record.command = line
def __read_motif_statistics(line):
# Depending on the version of MEME, this line either like like
# MOTIF 1 width = 19 sites = 3 llr = 43 E-value = 6.9e-002
# or like
# MOTIF 1 MEME width = 19 sites = 3 llr = 43 E-value = 6.9e-002
words = line.split()
assert words[0]=='MOTIF'
motif_number = int(words[1])
if words[2]=='MEME':
key_values = words[3:]
else:
key_values = words[2:]
keys = key_values[::3]
equal_signs = key_values[1::3]
values = key_values[2::3]
assert keys==['width', 'sites', 'llr', 'E-value']
for equal_sign in equal_signs:
assert equal_sign=='='
length = int(values[0])
num_occurrences = int(values[1])
evalue = float(values[3])
return motif_number, length, num_occurrences, evalue
def __read_motif_name(handle):
for line in handle:
if 'sorted by position p-value' in line:
break
else:
raise ValueError('Unexpected end of stream: Failed to find motif name')
line = line.strip()
words = line.split()
name = " ".join(words[0:2])
return name
def __read_motif_sequences(handle, motif_name, alphabet, length, revcomp):
try:
line = next(handle)
except StopIteration:
raise ValueError('Unexpected end of stream: Failed to find motif sequences')
if not line.startswith('---'):
raise ValueError("Line does not start with '---':\n%s" % line)
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'Sequence name'")
if not line.startswith('Sequence name'):
raise ValueError("Line does not start with 'Sequence name':\n%s" % line)
try:
line = next(handle)
except StopIteration:
raise ValueError('Unexpected end of stream: Failed to find motif sequences')
if not line.startswith('---'):
raise ValueError("Line does not start with '---':\n%s" % line)
instances = []
for line in handle:
if line.startswith('---'):
break
line = line.strip()
words = line.split()
if revcomp:
strand = words.pop(1)
else:
strand = '+'
sequence = words[4]
assert len(sequence) == length
instance = Instance(sequence, alphabet)
instance.motif_name = motif_name
instance.sequence_name = words[0]
instance.start = int(words[1])
instance.pvalue = float(words[2])
instance.strand = strand
instance.length = length
instances.append(instance)
else:
raise ValueError('Unexpected end of stream')
return motifs.Instances(instances, alphabet)
def __skip_unused_lines(handle):
for line in handle:
if line.startswith('log-odds matrix'):
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'log-odds matrix'")
for line in handle:
if line.startswith('---'):
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with '---'")
for line in handle:
if line.startswith('letter-probability matrix'):
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'letter-probability matrix'")
for line in handle:
if line.startswith('---'):
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with '---'")
for line in handle:
if line.startswith('Time'):
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with 'Time'")
try:
line = next(handle)
except StopIteration:
raise ValueError('Unexpected end of stream: Expected to find blank line')
if line.strip():
raise ValueError("Expected blank line, but got:\n%s" % line)
try:
line = next(handle)
except StopIteration:
raise ValueError("Unexpected end of stream: Expected to find line starting with '***'")
if not line.startswith('***'):
raise ValueError("Line does not start with '***':\n%s" % line)
for line in handle:
if line.strip():
break
else:
raise ValueError("Unexpected end of stream: Expected to find line starting with '***'")
if not line.startswith('***'):
raise ValueError("Line does not start with '***':\n%s" % line)
| apache-2.0 | 5,834,909,790,318,722,000 | 32.753666 | 117 | 0.611295 | false |
CaliOpen/CaliOpen | src/backend/components/py.pi/caliopen_pi/tests/test_spam.py | 1 | 1631 | """Test spam privacy feature extraction."""
import unittest
import os
from caliopen_storage.config import Configuration
if 'CALIOPEN_BASEDIR' in os.environ:
conf_file = '{}/src/backend/configs/caliopen.yaml.template'. \
format(os.environ['CALIOPEN_BASEDIR'])
else:
conf_file = '../../../../../configs/caliopen.yaml.template'
Configuration.load(conf_file, 'global')
from mailbox import Message
from caliopen_pi.features.helpers.spam import SpamScorer
def load_mail(filename):
"""Read email from fixtures of an user."""
# XXX tofix: set fixtures in a more convenient way to not
# have dirty hacking on relative path
dir_path = os.path.dirname(os.path.realpath(__file__))
path = '{}/fixtures'.format(dir_path)
with open('{}/{}'.format(path, filename)) as f:
data = f.read()
return Message(data)
class TestSpamScorer(unittest.TestCase):
"""Test spam scorer."""
def test_spam1(self):
mail = load_mail('spam1.eml')
scorer = SpamScorer(mail)
self.assertFalse(scorer.is_spam)
self.assertEqual(scorer.method, 'score')
self.assertEqual(scorer.score, 0.0)
def test_spam2(self):
mail = load_mail('spam2.eml')
scorer = SpamScorer(mail)
self.assertTrue(scorer.is_spam)
self.assertEqual(scorer.method, 'status')
self.assertEqual(scorer.score, 51.0)
def test_spam3(self):
mail = load_mail('spam3.eml')
scorer = SpamScorer(mail)
self.assertTrue(scorer.is_spam)
self.assertEqual(scorer.method, 'status')
self.assertEqual(scorer.score, 97.0)
| gpl-3.0 | -8,242,803,574,089,239,000 | 29.773585 | 66 | 0.652361 | false |
cmancone/mygrations | mygrations/formats/mysql/file_reader/database.py | 1 | 3315 | import os
import glob
from .reader import reader as sql_reader
from mygrations.formats.mysql.definitions.database import database as database_definition
class database(database_definition):
def __init__(self, strings):
""" Constructor. Accepts a string or list of strings with different possible contents
Strings can be one of the following:
================== ====================
Type Value
================== ====================
string SQL to parse
string A filename to read and to parse as SQL
string A directory name to search for .sql files, parsing each one
list A list of strings, with each element corresponding to any of the above
================== ====================
:param strings: A string or list of strings corresponding to one of the allowed input types
:type strings: string|list
"""
self._warnings = []
self._errors = []
self._tables = {}
self._rows = []
if isinstance(strings, str):
strings = [strings]
for string in strings:
self.process(string)
self.store_rows_with_tables()
def process(self, string):
""" Processes a string.
Strings can be either SQL to parse, the location of an SQL file, or a directory containing SQL files
:param string: A string containing one of the above
:type string: string
"""
if os.path.isdir(string):
self._process_directory(string)
elif os.path.isfile(string):
self._read(string)
else:
self._read(string)
def _process_directory(self, directory):
""" Processes a directory.
Finds all SQL files in the directory and calls `_read()` on them,
which results in the file being parsed and its tables/rows added to the
record of database tables/rows.
:param string: A string containing one of the above
:type string: string
"""
if directory[-1] != os.sep:
directory += os.sep
for filename in glob.glob('%s*.sql' % directory):
self._read(filename)
def _read(self, contents):
""" Processes a file or string of SQL.
Creates a reader object (which accepts files or a string of SQL)
to parse its input and stores the tables/rows in the database
object.
:param contents: A string containing a filename or SQL
:type contents: string
"""
try:
reader = sql_reader()
reader.parse(contents)
except ValueError as e:
print("Error in file %s: %s" % (contents, e))
# pull in all errors and warnings
self._errors.extend(reader.errors)
self._warnings.extend(reader.warnings)
# keep rows and tables separate while we are reading
for (table_name, table) in reader.tables.items():
if table.name in self._tables:
self.errors.append('Found two definitions for table %s' % table.name)
self._tables[table.name] = table
for (table_name, rows) in reader.rows.items():
self._rows.extend(rows)
| mit | -7,812,337,197,021,024,000 | 32.15 | 108 | 0.569231 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-dns/azure/mgmt/dns/models/srv_record.py | 1 | 1365 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SrvRecord(Model):
"""An SRV record.
:param priority: The priority value for this SRV record.
:type priority: int
:param weight: The weight value for this SRV record.
:type weight: int
:param port: The port value for this SRV record.
:type port: int
:param target: The target domain name for this SRV record.
:type target: str
"""
_attribute_map = {
'priority': {'key': 'priority', 'type': 'int'},
'weight': {'key': 'weight', 'type': 'int'},
'port': {'key': 'port', 'type': 'int'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(self, priority=None, weight=None, port=None, target=None):
super(SrvRecord, self).__init__()
self.priority = priority
self.weight = weight
self.port = port
self.target = target
| mit | 6,262,782,456,267,802,000 | 33.125 | 76 | 0.562637 | false |
dodobas/osm-export-tool2 | jobs/presets.py | 1 | 11550 | # -*- coding: utf-8 -*-
import logging
import pdb
from collections import OrderedDict
from StringIO import StringIO
from lxml import etree
logger = logging.getLogger(__name__)
class PresetParser():
types = {
'node': 'point',
'way': 'line',
'area': 'polygon',
'closedway': 'polygon',
'relation': 'polygon'
}
namespaces = {'ns': 'http://josm.openstreetmap.de/tagging-preset-1.0'}
def __init__(self, preset=None, *args, **kwargs):
self.preset = preset
self.tags = []
def parse(self,):
"""
Reads in the JOSM Preset.
Picks out all <item> elements.
For each <item>, gets the 'type' attribute and maps the
geometry type to the <item>'s 'key' element (tag name).
Ignores <item>'s with no 'type' attribute.
"""
f = open(self.preset)
xml = f.read()
tree = etree.parse(StringIO(xml))
items = tree.xpath('//ns:item', namespaces=self.namespaces)
for item in items:
self.process_item_and_children(item)
# tags = OrderedDict(sorted(self.tags.items()))
return self.tags
def process_item_and_children(self, item, geometrytype=None):
geometrytypes = None
if item.get('type'):
item_type = item.get('type')
geometrytypes = self.get_geometrytype(item_type)
keys = item.xpath('./ns:key', namespaces=self.namespaces)
item_groups = {}
groups = []
for group in item.iterancestors(tag='{http://josm.openstreetmap.de/tagging-preset-1.0}group'):
groups.append(group.get('name'))
if len(keys) > 0 and geometrytypes:
key = keys[0].get('key')
value = keys[0].get('value')
tag = {}
tag['name'] = item.get('name')
tag['key'] = key
tag['value'] = value
geom_types = []
for geomtype in geometrytypes:
geom_types.append(geomtype)
tag['geom_types'] = list(set(geom_types))
tag['groups'] = list(reversed(groups))
self.tags.append(tag)
for child in list(item):
self.process_item_and_children(child)
def get_geometrytype(self, item_type):
geometrytypes = []
osmtypes = item_type.split(',')
for osmtype in osmtypes:
geometrytypes.append(self.types[osmtype])
return geometrytypes
def build_hdm_preset_dict(self, ):
hdm = {}
xml = StringIO(open(self.preset).read())
tree = etree.parse(xml)
groups = tree.xpath('./ns:group', namespaces=self.namespaces)
for group in groups:
name = group.get('name')
group_dict = {}
hdm[name] = group_dict
self._parse_group(group, group_dict)
return OrderedDict(sorted(hdm.items()))
def _parse_group(self, group, group_dict):
items = group.xpath('./ns:item', namespaces=self.namespaces)
for item in items:
item_dict = {}
name = item.get('name')
types = item.get('type') # get the type attr on the item element
if types == None:
continue # pass those items with no geom type
geom_types = self.get_geometrytype(types)
keys = item.xpath('./ns:key', namespaces=self.namespaces)
if not len(keys) > 0:
continue
key = keys[0]
item_dict['displayName'] = name
item_dict['tag'] = '{0}:{1}'.format(key.get('key'), key.get('value'))
item_dict['geom'] = geom_types
group_dict[name] = OrderedDict(sorted(item_dict.items()))
groups = group.xpath('./ns:group', namespaces=self.namespaces)
for sub_group in groups:
sub_group_dict = {}
name = sub_group.get('name')
group_dict[name] = sub_group_dict
self._parse_group(sub_group, sub_group_dict)
class UnfilteredPresetParser():
types = {
'node': 'point',
'way': 'line',
'area': 'polygon',
'closedway': 'polygon',
'relation': 'polygon'
}
supported_elements = ['key', 'text', 'combo', 'multiselect', 'check']
namespaces = {'ns': 'http://josm.openstreetmap.de/tagging-preset-1.0'}
def __init__(self, preset=None, *args, **kwargs):
self.preset = preset
self.tags = []
self.keys = []
def parse(self,):
"""
Reads in the JOSM Preset.
Picks out all <item> elements.
For each <item>, gets the 'type' attribute and maps the
geometry type to the <item>'s 'key' attribute (tag name).
Ignores <item>'s with no 'type' attribute.
"""
f = open(self.preset)
xml = f.read()
tree = etree.parse(StringIO(xml))
items = tree.xpath('//ns:item', namespaces=self.namespaces)
for item in items:
self.process_item_and_children(item)
# tags = OrderedDict(sorted(self.tags.items()))
return self.tags
def process_item_and_children(self, item, geometrytype=None):
geometrytypes = None
if item.get('type'):
item_type = item.get('type')
geometrytypes = self.get_geometrytype(item_type)
elements = item.xpath('./ns:*', namespaces=self.namespaces)
item_groups = {}
groups = []
for group in item.iterancestors(tag='{http://josm.openstreetmap.de/tagging-preset-1.0}group'):
groups.append(group.get('name'))
if len(elements) > 0 and geometrytypes:
for element in elements:
name = element.xpath('local-name()')
if name in self.supported_elements:
key = element.get('key')
if key in self.keys:
continue # skip key if already parsed
tag = {}
tag['name'] = item.get('name')
tag['key'] = key
tag['value'] = '' # select all not-null values
geom_types = []
for geomtype in geometrytypes:
geom_types.append(geomtype)
tag['geom_types'] = list(set(geom_types))
tag['groups'] = list(reversed(groups))
self.tags.append(tag)
self.keys.append(key)
"""
if len(elements) > 0 and geometrytypes:
for key_ele in elements:
key = key_ele.get('key')
value = key_ele.get('value')
tag = {}
tag['name'] = item.get('name')
tag['key'] = key
tag['value'] = value
geom_types = []
for geomtype in geometrytypes:
geom_types.append(geomtype)
tag['geom_types'] = list(set(geom_types))
tag['groups'] = list(reversed(groups))
self.tags.append(tag)
"""
"""
if keys[0].get('key'):
# get kv pair
key = keys[0].get('key')
value = keys[0].get('value')
tag = {}
tag['name'] = item.get('name')
tag['key'] = key
tag['value'] = value
geom_types = []
for geomtype in geometrytypes:
geom_types.append(geomtype)
tag['geom_types'] = list(set(geom_types))
tag['groups'] = list(reversed(groups))
self.tags.append(tag)
"""
for child in list(item):
self.process_item_and_children(child)
def get_geometrytype(self, item_type):
geometrytypes = []
osmtypes = item_type.split(',')
for osmtype in osmtypes:
geometrytypes.append(self.types[osmtype])
return geometrytypes
def build_hdm_preset_dict(self, ):
hdm = {}
xml = StringIO(open(self.preset).read())
tree = etree.parse(xml)
groups = tree.xpath('./ns:group', namespaces=self.namespaces)
for group in groups:
name = group.get('name')
group_dict = {}
hdm[name] = group_dict
self._parse_group(group, group_dict)
return OrderedDict(sorted(hdm.items()))
def _parse_group(self, group, group_dict):
items = group.xpath('./ns:item', namespaces=self.namespaces)
for item in items:
item_dict = {}
name = item.get('name')
types = item.get('type') # get the type attr on the item element
if types == None:
continue # pass those items with no geom type
geom_types = self.get_geometrytype(types)
keys = item.xpath('./ns:key', namespaces=self.namespaces)
if not len(keys) > 0:
continue
key = keys[0]
item_dict['displayName'] = name
item_dict['tag'] = '{0}:{1}'.format(key.get('key'), key.get('value'))
item_dict['geom'] = geom_types
group_dict[name] = OrderedDict(sorted(item_dict.items()))
groups = group.xpath('./ns:group', namespaces=self.namespaces)
for sub_group in groups:
sub_group_dict = {}
name = sub_group.get('name')
group_dict[name] = sub_group_dict
self._parse_group(sub_group, sub_group_dict)
class TagParser():
namespaces = {'ns': 'http://josm.openstreetmap.de/tagging-preset-1.0'}
nsmap = {None: 'http://josm.openstreetmap.de/tagging-preset-1.0'}
types = {
'point': 'node',
'line': 'way',
'polygon': 'area,closedway,relation',
}
def __init__(self, tags=None, *args, **kwargs):
self.tags = tags
def parse_tags(self, ):
root = etree.Element('presets', nsmap=self.nsmap)
doc = etree.ElementTree(root)
for tag in self.tags:
groups = self._add_groups(root, tag)
xml = etree.tostring(doc, xml_declaration=True, encoding='UTF-8', pretty_print=True)
return xml
def _add_groups(self, parent, tag):
for group in tag.groups:
# check if element exists if not create it
found_groups = parent.xpath('group[@name="' + group + '"]', namespaces=self.namespaces)
if len(found_groups) == 0:
grp = etree.SubElement(parent, 'group', name=group)
tag.groups.pop(0)
if len(tag.groups) == 0:
geom_types = self._get_types(tag.geom_types)
item = etree.SubElement(grp, 'item', name=tag.name, type=geom_types)
etree.SubElement(item, 'key', key=tag.key, value=tag.value)
self._add_groups(grp, tag)
else:
tag.groups.pop(0)
if len(tag.groups) == 0:
geom_types = self._get_types(tag.geom_types)
item = etree.SubElement(found_groups[0], 'item', name=tag.name, type=geom_types)
etree.SubElement(item, 'key', key=tag.key, value=tag.value)
self._add_groups(found_groups[0], tag)
def _get_types(self, geom_types):
types = []
for geom_type in geom_types:
gtype = self.types.get(geom_type)
if gtype is not None:
types.append(self.types[geom_type])
return ','.join(types)
| bsd-3-clause | 1,925,800,538,163,020,300 | 36.5 | 102 | 0.524848 | false |
lukasjuhrich/sipa | sipa/blueprints/generic.py | 1 | 10785 | # -*- coding: utf-8 -*-
import logging
import os
from flask import render_template, request, redirect, \
url_for, flash, session, abort, current_app, jsonify
from flask.blueprints import Blueprint
from flask_babel import gettext, format_date
from flask_login import current_user, login_user, logout_user, \
login_required
from sqlalchemy.exc import DatabaseError
from ldap3.core.exceptions import LDAPCommunicationError
from sipa.forms import flash_formerrors, LoginForm, AnonymousContactForm, \
OfficialContactForm
from sipa.mail import send_official_contact_mail, send_contact_mail
from sipa.model import backends
from sipa.units import dynamic_unit, format_money
from sipa.utils import get_user_name, redirect_url
from sipa.model.exceptions import UserNotFound, InvalidCredentials
from sipa.utils.git_utils import get_repo_active_branch, get_latest_commits
logger = logging.getLogger(__name__)
bp_generic = Blueprint('generic', __name__)
@bp_generic.before_app_request
def log_request():
if 'sentry' in current_app.extensions:
current_app.extensions['sentry'].client.extra_context({
'current_user': get_user_name(current_user),
'ip_user': get_user_name(backends.user_from_ip(request.remote_addr))
})
logging.getLogger(__name__ + '.http').debug(
'Incoming request: %s %s', request.method, request.path,
extra={'tags': {'user': get_user_name(current_user),
'ip': request.remote_addr}}
)
@bp_generic.app_errorhandler(401)
@bp_generic.app_errorhandler(403)
@bp_generic.app_errorhandler(404)
def error_handler_redirection(e):
"""Handles errors by flashing an according message
:param e: The error
:return: A flask response with the according HTTP error code
"""
if e.code == 401:
message = gettext("Bitte melde Dich an, um die Seite zu sehen.")
elif e.code == 403:
message = gettext("Diese Funktion wird in deinem Wohnheim "
"nicht unterstützt.")
elif e.code == 404:
message = gettext("Das von Dir angeforderte Dokument gibt es nicht.")
else:
message = gettext("Es ist ein Fehler aufgetreten!")
return render_template(
'error.html',
errorcode=e.code,
message=message
), e.code
@bp_generic.app_errorhandler(DatabaseError)
def exceptionhandler_sql(ex):
"""Handles global Database errors like:
Server down, Lock wait timeout exceeded, …
"""
flash(gettext("Es gab einen Fehler bei der Datenbankabfrage. "
"Bitte probiere es in ein paar Minuten noch mal."),
"error")
logger.critical('DatabaseError caught',
extra={'data': {'exception_args': ex.args}},
exc_info=True)
return redirect(url_for('generic.index'))
@bp_generic.app_errorhandler(LDAPCommunicationError)
def exceptionhandler_ldap(ex):
"""Handles global LDAPCommunicationError exceptions.
The session must be reset, because if the user is logged in and
the server fails during his session, it would cause a redirect
loop. This also resets the language choice, btw.
The alternative would be a try-except catch block in load_user,
but login also needs a handler.
"""
session.clear()
flash(gettext("Verbindung zum LDAP-Server "
"konnte nicht hergestellt werden!"),
'error')
logger.critical(
'Unable to connect to LDAP server',
extra={'data': {'exception_args': ex.args}},
exc_info=True,
)
return redirect(url_for('generic.index'))
@bp_generic.app_errorhandler(ConnectionError)
def exceptionhandler_gerok(ex):
"""Handles ConnectionErrors
Session is cleared to avoid redirect loops, as above.
"""
flash(gettext("Es gab einen internen Fehler. "
"Bitte probiere es in ein paar Minuten noch mal."))
session.clear()
return redirect(url_for('generic.index'))
@bp_generic.route('/index.php')
@bp_generic.route('/')
def index():
return redirect(url_for('news.show'))
@bp_generic.route("/login", methods=['GET', 'POST'])
def login():
"""Login page for users
"""
form = LoginForm()
if form.validate_on_submit():
dormitory = backends.get_dormitory(form.dormitory.data)
username = form.username.data
password = form.password.data
remember = form.remember.data
User = dormitory.datasource.user_class
valid_suffix = "@{}".format(dormitory.datasource.mail_server)
if username.endswith(valid_suffix):
username = username[:-len(valid_suffix)]
try:
user = User.authenticate(username, password)
except InvalidCredentials as e:
cause = "username" if isinstance(e, UserNotFound) else "password"
logger.info("Authentication failed: Wrong %s", cause, extra={
'tags': {'user': username, 'rate_critical': True}
})
flash(gettext("Anmeldedaten fehlerhaft!"), "error")
else:
if isinstance(user, User):
session['dormitory'] = dormitory.name
login_user(user, remember=remember)
logger.info('Authentication successful',
extra={'tags': {'user': username}})
flash(gettext("Anmeldung erfolgreich!"), "success")
elif form.is_submitted():
flash_formerrors(form)
if current_user.is_authenticated:
return redirect(url_for('usersuite.index'))
return render_template('login.html', form=form,
unsupported=backends.premature_dormitories)
@bp_generic.route("/logout")
@login_required
def logout():
logger.info("Logging out",
extra={'tags': {'user': current_user.uid}})
logout_user()
flash(gettext("Abmeldung erfolgreich!"), 'success')
return redirect(url_for('.index'))
bp_generic.add_app_template_filter(dynamic_unit, name='unit')
@bp_generic.app_template_filter('traffic_color')
def traffic_color(amount, daily_credit):
return ("" if amount < daily_credit
else "bg-warning" if amount < 2 * daily_credit
else "bg-danger")
@bp_generic.app_template_filter('gib')
def to_gigabytes(number):
"""Convert a number from KiB to GiB
This is used mainly for the gauge, everything else uses the dynamic
`unit` function.
"""
return number / 1024 ** 2
@bp_generic.app_template_filter('date')
def jinja_format_date(date):
return format_date(date)
bp_generic.add_app_template_filter(format_money, name='money')
@bp_generic.route("/usertraffic")
def usertraffic():
"""Show a user's traffic on a static site just as in the usersuite.
If a user is logged but the ip corresponds to another user, a hint
is flashed and the traffic of the `ip_user` is displayed.
"""
ip_user = backends.user_from_ip(request.remote_addr)
chosen_user = None
if current_user.is_authenticated:
chosen_user = current_user
if not current_user.has_connection and not ip_user.is_authenticated:
flash(gettext("Aufgrund deines Nutzerstatus kannst Du "
"keine Trafficdaten einsehen."), "info")
return redirect(url_for('generic.index'))
if ip_user.is_authenticated:
chosen_user = ip_user
if current_user.is_authenticated:
if current_user != ip_user:
flash(gettext("Ein anderer Nutzer als der für diesen "
"Anschluss Eingetragene ist angemeldet!"),
'warning')
flash(gettext("Hier werden die Trafficdaten "
"dieses Anschlusses angezeigt."), "info")
if chosen_user:
user_id = chosen_user.id.value if chosen_user.id.supported else None
return render_template("usertraffic.html",
user_id=user_id,
traffic_user=chosen_user)
abort(401)
@bp_generic.route('/usertraffic/json')
def traffic_api():
user = (current_user if current_user.is_authenticated
else backends.user_from_ip(request.remote_addr))
if not user.is_authenticated:
return jsonify(version=0)
traffic_history = ({
'in': x['input'],
'out': x['output'],
} for x in reversed(user.traffic_history))
trafficdata = {
'quota': user.credit,
# `next` gets the first entry (“today”)
'traffic': next(traffic_history),
'history': list(traffic_history),
}
return jsonify(version=2, **trafficdata)
@bp_generic.route('/contact', methods=['GET', 'POST'])
def contact():
form = AnonymousContactForm()
if form.validate_on_submit():
success = send_contact_mail(
sender=form.email.data,
subject=form.subject.data,
name=form.name.data,
message=form.message.data,
dormitory_name=form.dormitory.data,
)
if success:
flash(gettext("Nachricht wurde versandt."), "success")
else:
flash(gettext("Es gab einen Fehler beim Versenden der Nachricht."),
'error')
return redirect(url_for('.index'))
elif form.is_submitted():
flash_formerrors(form)
elif current_user.is_authenticated:
flash(gettext("Sicher, dass Du das anonyme Formular "
"benutzen möchtest? Dies ist nur erforderlich, wenn Du "
"Administratoren eines anderen Wohnheims "
"kontaktieren willst."), 'info')
return render_template('anonymous_contact.html', form=form)
@bp_generic.route('/contact_official', methods=['GET', 'POST'])
def contact_official():
form = OfficialContactForm()
if form.validate_on_submit():
success = send_official_contact_mail(
sender=form.email.data,
subject=form.subject.data,
name=form.name.data,
message=form.message.data,
)
if success:
flash(gettext("Nachricht wurde versandt."), "success")
else:
flash(gettext("Es gab einen Fehler beim Versenden der Nachricht."),
'error')
return redirect(url_for('.index'))
elif form.is_submitted():
flash_formerrors(form)
return render_template(
'official_contact.html',
form=form
)
@bp_generic.route('/version')
def version():
""" Display version information from local repo """
sipa_dir = os.getcwd()
return render_template(
'version.html',
active_branch=get_repo_active_branch(sipa_dir),
commits=get_latest_commits(sipa_dir, 20),
)
| mit | 2,989,018,427,990,838,000 | 31.753799 | 80 | 0.627691 | false |
brunobraga/termsaver | termsaverlib/plugins/exampleplugin/constants.py | 1 | 2886 | ###############################################################################
#
# file: constants.py
#
# Purpose: refer to module documentation for details
#
# Note: This file is part of Termsaver-Example plugin, and should not be
# used or executed separately.
#
###############################################################################
#
# Copyright 2012 Termsaver
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
###############################################################################
"""
Holds constant values used throughout termsaver-exampleplugin plugin.
"""
#
# Termsaver modules
#
from termsaverlib.constants import PropertyClass
class Plugin(PropertyClass):
"""
Holds application related properties used by termsaver-exampleplugin plugin
screens. Refer to each of the available properties for detailed
documentation.
"""
VERSION = "0.1"
"""
Defines the version of termsaver-exampleplugin plugin. This is accessed during
install process, and to any help and usage messages informed by it.
Refer to CHANGELOG file for a complete history about this project.
"""
NAME = 'termsaver-exampleplugin'
"""
Defines the termsaver-exampleplugin plugin, usually the plugin package name.
"""
TITLE = 'TermSaver Example Plugin'
"""
Defines the termsaver-exampleplugin plugin's official name as it should appear
in documentation.
"""
DESCRIPTION = 'A set of screens for showing an example termsaver plugin.'
"""
Defines the main description of the termsaver-exampleplugin plugin.
"""
URL = 'http://www.termsaver.info/plugins'
"""
Defines the termsaver-exampleplugin plugin official website address.
"""
SOURCE_URL = 'http://github.com/brunobraga/termsaver'
"""
Defines the termsaver-exampleplugin plugin official source-code control site,
hosted on GitHub.
"""
AUTHORS = ['Bruno Braga <[email protected]>']
"""
Defines a list of all authors contributing to the termsaver-exampleplugin plugin.
"""
class Settings(PropertyClass):
"""
Holds configuration settings used by termsaver-exampleplugin plugin. Refer to each
of the available properties for detailed documentation.
Follow the formatting:
SETTING_NAME = VALUE
\"\"\"
document it!
\"\"\"
"""
pass
| apache-2.0 | 6,038,243,330,889,457,000 | 28.44898 | 86 | 0.646223 | false |
bl4ckh0l3z/droidtrail | droidtrail/trails/androguard/arscrestableconfig.py | 1 | 2720 | # This file is part of DroidTrail.
#
# bl4ckh0l3 <bl4ckh0l3z at gmail.com>
#
# DroidTrail is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# DroidTrail is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DroidTrail. If not, see <http://www.gnu.org/licenses/>.
#
# **********************************************************************
# NOTE: This file is part of Androguard;
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# It is a modified and sanitized version for DroidTrail,
# created by bl4ckh0l3 <bl4ckh0l3z at gmail.com>.
# **********************************************************************
#
__author__ = 'desnos'
__license__ = 'GPL v2'
__maintainer__ = 'bl4ckh0l3'
__email__ = '[email protected]'
import logging
from struct import unpack
class ARSCResTableConfig:
def __init__(self, buff):
self.start = buff.get_idx()
self.size = unpack('<i', buff.read(4))[0]
self.imsi = unpack('<i', buff.read(4))[0]
self.locale = unpack('<i', buff.read(4))[0]
self.screenType = unpack('<i', buff.read(4))[0]
self.input = unpack('<i', buff.read(4))[0]
self.screenSize = unpack('<i', buff.read(4))[0]
self.version = unpack('<i', buff.read(4))[0]
self.screenConfig = 0
self.screenSizeDp = 0
if self.size >= 32:
self.screenConfig = unpack('<i', buff.read(4))[0]
if self.size >= 36:
self.screenSizeDp = unpack('<i', buff.read(4))[0]
self.exceedingSize = self.size - 36
if self.exceedingSize > 0:
logging.warning("too much bytes !")
self.padding = buff.read(self.exceedingSize)
#print "ARSCResTableConfig", hex(self.start), hex(self.size), hex(self.imsi), hex(self.locale), repr(self.get_language()), repr(self.get_country()), hex(self.screenType), hex(self.input), hex(self.screenSize), hex(self.version), hex(self.screenConfig), hex(self.screenSizeDp)
def get_language(self):
x = self.locale & 0x0000ffff
return chr(x & 0x00ff) + chr((x & 0xff00) >> 8)
def get_country(self):
x = (self.locale & 0xffff0000) >> 16
return chr(x & 0x00ff) + chr((x & 0xff00) >> 8) | gpl-2.0 | -5,226,607,345,617,743,000 | 38.434783 | 283 | 0.60625 | false |
reynoldpj/sysadmrepo | mariadb/dump_stat_info.py | 1 | 3676 | #!/usr/bin/python
import MySQLdb
import json
from ConfigParser import ConfigParser
LIMIT = 7
user_and_client_stat_columns = ('TOTAL_CONNECTIONS', 'CONCURRENT_CONNECTIONS', 'CONNECTED_TIME', 'BUSY_TIME', 'CPU_TIME', 'BYTES_RECEIVED', 'BYTES_SENT', 'BINLOG_BYTES_WRITTEN', 'ROWS_READ', 'ROWS_SENT', 'ROWS_DELETED', 'ROWS_INSERTED', 'ROWS_UPDATED', 'SELECT_COMMANDS', 'UPDATE_COMMANDS', 'OTHER_COMMANDS', 'COMMIT_TRANSACTIONS', 'ROLLBACK_TRANSACTIONS', 'DENIED_CONNECTIONS', 'LOST_CONNECTIONS', 'ACCESS_DENIED', 'EMPTY_QUERIES')
# data holding dicts
data_user_stat = {}
data_client_stat = {}
data_index_stat = {}
data_table_stat = {}
try:
# Configuration parsers
cfg = ConfigParser()
cfg.read('/root/.my.cnf')
# Connect to mysql db and get cursor info
db = MySQLdb.connect(host = cfg.get(section='client',option='host'), db = 'INFORMATION_SCHEMA', user = cfg.get(section='client',option='user'), passwd = cfg.get(section='client',option ='password'))
cur = db.cursor()
#gather USER_STATISTICS and CLIENT_STATISTICS info
for col in user_and_client_stat_columns:
cur.execute("SELECT USER,%s FROM USER_STATISTICS ORDER BY %s DESC LIMIT %d" % (col, col, LIMIT))
data_user_stat[col] = cur.fetchall()
cur.execute("SELECT CLIENT,%s FROM CLIENT_STATISTICS ORDER BY %s DESC LIMIT %d" % (col, col, LIMIT))
data_client_stat[col] = cur.fetchall()
# gather INDEX_STATISTICS
cur.execute("select TABLE_SCHEMA, TABLE_NAME, INDEX_NAME, ROWS_READ from INDEX_STATISTICS order by ROWS_READ desc limit %d" % LIMIT)
data_index_stat['ROWS_READ'] = cur.fetchall()
# gather TABLE_STATISTICS
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED from TABLE_STATISTICS order by ROWS_CHANGED desc limit %d" % LIMIT)
data_table_stat['ROWS_CHANGED'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_READ from TABLE_STATISTICS order by ROWS_READ desc limit %d" % LIMIT)
data_table_stat['ROWS_READ'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED_X_INDEXES from TABLE_STATISTICS order by ROWS_CHANGED_X_INDEXES desc limit %d" % LIMIT)
data_table_stat['ROWS_CHANGED_X_INDEXES'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_READ from TABLE_STATISTICS where TABLE_NAME like '%s' order by ROWS_READ desc limit %d" % ("%comments%",LIMIT))
data_table_stat['ROWS_READ_comments'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED from TABLE_STATISTICS where TABLE_NAME REGEXP 'gast|guest|gasten|gjeste|gbook|gaest' order by ROWS_CHANGED desc limit %d" % LIMIT)
data_table_stat['ROWS_CHANGED_guestbook'] = cur.fetchall()
querystring = {'ROWS_CHANGED_comments':'%comments%' , 'ROWS_CHANGED_phpbbuser': 'phpbb%user%', 'ROWS_CHANGED_phpbbloginattempt':'phpbb%login%attempt%','ROWS_CHANGED_phpbbpost': 'phpbb%post%', 'ROWS_CHANGED_wpcomments': '%wp%comments%', 'ROWS_CHANGED_wpposts':'%wp%posts%', 'ROWS_CHANGED_wpusers': '%wp%users%','ROWS_CHANGED_users': 'users%', 'ROWS_CHANGED_session':'%session%', 'ROWS_CHANGED_friend': '%friend%' }
for key in querystring.keys():
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED from TABLE_STATISTICS where TABLE_NAME like '%s' order by ROWS_CHANGED desc limit %d" % (querystring[key], LIMIT))
data_table_stat[key] = cur.fetchall()
print json.dumps({'USER_STATISTICS': data_user_stat, 'CLIENT_STATISTICS': data_client_stat, 'INDEX_STATISTICS': data_index_stat ,'TABLE_STATISTICS': data_table_stat})
except Exception,e:
print e.message
finally:
#close db connection
cur.close()
db.close()
| gpl-2.0 | 2,162,547,570,938,539,800 | 53.058824 | 432 | 0.701034 | false |
lmyrefelt/CouchPotatoServer | couchpotato/core/notifications/plex/main.py | 1 | 2834 | from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.helpers.variable import cleanHost
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from urllib2 import URLError
from xml.dom import minidom
import traceback
log = CPLog(__name__)
class Plex(Notification):
def __init__(self):
super(Plex, self).__init__()
addEvent('renamer.after', self.addToLibrary)
def addToLibrary(self, message = None, group = {}):
if self.isDisabled(): return
log.info('Sending notification to Plex')
hosts = [cleanHost(x.strip() + ':32400') for x in self.conf('host').split(",")]
for host in hosts:
source_type = ['movie']
base_url = '%slibrary/sections' % host
refresh_url = '%s/%%s/refresh' % base_url
try:
sections_xml = self.urlopen(base_url)
xml_sections = minidom.parseString(sections_xml)
sections = xml_sections.getElementsByTagName('Directory')
for s in sections:
if s.getAttribute('type') in source_type:
url = refresh_url % s.getAttribute('key')
x = self.urlopen(url)
except:
log.error('Plex library update failed for %s, Media Server not running: %s', (host, traceback.format_exc(1)))
return False
return True
def notify(self, message = '', data = {}, listener = None):
hosts = [x.strip() + ':3000' for x in self.conf('host').split(",")]
successful = 0
for host in hosts:
if self.send({'command': 'ExecBuiltIn', 'parameter': 'Notification(CouchPotato, %s)' % message}, host):
successful += 1
return successful == len(hosts)
def send(self, command, host):
url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, tryUrlencode(command))
headers = {}
try:
self.urlopen(url, headers = headers, show_error = False)
except URLError:
log.error("Couldn't sent command to Plex, probably just running Media Server")
return False
except:
log.error("Couldn't sent command to Plex: %s", traceback.format_exc())
return False
log.info('Plex notification to %s successful.', host)
return True
def test(self):
test_type = self.testNotifyName()
log.info('Sending test to %s', test_type)
success = self.notify(
message = self.test_message,
data = {},
listener = 'test'
)
success2 = self.addToLibrary()
return {
'success': success or success2
}
| gpl-3.0 | -4,482,584,000,044,181,500 | 30.488889 | 125 | 0.575865 | false |
mission-peace/interview | python/dynamic/coin_change_num_ways.py | 1 | 2081 | """
Problem Statement
=================
Given a total and coins of certain denominations find number of ways total can be formed from coins assuming infinity
supply of coins.
Analysis
--------
* Runtime : O(num_of_coins * total)
Video
-----
* https://youtu.be/_fgjrs570YE
Reference
---------
* http://www.geeksforgeeks.org/dynamic-programming-set-7-coin-change/
"""
def coin_changing_num_ways(coins, total):
cols = total + 1 # 1 for value 0 in total
rows = len(coins)
T = [[1 if col == 0 else 0 for col in range(cols)] for _ in range(rows)]
for i in range(rows):
for j in range(cols):
if (i - 1) < 0:
continue
if j < coins[i]:
T[i][j] = T[i - 1][j]
else:
T[i][j] = T[i - 1][j] + T[i][j - coins[i]]
return T[rows - 1][cols - 1]
def coin_changing_num_ways2(coins, total):
cols = total + 1
num_coins = len(coins)
# Using 1-D Array instead of 2-D Array. Approach is same as coin_changing_num_ways.
T = [1 if col == 0 else 0 for col in range(cols)]
for i in range(num_coins):
for col in range(1, cols):
if col >= coins[i]:
T[col] += T[col - coins[i]]
return T[cols - 1]
def print_coin_changes_recursive(coins, total, results_stack, pos):
if total == 0:
for coin in results_stack:
print "%d " % coin,
print
for idx in range(pos, len(coins)):
if total >= coins[idx]:
results_stack.append(coins[idx])
print_coin_changes_recursive(coins, total - coins[idx], results_stack, idx)
results_stack.pop() # Remove last inserted coin from stack to use new coin with different index.
def print_coin_changes(coins, total):
print_coin_changes_recursive(coins, total, list(), 0)
if __name__ == '__main__':
coins = [1, 2, 3]
total = 5
expected = 5
assert expected == coin_changing_num_ways(coins, total)
assert expected == coin_changing_num_ways2(coins, total)
print_coin_changes(coins, total)
| apache-2.0 | -4,793,228,212,112,853,000 | 26.025974 | 117 | 0.575204 | false |
JackGavin13/octoprint-test-not-finished | src/octoprint/plugins/pluginmanager/__init__.py | 1 | 39848 | # coding=utf-8
from __future__ import absolute_import, division, print_function
__author__ = "Gina Häußge <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
from past.builtins import basestring
import octoprint.plugin
import octoprint.plugin.core
from octoprint.settings import valid_boolean_trues
from octoprint.server.util.flask import restricted_access, with_revalidation_checking, check_etag
from octoprint.server import admin_permission, VERSION
from octoprint.util.pip import LocalPipCaller, UnknownPip
from flask import jsonify, make_response
from flask.ext.babel import gettext
from collections import OrderedDict
import logging
import sarge
import sys
import requests
import re
import os
import pkg_resources
import copy
import dateutil.parser
import time
import threading
class PluginManagerPlugin(octoprint.plugin.SimpleApiPlugin,
octoprint.plugin.TemplatePlugin,
octoprint.plugin.AssetPlugin,
octoprint.plugin.SettingsPlugin,
octoprint.plugin.StartupPlugin,
octoprint.plugin.BlueprintPlugin,
octoprint.plugin.EventHandlerPlugin):
ARCHIVE_EXTENSIONS = (".zip", ".tar.gz", ".tgz", ".tar")
OPERATING_SYSTEMS = dict(windows=["win32"],
linux=lambda x: x.startswith("linux"),
macos=["darwin"],
freebsd=lambda x: x.startswith("freebsd"))
PIP_INAPPLICABLE_ARGUMENTS = dict(uninstall=["--user"])
RECONNECT_HOOKS = ["octoprint.comm.protocol.*",]
def __init__(self):
self._pending_enable = set()
self._pending_disable = set()
self._pending_install = set()
self._pending_uninstall = set()
self._pip_caller = None
self._repository_available = False
self._repository_plugins = []
self._repository_cache_path = None
self._repository_cache_ttl = 0
self._notices = dict()
self._notices_available = False
self._notices_cache_path = None
self._notices_cache_ttl = 0
self._console_logger = None
def initialize(self):
self._console_logger = logging.getLogger("octoprint.plugins.pluginmanager.console")
self._repository_cache_path = os.path.join(self.get_plugin_data_folder(), "plugins.json")
self._repository_cache_ttl = self._settings.get_int(["repository_ttl"]) * 60
self._notices_cache_path = os.path.join(self.get_plugin_data_folder(), "notices.json")
self._notices_cache_ttl = self._settings.get_int(["notices_ttl"]) * 60
self._pip_caller = LocalPipCaller(force_user=self._settings.get_boolean(["pip_force_user"]))
self._pip_caller.on_log_call = self._log_call
self._pip_caller.on_log_stdout = self._log_stdout
self._pip_caller.on_log_stderr = self._log_stderr
##~~ Body size hook
def increase_upload_bodysize(self, current_max_body_sizes, *args, **kwargs):
# set a maximum body size of 50 MB for plugin archive uploads
return [("POST", r"/upload_archive", 50 * 1024 * 1024)]
##~~ StartupPlugin
def on_after_startup(self):
from octoprint.logging.handlers import CleaningTimedRotatingFileHandler
console_logging_handler = CleaningTimedRotatingFileHandler(self._settings.get_plugin_logfile_path(postfix="console"), when="D", backupCount=3)
console_logging_handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
console_logging_handler.setLevel(logging.DEBUG)
self._console_logger.addHandler(console_logging_handler)
self._console_logger.setLevel(logging.DEBUG)
self._console_logger.propagate = False
# decouple repository fetching from server startup
self._fetch_all_data(async=True)
##~~ SettingsPlugin
def get_settings_defaults(self):
return dict(
repository="http://plugins.octoprint.org/plugins.json",
repository_ttl=24*60,
notices="http://plugins.octoprint.org/notices.json",
notices_ttl=6*60,
pip_args=None,
pip_force_user=False,
dependency_links=False,
hidden=[]
)
def on_settings_save(self, data):
octoprint.plugin.SettingsPlugin.on_settings_save(self, data)
self._repository_cache_ttl = self._settings.get_int(["repository_ttl"]) * 60
self._notices_cache_ttl = self._settings.get_int(["notices_ttl"]) * 60
self._pip_caller.force_user = self._settings.get_boolean(["pip_force_user"])
##~~ AssetPlugin
def get_assets(self):
return dict(
js=["js/pluginmanager.js"],
css=["css/pluginmanager.css"],
less=["less/pluginmanager.less"]
)
##~~ TemplatePlugin
def get_template_configs(self):
return [
dict(type="settings", name=gettext("Plugin Manager"), template="pluginmanager_settings.jinja2", custom_bindings=True),
dict(type="about", name="Plugin Licenses", template="pluginmanager_about.jinja2")
]
def get_template_vars(self):
plugins = sorted(self._get_plugins(), key=lambda x: x["name"].lower())
return dict(
all=plugins,
thirdparty=filter(lambda p: not p["bundled"], plugins),
archive_extensions=self.__class__.ARCHIVE_EXTENSIONS
)
def get_template_types(self, template_sorting, template_rules, *args, **kwargs):
return [
("about_thirdparty", dict(), dict(template=lambda x: x + "_about_thirdparty.jinja2"))
]
##~~ BlueprintPlugin
@octoprint.plugin.BlueprintPlugin.route("/upload_archive", methods=["POST"])
@restricted_access
@admin_permission.require(403)
def upload_archive(self):
import flask
input_name = "file"
input_upload_path = input_name + "." + self._settings.global_get(["server", "uploads", "pathSuffix"])
input_upload_name = input_name + "." + self._settings.global_get(["server", "uploads", "nameSuffix"])
if input_upload_path not in flask.request.values or input_upload_name not in flask.request.values:
return flask.make_response("No file included", 400)
upload_path = flask.request.values[input_upload_path]
upload_name = flask.request.values[input_upload_name]
exts = filter(lambda x: upload_name.lower().endswith(x), self.__class__.ARCHIVE_EXTENSIONS)
if not len(exts):
return flask.make_response("File doesn't have a valid extension for a plugin archive", 400)
ext = exts[0]
import tempfile
import shutil
import os
archive = tempfile.NamedTemporaryFile(delete=False, suffix="{ext}".format(**locals()))
try:
archive.close()
shutil.copy(upload_path, archive.name)
return self.command_install(path=archive.name, force="force" in flask.request.values and flask.request.values["force"] in valid_boolean_trues)
finally:
try:
os.remove(archive.name)
except Exception as e:
self._logger.warn("Could not remove temporary file {path} again: {message}".format(path=archive.name, message=str(e)))
##~~ EventHandlerPlugin
def on_event(self, event, payload):
from octoprint.events import Events
if event != Events.CONNECTIVITY_CHANGED or not payload or not payload.get("new", False):
return
self._fetch_all_data(async=True)
##~~ SimpleApiPlugin
def get_api_commands(self):
return {
"install": ["url"],
"uninstall": ["plugin"],
"enable": ["plugin"],
"disable": ["plugin"],
"refresh_repository": []
}
def on_api_get(self, request):
if not admin_permission.can():
return make_response("Insufficient rights", 403)
from octoprint.server import safe_mode
refresh_repository = request.values.get("refresh_repository", "false") in valid_boolean_trues
if refresh_repository:
self._repository_available = self._refresh_repository()
refresh_notices = request.values.get("refresh_notices", "false") in valid_boolean_trues
if refresh_notices:
self._notices_available = self._refresh_notices()
def view():
return jsonify(plugins=self._get_plugins(),
repository=dict(
available=self._repository_available,
plugins=self._repository_plugins
),
os=self._get_os(),
octoprint=self._get_octoprint_version_string(),
pip=dict(
available=self._pip_caller.available,
version=self._pip_caller.version_string,
install_dir=self._pip_caller.install_dir,
use_user=self._pip_caller.use_user,
virtual_env=self._pip_caller.virtual_env,
additional_args=self._settings.get(["pip_args"]),
python=sys.executable
),
safe_mode=safe_mode,
online=self._connectivity_checker.online)
def etag():
import hashlib
hash = hashlib.sha1()
hash.update(repr(self._get_plugins()))
hash.update(str(self._repository_available))
hash.update(repr(self._repository_plugins))
hash.update(str(self._notices_available))
hash.update(repr(self._notices))
hash.update(repr(safe_mode))
hash.update(repr(self._connectivity_checker.online))
return hash.hexdigest()
def condition():
return check_etag(etag())
return with_revalidation_checking(etag_factory=lambda *args, **kwargs: etag(),
condition=lambda *args, **kwargs: condition(),
unless=lambda: refresh_repository or refresh_notices)(view)()
def on_api_command(self, command, data):
if not admin_permission.can():
return make_response("Insufficient rights", 403)
if self._printer.is_printing() or self._printer.is_paused():
# do not update while a print job is running
return make_response("Printer is currently printing or paused", 409)
if command == "install":
url = data["url"]
plugin_name = data["plugin"] if "plugin" in data else None
return self.command_install(url=url,
force="force" in data and data["force"] in valid_boolean_trues,
dependency_links="dependency_links" in data
and data["dependency_links"] in valid_boolean_trues,
reinstall=plugin_name)
elif command == "uninstall":
plugin_name = data["plugin"]
if not plugin_name in self._plugin_manager.plugins:
return make_response("Unknown plugin: %s" % plugin_name, 404)
plugin = self._plugin_manager.plugins[plugin_name]
return self.command_uninstall(plugin)
elif command == "enable" or command == "disable":
plugin_name = data["plugin"]
if not plugin_name in self._plugin_manager.plugins:
return make_response("Unknown plugin: %s" % plugin_name, 404)
plugin = self._plugin_manager.plugins[plugin_name]
return self.command_toggle(plugin, command)
def command_install(self, url=None, path=None, force=False, reinstall=None, dependency_links=False):
if url is not None:
source = url
source_type = "url"
already_installed_check = lambda line: url in line
elif path is not None:
path = os.path.abspath(path)
path_url = "file://" + path
if os.sep != "/":
# windows gets special handling
path = path.replace(os.sep, "/").lower()
path_url = "file:///" + path
source = path
source_type = "path"
already_installed_check = lambda line: path_url in line.lower() # lower case in case of windows
else:
raise ValueError("Either URL or path must be provided")
self._logger.info("Installing plugin from {}".format(source))
pip_args = ["install", sarge.shell_quote(source)]
if dependency_links or self._settings.get_boolean(["dependency_links"]):
pip_args.append("--process-dependency-links")
all_plugins_before = self._plugin_manager.find_plugins(existing=dict())
already_installed_string = "Requirement already satisfied (use --upgrade to upgrade)"
success_string = "Successfully installed"
failure_string = "Could not install"
try:
returncode, stdout, stderr = self._call_pip(pip_args)
# pip's output for a package that is already installed looks something like any of these:
#
# Requirement already satisfied (use --upgrade to upgrade): OctoPrint-Plugin==1.0 from \
# https://example.com/foobar.zip in <lib>
# Requirement already satisfied (use --upgrade to upgrade): OctoPrint-Plugin in <lib>
# Requirement already satisfied (use --upgrade to upgrade): OctoPrint-Plugin==1.0 from \
# file:///tmp/foobar.zip in <lib>
# Requirement already satisfied (use --upgrade to upgrade): OctoPrint-Plugin==1.0 from \
# file:///C:/Temp/foobar.zip in <lib>
#
# If we detect any of these matching what we just tried to install, we'll need to trigger a second
# install with reinstall flags.
if not force and any(map(lambda x: x.strip().startswith(already_installed_string) and already_installed_check(x),
stdout)):
self._logger.info("Plugin to be installed from {} was already installed, forcing a reinstall".format(source))
self._log_message("Looks like the plugin was already installed. Forcing a reinstall.")
force = True
except:
self._logger.exception("Could not install plugin from %s" % url)
return make_response("Could not install plugin from URL, see the log for more details", 500)
else:
if force:
# We don't use --upgrade here because that will also happily update all our dependencies - we'd rather
# do that in a controlled manner
pip_args += ["--ignore-installed", "--force-reinstall", "--no-deps"]
try:
returncode, stdout, stderr = self._call_pip(pip_args)
except:
self._logger.exception("Could not install plugin from {}".format(source))
return make_response("Could not install plugin from source {}, see the log for more details"
.format(source), 500)
try:
result_line = filter(lambda x: x.startswith(success_string) or x.startswith(failure_string),
stdout)[-1]
except IndexError:
self._logger.error("Installing the plugin from {} failed, could not parse output from pip. "
"See plugin_pluginmanager_console.log for generated output".format(source))
result = dict(result=False,
source=source,
source_type=source_type,
reason="Could not parse output from pip, see plugin_pluginmanager_console.log "
"for generated output")
self._send_result_notification("install", result)
return jsonify(result)
# The final output of a pip install command looks something like this:
#
# Successfully installed OctoPrint-Plugin-1.0 Dependency-One-0.1 Dependency-Two-9.3
#
# or this:
#
# Successfully installed OctoPrint-Plugin Dependency-One Dependency-Two
# Cleaning up...
#
# So we'll need to fetch the "Successfully installed" line, strip the "Successfully" part, then split
# by whitespace and strip to get all installed packages.
#
# We then need to iterate over all known plugins and see if either the package name or the package name plus
# version number matches one of our installed packages. If it does, that's our installed plugin.
#
# Known issue: This might return the wrong plugin if more than one plugin was installed through this
# command (e.g. due to pulling in another plugin as dependency). It should be safe for now though to
# consider this a rare corner case. Once it becomes a real problem we'll just extend the plugin manager
# so that it can report on more than one installed plugin.
result_line = result_line.strip()
if not result_line.startswith(success_string):
self._logger.error("Installing the plugin from {} failed, pip did not report successful installation"
.format(source))
result = dict(result=False,
source=source,
source_type=source_type,
reason="Pip did not report successful installation")
self._send_result_notification("install", result)
return jsonify(result)
installed = map(lambda x: x.strip(), result_line[len(success_string):].split(" "))
all_plugins_after = self._plugin_manager.find_plugins(existing=dict(), ignore_uninstalled=False)
new_plugin = self._find_installed_plugin(installed, plugins=all_plugins_after)
if new_plugin is None:
self._logger.warn("The plugin was installed successfully, but couldn't be found afterwards to "
"initialize properly during runtime. Please restart OctoPrint.")
result = dict(result=True,
source=source,
source_type=source_type,
needs_restart=True,
needs_refresh=True,
needs_reconnect=True,
was_reinstalled=False,
plugin="unknown")
self._send_result_notification("install", result)
return jsonify(result)
self._plugin_manager.reload_plugins()
needs_restart = self._plugin_manager.is_restart_needing_plugin(new_plugin) \
or new_plugin.key in all_plugins_before \
or reinstall is not None
needs_refresh = new_plugin.implementation \
and isinstance(new_plugin.implementation, octoprint.plugin.ReloadNeedingPlugin)
needs_reconnect = self._plugin_manager.has_any_of_hooks(new_plugin, self._reconnect_hooks) and self._printer.is_operational()
is_reinstall = self._plugin_manager.is_plugin_marked(new_plugin.key, "uninstalled")
self._plugin_manager.mark_plugin(new_plugin.key,
uninstalled=False,
installed=not is_reinstall and needs_restart)
self._plugin_manager.log_all_plugins()
self._logger.info("The plugin was installed successfully: {}, version {}".format(new_plugin.name, new_plugin.version))
result = dict(result=True,
source=source,
source_type=source_type,
needs_restart=needs_restart,
needs_refresh=needs_refresh,
needs_reconnect=needs_reconnect,
was_reinstalled=new_plugin.key in all_plugins_before or reinstall is not None,
plugin=self._to_external_plugin(new_plugin))
self._send_result_notification("install", result)
return jsonify(result)
def command_uninstall(self, plugin):
if plugin.key == "pluginmanager":
return make_response("Can't uninstall Plugin Manager", 403)
if not plugin.managable:
return make_response("Plugin is not managable and hence cannot be uninstalled", 403)
if plugin.bundled:
return make_response("Bundled plugins cannot be uninstalled", 403)
if plugin.origin is None:
self._logger.warn(u"Trying to uninstall plugin {plugin} but origin is unknown".format(**locals()))
return make_response("Could not uninstall plugin, its origin is unknown")
if plugin.origin.type == "entry_point":
# plugin is installed through entry point, need to use pip to uninstall it
origin = plugin.origin[3]
if origin is None:
origin = plugin.origin[2]
pip_args = ["uninstall", "--yes", origin]
try:
self._call_pip(pip_args)
except:
self._logger.exception(u"Could not uninstall plugin via pip")
return make_response("Could not uninstall plugin via pip, see the log for more details", 500)
elif plugin.origin.type == "folder":
import os
import shutil
full_path = os.path.realpath(plugin.location)
if os.path.isdir(full_path):
# plugin is installed via a plugin folder, need to use rmtree to get rid of it
self._log_stdout(u"Deleting plugin from {folder}".format(folder=plugin.location))
shutil.rmtree(full_path)
elif os.path.isfile(full_path):
self._log_stdout(u"Deleting plugin from {file}".format(file=plugin.location))
os.remove(full_path)
if full_path.endswith(".py"):
pyc_file = "{full_path}c".format(**locals())
if os.path.isfile(pyc_file):
os.remove(pyc_file)
else:
self._logger.warn(u"Trying to uninstall plugin {plugin} but origin is unknown ({plugin.origin.type})".format(**locals()))
return make_response("Could not uninstall plugin, its origin is unknown")
needs_restart = self._plugin_manager.is_restart_needing_plugin(plugin)
needs_refresh = plugin.implementation and isinstance(plugin.implementation, octoprint.plugin.ReloadNeedingPlugin)
needs_reconnect = self._plugin_manager.has_any_of_hooks(plugin, self._reconnect_hooks) and self._printer.is_operational()
was_pending_install = self._plugin_manager.is_plugin_marked(plugin.key, "installed")
self._plugin_manager.mark_plugin(plugin.key,
uninstalled=not was_pending_install and needs_restart,
installed=False)
if not needs_restart:
try:
self._plugin_manager.disable_plugin(plugin.key, plugin=plugin)
except octoprint.plugin.core.PluginLifecycleException as e:
self._logger.exception(u"Problem disabling plugin {name}".format(name=plugin.key))
result = dict(result=False, uninstalled=True, disabled=False, unloaded=False, reason=e.reason)
self._send_result_notification("uninstall", result)
return jsonify(result)
try:
self._plugin_manager.unload_plugin(plugin.key)
except octoprint.plugin.core.PluginLifecycleException as e:
self._logger.exception(u"Problem unloading plugin {name}".format(name=plugin.key))
result = dict(result=False, uninstalled=True, disabled=True, unloaded=False, reason=e.reason)
self._send_result_notification("uninstall", result)
return jsonify(result)
self._plugin_manager.reload_plugins()
result = dict(result=True,
needs_restart=needs_restart,
needs_refresh=needs_refresh,
needs_reconnect=needs_reconnect,
plugin=self._to_external_plugin(plugin))
self._send_result_notification("uninstall", result)
return jsonify(result)
def command_toggle(self, plugin, command):
if plugin.key == "pluginmanager":
return make_response("Can't enable/disable Plugin Manager", 400)
needs_restart = self._plugin_manager.is_restart_needing_plugin(plugin)
needs_refresh = plugin.implementation and isinstance(plugin.implementation, octoprint.plugin.ReloadNeedingPlugin)
needs_reconnect = self._plugin_manager.has_any_of_hooks(plugin, self._reconnect_hooks) and self._printer.is_operational()
pending = ((command == "disable" and plugin.key in self._pending_enable) or (command == "enable" and plugin.key in self._pending_disable))
safe_mode_victim = getattr(plugin, "safe_mode_victim", False)
needs_restart_api = (needs_restart or safe_mode_victim) and not pending
needs_refresh_api = needs_refresh and not pending
needs_reconnect_api = needs_reconnect and not pending
try:
if command == "disable":
self._mark_plugin_disabled(plugin, needs_restart=needs_restart)
elif command == "enable":
self._mark_plugin_enabled(plugin, needs_restart=needs_restart)
except octoprint.plugin.core.PluginLifecycleException as e:
self._logger.exception(u"Problem toggling enabled state of {name}: {reason}".format(name=plugin.key, reason=e.reason))
result = dict(result=False, reason=e.reason)
except octoprint.plugin.core.PluginNeedsRestart:
result = dict(result=True,
needs_restart=True,
needs_refresh=True,
needs_reconnect=True,
plugin=self._to_external_plugin(plugin))
else:
result = dict(result=True,
needs_restart=needs_restart_api,
needs_refresh=needs_refresh_api,
needs_reconnect=needs_reconnect_api,
plugin=self._to_external_plugin(plugin))
self._send_result_notification(command, result)
return jsonify(result)
def _find_installed_plugin(self, packages, plugins=None):
if plugins is None:
plugins = self._plugin_manager.find_plugins(existing=dict(), ignore_uninstalled=False)
for key, plugin in plugins.items():
if plugin.origin is None or plugin.origin.type != "entry_point":
continue
package_name = plugin.origin.package_name
package_version = plugin.origin.package_version
versioned_package = "{package_name}-{package_version}".format(**locals())
if package_name in packages or versioned_package in packages:
# exact match, we are done here
return plugin
else:
# it might still be a version that got stripped by python's package resources, e.g. 1.4.5a0 => 1.4.5a
found = False
for inst in packages:
if inst.startswith(versioned_package):
found = True
break
if found:
return plugin
return None
def _send_result_notification(self, action, result):
notification = dict(type="result", action=action)
notification.update(result)
self._plugin_manager.send_plugin_message(self._identifier, notification)
def _call_pip(self, args):
if self._pip_caller is None or not self._pip_caller.available:
raise RuntimeError(u"No pip available, can't operate".format(**locals()))
if "--process-dependency-links" in args:
self._log_message(u"Installation needs to process external dependencies, that might make it take a bit longer than usual depending on the pip version")
additional_args = self._settings.get(["pip_args"])
if additional_args is not None:
inapplicable_arguments = self.__class__.PIP_INAPPLICABLE_ARGUMENTS.get(args[0], list())
for inapplicable_argument in inapplicable_arguments:
additional_args = re.sub("(^|\s)" + re.escape(inapplicable_argument) + "\\b", "", additional_args)
if additional_args:
args.append(additional_args)
return self._pip_caller.execute(*args)
def _log_message(self, *lines):
self._log(lines, prefix=u"*", stream="message")
def _log_call(self, *lines):
self._log(lines, prefix=u" ", stream="call")
def _log_stdout(self, *lines):
self._log(lines, prefix=u">", stream="stdout")
def _log_stderr(self, *lines):
self._log(lines, prefix=u"!", stream="stderr")
def _log(self, lines, prefix=None, stream=None, strip=True):
if strip:
lines = map(lambda x: x.strip(), lines)
self._plugin_manager.send_plugin_message(self._identifier, dict(type="loglines", loglines=[dict(line=line, stream=stream) for line in lines]))
for line in lines:
self._console_logger.debug(u"{prefix} {line}".format(**locals()))
def _mark_plugin_enabled(self, plugin, needs_restart=False):
disabled_list = list(self._settings.global_get(["plugins", "_disabled"]))
if plugin.key in disabled_list:
disabled_list.remove(plugin.key)
self._settings.global_set(["plugins", "_disabled"], disabled_list)
self._settings.save(force=True)
if not needs_restart and not getattr(plugin, "safe_mode_victim", False):
self._plugin_manager.enable_plugin(plugin.key)
else:
if plugin.key in self._pending_disable:
self._pending_disable.remove(plugin.key)
elif (not plugin.enabled and not getattr(plugin, "safe_mode_enabled", False)) and plugin.key not in self._pending_enable:
self._pending_enable.add(plugin.key)
def _mark_plugin_disabled(self, plugin, needs_restart=False):
disabled_list = list(self._settings.global_get(["plugins", "_disabled"]))
if not plugin.key in disabled_list:
disabled_list.append(plugin.key)
self._settings.global_set(["plugins", "_disabled"], disabled_list)
self._settings.save(force=True)
if not needs_restart and not getattr(plugin, "safe_mode_victim", False):
self._plugin_manager.disable_plugin(plugin.key)
else:
if plugin.key in self._pending_enable:
self._pending_enable.remove(plugin.key)
elif (plugin.enabled or getattr(plugin, "safe_mode_enabled", False)) and plugin.key not in self._pending_disable:
self._pending_disable.add(plugin.key)
def _fetch_all_data(self, async=False):
def run():
self._repository_available = self._fetch_repository_from_disk()
self._notices_available = self._fetch_notices_from_disk()
if async:
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
else:
run()
def _fetch_repository_from_disk(self):
repo_data = None
if os.path.isfile(self._repository_cache_path):
import time
mtime = os.path.getmtime(self._repository_cache_path)
if mtime + self._repository_cache_ttl >= time.time() > mtime:
try:
import json
with open(self._repository_cache_path) as f:
repo_data = json.load(f)
self._logger.info("Loaded plugin repository data from disk, was still valid")
except:
self._logger.exception("Error while loading repository data from {}".format(self._repository_cache_path))
return self._refresh_repository(repo_data=repo_data)
def _fetch_repository_from_url(self):
if not self._connectivity_checker.online:
self._logger.info("Looks like we are offline, can't fetch repository from network")
return None
repository_url = self._settings.get(["repository"])
try:
r = requests.get(repository_url, timeout=30)
r.raise_for_status()
self._logger.info("Loaded plugin repository data from {}".format(repository_url))
except Exception as e:
self._logger.exception("Could not fetch plugins from repository at {repository_url}: {message}".format(repository_url=repository_url, message=str(e)))
return None
repo_data = r.json()
try:
import json
with octoprint.util.atomic_write(self._repository_cache_path, "wb") as f:
json.dump(repo_data, f)
except Exception as e:
self._logger.exception("Error while saving repository data to {}: {}".format(self._repository_cache_path, str(e)))
return repo_data
def _refresh_repository(self, repo_data=None):
if repo_data is None:
repo_data = self._fetch_repository_from_url()
if repo_data is None:
return False
current_os = self._get_os()
octoprint_version = self._get_octoprint_version(base=True)
def map_repository_entry(entry):
result = copy.deepcopy(entry)
if not "follow_dependency_links" in result:
result["follow_dependency_links"] = False
result["is_compatible"] = dict(
octoprint=True,
os=True
)
if "compatibility" in entry:
if "octoprint" in entry["compatibility"] and entry["compatibility"]["octoprint"] is not None and isinstance(entry["compatibility"]["octoprint"], (list, tuple)) and len(entry["compatibility"]["octoprint"]):
result["is_compatible"]["octoprint"] = self._is_octoprint_compatible(octoprint_version, entry["compatibility"]["octoprint"])
if "os" in entry["compatibility"] and entry["compatibility"]["os"] is not None and isinstance(entry["compatibility"]["os"], (list, tuple)) and len(entry["compatibility"]["os"]):
result["is_compatible"]["os"] = self._is_os_compatible(current_os, entry["compatibility"]["os"])
return result
self._repository_plugins = map(map_repository_entry, repo_data)
return True
def _fetch_notices_from_disk(self):
notice_data = None
if os.path.isfile(self._notices_cache_path):
import time
mtime = os.path.getmtime(self._notices_cache_path)
if mtime + self._notices_cache_ttl >= time.time() > mtime:
try:
import json
with open(self._notices_cache_path) as f:
notice_data = json.load(f)
self._logger.info("Loaded notice data from disk, was still valid")
except:
self._logger.exception("Error while loading notices from {}".format(self._notices_cache_path))
return self._refresh_notices(notice_data=notice_data)
def _fetch_notices_from_url(self):
if not self._connectivity_checker.online:
self._logger.info("Looks like we are offline, can't fetch notices from network")
return None
notices_url = self._settings.get(["notices"])
try:
r = requests.get(notices_url, timeout=30)
r.raise_for_status()
self._logger.info("Loaded plugin notices data from {}".format(notices_url))
except Exception as e:
self._logger.exception("Could not fetch notices from {notices_url}: {message}".format(notices_url=notices_url, message=str(e)))
return None
notice_data = r.json()
try:
import json
with octoprint.util.atomic_write(self._notices_cache_path, "wb") as f:
json.dump(notice_data, f)
except Exception as e:
self._logger.exception("Error while saving notices to {}: {}".format(self._notices_cache_path, str(e)))
return notice_data
def _refresh_notices(self, notice_data=None):
if notice_data is None:
notice_data = self._fetch_notices_from_url()
if notice_data is None:
return False
notices = dict()
for notice in notice_data:
if not "plugin" in notice or not "text" in notice or not "date" in notice:
continue
key = notice["plugin"]
try:
parsed_date = dateutil.parser.parse(notice["date"])
notice["timestamp"] = parsed_date.timetuple()
except Exception as e:
self._logger.warn("Error while parsing date {!r} for plugin notice "
"of plugin {}, ignoring notice: {}".format(notice["date"], key, str(e)))
continue
if not key in notices:
notices[key] = []
notices[key].append(notice)
self._notices = notices
return True
def _is_octoprint_compatible(self, octoprint_version, compatibility_entries):
"""
Tests if the current ``octoprint_version`` is compatible to any of the provided ``compatibility_entries``.
"""
for octo_compat in compatibility_entries:
try:
if not any(octo_compat.startswith(c) for c in ("<", "<=", "!=", "==", ">=", ">", "~=", "===")):
octo_compat = ">={}".format(octo_compat)
s = next(pkg_resources.parse_requirements("OctoPrint" + octo_compat))
if octoprint_version in s:
break
except:
self._logger.exception("Something is wrong with this compatibility string for OctoPrint: {}".format(octo_compat))
else:
return False
return True
@staticmethod
def _is_os_compatible(current_os, compatibility_entries):
"""
Tests if the ``current_os`` or ``sys.platform`` are blacklisted or whitelisted in ``compatibility_entries``
"""
if len(compatibility_entries) == 0:
# shortcut - no compatibility info means we are compatible
return True
negative_entries = map(lambda x: x[1:], filter(lambda x: x.startswith("!"), compatibility_entries))
positive_entries = filter(lambda x: not x.startswith("!"), compatibility_entries)
negative_match = False
if negative_entries:
# check if we are blacklisted
negative_match = current_os in negative_entries or any(map(lambda x: sys.platform.startswith(x), negative_entries))
positive_match = True
if positive_entries:
# check if we are whitelisted
positive_match = current_os in positive_entries or any(map(lambda x: sys.platform.startswith(x), positive_entries))
return positive_match and not negative_match
@classmethod
def _get_os(cls):
for identifier, platforms in cls.OPERATING_SYSTEMS.items():
if (callable(platforms) and platforms(sys.platform)) or (isinstance(platforms, list) and sys.platform in platforms):
return identifier
else:
return "unmapped"
def _get_octoprint_version_string(self):
return VERSION
def _get_octoprint_version(self, base=False):
octoprint_version_string = self._get_octoprint_version_string()
if "-" in octoprint_version_string:
octoprint_version_string = octoprint_version_string[:octoprint_version_string.find("-")]
octoprint_version = pkg_resources.parse_version(octoprint_version_string)
# A leading v is common in github release tags and old setuptools doesn't remove it. While OctoPrint's
# versions should never contains such a prefix, we'll make sure to have stuff behave the same
# regardless of setuptools version anyhow.
if octoprint_version and isinstance(octoprint_version, tuple) and octoprint_version[0].lower() == "*v":
octoprint_version = octoprint_version[1:]
if base:
if isinstance(octoprint_version, tuple):
# old setuptools
base_version = []
for part in octoprint_version:
if part.startswith("*"):
break
base_version.append(part)
base_version.append("*final")
octoprint_version = tuple(base_version)
else:
# new setuptools
octoprint_version = pkg_resources.parse_version(octoprint_version.base_version)
return octoprint_version
@property
def _reconnect_hooks(self):
reconnect_hooks = self.__class__.RECONNECT_HOOKS
reconnect_hook_provider_hooks = self._plugin_manager.get_hooks("octoprint.plugin.pluginmanager.reconnect_hooks")
for name, hook in reconnect_hook_provider_hooks.items():
try:
result = hook()
if isinstance(result, (list, tuple)):
reconnect_hooks.extend(filter(lambda x: isinstance(x, basestring), result))
except:
self._logger.exception("Error while retrieving additional hooks for which a "
"reconnect is required from plugin {name}".format(**locals()))
return reconnect_hooks
def _get_plugins(self):
plugins = self._plugin_manager.plugins
hidden = self._settings.get(["hidden"])
result = []
for key, plugin in plugins.items():
if key in hidden:
continue
result.append(self._to_external_plugin(plugin))
return result
def _to_external_plugin(self, plugin):
return dict(
key=plugin.key,
name=plugin.name,
description=plugin.description,
disabling_discouraged=gettext(plugin.disabling_discouraged) if plugin.disabling_discouraged else False,
author=plugin.author,
version=plugin.version,
url=plugin.url,
license=plugin.license,
bundled=plugin.bundled,
managable=plugin.managable,
enabled=plugin.enabled,
safe_mode_victim=getattr(plugin, "safe_mode_victim", False),
safe_mode_enabled=getattr(plugin, "safe_mode_enabled", False),
pending_enable=(not plugin.enabled and not getattr(plugin, "safe_mode_enabled", False) and plugin.key in self._pending_enable),
pending_disable=((plugin.enabled or getattr(plugin, "safe_mode_enabled", False)) and plugin.key in self._pending_disable),
pending_install=(self._plugin_manager.is_plugin_marked(plugin.key, "installed")),
pending_uninstall=(self._plugin_manager.is_plugin_marked(plugin.key, "uninstalled")),
origin=plugin.origin.type,
notifications = self._get_notifications(plugin)
)
def _get_notifications(self, plugin):
key = plugin.key
if not plugin.enabled:
return
if key not in self._notices:
return
octoprint_version = self._get_octoprint_version(base=True)
plugin_notifications = self._notices.get(key, [])
def filter_relevant(notification):
return "text" in notification and "date" in notification and \
("versions" not in notification or plugin.version in notification["versions"]) and \
("octoversions" not in notification or self._is_octoprint_compatible(octoprint_version, notification["octoversions"]))
def map_notification(notification):
return self._to_external_notification(key, notification)
return filter(lambda x: x is not None,
map(map_notification,
filter(filter_relevant,
plugin_notifications)))
def _to_external_notification(self, key, notification):
return dict(key=key,
date=time.mktime(notification["timestamp"]),
text=notification["text"],
link=notification.get("link"),
versions=notification.get("versions", []),
important=notification.get("important", False))
__plugin_name__ = "Plugin Manager"
__plugin_author__ = "Gina Häußge"
__plugin_url__ = "http://docs.octoprint.org/en/master/bundledplugins/pluginmanager.html"
__plugin_description__ = "Allows installing and managing OctoPrint plugins"
__plugin_license__ = "AGPLv3"
def __plugin_load__():
global __plugin_implementation__
__plugin_implementation__ = PluginManagerPlugin()
global __plugin_hooks__
__plugin_hooks__ = {
"octoprint.server.http.bodysize": __plugin_implementation__.increase_upload_bodysize,
"octoprint.ui.web.templatetypes": __plugin_implementation__.get_template_types
}
| agpl-3.0 | -7,321,240,752,470,460,000 | 38.024486 | 209 | 0.68723 | false |
lovetox/gajim | src/config.py | 1 | 183423 | # -*- coding:utf-8 -*-
## src/config.py
##
## Copyright (C) 2003-2005 Vincent Hanquez <tab AT snarc.org>
## Copyright (C) 2003-2014 Yann Leboulanger <asterix AT lagaule.org>
## Copyright (C) 2005 Alex Podaras <bigpod AT gmail.com>
## Stéphan Kochen <stephan AT kochen.nl>
## Copyright (C) 2005-2006 Dimitur Kirov <dkirov AT gmail.com>
## Nikos Kouremenos <kourem AT gmail.com>
## Copyright (C) 2006 Junglecow J <junglecow AT gmail.com>
## Copyright (C) 2006-2007 Travis Shirk <travis AT pobox.com>
## Stefan Bethge <stefan AT lanpartei.de>
## Copyright (C) 2006-2008 Jean-Marie Traissard <jim AT lapin.org>
## Copyright (C) 2007 James Newton <redshodan AT gmail.com>
## Julien Pivotto <roidelapluie AT gmail.com>
## Copyright (C) 2007-2008 Stephan Erb <steve-e AT h3c.de>
## Copyright (C) 2008 Jonathan Schleifer <js-gajim AT webkeks.org>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import Pango
from gi.repository import GObject
from gi.repository import GLib
import os
import common.config
import common.sleepy
from common.i18n import Q_
import gtkgui_helpers
import dialogs
import cell_renderer_image
import message_control
from chat_control_base import ChatControlBase
import dataforms_widget
import profile_window
try:
import gtkspell
HAS_GTK_SPELL = True
except (ImportError, ValueError):
HAS_GTK_SPELL = False
from common import helpers
from common import gajim
from common import connection
from common import passwords
from common.zeroconf import connection_zeroconf
from common import dataforms
from common import gpg
from common import ged
try:
from common.multimedia_helpers import AudioInputManager, AudioOutputManager
from common.multimedia_helpers import VideoInputManager, VideoOutputManager
HAS_GST = True
except (ImportError, ValueError):
HAS_GST = False
from common.exceptions import GajimGeneralException
from common.connection_handlers_events import InformationEvent
#---------- PreferencesWindow class -------------#
class PreferencesWindow:
"""
Class for Preferences window
"""
def on_preferences_window_destroy(self, widget):
"""
Close window
"""
del gajim.interface.instances['preferences']
def on_close_button_clicked(self, widget):
self.window.destroy()
def __init__(self):
"""
Initialize Preferences window
"""
self.xml = gtkgui_helpers.get_gtk_builder('preferences_window.ui')
self.window = self.xml.get_object('preferences_window')
self.window.set_transient_for(gajim.interface.roster.window)
self.notebook = self.xml.get_object('preferences_notebook')
self.one_window_type_combobox = self.xml.get_object(
'one_window_type_combobox')
self.iconset_combobox = self.xml.get_object('iconset_combobox')
self.notify_on_signin_checkbutton = self.xml.get_object(
'notify_on_signin_checkbutton')
self.notify_on_signout_checkbutton = self.xml.get_object(
'notify_on_signout_checkbutton')
self.auto_popup_away_checkbutton = self.xml.get_object(
'auto_popup_away_checkbutton')
self.auto_popup_chat_opened_checkbutton = self.xml.get_object(
'auto_popup_chat_opened_checkbutton')
self.sound_dnd_checkbutton = self.xml.get_object('sound_dnd_checkbutton')
self.auto_away_checkbutton = self.xml.get_object('auto_away_checkbutton')
self.auto_away_time_spinbutton = self.xml.get_object(
'auto_away_time_spinbutton')
self.auto_away_message_entry = self.xml.get_object(
'auto_away_message_entry')
self.auto_xa_checkbutton = self.xml.get_object('auto_xa_checkbutton')
self.auto_xa_time_spinbutton = self.xml.get_object(
'auto_xa_time_spinbutton')
self.auto_xa_message_entry = self.xml.get_object('auto_xa_message_entry')
### General tab ###
# Display avatars in roster
st = gajim.config.get('show_avatars_in_roster')
self.xml.get_object('show_avatars_in_roster_checkbutton'). \
set_active(st)
# Display status msg under contact name in roster
st = gajim.config.get('show_status_msgs_in_roster')
self.xml.get_object('show_status_msgs_in_roster_checkbutton'). \
set_active( st)
# Display PEP in roster
st1 = gajim.config.get('show_mood_in_roster')
st2 = gajim.config.get('show_activity_in_roster')
st3 = gajim.config.get('show_tunes_in_roster')
st4 = gajim.config.get('show_location_in_roster')
w = self.xml.get_object('show_pep_in_roster_checkbutton')
if st1 == st2 == st3 == st4:
w.set_active(st1)
else:
w.set_inconsistent(True)
# Sort contacts by show
st = gajim.config.get('sort_by_show_in_roster')
self.xml.get_object('sort_by_show_in_roster_checkbutton').set_active(st)
st = gajim.config.get('sort_by_show_in_muc')
self.xml.get_object('sort_by_show_in_muc_checkbutton').set_active(st)
# emoticons
emoticons_combobox = self.xml.get_object('emoticons_combobox')
emoticons_list = os.listdir(os.path.join(gajim.DATA_DIR, 'emoticons'))
# user themes
if os.path.isdir(gajim.MY_EMOTS_PATH):
emoticons_list += os.listdir(gajim.MY_EMOTS_PATH)
emoticons_list.sort()
renderer_text = Gtk.CellRendererText()
emoticons_combobox.pack_start(renderer_text, True)
emoticons_combobox.add_attribute(renderer_text, 'text', 0)
model = Gtk.ListStore(str)
emoticons_combobox.set_model(model)
l = [_('Disabled')]
for dir_ in emoticons_list:
if not os.path.isdir(os.path.join(gajim.DATA_DIR, 'emoticons', dir_)) \
and not os.path.isdir(os.path.join(gajim.MY_EMOTS_PATH, dir_)) :
continue
if dir_ != '.svn':
l.append(dir_)
for i in range(len(l)):
model.append([l[i]])
if gajim.config.get('emoticons_theme') == l[i]:
emoticons_combobox.set_active(i)
if not gajim.config.get('emoticons_theme'):
emoticons_combobox.set_active(0)
# Set default for single window type
choices = common.config.opt_one_window_types
type_ = gajim.config.get('one_message_window')
if type_ in choices:
self.one_window_type_combobox.set_active(choices.index(type_))
else:
self.one_window_type_combobox.set_active(0)
# Show roster on startup
show_roster_combobox = self.xml.get_object('show_roster_on_startup')
choices = common.config.opt_show_roster_on_startup
type_ = gajim.config.get('show_roster_on_startup')
if type_ in choices:
show_roster_combobox.set_active(choices.index(type_))
else:
show_roster_combobox.set_active(0)
# Compact View
st = gajim.config.get('compact_view')
self.xml.get_object('compact_view_checkbutton').set_active(st)
# Ignore XHTML
st = gajim.config.get('ignore_incoming_xhtml')
self.xml.get_object('xhtml_checkbutton').set_active(st)
# use speller
if HAS_GTK_SPELL:
st = gajim.config.get('use_speller')
self.xml.get_object('speller_checkbutton').set_active(st)
else:
self.xml.get_object('speller_checkbutton').set_sensitive(False)
# XEP-0184 positive ack
st = gajim.config.get('positive_184_ack')
self.xml.get_object('positive_184_ack_checkbutton').set_active(st)
# Show avatar in tabs
st = gajim.config.get('show_avatar_in_tabs')
self.xml.get_object('show_avatar_in_tabs_checkbutton').set_active(st)
### Style tab ###
# Themes
theme_combobox = self.xml.get_object('theme_combobox')
cell = Gtk.CellRendererText()
theme_combobox.pack_start(cell, True)
theme_combobox.add_attribute(cell, 'text', 0)
self.update_theme_list()
# iconset
iconsets_list = os.listdir(os.path.join(gajim.DATA_DIR, 'iconsets'))
if os.path.isdir(gajim.MY_ICONSETS_PATH):
iconsets_list += os.listdir(gajim.MY_ICONSETS_PATH)
# new model, image in 0, string in 1
model = Gtk.ListStore(Gtk.Image, str)
renderer_image = cell_renderer_image.CellRendererImage(0, 0)
renderer_text = Gtk.CellRendererText()
renderer_text.set_property('xpad', 5)
self.iconset_combobox.pack_start(renderer_image, False)
self.iconset_combobox.pack_start(renderer_text, True)
self.iconset_combobox.add_attribute(renderer_text, 'text', 1)
self.iconset_combobox.add_attribute(renderer_image, 'image', 0)
self.iconset_combobox.set_model(model)
l = []
for dir in iconsets_list:
if not os.path.isdir(os.path.join(gajim.DATA_DIR, 'iconsets', dir)) \
and not os.path.isdir(os.path.join(gajim.MY_ICONSETS_PATH, dir)):
continue
if dir != '.svn' and dir != 'transports':
l.append(dir)
if l.count == 0:
l.append(' ')
for i in range(len(l)):
preview = Gtk.Image()
files = []
files.append(os.path.join(helpers.get_iconset_path(l[i]), '16x16',
'online.png'))
files.append(os.path.join(helpers.get_iconset_path(l[i]), '16x16',
'online.gif'))
for file_ in files:
if os.path.exists(file_):
preview.set_from_file(file_)
model.append([preview, l[i]])
if gajim.config.get('iconset') == l[i]:
self.iconset_combobox.set_active(i)
# Use transports iconsets
st = gajim.config.get('use_transports_iconsets')
self.xml.get_object('transports_iconsets_checkbutton').set_active(st)
# Color widgets
self.draw_color_widgets()
# Font for messages
font = gajim.config.get('conversation_font')
# try to set default font for the current desktop env
fontbutton = self.xml.get_object('conversation_fontbutton')
if font == '':
fontbutton.set_sensitive(False)
self.xml.get_object('default_chat_font').set_active(True)
else:
fontbutton.set_font_name(font)
### Personal Events tab ###
# outgoing send chat state notifications
st = gajim.config.get('outgoing_chat_state_notifications')
combo = self.xml.get_object('outgoing_chat_states_combobox')
if st == 'all':
combo.set_active(0)
elif st == 'composing_only':
combo.set_active(1)
else: # disabled
combo.set_active(2)
# displayed send chat state notifications
st = gajim.config.get('displayed_chat_state_notifications')
combo = self.xml.get_object('displayed_chat_states_combobox')
if st == 'all':
combo.set_active(0)
elif st == 'composing_only':
combo.set_active(1)
else: # disabled
combo.set_active(2)
### Notifications tab ###
# On new event
on_event_combobox = self.xml.get_object('on_event_combobox')
if gajim.config.get('autopopup'):
on_event_combobox.set_active(0)
elif gajim.config.get('notify_on_new_message'):
on_event_combobox.set_active(1)
else:
on_event_combobox.set_active(2)
# notify on online statuses
st = gajim.config.get('notify_on_signin')
self.notify_on_signin_checkbutton.set_active(st)
# notify on offline statuses
st = gajim.config.get('notify_on_signout')
self.notify_on_signout_checkbutton.set_active(st)
# autopopupaway
st = gajim.config.get('autopopupaway')
self.auto_popup_away_checkbutton.set_active(st)
# autopopup_chat_opened
st = gajim.config.get('autopopup_chat_opened')
self.auto_popup_chat_opened_checkbutton.set_active(st)
# sounddnd
st = gajim.config.get('sounddnd')
self.sound_dnd_checkbutton.set_active(st)
# Systray
systray_combobox = self.xml.get_object('systray_combobox')
if gajim.config.get('trayicon') == 'never':
systray_combobox.set_active(0)
elif gajim.config.get('trayicon') == 'on_event':
systray_combobox.set_active(1)
else:
systray_combobox.set_active(2)
# sounds
if gajim.config.get('sounds_on'):
self.xml.get_object('play_sounds_checkbutton').set_active(True)
else:
self.xml.get_object('manage_sounds_button').set_sensitive(False)
# Notify user of new gmail e-mail messages,
# make checkbox sensitive if user has a gtalk account
frame_gmail = self.xml.get_object('frame_gmail')
notify_gmail_checkbutton = self.xml.get_object('notify_gmail_checkbutton')
notify_gmail_extra_checkbutton = self.xml.get_object(
'notify_gmail_extra_checkbutton')
for account in gajim.config.get_per('accounts'):
jid = gajim.get_jid_from_account(account)
if gajim.get_server_from_jid(jid) in gajim.gmail_domains:
frame_gmail.set_sensitive(True)
st = gajim.config.get('notify_on_new_gmail_email')
notify_gmail_checkbutton.set_active(st)
st = gajim.config.get('notify_on_new_gmail_email_extra')
notify_gmail_extra_checkbutton.set_active(st)
break
#### Status tab ###
# Autoaway
st = gajim.config.get('autoaway')
self.auto_away_checkbutton.set_active(st)
# Autoawaytime
st = gajim.config.get('autoawaytime')
self.auto_away_time_spinbutton.set_value(st)
self.auto_away_time_spinbutton.set_sensitive(gajim.config.get('autoaway'))
# autoaway message
st = gajim.config.get('autoaway_message')
self.auto_away_message_entry.set_text(st)
self.auto_away_message_entry.set_sensitive(gajim.config.get('autoaway'))
# Autoxa
st = gajim.config.get('autoxa')
self.auto_xa_checkbutton.set_active(st)
# Autoxatime
st = gajim.config.get('autoxatime')
self.auto_xa_time_spinbutton.set_value(st)
self.auto_xa_time_spinbutton.set_sensitive(gajim.config.get('autoxa'))
# autoxa message
st = gajim.config.get('autoxa_message')
self.auto_xa_message_entry.set_text(st)
self.auto_xa_message_entry.set_sensitive(gajim.config.get('autoxa'))
from common import sleepy
if not sleepy.SUPPORTED:
self.xml.get_object('autoaway_table').set_sensitive(False)
# ask_status when online / offline
st = gajim.config.get('ask_online_status')
self.xml.get_object('prompt_online_status_message_checkbutton').\
set_active(st)
st = gajim.config.get('ask_offline_status')
self.xml.get_object('prompt_offline_status_message_checkbutton').\
set_active(st)
# Default Status messages
self.default_msg_tree = self.xml.get_object('default_msg_treeview')
self.fill_default_msg_treeview()
# Status messages
self.msg_tree = self.xml.get_object('msg_treeview')
renderer = Gtk.CellRendererText()
renderer.connect('edited', self.on_msg_cell_edited)
renderer.set_property('editable', True)
col = Gtk.TreeViewColumn('name', renderer, text=0)
self.msg_tree.append_column(col)
self.fill_msg_treeview()
buf = self.xml.get_object('msg_textview').get_buffer()
buf.connect('end-user-action', self.on_msg_textview_changed)
### Audio / Video tab ###
def create_av_combobox(opt_name, device_dict, config_name=None,
key=None):
combobox = self.xml.get_object(opt_name + '_combobox')
cell = Gtk.CellRendererText()
cell.set_property('ellipsize', Pango.EllipsizeMode.END)
cell.set_property('ellipsize-set', True)
combobox.pack_start(cell, True)
combobox.add_attribute(cell, 'text', 0)
model = Gtk.ListStore(str, str)
combobox.set_model(model)
if config_name:
config = gajim.config.get(config_name)
else:
config = gajim.config.get(opt_name + '_device')
for index, (name, value) in enumerate(sorted(device_dict.items(),
key=key)):
model.append((name, value))
if config == value:
combobox.set_active(index)
if HAS_GST:
create_av_combobox('audio_input', AudioInputManager().get_devices())
create_av_combobox('audio_output', AudioOutputManager().get_devices(
))
create_av_combobox('video_input', VideoInputManager().get_devices())
create_av_combobox('video_output', VideoOutputManager().get_devices(
))
create_av_combobox('video_framerate', {_('Default'): '',
'15fps': '15/1', '10fps': '10/1', '5fps': '5/1',
'2.5fps': '5/2'}, 'video_framerate', key=lambda x: -1 if \
not x[1] else float(x[0][:-3]))
create_av_combobox('video_size', {_('Default'): '',
'800x600': '800x600', '640x480': '640x480',
'320x240': '320x240'}, 'video_size', key=lambda x: -1 if \
not x[1] else int(x[0][:3]))
st = gajim.config.get('video_see_self')
self.xml.get_object('video_see_self_checkbutton').set_active(st)
else:
for opt_name in ('audio_input', 'audio_output', 'video_input',
'video_output', 'video_framerate', 'video_size'):
combobox = self.xml.get_object(opt_name + '_combobox')
combobox.set_sensitive(False)
# STUN
cb = self.xml.get_object('stun_checkbutton')
st = gajim.config.get('use_stun_server')
cb.set_active(st)
entry = self.xml.get_object('stun_server_entry')
entry.set_text(gajim.config.get('stun_server'))
if not st:
entry.set_sensitive(False)
### Advanced tab ###
# open links with
if os.name == 'nt':
applications_frame = self.xml.get_object('applications_frame')
applications_frame.set_no_show_all(True)
applications_frame.hide()
else:
self.applications_combobox = self.xml.get_object(
'applications_combobox')
self.xml.get_object('custom_apps_frame').hide()
self.xml.get_object('custom_apps_frame').set_no_show_all(True)
if gajim.config.get('autodetect_browser_mailer'):
self.applications_combobox.set_active(0)
else:
self.applications_combobox.set_active(1)
self.xml.get_object('custom_apps_frame').show()
self.xml.get_object('custom_browser_entry').set_text(
gajim.config.get('custombrowser'))
self.xml.get_object('custom_mail_client_entry').set_text(
gajim.config.get('custommailapp'))
self.xml.get_object('custom_file_manager_entry').set_text(
gajim.config.get('custom_file_manager'))
# log status changes of contacts
st = gajim.config.get('log_contact_status_changes')
self.xml.get_object('log_show_changes_checkbutton').set_active(st)
# log encrypted chat sessions
w = self.xml.get_object('log_encrypted_chats_checkbutton')
st = self.get_per_account_option('log_encrypted_sessions')
if st == 'mixed':
w.set_inconsistent(True)
else:
w.set_active(st)
# send os info
w = self.xml.get_object('send_os_info_checkbutton')
st = self.get_per_account_option('send_os_info')
if st == 'mixed':
w.set_inconsistent(True)
else:
w.set_active(st)
# send absolute time info
w = self.xml.get_object('send_time_info_checkbutton')
st = self.get_per_account_option('send_time_info')
if st == 'mixed':
w.set_inconsistent(True)
else:
w.set_active(st)
# send idle time
w = self.xml.get_object('send_idle_time_checkbutton')
st = self.get_per_account_option('send_idle_time')
if st == 'mixed':
w.set_inconsistent(True)
else:
w.set_active(st)
self.update_proxy_list()
# Ignore messages from unknown contacts
w = self.xml.get_object('ignore_events_from_unknown_contacts_checkbutton')
st = self.get_per_account_option('ignore_unknown_contacts')
if st == 'mixed':
w.set_inconsistent(True)
else:
w.set_active(st)
self.xml.connect_signals(self)
self.msg_tree.get_model().connect('row-changed',
self.on_msg_treemodel_row_changed)
self.msg_tree.get_model().connect('row-deleted',
self.on_msg_treemodel_row_deleted)
self.default_msg_tree.get_model().connect('row-changed',
self.on_default_msg_treemodel_row_changed)
self.theme_preferences = None
self.sounds_preferences = None
self.notebook.set_current_page(0)
self.xml.get_object('close_button').grab_focus()
self.window.show_all()
gtkgui_helpers.possibly_move_window_in_current_desktop(self.window)
def on_preferences_notebook_switch_page(self, widget, page, page_num):
GLib.idle_add(self.xml.get_object('close_button').grab_focus)
def on_preferences_window_key_press_event(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
self.window.hide()
def get_per_account_option(self, opt):
"""
Return the value of the option opt if it's the same in all accounts else
returns "mixed"
"""
if len(gajim.connections) == 0:
# a non existant key return default value
return gajim.config.get_per('accounts', '__default__', opt)
val = None
for account in gajim.connections:
v = gajim.config.get_per('accounts', account, opt)
if val is None:
val = v
elif val != v:
return 'mixed'
return val
def on_checkbutton_toggled(self, widget, config_name,
change_sensitivity_widgets=None):
gajim.config.set(config_name, widget.get_active())
if change_sensitivity_widgets:
for w in change_sensitivity_widgets:
w.set_sensitive(widget.get_active())
def on_per_account_checkbutton_toggled(self, widget, config_name,
change_sensitivity_widgets=None):
for account in gajim.connections:
gajim.config.set_per('accounts', account, config_name,
widget.get_active())
if change_sensitivity_widgets:
for w in change_sensitivity_widgets:
w.set_sensitive(widget.get_active())
def _get_all_controls(self):
for ctrl in gajim.interface.msg_win_mgr.get_controls():
yield ctrl
for account in gajim.connections:
for ctrl in gajim.interface.minimized_controls[account].values():
yield ctrl
def _get_all_muc_controls(self):
for ctrl in gajim.interface.msg_win_mgr.get_controls(
message_control.TYPE_GC):
yield ctrl
for account in gajim.connections:
for ctrl in gajim.interface.minimized_controls[account].values():
yield ctrl
def on_sort_by_show_in_roster_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'sort_by_show_in_roster')
gajim.interface.roster.setup_and_draw_roster()
def on_sort_by_show_in_muc_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'sort_by_show_in_muc')
# Redraw groupchats
for ctrl in self._get_all_muc_controls():
ctrl.draw_roster()
def on_show_avatars_in_roster_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'show_avatars_in_roster')
gajim.interface.roster.setup_and_draw_roster()
# Redraw groupchats (in an ugly way)
for ctrl in self._get_all_muc_controls():
ctrl.draw_roster()
def on_show_status_msgs_in_roster_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'show_status_msgs_in_roster')
gajim.interface.roster.setup_and_draw_roster()
for ctrl in self._get_all_muc_controls():
ctrl.update_ui()
def on_show_pep_in_roster_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'show_mood_in_roster')
self.on_checkbutton_toggled(widget, 'show_activity_in_roster')
self.on_checkbutton_toggled(widget, 'show_tunes_in_roster')
self.on_checkbutton_toggled(widget, 'show_location_in_roster')
gajim.interface.roster.setup_and_draw_roster()
def on_emoticons_combobox_changed(self, widget):
active = widget.get_active()
model = widget.get_model()
emot_theme = model[active][0]
if emot_theme == _('Disabled'):
gajim.config.set('emoticons_theme', '')
else:
gajim.config.set('emoticons_theme', emot_theme)
gajim.interface.init_emoticons(need_reload = True)
gajim.interface.make_regexps()
self.toggle_emoticons()
def toggle_emoticons(self):
"""
Update emoticons state in Opened Chat Windows
"""
for ctrl in self._get_all_controls():
ctrl.toggle_emoticons()
def on_one_window_type_combo_changed(self, widget):
active = widget.get_active()
config_type = common.config.opt_one_window_types[active]
gajim.config.set('one_message_window', config_type)
gajim.interface.msg_win_mgr.reconfig()
def on_show_roster_on_startup_changed(self, widget):
active = widget.get_active()
config_type = common.config.opt_show_roster_on_startup[active]
gajim.config.set('show_roster_on_startup', config_type)
def on_compact_view_checkbutton_toggled(self, widget):
active = widget.get_active()
for ctrl in self._get_all_controls():
ctrl.chat_buttons_set_visible(active)
gajim.config.set('compact_view', active)
def on_xhtml_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'ignore_incoming_xhtml')
helpers.update_optional_features()
def apply_speller(self):
for ctrl in self._get_all_controls():
if isinstance(ctrl, ChatControlBase):
try:
spell_obj = gtkspell.get_from_text_view(ctrl.msg_textview)
except (TypeError, RuntimeError, OSError):
spell_obj = None
if not spell_obj:
ctrl.set_speller()
def remove_speller(self):
for ctrl in self._get_all_controls():
if isinstance(ctrl, ChatControlBase):
try:
spell_obj = gtkspell.get_from_text_view(ctrl.msg_textview)
except (TypeError, RuntimeError):
spell_obj = None
if spell_obj:
spell_obj.detach()
def on_speller_checkbutton_toggled(self, widget):
active = widget.get_active()
gajim.config.set('use_speller', active)
if active:
lang = gajim.config.get('speller_language')
if not lang:
lang = gajim.LANG
tv = Gtk.TextView()
try:
gtkspell.Spell(tv, lang)
except (TypeError, RuntimeError, OSError):
dialogs.ErrorDialog(
_('Dictionary for lang %s not available') % lang,
_('You have to install %s dictionary to use spellchecking, or '
'choose another language by setting the speller_language option.'
) % lang)
gajim.config.set('use_speller', False)
widget.set_active(False)
else:
gajim.config.set('speller_language', lang)
self.apply_speller()
else:
self.remove_speller()
def on_positive_184_ack_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'positive_184_ack')
def on_show_avatar_in_tabs_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'show_avatar_in_tabs')
def on_theme_combobox_changed(self, widget):
model = widget.get_model()
active = widget.get_active()
config_theme = model[active][0].replace(' ', '_')
gajim.config.set('roster_theme', config_theme)
# begin repainting themed widgets throughout
gajim.interface.roster.repaint_themed_widgets()
gajim.interface.roster.change_roster_style(None)
def update_theme_list(self):
theme_combobox = self.xml.get_object('theme_combobox')
model = Gtk.ListStore(str)
theme_combobox.set_model(model)
i = 0
for config_theme in gajim.config.get_per('themes'):
theme = config_theme.replace('_', ' ')
model.append([theme])
if gajim.config.get('roster_theme') == config_theme:
theme_combobox.set_active(i)
i += 1
def on_manage_theme_button_clicked(self, widget):
if self.theme_preferences is None:
self.theme_preferences = dialogs.GajimThemesWindow()
else:
self.theme_preferences.window.present()
self.theme_preferences.select_active_theme()
def on_iconset_combobox_changed(self, widget):
model = widget.get_model()
active = widget.get_active()
icon_string = model[active][1]
gajim.config.set('iconset', icon_string)
gtkgui_helpers.reload_jabber_state_images()
def on_transports_iconsets_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'use_transports_iconsets')
gtkgui_helpers.reload_jabber_state_images()
def on_outgoing_chat_states_combobox_changed(self, widget):
active = widget.get_active()
old_value = gajim.config.get('outgoing_chat_state_notifications')
if active == 0: # all
gajim.config.set('outgoing_chat_state_notifications', 'all')
elif active == 1: # only composing
gajim.config.set('outgoing_chat_state_notifications', 'composing_only')
else: # disabled
gajim.config.set('outgoing_chat_state_notifications', 'disabled')
new_value = gajim.config.get('outgoing_chat_state_notifications')
if 'disabled' in (old_value, new_value):
# we changed from disabled to sth else or vice versa
helpers.update_optional_features()
def on_displayed_chat_states_combobox_changed(self, widget):
active = widget.get_active()
if active == 0: # all
gajim.config.set('displayed_chat_state_notifications', 'all')
elif active == 1: # only composing
gajim.config.set('displayed_chat_state_notifications',
'composing_only')
else: # disabled
gajim.config.set('displayed_chat_state_notifications', 'disabled')
def on_ignore_events_from_unknown_contacts_checkbutton_toggled(self, widget):
widget.set_inconsistent(False)
self.on_per_account_checkbutton_toggled(widget, 'ignore_unknown_contacts')
def on_on_event_combobox_changed(self, widget):
active = widget.get_active()
if active == 0:
gajim.config.set('autopopup', True)
gajim.config.set('notify_on_new_message', False)
elif active == 1:
gajim.config.set('autopopup', False)
gajim.config.set('notify_on_new_message', True)
else:
gajim.config.set('autopopup', False)
gajim.config.set('notify_on_new_message', False)
def on_notify_on_signin_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'notify_on_signin')
def on_notify_on_signout_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'notify_on_signout')
def on_auto_popup_away_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'autopopupaway')
def on_auto_popup_chat_opened_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'autopopup_chat_opened')
def on_sound_dnd_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'sounddnd')
def on_systray_combobox_changed(self, widget):
active = widget.get_active()
if active == 0:
gajim.config.set('trayicon', 'never')
gajim.interface.systray_enabled = False
gajim.interface.systray.hide_icon()
elif active == 1:
gajim.config.set('trayicon', 'on_event')
gajim.interface.systray_enabled = True
gajim.interface.systray.show_icon()
else:
gajim.config.set('trayicon', 'always')
gajim.interface.systray_enabled = True
gajim.interface.systray.show_icon()
def on_play_sounds_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'sounds_on',
[self.xml.get_object('manage_sounds_button')])
def on_manage_sounds_button_clicked(self, widget):
if self.sounds_preferences is None:
self.sounds_preferences = ManageSoundsWindow()
else:
self.sounds_preferences.window.present()
def update_text_tags(self):
"""
Update color tags in opened chat windows
"""
for ctrl in self._get_all_controls():
ctrl.update_tags()
def on_preference_widget_color_set(self, widget, text):
color = widget.get_color()
color_string = gtkgui_helpers.make_color_string(color)
gajim.config.set(text, color_string)
self.update_text_tags()
def on_preference_widget_font_set(self, widget, text):
if widget:
font = widget.get_font_name()
else:
font = ''
gajim.config.set(text, font)
self.update_text_font()
def update_text_font(self):
"""
Update text font in opened chat windows
"""
for ctrl in self._get_all_controls():
ctrl.update_font()
def on_incoming_nick_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'inmsgcolor')
def on_outgoing_nick_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'outmsgcolor')
def on_incoming_msg_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'inmsgtxtcolor')
def on_outgoing_msg_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'outmsgtxtcolor')
def on_url_msg_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'urlmsgcolor')
def on_status_msg_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'statusmsgcolor')
def on_muc_highlight_colorbutton_color_set(self, widget):
self.on_preference_widget_color_set(widget, 'markedmsgcolor')
def on_conversation_fontbutton_font_set(self, widget):
self.on_preference_widget_font_set(widget, 'conversation_font')
def on_default_chat_font_toggled(self, widget):
font_widget = self.xml.get_object('conversation_fontbutton')
if widget.get_active():
font_widget.set_sensitive(False)
font_widget = None
else:
font_widget.set_sensitive(True)
self.on_preference_widget_font_set(font_widget, 'conversation_font')
def draw_color_widgets(self):
col_to_widget = {'inmsgcolor': 'incoming_nick_colorbutton',
'outmsgcolor': 'outgoing_nick_colorbutton',
'inmsgtxtcolor': ['incoming_msg_colorbutton',
'incoming_msg_checkbutton'],
'outmsgtxtcolor': ['outgoing_msg_colorbutton',
'outgoing_msg_checkbutton'],
'statusmsgcolor': 'status_msg_colorbutton',
'urlmsgcolor': 'url_msg_colorbutton',
'markedmsgcolor': 'muc_highlight_colorbutton'}
for c in col_to_widget:
col = gajim.config.get(c)
if col:
if isinstance(col_to_widget[c], list):
rgba = Gdk.RGBA()
rgba.parse(col)
self.xml.get_object(col_to_widget[c][0]).set_rgba(rgba)
self.xml.get_object(col_to_widget[c][0]).set_sensitive(True)
self.xml.get_object(col_to_widget[c][1]).set_active(True)
else:
rgba = Gdk.RGBA()
rgba.parse(col)
self.xml.get_object(col_to_widget[c]).set_rgba(rgba)
else:
rgba = Gdk.RGBA()
rgba.parse('#000000')
if isinstance(col_to_widget[c], list):
self.xml.get_object(col_to_widget[c][0]).set_rgba(rgba)
self.xml.get_object(col_to_widget[c][0]).set_sensitive(False)
self.xml.get_object(col_to_widget[c][1]).set_active(False)
else:
self.xml.get_object(col_to_widget[c]).set_rgba(rgba)
def on_reset_colors_button_clicked(self, widget):
col_to_widget = {'inmsgcolor': 'incoming_nick_colorbutton',
'outmsgcolor': 'outgoing_nick_colorbutton',
'inmsgtxtcolor': 'incoming_msg_colorbutton',
'outmsgtxtcolor': 'outgoing_msg_colorbutton',
'statusmsgcolor': 'status_msg_colorbutton',
'urlmsgcolor': 'url_msg_colorbutton',
'markedmsgcolor': 'muc_highlight_colorbutton'}
for c in col_to_widget:
gajim.config.set(c, gajim.interface.default_colors[c])
self.draw_color_widgets()
self.update_text_tags()
def _set_color(self, state, widget_name, option):
"""
Set color value in prefs and update the UI
"""
if state:
color = self.xml.get_object(widget_name).get_color()
color_string = gtkgui_helpers.make_color_string(color)
else:
color_string = ''
gajim.config.set(option, color_string)
def on_incoming_msg_checkbutton_toggled(self, widget):
state = widget.get_active()
self.xml.get_object('incoming_msg_colorbutton').set_sensitive(state)
self._set_color(state, 'incoming_msg_colorbutton', 'inmsgtxtcolor')
def on_outgoing_msg_checkbutton_toggled(self, widget):
state = widget.get_active()
self.xml.get_object('outgoing_msg_colorbutton').set_sensitive(state)
self._set_color(state, 'outgoing_msg_colorbutton', 'outmsgtxtcolor')
def on_auto_away_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'autoaway',
[self.auto_away_time_spinbutton, self.auto_away_message_entry])
def on_auto_away_time_spinbutton_value_changed(self, widget):
aat = widget.get_value_as_int()
gajim.config.set('autoawaytime', aat)
gajim.interface.sleeper = common.sleepy.Sleepy(
gajim.config.get('autoawaytime') * 60,
gajim.config.get('autoxatime') * 60)
def on_auto_away_message_entry_changed(self, widget):
gajim.config.set('autoaway_message', widget.get_text())
def on_auto_xa_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'autoxa',
[self.auto_xa_time_spinbutton, self.auto_xa_message_entry])
def on_auto_xa_time_spinbutton_value_changed(self, widget):
axt = widget.get_value_as_int()
gajim.config.set('autoxatime', axt)
gajim.interface.sleeper = common.sleepy.Sleepy(
gajim.config.get('autoawaytime') * 60,
gajim.config.get('autoxatime') * 60)
def on_auto_xa_message_entry_changed(self, widget):
gajim.config.set('autoxa_message', widget.get_text())
def on_prompt_online_status_message_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'ask_online_status')
def on_prompt_offline_status_message_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'ask_offline_status')
def fill_default_msg_treeview(self):
model = self.default_msg_tree.get_model()
model.clear()
status = []
for status_ in gajim.config.get_per('defaultstatusmsg'):
status.append(status_)
status.sort()
for status_ in status:
msg = gajim.config.get_per('defaultstatusmsg', status_, 'message')
msg = helpers.from_one_line(msg)
enabled = gajim.config.get_per('defaultstatusmsg', status_, 'enabled')
iter_ = model.append()
uf_show = helpers.get_uf_show(status_)
model.set(iter_, 0, status_, 1, uf_show, 2, msg, 3, enabled)
def on_default_msg_cell_edited(self, cell, row, new_text):
model = self.default_msg_tree.get_model()
iter_ = model.get_iter_from_string(row)
model.set_value(iter_, 2, new_text)
def default_msg_toggled_cb(self, cell, path):
model = self.default_msg_tree.get_model()
model[path][3] = not model[path][3]
def on_default_msg_treemodel_row_changed(self, model, path, iter_):
status = model[iter_][0]
message = model[iter_][2]
message = helpers.to_one_line(message)
gajim.config.set_per('defaultstatusmsg', status, 'enabled',
model[iter_][3])
gajim.config.set_per('defaultstatusmsg', status, 'message', message)
def on_default_status_expander_activate(self, expander):
eventbox = self.xml.get_object('default_status_eventbox')
vbox = self.xml.get_object('status_vbox')
vbox.set_child_packing(eventbox, not expander.get_expanded(), True, 0,
Gtk.PACK_START)
def save_status_messages(self, model):
for msg in gajim.config.get_per('statusmsg'):
gajim.config.del_per('statusmsg', msg)
iter_ = model.get_iter_first()
while iter_:
val = model[iter_][0]
if model[iter_][1]: # we have a preset message
if not val: # no title, use message text for title
val = model[iter_][1]
gajim.config.add_per('statusmsg', val)
msg = helpers.to_one_line(model[iter_][1])
gajim.config.set_per('statusmsg', val, 'message', msg)
i = 2
# store mood / activity
for subname in ('activity', 'subactivity', 'activity_text',
'mood', 'mood_text'):
val2 = model[iter_][i]
if not val2:
val2 = ''
gajim.config.set_per('statusmsg', val, subname, val2)
i += 1
iter_ = model.iter_next(iter_)
def on_msg_treemodel_row_changed(self, model, path, iter_):
self.save_status_messages(model)
def on_msg_treemodel_row_deleted(self, model, path):
self.save_status_messages(model)
def on_av_combobox_changed(self, combobox, config_name):
model = combobox.get_model()
active = combobox.get_active()
device = model[active][1]
gajim.config.set(config_name, device)
def on_audio_input_combobox_changed(self, widget):
self.on_av_combobox_changed(widget, 'audio_input_device')
def on_audio_output_combobox_changed(self, widget):
self.on_av_combobox_changed(widget, 'audio_output_device')
def on_video_input_combobox_changed(self, widget):
self.on_av_combobox_changed(widget, 'video_input_device')
def on_video_output_combobox_changed(self, widget):
self.on_av_combobox_changed(widget, 'video_output_device')
def on_video_framerate_combobox_changed(self, widget):
self.on_av_combobox_changed(widget, 'video_framerate')
def on_video_size_combobox_changed(self, widget):
self.on_av_combobox_changed(widget, 'video_size')
def on_video_see_self_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'video_see_self')
def on_stun_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'use_stun_server',
[self.xml.get_object('stun_server_entry')])
def stun_server_entry_changed(self, widget):
gajim.config.set('stun_server', widget.get_text())
def on_applications_combobox_changed(self, widget):
if widget.get_active() == 0:
gajim.config.set('autodetect_browser_mailer', True)
self.xml.get_object('custom_apps_frame').hide()
elif widget.get_active() == 1:
gajim.config.set('autodetect_browser_mailer', False)
self.xml.get_object('custom_apps_frame').show()
def on_custom_browser_entry_changed(self, widget):
gajim.config.set('custombrowser', widget.get_text())
def on_custom_mail_client_entry_changed(self, widget):
gajim.config.set('custommailapp', widget.get_text())
def on_custom_file_manager_entry_changed(self, widget):
gajim.config.set('custom_file_manager', widget.get_text())
def on_log_show_changes_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'log_contact_status_changes')
def on_log_encrypted_chats_checkbutton_toggled(self, widget):
widget.set_inconsistent(False)
self.on_per_account_checkbutton_toggled(widget, 'log_encrypted_sessions')
def on_send_os_info_checkbutton_toggled(self, widget):
widget.set_inconsistent(False)
self.on_per_account_checkbutton_toggled(widget, 'send_os_info')
def on_send_time_info_checkbutton_toggled(self, widget):
widget.set_inconsistent(False)
self.on_per_account_checkbutton_toggled(widget, 'send_time_info')
def on_send_idle_time_checkbutton_toggled(self, widget):
widget.set_inconsistent(False)
self.on_per_account_checkbutton_toggled(widget, 'send_idle_time')
def on_notify_gmail_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'notify_on_new_gmail_email')
def on_notify_gmail_extra_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'notify_on_new_gmail_email_extra')
def fill_msg_treeview(self):
self.xml.get_object('delete_msg_button').set_sensitive(False)
model = self.msg_tree.get_model()
model.clear()
preset_status = []
for msg_name in gajim.config.get_per('statusmsg'):
if msg_name.startswith('_last_'):
continue
preset_status.append(msg_name)
preset_status.sort()
for msg_name in preset_status:
msg_text = gajim.config.get_per('statusmsg', msg_name, 'message')
msg_text = helpers.from_one_line(msg_text)
activity = gajim.config.get_per('statusmsg', msg_name, 'activity')
subactivity = gajim.config.get_per('statusmsg', msg_name,
'subactivity')
activity_text = gajim.config.get_per('statusmsg', msg_name,
'activity_text')
mood = gajim.config.get_per('statusmsg', msg_name, 'mood')
mood_text = gajim.config.get_per('statusmsg', msg_name, 'mood_text')
iter_ = model.append()
model.set(iter_, 0, msg_name, 1, msg_text, 2, activity, 3,
subactivity, 4, activity_text, 5, mood, 6, mood_text)
def on_msg_cell_edited(self, cell, row, new_text):
model = self.msg_tree.get_model()
iter_ = model.get_iter_from_string(row)
model.set_value(iter_, 0, new_text)
def on_msg_treeview_cursor_changed(self, widget, data = None):
sel = self.msg_tree.get_selection()
if not sel:
return
(model, iter_) = sel.get_selected()
if not iter_:
return
self.xml.get_object('delete_msg_button').set_sensitive(True)
buf = self.xml.get_object('msg_textview').get_buffer()
msg = model[iter_][1]
buf.set_text(msg)
def on_new_msg_button_clicked(self, widget, data = None):
model = self.msg_tree.get_model()
iter_ = model.append()
model.set(iter_, 0, _('status message title'), 1,
_('status message text'))
self.msg_tree.set_cursor(model.get_path(iter_))
def on_delete_msg_button_clicked(self, widget, data = None):
sel = self.msg_tree.get_selection()
if not sel:
return
(model, iter_) = sel.get_selected()
if not iter_:
return
buf = self.xml.get_object('msg_textview').get_buffer()
model.remove(iter_)
buf.set_text('')
self.xml.get_object('delete_msg_button').set_sensitive(False)
def on_msg_textview_changed(self, widget, data = None):
sel = self.msg_tree.get_selection()
if not sel:
return
(model, iter_) = sel.get_selected()
if not iter_:
return
buf = self.xml.get_object('msg_textview').get_buffer()
first_iter, end_iter = buf.get_bounds()
model.set_value(iter_, 1, buf.get_text(first_iter, end_iter, True))
def on_msg_treeview_key_press_event(self, widget, event):
if event.keyval == Gdk.KEY_Delete:
self.on_delete_msg_button_clicked(widget)
def on_proxies_combobox_changed(self, widget):
active = widget.get_active()
proxy = widget.get_model()[active][0]
if proxy == _('None'):
proxy = ''
gajim.config.set('global_proxy', proxy)
def on_manage_proxies_button_clicked(self, widget):
if 'manage_proxies' in gajim.interface.instances:
gajim.interface.instances['manage_proxies'].window.present()
else:
gajim.interface.instances['manage_proxies'] = ManageProxiesWindow(
self.window)
def update_proxy_list(self):
our_proxy = gajim.config.get('global_proxy')
if not our_proxy:
our_proxy = _('None')
proxy_combobox = self.xml.get_object('proxies_combobox')
model = proxy_combobox.get_model()
model.clear()
l = gajim.config.get_per('proxies')
l.insert(0, _('None'))
for i in range(len(l)):
model.append([l[i]])
if our_proxy == l[i]:
proxy_combobox.set_active(i)
def on_open_advanced_editor_button_clicked(self, widget, data = None):
if 'advanced_config' in gajim.interface.instances:
gajim.interface.instances['advanced_config'].window.present()
else:
gajim.interface.instances['advanced_config'] = \
dialogs.AdvancedConfigurationWindow()
#---------- ManageProxiesWindow class -------------#
class ManageProxiesWindow:
def __init__(self, transient_for=None):
self.xml = gtkgui_helpers.get_gtk_builder('manage_proxies_window.ui')
self.window = self.xml.get_object('manage_proxies_window')
self.window.set_transient_for(transient_for)
self.proxies_treeview = self.xml.get_object('proxies_treeview')
self.proxyname_entry = self.xml.get_object('proxyname_entry')
self.proxytype_combobox = self.xml.get_object('proxytype_combobox')
self.init_list()
self.block_signal = False
self.xml.connect_signals(self)
self.window.show_all()
# hide the BOSH fields by default
self.show_bosh_fields()
def show_bosh_fields(self, show=True):
if show:
self.xml.get_object('boshuri_entry').show()
self.xml.get_object('boshuri_label').show()
self.xml.get_object('boshuseproxy_checkbutton').show()
else:
cb = self.xml.get_object('boshuseproxy_checkbutton')
cb.hide()
cb.set_active(True)
self.on_boshuseproxy_checkbutton_toggled(cb)
self.xml.get_object('boshuri_entry').hide()
self.xml.get_object('boshuri_label').hide()
def fill_proxies_treeview(self):
model = self.proxies_treeview.get_model()
model.clear()
iter_ = model.append()
model.set(iter_, 0, _('None'))
for p in gajim.config.get_per('proxies'):
iter_ = model.append()
model.set(iter_, 0, p)
def init_list(self):
self.xml.get_object('remove_proxy_button').set_sensitive(False)
self.proxytype_combobox.set_sensitive(False)
self.xml.get_object('proxy_table').set_sensitive(False)
model = Gtk.ListStore(str)
self.proxies_treeview.set_model(model)
col = Gtk.TreeViewColumn('Proxies')
self.proxies_treeview.append_column(col)
renderer = Gtk.CellRendererText()
col.pack_start(renderer, True)
col.add_attribute(renderer, 'text', 0)
self.fill_proxies_treeview()
self.xml.get_object('proxytype_combobox').set_active(0)
def on_manage_proxies_window_destroy(self, widget):
if 'accounts' in gajim.interface.instances:
gajim.interface.instances['accounts'].\
update_proxy_list()
del gajim.interface.instances['manage_proxies']
def on_add_proxy_button_clicked(self, widget):
model = self.proxies_treeview.get_model()
proxies = gajim.config.get_per('proxies')
i = 1
while ('proxy' + str(i)) in proxies:
i += 1
iter_ = model.append()
model.set(iter_, 0, 'proxy' + str(i))
gajim.config.add_per('proxies', 'proxy' + str(i))
self.proxies_treeview.set_cursor(model.get_path(iter_))
def on_remove_proxy_button_clicked(self, widget):
sel = self.proxies_treeview.get_selection()
if not sel:
return
(model, iter_) = sel.get_selected()
if not iter_:
return
proxy = model[iter_][0]
model.remove(iter_)
gajim.config.del_per('proxies', proxy)
self.xml.get_object('remove_proxy_button').set_sensitive(False)
self.block_signal = True
self.on_proxies_treeview_cursor_changed(self.proxies_treeview)
self.block_signal = False
def on_close_button_clicked(self, widget):
self.window.destroy()
def on_useauth_checkbutton_toggled(self, widget):
if self.block_signal:
return
act = widget.get_active()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'useauth', act)
self.xml.get_object('proxyuser_entry').set_sensitive(act)
self.xml.get_object('proxypass_entry').set_sensitive(act)
def on_boshuseproxy_checkbutton_toggled(self, widget):
if self.block_signal:
return
act = widget.get_active()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'bosh_useproxy', act)
self.xml.get_object('proxyhost_entry').set_sensitive(act)
self.xml.get_object('proxyport_entry').set_sensitive(act)
def on_proxies_treeview_cursor_changed(self, widget):
#FIXME: check if off proxy settings are correct (see
# http://trac.gajim.org/changeset/1921#file2 line 1221
proxyhost_entry = self.xml.get_object('proxyhost_entry')
proxyport_entry = self.xml.get_object('proxyport_entry')
proxyuser_entry = self.xml.get_object('proxyuser_entry')
proxypass_entry = self.xml.get_object('proxypass_entry')
boshuri_entry = self.xml.get_object('boshuri_entry')
useauth_checkbutton = self.xml.get_object('useauth_checkbutton')
boshuseproxy_checkbutton = self.xml.get_object('boshuseproxy_checkbutton')
self.block_signal = True
proxyhost_entry.set_text('')
proxyport_entry.set_text('')
proxyuser_entry.set_text('')
proxypass_entry.set_text('')
boshuri_entry.set_text('')
#boshuseproxy_checkbutton.set_active(False)
#self.on_boshuseproxy_checkbutton_toggled(boshuseproxy_checkbutton)
#useauth_checkbutton.set_active(False)
#self.on_useauth_checkbutton_toggled(useauth_checkbutton)
sel = widget.get_selection()
if sel:
(model, iter_) = sel.get_selected()
else:
iter_ = None
if not iter_:
self.xml.get_object('proxyname_entry').set_text('')
self.xml.get_object('proxytype_combobox').set_sensitive(False)
self.xml.get_object('proxy_table').set_sensitive(False)
self.block_signal = False
return
proxy = model[iter_][0]
self.xml.get_object('proxyname_entry').set_text(proxy)
if proxy == _('None'): # special proxy None
self.show_bosh_fields(False)
self.proxyname_entry.set_editable(False)
self.xml.get_object('remove_proxy_button').set_sensitive(False)
self.xml.get_object('proxytype_combobox').set_sensitive(False)
self.xml.get_object('proxy_table').set_sensitive(False)
else:
proxytype = gajim.config.get_per('proxies', proxy, 'type')
self.show_bosh_fields(proxytype=='bosh')
self.proxyname_entry.set_editable(True)
self.xml.get_object('remove_proxy_button').set_sensitive(True)
self.xml.get_object('proxytype_combobox').set_sensitive(True)
self.xml.get_object('proxy_table').set_sensitive(True)
proxyhost_entry.set_text(gajim.config.get_per('proxies', proxy,
'host'))
proxyport_entry.set_text(str(gajim.config.get_per('proxies',
proxy, 'port')))
proxyuser_entry.set_text(gajim.config.get_per('proxies', proxy,
'user'))
proxypass_entry.set_text(gajim.config.get_per('proxies', proxy,
'pass'))
boshuri_entry.set_text(gajim.config.get_per('proxies', proxy,
'bosh_uri'))
types = ['http', 'socks5', 'bosh']
self.proxytype_combobox.set_active(types.index(proxytype))
boshuseproxy_checkbutton.set_active(
gajim.config.get_per('proxies', proxy, 'bosh_useproxy'))
useauth_checkbutton.set_active(
gajim.config.get_per('proxies', proxy, 'useauth'))
self.block_signal = False
def on_proxies_treeview_key_press_event(self, widget, event):
if event.keyval == Gdk.KEY_Delete:
self.on_remove_proxy_button_clicked(widget)
def on_proxyname_entry_changed(self, widget):
if self.block_signal:
return
sel = self.proxies_treeview.get_selection()
if not sel:
return
(model, iter_) = sel.get_selected()
if not iter_:
return
old_name = model.get_value(iter_, 0)
new_name = widget.get_text()
if new_name == '':
return
if new_name == old_name:
return
config = gajim.config.get_per('proxies', old_name)
gajim.config.del_per('proxies', old_name)
gajim.config.add_per('proxies', new_name)
for option in config:
gajim.config.set_per('proxies', new_name, option, config[option])
model.set_value(iter_, 0, new_name)
def on_proxytype_combobox_changed(self, widget):
if self.block_signal:
return
types = ['http', 'socks5', 'bosh']
type_ = self.proxytype_combobox.get_active()
self.show_bosh_fields(types[type_]=='bosh')
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'type', types[type_])
def on_proxyhost_entry_changed(self, widget):
if self.block_signal:
return
value = widget.get_text()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'host', value)
def on_proxyport_entry_changed(self, widget):
if self.block_signal:
return
value = widget.get_text()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'port', value)
def on_proxyuser_entry_changed(self, widget):
if self.block_signal:
return
value = widget.get_text()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'user', value)
def on_boshuri_entry_changed(self, widget):
if self.block_signal:
return
value = widget.get_text()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'bosh_uri', value)
def on_proxypass_entry_changed(self, widget):
if self.block_signal:
return
value = widget.get_text()
proxy = self.proxyname_entry.get_text()
gajim.config.set_per('proxies', proxy, 'pass', value)
#---------- AccountsWindow class -------------#
class AccountsWindow:
"""
Class for accounts window: list of accounts
"""
def on_accounts_window_destroy(self, widget):
del gajim.interface.instances['accounts']
def on_close_button_clicked(self, widget):
self.check_resend_relog()
self.window.destroy()
def __init__(self):
self.xml = gtkgui_helpers.get_gtk_builder('accounts_window.ui')
self.window = self.xml.get_object('accounts_window')
self.window.set_transient_for(gajim.interface.roster.window)
self.accounts_treeview = self.xml.get_object('accounts_treeview')
self.remove_button = self.xml.get_object('remove_button')
self.rename_button = self.xml.get_object('rename_button')
path_to_kbd_input_img = gtkgui_helpers.get_icon_path('gajim-kbd_input')
img = self.xml.get_object('rename_image')
img.set_from_file(path_to_kbd_input_img)
self.notebook = self.xml.get_object('notebook')
# Name
model = Gtk.ListStore(str)
self.accounts_treeview.set_model(model)
# column
renderer = Gtk.CellRendererText()
col = Gtk.TreeViewColumn()
col.set_title(_('Name'))
col.pack_start(renderer, False)
col.add_attribute(renderer, 'text', 0)
self.accounts_treeview.insert_column(col, -1)
self.current_account = None
# When we fill info, we don't want to handle the changed signals
self.ignore_events = False
self.need_relogin = False
self.resend_presence = False
self.update_proxy_list()
self.xml.connect_signals(self)
self.init_accounts()
self.window.show_all()
# Merge accounts
st = gajim.config.get('mergeaccounts')
checkbutton = self.xml.get_object('merge_checkbutton')
checkbutton.set_active(st)
# prevent roster redraws by connecting the signal after button state is
# set
checkbutton.connect('toggled', self.on_merge_checkbutton_toggled)
self.avahi_available = True
try:
import avahi
except ImportError:
self.avahi_available = False
self.xml.get_object('close_button').grab_focus()
def on_accounts_window_key_press_event(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
self.check_resend_relog()
self.window.destroy()
def select_account(self, account):
model = self.accounts_treeview.get_model()
iter_ = model.get_iter_first()
while iter_:
acct = model[iter_][0]
if account == acct:
self.accounts_treeview.set_cursor(model.get_path(iter_))
return
iter_ = model.iter_next(iter_)
def init_accounts(self):
"""
Initialize listStore with existing accounts
"""
self.remove_button.set_sensitive(False)
self.rename_button.set_sensitive(False)
self.current_account = None
model = self.accounts_treeview.get_model()
model.clear()
list_ = gajim.config.get_per('accounts')
list_.sort()
for account in list_:
iter_ = model.append()
model.set(iter_, 0, account)
self.selection = self.accounts_treeview.get_selection()
self.selection.select_iter(model.get_iter_first())
def resend(self, account):
if not account in gajim.connections:
return
show = gajim.SHOW_LIST[gajim.connections[account].connected]
status = gajim.connections[account].status
gajim.connections[account].change_status(show, status)
def check_resend_relog(self):
if self.need_relogin and self.current_account == \
gajim.ZEROCONF_ACC_NAME:
if gajim.ZEROCONF_ACC_NAME in gajim.connections:
gajim.connections[gajim.ZEROCONF_ACC_NAME].update_details()
return
elif self.need_relogin and self.current_account and \
gajim.connections[self.current_account].connected > 0:
def login(account, show_before, status_before):
"""
Login with previous status
"""
# first make sure connection is really closed,
# 0.5 may not be enough
gajim.connections[account].disconnect(True)
gajim.interface.roster.send_status(account, show_before,
status_before)
def relog(account):
self.dialog.destroy()
show_before = gajim.SHOW_LIST[gajim.connections[account].\
connected]
status_before = gajim.connections[account].status
gajim.interface.roster.send_status(account, 'offline',
_('Be right back.'))
GLib.timeout_add(500, login, account, show_before,
status_before)
def on_yes(checked, account):
relog(account)
def on_no(account):
if self.resend_presence:
self.resend(account)
if self.current_account in gajim.connections:
self.dialog = dialogs.YesNoDialog(_('Relogin now?'),
_('If you want all the changes to apply instantly, '
'you must relogin.'), on_response_yes=(on_yes,
self.current_account), on_response_no=(on_no,
self.current_account))
elif self.resend_presence:
self.resend(self.current_account)
self.need_relogin = False
self.resend_presence = False
def on_accounts_treeview_cursor_changed(self, widget):
"""
Activate modify buttons when a row is selected, update accounts info
"""
sel = self.accounts_treeview.get_selection()
if sel:
(model, iter_) = sel.get_selected()
if iter_:
account = model[iter_][0]
else:
account = None
else:
iter_ = account = None
if self.current_account and self.current_account == account:
# We're comming back to our current account, no need to update
# widgets
return
# Save config for previous account if needed cause focus_out event is
# called after the changed event
if self.current_account and self.window.get_focus():
focused_widget = self.window.get_focus()
focused_widget_name = focused_widget.get_name()
if focused_widget_name in ('jid_entry1', 'resource_entry1',
'custom_port_entry', 'cert_entry1'):
if focused_widget_name == 'jid_entry1':
func = self.on_jid_entry1_focus_out_event
elif focused_widget_name == 'resource_entry1':
func = self.on_resource_entry1_focus_out_event
elif focused_widget_name == 'custom_port_entry':
func = self.on_custom_port_entry_focus_out_event
elif focused_widget_name == 'cert_entry1':
func = self.on_cert_entry1_focus_out_event
if func(focused_widget, None):
# Error detected in entry, don't change account,
# re-put cursor on previous row
self.select_account(self.current_account)
return True
self.window.set_focus(widget)
self.check_resend_relog()
if account:
self.remove_button.set_sensitive(True)
self.rename_button.set_sensitive(True)
else:
self.remove_button.set_sensitive(False)
self.rename_button.set_sensitive(False)
if iter_:
self.current_account = account
if account == gajim.ZEROCONF_ACC_NAME:
self.remove_button.set_sensitive(False)
self.init_account()
self.update_proxy_list()
def on_browse_for_client_cert_button_clicked(self, widget, data=None):
def on_ok(widget, path_to_clientcert_file):
self.dialog.destroy()
if not path_to_clientcert_file:
return
self.xml.get_object('cert_entry1').set_text(path_to_clientcert_file)
gajim.config.set_per('accounts', self.current_account,
'client_cert', path_to_clientcert_file)
def on_cancel(widget):
self.dialog.destroy()
path_to_clientcert_file = self.xml.get_object('cert_entry1').get_text()
self.dialog = dialogs.ClientCertChooserDialog(path_to_clientcert_file,
on_ok, on_cancel)
def update_proxy_list(self):
if self.current_account:
our_proxy = gajim.config.get_per('accounts', self.current_account,
'proxy')
else:
our_proxy = ''
if not our_proxy:
our_proxy = _('None')
proxy_combobox = self.xml.get_object('proxies_combobox1')
model = Gtk.ListStore(str)
proxy_combobox.set_model(model)
l = gajim.config.get_per('proxies')
l.insert(0, _('None'))
for i in range(len(l)):
model.append([l[i]])
if our_proxy == l[i]:
proxy_combobox.set_active(i)
def init_account(self):
if not self.current_account:
self.notebook.set_current_page(0)
return
if gajim.config.get_per('accounts', self.current_account,
'is_zeroconf'):
self.ignore_events = True
self.init_zeroconf_account()
self.ignore_events = False
self.notebook.set_current_page(2)
return
self.ignore_events = True
self.init_normal_account()
self.ignore_events = False
self.notebook.set_current_page(1)
def init_zeroconf_account(self):
active = gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'active')
self.xml.get_object('enable_zeroconf_checkbutton2').set_active(active)
if not gajim.HAVE_ZEROCONF:
self.xml.get_object('enable_zeroconf_checkbutton2').set_sensitive(
False)
self.xml.get_object('zeroconf_notebook').set_sensitive(active)
# General tab
st = gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'autoconnect')
self.xml.get_object('autoconnect_checkbutton2').set_active(st)
list_no_log_for = gajim.config.get_per('accounts',
gajim.ZEROCONF_ACC_NAME, 'no_log_for').split()
if gajim.ZEROCONF_ACC_NAME in list_no_log_for:
self.xml.get_object('log_history_checkbutton2').set_active(0)
else:
self.xml.get_object('log_history_checkbutton2').set_active(1)
st = gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'sync_with_global_status')
self.xml.get_object('sync_with_global_status_checkbutton2').set_active(
st)
st = gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'use_custom_host')
self.xml.get_object('custom_port_checkbutton2').set_active(st)
self.xml.get_object('custom_port_entry2').set_sensitive(st)
st = gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'custom_port')
if not st:
gajim.config.set_per('accounts', gajim.ZEROCONF_ACC_NAME,
'custom_port', '5298')
st = '5298'
self.xml.get_object('custom_port_entry2').set_text(str(st))
# Personal tab
gpg_key_label = self.xml.get_object('gpg_key_label2')
if gajim.ZEROCONF_ACC_NAME in gajim.connections and \
gajim.connections[gajim.ZEROCONF_ACC_NAME].gpg:
self.xml.get_object('gpg_choose_button2').set_sensitive(True)
self.init_account_gpg()
else:
gpg_key_label.set_text(_('OpenPGP is not usable on this computer'))
self.xml.get_object('gpg_choose_button2').set_sensitive(False)
for opt in ('first_name', 'last_name', 'jabber_id', 'email'):
st = gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'zeroconf_' + opt)
self.xml.get_object(opt + '_entry2').set_text(st)
def init_account_gpg(self):
account = self.current_account
keyid = gajim.config.get_per('accounts', account, 'keyid')
keyname = gajim.config.get_per('accounts', account, 'keyname')
use_gpg_agent = gajim.config.get('use_gpg_agent')
if account == gajim.ZEROCONF_ACC_NAME:
widget_name_add = '2'
else:
widget_name_add = '1'
gpg_key_label = self.xml.get_object('gpg_key_label' + widget_name_add)
gpg_name_label = self.xml.get_object('gpg_name_label' + widget_name_add)
use_gpg_agent_checkbutton = self.xml.get_object(
'use_gpg_agent_checkbutton' + widget_name_add)
if not keyid:
use_gpg_agent_checkbutton.set_sensitive(False)
gpg_key_label.set_text(_('No key selected'))
gpg_name_label.set_text('')
return
gpg_key_label.set_text(keyid)
gpg_name_label.set_text(keyname)
use_gpg_agent_checkbutton.set_sensitive(True)
use_gpg_agent_checkbutton.set_active(use_gpg_agent)
def draw_normal_jid(self):
account = self.current_account
self.ignore_events = True
active = gajim.config.get_per('accounts', account, 'active')
self.xml.get_object('enable_checkbutton1').set_active(active)
self.xml.get_object('normal_notebook1').set_sensitive(active)
if gajim.config.get_per('accounts', account, 'anonymous_auth'):
self.xml.get_object('anonymous_checkbutton1').set_active(True)
self.xml.get_object('jid_label1').set_text(_('Server:'))
save_password = self.xml.get_object('save_password_checkbutton1')
save_password.set_active(False)
save_password.set_sensitive(False)
password_entry = self.xml.get_object('password_entry1')
password_entry.set_text('')
password_entry.set_sensitive(False)
jid = gajim.config.get_per('accounts', account, 'hostname')
else:
self.xml.get_object('anonymous_checkbutton1').set_active(False)
self.xml.get_object('jid_label1').set_text(_('Jabber ID:'))
savepass = gajim.config.get_per('accounts', account, 'savepass')
save_password = self.xml.get_object('save_password_checkbutton1')
save_password.set_sensitive(True)
save_password.set_active(savepass)
password_entry = self.xml.get_object('password_entry1')
if savepass:
passstr = passwords.get_password(account) or ''
password_entry.set_sensitive(True)
else:
passstr = ''
password_entry.set_sensitive(False)
password_entry.set_text(passstr)
jid = gajim.config.get_per('accounts', account, 'name') \
+ '@' + gajim.config.get_per('accounts', account, 'hostname')
self.xml.get_object('jid_entry1').set_text(jid)
self.ignore_events = False
def init_normal_account(self):
account = self.current_account
# Account tab
self.draw_normal_jid()
self.xml.get_object('resource_entry1').set_text(gajim.config.get_per(
'accounts', account, 'resource'))
client_cert = gajim.config.get_per('accounts', account, 'client_cert')
self.xml.get_object('cert_entry1').set_text(client_cert)
client_cert_encrypted = gajim.config.get_per('accounts', account,
'client_cert_encrypted')
self.xml.get_object('client_cert_encrypted_checkbutton1').\
set_active(client_cert_encrypted)
self.xml.get_object('adjust_priority_with_status_checkbutton1').\
set_active(gajim.config.get_per('accounts', account,
'adjust_priority_with_status'))
spinbutton = self.xml.get_object('priority_spinbutton1')
if gajim.config.get('enable_negative_priority'):
spinbutton.set_range(-128, 127)
else:
spinbutton.set_range(0, 127)
spinbutton.set_value(gajim.config.get_per('accounts', account,
'priority'))
# Connection tab
use_env_http_proxy = gajim.config.get_per('accounts', account,
'use_env_http_proxy')
self.xml.get_object('use_env_http_proxy_checkbutton1').set_active(
use_env_http_proxy)
self.xml.get_object('proxy_hbox1').set_sensitive(not use_env_http_proxy)
warn_when_insecure_ssl = gajim.config.get_per('accounts', account,
'warn_when_insecure_ssl_connection')
self.xml.get_object('warn_when_insecure_connection_checkbutton1').\
set_active(warn_when_insecure_ssl)
self.xml.get_object('send_keepalive_checkbutton1').set_active(
gajim.config.get_per('accounts', account, 'keep_alives_enabled'))
use_custom_host = gajim.config.get_per('accounts', account,
'use_custom_host')
self.xml.get_object('custom_host_port_checkbutton1').set_active(
use_custom_host)
custom_host = gajim.config.get_per('accounts', account, 'custom_host')
if not custom_host:
custom_host = gajim.config.get_per('accounts', account, 'hostname')
gajim.config.set_per('accounts', account, 'custom_host',
custom_host)
self.xml.get_object('custom_host_entry1').set_text(custom_host)
custom_port = gajim.config.get_per('accounts', account, 'custom_port')
if not custom_port:
custom_port = 5222
gajim.config.set_per('accounts', account, 'custom_port',
custom_port)
self.xml.get_object('custom_port_entry1').set_text(str(custom_port))
# Personal tab
gpg_key_label = self.xml.get_object('gpg_key_label1')
if gajim.HAVE_GPG:
self.xml.get_object('gpg_choose_button1').set_sensitive(True)
self.init_account_gpg()
else:
gpg_key_label.set_text(_('OpenPGP is not usable on this computer'))
self.xml.get_object('gpg_choose_button1').set_sensitive(False)
# General tab
self.xml.get_object('autoconnect_checkbutton1').set_active(
gajim.config.get_per('accounts', account, 'autoconnect'))
self.xml.get_object('autoreconnect_checkbutton1').set_active(gajim.
config.get_per('accounts', account, 'autoreconnect'))
list_no_log_for = gajim.config.get_per('accounts', account,
'no_log_for').split()
if account in list_no_log_for:
self.xml.get_object('log_history_checkbutton1').set_active(False)
else:
self.xml.get_object('log_history_checkbutton1').set_active(True)
self.xml.get_object('sync_logs_with_server_checkbutton1').set_active(
gajim.config.get_per('accounts', account, 'sync_logs_with_server'))
self.xml.get_object('sync_with_global_status_checkbutton1').set_active(
gajim.config.get_per('accounts', account,
'sync_with_global_status'))
self.xml.get_object('carbons_checkbutton1').set_active(
gajim.config.get_per('accounts', account, 'enable_message_carbons'))
self.xml.get_object('use_ft_proxies_checkbutton1').set_active(
gajim.config.get_per('accounts', account, 'use_ft_proxies'))
def on_add_button_clicked(self, widget):
"""
When add button is clicked: open an account information window
"""
if 'account_creation_wizard' in gajim.interface.instances:
gajim.interface.instances['account_creation_wizard'].window.present()
else:
gajim.interface.instances['account_creation_wizard'] = \
AccountCreationWizardWindow()
def on_remove_button_clicked(self, widget):
"""
When delete button is clicked: Remove an account from the listStore and
from the config file
"""
if not self.current_account:
return
account = self.current_account
if len(gajim.events.get_events(account)):
dialogs.ErrorDialog(_('Unread events'),
_('Read all pending events before removing this account.'),
transient_for=self.window)
return
if gajim.config.get_per('accounts', account, 'is_zeroconf'):
# Should never happen as button is insensitive
return
win_opened = False
if gajim.interface.msg_win_mgr.get_controls(acct=account):
win_opened = True
elif account in gajim.interface.instances:
for key in gajim.interface.instances[account]:
if gajim.interface.instances[account][key] and key != \
'remove_account':
win_opened = True
break
# Detect if we have opened windows for this account
def remove(account):
if account in gajim.interface.instances and \
'remove_account' in gajim.interface.instances[account]:
gajim.interface.instances[account]['remove_account'].window.\
present()
else:
if not account in gajim.interface.instances:
gajim.interface.instances[account] = {}
gajim.interface.instances[account]['remove_account'] = \
RemoveAccountWindow(account)
if win_opened:
dialogs.ConfirmationDialog(
_('You have opened chat in account %s') % account,
_('All chat and groupchat windows will be closed. Do you want to '
'continue?'),
on_response_ok = (remove, account))
else:
remove(account)
def on_rename_button_clicked(self, widget):
if not self.current_account:
return
active = gajim.config.get_per('accounts', self.current_account,
'active') and self.current_account in gajim.connections
if active and gajim.connections[self.current_account].connected != 0:
dialogs.ErrorDialog(
_('You are currently connected to the server'),
_('To change the account name, you must be disconnected.'),
transient_for=self.window)
return
if len(gajim.events.get_events(self.current_account)):
dialogs.ErrorDialog(_('Unread events'),
_('To change the account name, you must read all pending '
'events.'), transient_for=self.window)
return
# Get the new name
def on_renamed(new_name, old_name):
if new_name in gajim.connections:
dialogs.ErrorDialog(_('Account Name Already Used'),
_('This name is already used by another of your accounts. '
'Please choose another name.'), transient_for=self.window)
return
if (new_name == ''):
dialogs.ErrorDialog(_('Invalid account name'),
_('Account name cannot be empty.'),
transient_for=self.window)
return
if new_name.find(' ') != -1:
dialogs.ErrorDialog(_('Invalid account name'),
_('Account name cannot contain spaces.'),
transient_for=self.window)
return
if active:
# update variables
gajim.interface.instances[new_name] = gajim.interface.instances[
old_name]
gajim.interface.minimized_controls[new_name] = \
gajim.interface.minimized_controls[old_name]
gajim.nicks[new_name] = gajim.nicks[old_name]
gajim.block_signed_in_notifications[new_name] = \
gajim.block_signed_in_notifications[old_name]
gajim.groups[new_name] = gajim.groups[old_name]
gajim.gc_connected[new_name] = gajim.gc_connected[old_name]
gajim.automatic_rooms[new_name] = gajim.automatic_rooms[
old_name]
gajim.newly_added[new_name] = gajim.newly_added[old_name]
gajim.to_be_removed[new_name] = gajim.to_be_removed[old_name]
gajim.sleeper_state[new_name] = gajim.sleeper_state[old_name]
gajim.encrypted_chats[new_name] = gajim.encrypted_chats[
old_name]
gajim.last_message_time[new_name] = \
gajim.last_message_time[old_name]
gajim.status_before_autoaway[new_name] = \
gajim.status_before_autoaway[old_name]
gajim.transport_avatar[new_name] = gajim.transport_avatar[old_name]
gajim.gajim_optional_features[new_name] = \
gajim.gajim_optional_features[old_name]
gajim.caps_hash[new_name] = gajim.caps_hash[old_name]
gajim.contacts.change_account_name(old_name, new_name)
gajim.events.change_account_name(old_name, new_name)
# change account variable for chat / gc controls
gajim.interface.msg_win_mgr.change_account_name(old_name, new_name)
# upgrade account variable in opened windows
for kind in ('infos', 'disco', 'gc_config', 'search',
'online_dialog', 'sub_request'):
for j in gajim.interface.instances[new_name][kind]:
gajim.interface.instances[new_name][kind][j].account = \
new_name
# ServiceCache object keep old property account
if hasattr(gajim.connections[old_name], 'services_cache'):
gajim.connections[old_name].services_cache.account = \
new_name
del gajim.interface.instances[old_name]
del gajim.interface.minimized_controls[old_name]
del gajim.nicks[old_name]
del gajim.block_signed_in_notifications[old_name]
del gajim.groups[old_name]
del gajim.gc_connected[old_name]
del gajim.automatic_rooms[old_name]
del gajim.newly_added[old_name]
del gajim.to_be_removed[old_name]
del gajim.sleeper_state[old_name]
del gajim.encrypted_chats[old_name]
del gajim.last_message_time[old_name]
del gajim.status_before_autoaway[old_name]
del gajim.transport_avatar[old_name]
del gajim.gajim_optional_features[old_name]
del gajim.caps_hash[old_name]
gajim.connections[old_name].name = new_name
gajim.connections[old_name].pep_change_account_name(new_name)
gajim.connections[old_name].caps_change_account_name(new_name)
gajim.connections[new_name] = gajim.connections[old_name]
del gajim.connections[old_name]
gajim.config.add_per('accounts', new_name)
old_config = gajim.config.get_per('accounts', old_name)
for opt in old_config:
gajim.config.set_per('accounts', new_name, opt, old_config[opt])
gajim.config.del_per('accounts', old_name)
if self.current_account == old_name:
self.current_account = new_name
if old_name == gajim.ZEROCONF_ACC_NAME:
gajim.ZEROCONF_ACC_NAME = new_name
# refresh roster
gajim.interface.roster.setup_and_draw_roster()
self.init_accounts()
self.select_account(new_name)
title = _('Rename Account')
message = _('Enter a new name for account %s') % self.current_account
old_text = self.current_account
dialogs.InputDialog(title, message, old_text, is_modal=False,
ok_handler=(on_renamed, self.current_account),
transient_for=self.window)
def option_changed(self, option, value):
return gajim.config.get_per('accounts', self.current_account, option) \
!= value
def on_jid_entry1_focus_out_event(self, widget, event):
if self.ignore_events:
return
jid = widget.get_text()
# check if jid is conform to RFC and stringprep it
try:
jid = helpers.parse_jid(jid)
except helpers.InvalidFormat as s:
if not widget.is_focus():
pritext = _('Invalid Jabber ID')
dialogs.ErrorDialog(pritext, str(s), transient_for=self.window)
GLib.idle_add(lambda: widget.grab_focus())
return True
jid_splited = jid.split('@', 1)
if len(jid_splited) != 2 and not gajim.config.get_per('accounts',
self.current_account, 'anonymous_auth'):
if not widget.is_focus():
pritext = _('Invalid Jabber ID')
sectext = \
_('A Jabber ID must be in the form "user@servername".')
dialogs.ErrorDialog(pritext, sectext, transient_for=self.window)
GLib.idle_add(lambda: widget.grab_focus())
return True
if gajim.config.get_per('accounts', self.current_account,
'anonymous_auth'):
gajim.config.set_per('accounts', self.current_account, 'hostname',
jid_splited[0])
if self.option_changed('hostname', jid_splited[0]):
self.need_relogin = True
else:
if self.option_changed('name', jid_splited[0]) or \
self.option_changed('hostname', jid_splited[1]):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account, 'name',
jid_splited[0])
gajim.config.set_per('accounts', self.current_account, 'hostname',
jid_splited[1])
def on_cert_entry1_focus_out_event(self, widget, event):
if self.ignore_events:
return
client_cert = widget.get_text()
if self.option_changed('client_cert', client_cert):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account, 'client_cert',
client_cert)
def on_anonymous_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
active = widget.get_active()
gajim.config.set_per('accounts', self.current_account, 'anonymous_auth',
active)
self.draw_normal_jid()
def on_password_entry1_changed(self, widget):
if self.ignore_events:
return
passwords.save_password(self.current_account, widget.get_text())
def on_save_password_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
active = widget.get_active()
password_entry = self.xml.get_object('password_entry1')
password_entry.set_sensitive(active)
gajim.config.set_per('accounts', self.current_account, 'savepass',
active)
if active:
password = password_entry.get_text()
passwords.save_password(self.current_account, password)
else:
passwords.save_password(self.current_account, '')
def on_resource_entry1_focus_out_event(self, widget, event):
if self.ignore_events:
return
resource = self.xml.get_object('resource_entry1').get_text()
try:
resource = helpers.parse_resource(resource)
except helpers.InvalidFormat as s:
if not widget.is_focus():
pritext = _('Invalid Jabber ID')
dialogs.ErrorDialog(pritext, str(s), transient_for=self.window)
GLib.idle_add(lambda: widget.grab_focus())
return True
if self.option_changed('resource', resource):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account, 'resource',
resource)
def on_adjust_priority_with_status_checkbutton1_toggled(self, widget):
self.xml.get_object('priority_spinbutton1').set_sensitive(
not widget.get_active())
self.on_checkbutton_toggled(widget, 'adjust_priority_with_status',
account = self.current_account)
def on_priority_spinbutton1_value_changed(self, widget):
prio = widget.get_value_as_int()
if self.option_changed('priority', prio):
self.resend_presence = True
gajim.config.set_per('accounts', self.current_account, 'priority', prio)
def on_synchronise_contacts_button1_clicked(self, widget):
try:
dialogs.SynchroniseSelectAccountDialog(self.current_account)
except GajimGeneralException:
# If we showed ErrorDialog, there will not be dialog instance
return
def on_change_password_button1_clicked(self, widget):
def on_changed(new_password):
if new_password is not None:
gajim.connections[self.current_account].change_password(
new_password)
if self.xml.get_object('save_password_checkbutton1').\
get_active():
self.xml.get_object('password_entry1').set_text(
new_password)
try:
dialogs.ChangePasswordDialog(self.current_account, on_changed,
self.window)
except GajimGeneralException:
# if we showed ErrorDialog, there will not be dialog instance
return
def on_client_cert_encrypted_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'client_cert_encrypted',
account=self.current_account)
def on_autoconnect_checkbutton_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'autoconnect',
account=self.current_account)
def on_autoreconnect_checkbutton_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'autoreconnect',
account=self.current_account)
def on_log_history_checkbutton_toggled(self, widget):
if self.ignore_events:
return
list_no_log_for = gajim.config.get_per('accounts', self.current_account,
'no_log_for').split()
if self.current_account in list_no_log_for:
list_no_log_for.remove(self.current_account)
if not widget.get_active():
list_no_log_for.append(self.current_account)
gajim.config.set_per('accounts', self.current_account, 'no_log_for',
' '.join(list_no_log_for))
def on_sync_logs_with_server_checkbutton_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'sync_logs_with_server',
account=self.current_account)
def on_sync_with_global_status_checkbutton_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'sync_with_global_status',
account=self.current_account)
gajim.interface.roster.update_status_combobox()
def on_carbons_checkbutton_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'enable_message_carbons',
account=self.current_account)
def on_use_ft_proxies_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'use_ft_proxies',
account=self.current_account)
def on_use_env_http_proxy_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'use_env_http_proxy',
account=self.current_account)
hbox = self.xml.get_object('proxy_hbox1')
hbox.set_sensitive(not widget.get_active())
def on_proxies_combobox1_changed(self, widget):
active = widget.get_active()
proxy = widget.get_model()[active][0]
if proxy == _('None'):
proxy = ''
if self.option_changed('proxy', proxy):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account, 'proxy', proxy)
def on_manage_proxies_button1_clicked(self, widget):
if 'manage_proxies' in gajim.interface.instances:
gajim.interface.instances['manage_proxies'].window.present()
else:
gajim.interface.instances['manage_proxies'] = ManageProxiesWindow(
self.window)
def on_warn_when_insecure_connection_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'warn_when_insecure_ssl_connection',
account=self.current_account)
def on_send_keepalive_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
self.on_checkbutton_toggled(widget, 'keep_alives_enabled',
account=self.current_account)
gajim.config.set_per('accounts', self.current_account,
'ping_alives_enabled', widget.get_active())
def on_custom_host_port_checkbutton1_toggled(self, widget):
if self.option_changed('use_custom_host', widget.get_active()):
self.need_relogin = True
self.on_checkbutton_toggled(widget, 'use_custom_host',
account=self.current_account)
active = widget.get_active()
self.xml.get_object('custom_host_port_hbox1').set_sensitive(active)
def on_custom_host_entry1_changed(self, widget):
if self.ignore_events:
return
host = widget.get_text()
if self.option_changed('custom_host', host):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account, 'custom_host',
host)
def on_custom_port_entry_focus_out_event(self, widget, event):
if self.ignore_events:
return
custom_port = widget.get_text()
try:
custom_port = int(custom_port)
except Exception:
if not widget.is_focus():
dialogs.ErrorDialog(_('Invalid entry'),
_('Custom port must be a port number.'),
transient_for=self.window)
GLib.idle_add(lambda: widget.grab_focus())
return True
if self.option_changed('custom_port', custom_port):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account, 'custom_port',
custom_port)
def on_gpg_choose_button_clicked(self, widget, data = None):
if self.current_account in gajim.connections and \
gajim.connections[self.current_account].gpg:
secret_keys = gajim.connections[self.current_account].\
ask_gpg_secrete_keys()
# self.current_account is None and/or gajim.connections is {}
else:
if gajim.HAVE_GPG:
secret_keys = gpg.GnuPG().get_secret_keys()
else:
secret_keys = []
if not secret_keys:
dialogs.ErrorDialog(_('Failed to get secret keys'),
_('There is no OpenPGP secret key available.'),
transient_for=self.window)
secret_keys[_('None')] = _('None')
def on_key_selected(keyID):
if keyID is None:
return
if self.current_account == gajim.ZEROCONF_ACC_NAME:
wiget_name_ext = '2'
else:
wiget_name_ext = '1'
gpg_key_label = self.xml.get_object('gpg_key_label' + \
wiget_name_ext)
gpg_name_label = self.xml.get_object('gpg_name_label' + \
wiget_name_ext)
use_gpg_agent_checkbutton = self.xml.get_object(
'use_gpg_agent_checkbutton' + wiget_name_ext)
if keyID[0] == _('None'):
gpg_key_label.set_text(_('No key selected'))
gpg_name_label.set_text('')
use_gpg_agent_checkbutton.set_sensitive(False)
if self.option_changed('keyid', ''):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account,
'keyname', '')
gajim.config.set_per('accounts', self.current_account, 'keyid',
'')
else:
gpg_key_label.set_text(keyID[0])
gpg_name_label.set_text(keyID[1])
use_gpg_agent_checkbutton.set_sensitive(True)
if self.option_changed('keyid', keyID[0]):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account,
'keyname', keyID[1])
gajim.config.set_per('accounts', self.current_account, 'keyid',
keyID[0])
dialogs.ChooseGPGKeyDialog(_('OpenPGP Key Selection'),
_('Choose your OpenPGP key'), secret_keys, on_key_selected,
transient_for=self.window)
def on_use_gpg_agent_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'use_gpg_agent')
def on_edit_details_button1_clicked(self, widget):
if self.current_account not in gajim.interface.instances:
dlg = dialogs.ErrorDialog(_('No such account available'),
_('You must create your account before editing your personal '
'information.'), transient_for=self.window)
return
# show error dialog if account is newly created (not in gajim.connections)
if self.current_account not in gajim.connections or \
gajim.connections[self.current_account].connected < 2:
dialogs.ErrorDialog(_('You are not connected to the server'),
_('Without a connection, you can not edit your personal '
'information.'), transient_for=self.window)
return
if not gajim.connections[self.current_account].vcard_supported:
dialogs.ErrorDialog(_("Your server doesn't support vCard"),
_("Your server can't save your personal information."),
transient_for=self.window)
return
jid = gajim.get_jid_from_account(self.current_account)
if 'profile' not in gajim.interface.instances[self.current_account]:
gajim.interface.instances[self.current_account]['profile'] = \
profile_window.ProfileWindow(self.current_account, transient_for=self.window)
gajim.connections[self.current_account].request_vcard(jid)
def on_checkbutton_toggled(self, widget, config_name,
change_sensitivity_widgets = None, account = None):
if account:
gajim.config.set_per('accounts', account, config_name,
widget.get_active())
else:
gajim.config.set(config_name, widget.get_active())
if change_sensitivity_widgets:
for w in change_sensitivity_widgets:
w.set_sensitive(widget.get_active())
def on_merge_checkbutton_toggled(self, widget):
self.on_checkbutton_toggled(widget, 'mergeaccounts')
if len(gajim.connections) >= 2: # Do not merge accounts if only one active
gajim.interface.roster.regroup = gajim.config.get('mergeaccounts')
else:
gajim.interface.roster.regroup = False
gajim.interface.roster.setup_and_draw_roster()
def _disable_account(self, account):
gajim.interface.roster.close_all(account)
if account == gajim.ZEROCONF_ACC_NAME:
gajim.connections[account].disable_account()
gajim.connections[account].cleanup()
del gajim.connections[account]
del gajim.interface.instances[account]
del gajim.interface.minimized_controls[account]
del gajim.nicks[account]
del gajim.block_signed_in_notifications[account]
del gajim.groups[account]
gajim.contacts.remove_account(account)
del gajim.gc_connected[account]
del gajim.automatic_rooms[account]
del gajim.to_be_removed[account]
del gajim.newly_added[account]
del gajim.sleeper_state[account]
del gajim.encrypted_chats[account]
del gajim.last_message_time[account]
del gajim.status_before_autoaway[account]
del gajim.transport_avatar[account]
del gajim.gajim_optional_features[account]
del gajim.caps_hash[account]
if len(gajim.connections) >= 2:
# Do not merge accounts if only one exists
gajim.interface.roster.regroup = gajim.config.get('mergeaccounts')
else:
gajim.interface.roster.regroup = False
gajim.interface.roster.setup_and_draw_roster()
gajim.interface.roster.set_actions_menu_needs_rebuild()
def _enable_account(self, account):
if account == gajim.ZEROCONF_ACC_NAME:
gajim.connections[account] = connection_zeroconf.ConnectionZeroconf(
account)
if gajim.connections[account].gpg:
self.xml.get_object('gpg_choose_button2').set_sensitive(True)
else:
gajim.connections[account] = common.connection.Connection(account)
if gajim.connections[account].gpg:
self.xml.get_object('gpg_choose_button1').set_sensitive(True)
self.init_account_gpg()
# update variables
gajim.interface.instances[account] = {'infos': {},
'disco': {}, 'gc_config': {}, 'search': {}, 'online_dialog': {},
'sub_request': {}}
gajim.interface.minimized_controls[account] = {}
gajim.connections[account].connected = 0
gajim.groups[account] = {}
gajim.contacts.add_account(account)
gajim.gc_connected[account] = {}
gajim.automatic_rooms[account] = {}
gajim.newly_added[account] = []
gajim.to_be_removed[account] = []
if account == gajim.ZEROCONF_ACC_NAME:
gajim.nicks[account] = gajim.ZEROCONF_ACC_NAME
else:
gajim.nicks[account] = gajim.config.get_per('accounts', account,
'name')
gajim.block_signed_in_notifications[account] = True
gajim.sleeper_state[account] = 'off'
gajim.encrypted_chats[account] = []
gajim.last_message_time[account] = {}
gajim.status_before_autoaway[account] = ''
gajim.transport_avatar[account] = {}
gajim.gajim_optional_features[account] = []
gajim.caps_hash[account] = ''
helpers.update_optional_features(account)
# refresh roster
if len(gajim.connections) >= 2:
# Do not merge accounts if only one exists
gajim.interface.roster.regroup = gajim.config.get('mergeaccounts')
else:
gajim.interface.roster.regroup = False
gajim.interface.roster.setup_and_draw_roster()
gajim.interface.roster.set_actions_menu_needs_rebuild()
def on_enable_zeroconf_checkbutton2_toggled(self, widget):
# don't do anything if there is an account with the local name but is a
# normal account
if self.ignore_events:
return
if self.current_account in gajim.connections and \
gajim.connections[self.current_account].connected > 0:
self.ignore_events = True
self.xml.get_object('enable_zeroconf_checkbutton2').set_active(True)
self.ignore_events = False
dialogs.ErrorDialog(
_('You are currently connected to the server'),
_('To disable the account, you must be disconnected.'),
transient_for=self.window)
return
if gajim.ZEROCONF_ACC_NAME in gajim.connections and not \
gajim.connections[gajim.ZEROCONF_ACC_NAME].is_zeroconf:
gajim.nec.push_incoming_event(InformationEvent(None,
conn=gajim.connections[gajim.ZEROCONF_ACC_NAME],
level='error', pri_txt=_('Account Local already exists.'),
sec_txt=_('Please rename or remove it before enabling '
'link-local messaging.')))
return
if gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME, 'active') \
and not widget.get_active():
self.xml.get_object('zeroconf_notebook').set_sensitive(False)
# disable
self._disable_account(gajim.ZEROCONF_ACC_NAME)
elif not gajim.config.get_per('accounts', gajim.ZEROCONF_ACC_NAME,
'active') and widget.get_active():
self.xml.get_object('zeroconf_notebook').set_sensitive(True)
# enable (will create new account if not present)
self._enable_account(gajim.ZEROCONF_ACC_NAME)
self.on_checkbutton_toggled(widget, 'active',
account=gajim.ZEROCONF_ACC_NAME)
def on_enable_checkbutton1_toggled(self, widget):
if self.ignore_events:
return
if self.current_account in gajim.connections and \
gajim.connections[self.current_account].connected > 0:
# connecting or connected
self.ignore_events = True
self.xml.get_object('enable_checkbutton1').set_active(True)
self.ignore_events = False
dialogs.ErrorDialog(
_('You are currently connected to the server'),
_('To disable the account, you must be disconnected.'),
transient_for=self.window)
return
# add/remove account in roster and all variables
if widget.get_active():
# enable
self._enable_account(self.current_account)
else:
# disable
self._disable_account(self.current_account)
self.on_checkbutton_toggled(widget, 'active',
account=self.current_account, change_sensitivity_widgets=[
self.xml.get_object('normal_notebook1')])
def on_custom_port_checkbutton2_toggled(self, widget):
self.xml.get_object('custom_port_entry2').set_sensitive(
widget.get_active())
self.on_checkbutton_toggled(widget, 'use_custom_host',
account=self.current_account)
if not widget.get_active():
self.xml.get_object('custom_port_entry2').set_text('5298')
def on_first_name_entry2_changed(self, widget):
if self.ignore_events:
return
name = widget.get_text()
if self.option_changed('zeroconf_first_name', name):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account,
'zeroconf_first_name', name)
def on_last_name_entry2_changed(self, widget):
if self.ignore_events:
return
name = widget.get_text()
if self.option_changed('zeroconf_last_name', name):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account,
'zeroconf_last_name', name)
def on_jabber_id_entry2_changed(self, widget):
if self.ignore_events:
return
id_ = widget.get_text()
if self.option_changed('zeroconf_jabber_id', id_):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account,
'zeroconf_jabber_id', id_)
def on_email_entry2_changed(self, widget):
if self.ignore_events:
return
email = widget.get_text()
if self.option_changed('zeroconf_email', email):
self.need_relogin = True
gajim.config.set_per('accounts', self.current_account,
'zeroconf_email', email)
class FakeDataForm(Gtk.Table, object):
"""
Class for forms that are in XML format <entry1>value1</entry1> infos in a
table {entry1: value1}
"""
def __init__(self, infos, selectable=False):
GObject.GObject.__init__(self)
self.infos = infos
self.selectable = selectable
self.entries = {}
self._draw_table()
def _draw_table(self):
"""
Draw the table
"""
nbrow = 0
if 'instructions' in self.infos:
nbrow = 1
self.resize(rows = nbrow, columns = 2)
label = Gtk.Label(label=self.infos['instructions'])
if self.selectable:
label.set_selectable(True)
self.attach(label, 0, 2, 0, 1, 0, 0, 0, 0)
for name in self.infos.keys():
if name in ('key', 'instructions', 'x', 'registered'):
continue
if not name:
continue
nbrow = nbrow + 1
self.resize(rows = nbrow, columns = 2)
label = Gtk.Label(label=name.capitalize() + ':')
self.attach(label, 0, 1, nbrow - 1, nbrow, 0, 0, 0, 0)
entry = Gtk.Entry()
entry.set_activates_default(True)
if self.infos[name]:
entry.set_text(self.infos[name])
if name == 'password':
entry.set_visibility(False)
self.attach(entry, 1, 2, nbrow - 1, nbrow, 0, 0, 0, 0)
self.entries[name] = entry
if nbrow == 1:
entry.grab_focus()
def get_infos(self):
for name in self.entries.keys():
self.infos[name] = self.entries[name].get_text()
return self.infos
class ServiceRegistrationWindow:
"""
Class for Service registration window. Window that appears when we want to
subscribe to a service if is_form we use dataforms_widget else we use
service_registarion_window
"""
def __init__(self, service, infos, account, is_form):
self.service = service
self.account = account
self.is_form = is_form
self.xml = gtkgui_helpers.get_gtk_builder('service_registration_window.ui')
self.window = self.xml.get_object('service_registration_window')
self.window.set_transient_for(gajim.interface.roster.window)
if self.is_form:
dataform = dataforms.ExtendForm(node = infos)
self.data_form_widget = dataforms_widget.DataFormWidget(dataform)
if self.data_form_widget.title:
self.window.set_title('%s - Gajim' % self.data_form_widget.title)
grid = self.xml.get_object('grid')
grid.attach(self.data_form_widget, 0, 0, 2, 1)
else:
if 'registered' in infos:
self.window.set_title(_('Edit %s') % service)
else:
self.window.set_title(_('Register to %s') % service)
self.data_form_widget = FakeDataForm(infos)
grid = self.xml.get_object('grid')
grid.attach(self.data_form_widget, 0, 0, 2, 1)
self.xml.connect_signals(self)
self.window.show_all()
def on_cancel_button_clicked(self, widget):
self.window.destroy()
def on_ok_button_clicked(self, widget):
# send registration info to the core
if self.is_form:
form = self.data_form_widget.data_form
gajim.connections[self.account].register_agent(self.service,
form, True) # True is for is_form
else:
infos = self.data_form_widget.get_infos()
if 'instructions' in infos:
del infos['instructions']
if 'registered' in infos:
del infos['registered']
gajim.connections[self.account].register_agent(self.service, infos)
self.window.destroy()
class GroupchatConfigWindow:
def __init__(self, account, room_jid, form=None):
self.account = account
self.room_jid = room_jid
self.form = form
self.remove_button = {}
self.affiliation_treeview = {}
self.start_users_dict = {} # list at the beginning
self.affiliation_labels = {'outcast': _('Ban List'),
'member': _('Member List'), 'owner': _('Owner List'),
'admin':_('Administrator List')}
self.xml = gtkgui_helpers.get_gtk_builder('data_form_window.ui',
'data_form_window')
self.window = self.xml.get_object('data_form_window')
self.window.set_transient_for(gajim.interface.roster.window)
if self.form:
config_vbox = self.xml.get_object('config_vbox')
self.data_form_widget = dataforms_widget.DataFormWidget(self.form)
# hide scrollbar of this data_form_widget, we already have in this
# widget
sw = self.data_form_widget.xml.get_object(
'single_form_scrolledwindow')
sw.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.NEVER)
if self.form.title:
self.xml.get_object('title_label').set_text(self.form.title)
else:
self.xml.get_object('title_hseparator').set_no_show_all(True)
self.xml.get_object('title_hseparator').hide()
self.data_form_widget.show()
config_vbox.pack_start(self.data_form_widget, True, True, 0)
else:
self.xml.get_object('title_label').set_no_show_all(True)
self.xml.get_object('title_label').hide()
self.xml.get_object('title_hseparator').set_no_show_all(True)
self.xml.get_object('title_hseparator').hide()
self.xml.get_object('config_hseparator').set_no_show_all(True)
self.xml.get_object('config_hseparator').hide()
# Draw the edit affiliation list things
add_on_vbox = self.xml.get_object('add_on_vbox')
for affiliation in self.affiliation_labels.keys():
self.start_users_dict[affiliation] = {}
hbox = Gtk.HBox(spacing=5)
add_on_vbox.pack_start(hbox, False, True, 0)
label = Gtk.Label(label=self.affiliation_labels[affiliation])
hbox.pack_start(label, False, True, 0)
bb = Gtk.HButtonBox()
bb.set_layout(Gtk.ButtonBoxStyle.END)
bb.set_spacing(5)
hbox.pack_start(bb, True, True, 0)
add_button = Gtk.Button(stock=Gtk.STOCK_ADD)
add_button.connect('clicked', self.on_add_button_clicked,
affiliation)
bb.pack_start(add_button, True, True, 0)
self.remove_button[affiliation] = Gtk.Button(stock=Gtk.STOCK_REMOVE)
self.remove_button[affiliation].set_sensitive(False)
self.remove_button[affiliation].connect('clicked',
self.on_remove_button_clicked, affiliation)
bb.pack_start(self.remove_button[affiliation], True, True, 0)
# jid, reason, nick, role
liststore = Gtk.ListStore(str, str, str, str)
self.affiliation_treeview[affiliation] = Gtk.TreeView(liststore)
self.affiliation_treeview[affiliation].get_selection().set_mode(
Gtk.SelectionMode.MULTIPLE)
self.affiliation_treeview[affiliation].connect('cursor-changed',
self.on_affiliation_treeview_cursor_changed, affiliation)
renderer = Gtk.CellRendererText()
col = Gtk.TreeViewColumn(_('JID'), renderer)
col.add_attribute(renderer, 'text', 0)
col.set_resizable(True)
col.set_sort_column_id(0)
self.affiliation_treeview[affiliation].append_column(col)
if affiliation == 'outcast':
renderer = Gtk.CellRendererText()
renderer.set_property('editable', True)
renderer.connect('edited', self.on_cell_edited)
col = Gtk.TreeViewColumn(_('Reason'), renderer)
col.add_attribute(renderer, 'text', 1)
col.set_resizable(True)
col.set_sort_column_id(1)
self.affiliation_treeview[affiliation].append_column(col)
elif affiliation == 'member':
renderer = Gtk.CellRendererText()
col = Gtk.TreeViewColumn(_('Nick'), renderer)
col.add_attribute(renderer, 'text', 2)
col.set_resizable(True)
col.set_sort_column_id(2)
self.affiliation_treeview[affiliation].append_column(col)
renderer = Gtk.CellRendererText()
col = Gtk.TreeViewColumn(_('Role'), renderer)
col.add_attribute(renderer, 'text', 3)
col.set_resizable(True)
col.set_sort_column_id(3)
self.affiliation_treeview[affiliation].append_column(col)
sw = Gtk.ScrolledWindow()
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.NEVER)
sw.add(self.affiliation_treeview[affiliation])
add_on_vbox.pack_start(sw, True, True, 0)
gajim.connections[self.account].get_affiliation_list(self.room_jid,
affiliation)
self.xml.connect_signals(self)
self.window.show_all()
def on_cancel_button_clicked(self, widget):
self.window.destroy()
def on_cell_edited(self, cell, path, new_text):
model = self.affiliation_treeview['outcast'].get_model()
new_text = new_text
iter_ = model.get_iter(path)
model[iter_][1] = new_text
def on_add_button_clicked(self, widget, affiliation):
if affiliation == 'outcast':
title = _('Banning...')
#You can move '\n' before user@domain if that line is TOO BIG
prompt = _('<b>Whom do you want to ban?</b>\n\n')
elif affiliation == 'member':
title = _('Adding Member...')
prompt = _('<b>Whom do you want to make a member?</b>\n\n')
elif affiliation == 'owner':
title = _('Adding Owner...')
prompt = _('<b>Whom do you want to make an owner?</b>\n\n')
else:
title = _('Adding Administrator...')
prompt = _('<b>Whom do you want to make an administrator?</b>\n\n')
prompt += _('Can be one of the following:\n'
'1. user@domain/resource (only that resource matches).\n'
'2. user@domain (any resource matches).\n'
'3. domain/resource (only that resource matches).\n'
'4. domain (the domain itself matches, as does any user@domain,\n'
'domain/resource, or address containing a subdomain).')
def on_ok(jid):
if not jid:
return
model = self.affiliation_treeview[affiliation].get_model()
model.append((jid, '', '', ''))
dialogs.InputDialog(title, prompt, ok_handler=on_ok)
def on_remove_button_clicked(self, widget, affiliation):
selection = self.affiliation_treeview[affiliation].get_selection()
model, paths = selection.get_selected_rows()
row_refs = []
for path in paths:
row_refs.append(Gtk.TreeRowReference.new(model, path))
for row_ref in row_refs:
path = row_ref.get_path()
iter_ = model.get_iter(path)
model.remove(iter_)
self.remove_button[affiliation].set_sensitive(False)
def on_affiliation_treeview_cursor_changed(self, widget, affiliation):
self.remove_button[affiliation].set_sensitive(True)
def affiliation_list_received(self, users_dict):
"""
Fill the affiliation treeview
"""
for jid in users_dict:
affiliation = users_dict[jid]['affiliation']
if affiliation not in self.affiliation_labels.keys():
# Unknown affiliation or 'none' affiliation, do not show it
continue
self.start_users_dict[affiliation][jid] = users_dict[jid]
tv = self.affiliation_treeview[affiliation]
model = tv.get_model()
reason = users_dict[jid].get('reason', '')
nick = users_dict[jid].get('nick', '')
role = users_dict[jid].get('role', '')
model.append((jid, reason, nick, role))
def on_data_form_window_destroy(self, widget):
del gajim.interface.instances[self.account]['gc_config'][self.room_jid]
def on_ok_button_clicked(self, widget):
if self.form:
form = self.data_form_widget.data_form
gajim.connections[self.account].send_gc_config(self.room_jid, form)
for affiliation in self.affiliation_labels.keys():
users_dict = {}
actual_jid_list = []
model = self.affiliation_treeview[affiliation].get_model()
iter_ = model.get_iter_first()
# add new jid
while iter_:
jid = model[iter_][0]
actual_jid_list.append(jid)
if jid not in self.start_users_dict[affiliation] or \
(affiliation == 'outcast' and 'reason' in self.start_users_dict[
affiliation][jid] and self.start_users_dict[affiliation][jid]\
['reason'] != model[iter_][1]):
users_dict[jid] = {'affiliation': affiliation}
if affiliation == 'outcast':
users_dict[jid]['reason'] = model[iter_][1]
iter_ = model.iter_next(iter_)
# remove removed one
for jid in self.start_users_dict[affiliation]:
if jid not in actual_jid_list:
users_dict[jid] = {'affiliation': 'none'}
if users_dict:
gajim.connections[self.account].send_gc_affiliation_list(
self.room_jid, users_dict)
self.window.destroy()
#---------- RemoveAccountWindow class -------------#
class RemoveAccountWindow:
"""
Ask for removing from gajim only or from gajim and server too and do
removing of the account given
"""
def on_remove_account_window_destroy(self, widget):
if self.account in gajim.interface.instances:
del gajim.interface.instances[self.account]['remove_account']
def on_cancel_button_clicked(self, widget):
self.window.destroy()
def __init__(self, account):
self.account = account
xml = gtkgui_helpers.get_gtk_builder('remove_account_window.ui')
self.window = xml.get_object('remove_account_window')
self.window.set_transient_for(gajim.interface.roster.window)
self.remove_and_unregister_radiobutton = xml.get_object(
'remove_and_unregister_radiobutton')
self.window.set_title(_('Removing %s account') % self.account)
xml.connect_signals(self)
self.window.show_all()
def on_remove_button_clicked(self, widget):
def remove():
if self.account in gajim.connections and \
gajim.connections[self.account].connected and \
not self.remove_and_unregister_radiobutton.get_active():
# change status to offline only if we will not remove this JID from
# server
gajim.connections[self.account].change_status('offline', 'offline')
if self.remove_and_unregister_radiobutton.get_active():
if not self.account in gajim.connections:
dialogs.ErrorDialog(
_('Account is disabled'),
_('To unregister from a server, account must be '
'enabled.'))
return
if not gajim.connections[self.account].password:
def on_ok(passphrase, checked):
if passphrase == -1:
# We don't remove account cause we canceled pw window
return
gajim.connections[self.account].password = passphrase
gajim.connections[self.account].unregister_account(
self._on_remove_success)
dialogs.PassphraseDialog(
_('Password Required'),
_('Enter your password for account %s') % self.account,
_('Save password'), ok_handler=on_ok)
return
gajim.connections[self.account].unregister_account(
self._on_remove_success)
else:
self._on_remove_success(True)
if self.account in gajim.connections and \
gajim.connections[self.account].connected:
dialogs.ConfirmationDialog(
_('Account "%s" is connected to the server') % self.account,
_('If you remove it, the connection will be lost.'),
on_response_ok=remove)
else:
remove()
def on_remove_responce_ok(self, is_checked):
if is_checked[0]:
self._on_remove_success(True)
def _on_remove_success(self, res):
# action of unregistration has failed, we don't remove the account
# Error message is send by connect_and_auth()
if not res:
dialogs.ConfirmationDialogDoubleRadio(
_('Connection to server %s failed') % self.account,
_('What would you like to do?'),
_('Remove only from Gajim'),
_('Don\'t remove anything. I\'ll try again later'),
on_response_ok=self.on_remove_responce_ok, is_modal=False)
return
# Close all opened windows
gajim.interface.roster.close_all(self.account, force=True)
if self.account in gajim.connections:
gajim.connections[self.account].disconnect(on_purpose=True)
gajim.connections[self.account].cleanup()
del gajim.connections[self.account]
gajim.logger.remove_roster(gajim.get_jid_from_account(self.account))
gajim.config.del_per('accounts', self.account)
del gajim.interface.instances[self.account]
if self.account in gajim.nicks:
del gajim.interface.minimized_controls[self.account]
del gajim.nicks[self.account]
del gajim.block_signed_in_notifications[self.account]
del gajim.groups[self.account]
gajim.contacts.remove_account(self.account)
del gajim.gc_connected[self.account]
del gajim.automatic_rooms[self.account]
del gajim.to_be_removed[self.account]
del gajim.newly_added[self.account]
del gajim.sleeper_state[self.account]
del gajim.encrypted_chats[self.account]
del gajim.last_message_time[self.account]
del gajim.status_before_autoaway[self.account]
del gajim.transport_avatar[self.account]
del gajim.gajim_optional_features[self.account]
del gajim.caps_hash[self.account]
if len(gajim.connections) >= 2: # Do not merge accounts if only one exists
gajim.interface.roster.regroup = gajim.config.get('mergeaccounts')
else:
gajim.interface.roster.regroup = False
gajim.interface.roster.setup_and_draw_roster()
gajim.interface.roster.set_actions_menu_needs_rebuild()
if 'accounts' in gajim.interface.instances:
gajim.interface.instances['accounts'].init_accounts()
gajim.interface.instances['accounts'].init_account()
self.window.destroy()
#---------- ManageBookmarksWindow class -------------#
class ManageBookmarksWindow:
def __init__(self):
self.xml = gtkgui_helpers.get_gtk_builder('manage_bookmarks_window.ui')
self.window = self.xml.get_object('manage_bookmarks_window')
self.window.set_transient_for(gajim.interface.roster.window)
self.ignore_events = False
# Account-JID, RoomName, Room-JID, Autojoin, Minimize, Passowrd, Nick,
# Show_Status
self.treestore = Gtk.TreeStore(str, str, str, bool, bool, str, str, str)
self.treestore.set_sort_column_id(1, Gtk.SortType.ASCENDING)
# Store bookmarks in treeview.
for account in gajim.connections:
if gajim.connections[account].connected <= 1:
continue
if gajim.connections[account].is_zeroconf:
continue
if not gajim.connections[account].private_storage_supported:
continue
iter_ = self.treestore.append(None, [None, account, None, None,
None, None, None, None])
for bookmark in gajim.connections[account].bookmarks:
if not bookmark['name']:
# No name was given for this bookmark.
# Use the first part of JID instead...
name = bookmark['jid'].split("@")[0]
bookmark['name'] = name
# make '1', '0', 'true', 'false' (or other) to True/False
autojoin = helpers.from_xs_boolean_to_python_boolean(
bookmark['autojoin'])
minimize = helpers.from_xs_boolean_to_python_boolean(
bookmark['minimize'])
print_status = bookmark.get('print_status', '')
if print_status not in ('', 'all', 'in_and_out', 'none'):
print_status = ''
self.treestore.append(iter_, [
account,
bookmark['name'],
bookmark['jid'],
autojoin,
minimize,
bookmark['password'],
bookmark['nick'],
print_status ])
self.print_status_combobox = self.xml.get_object('print_status_combobox')
model = Gtk.ListStore(str, str)
self.option_list = {'': _('Default'), 'all': Q_('?print_status:All'),
'in_and_out': _('Enter and leave only'),
'none': Q_('?print_status:None')}
opts = sorted(self.option_list.keys())
for opt in opts:
model.append([self.option_list[opt], opt])
self.print_status_combobox.set_model(model)
self.print_status_combobox.set_active(1)
self.view = self.xml.get_object('bookmarks_treeview')
self.view.set_model(self.treestore)
self.view.expand_all()
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn('Bookmarks', renderer, text=1)
self.view.append_column(column)
self.selection = self.view.get_selection()
self.selection.connect('changed', self.bookmark_selected)
#Prepare input fields
self.title_entry = self.xml.get_object('title_entry')
self.title_entry.connect('changed', self.on_title_entry_changed)
self.nick_entry = self.xml.get_object('nick_entry')
self.nick_entry.connect('changed', self.on_nick_entry_changed)
self.server_entry = self.xml.get_object('server_entry')
self.server_entry.connect('changed', self.on_server_entry_changed)
self.room_entry = self.xml.get_object('room_entry')
self.room_entry_changed_id = self.room_entry.connect('changed',
self.on_room_entry_changed)
self.pass_entry = self.xml.get_object('pass_entry')
self.pass_entry.connect('changed', self.on_pass_entry_changed)
self.autojoin_checkbutton = self.xml.get_object('autojoin_checkbutton')
self.minimize_checkbutton = self.xml.get_object('minimize_checkbutton')
self.xml.connect_signals(self)
self.window.show_all()
# select root iter
self.selection.select_iter(self.treestore.get_iter_first())
def on_add_bookmark_button_clicked(self, widget):
"""
Add a new bookmark
"""
# Get the account that is currently used
# (the parent of the currently selected item)
(model, iter_) = self.selection.get_selected()
if not iter_: # Nothing selected, do nothing
return
parent = model.iter_parent(iter_)
if parent:
# We got a bookmark selected, so we add_to the parent
add_to = parent
else:
# No parent, so we got an account -> add to this.
add_to = iter_
account = model[add_to][1]
nick = gajim.nicks[account]
iter_ = self.treestore.append(add_to, [account, _('New Group Chat'),
'@', False, False, '', nick, 'in_and_out'])
self.view.expand_row(model.get_path(add_to), True)
self.view.set_cursor(model.get_path(iter_))
def on_remove_bookmark_button_clicked(self, widget):
"""
Remove selected bookmark
"""
(model, iter_) = self.selection.get_selected()
if not iter_: # Nothing selected
return
if not model.iter_parent(iter_):
# Don't remove account iters
return
self.ignore_events = True
model.remove(iter_)
self.selection.unselect_all()
self.clear_fields()
self.ignore_events = False
def check_valid_bookmark(self):
"""
Check if all neccessary fields are entered correctly
"""
(model, iter_) = self.selection.get_selected()
if not model.iter_parent(iter_):
#Account data can't be changed
return
if self.server_entry.get_text() == '' or \
self.room_entry.get_text() == '':
dialogs.ErrorDialog(_('This bookmark has invalid data'),
_('Please be sure to fill out server and room fields or remove this'
' bookmark.'))
return False
return True
def on_ok_button_clicked(self, widget):
"""
Parse the treestore data into our new bookmarks array, then send the new
bookmarks to the server.
"""
(model, iter_) = self.selection.get_selected()
if iter_ and model.iter_parent(iter_):
#bookmark selected, check it
if not self.check_valid_bookmark():
return
for account in self.treestore:
acct = account[1]
gajim.connections[acct].bookmarks = []
for bm in account.iterchildren():
# Convert True/False/None to '1' or '0'
autojoin = str(int(bm[3]))
minimize = str(int(bm[4]))
name = bm[1]
jid = bm[2]
pw = bm[5]
nick = bm[6]
# create the bookmark-dict
bmdict = { 'name': name, 'jid': jid, 'autojoin': autojoin,
'minimize': minimize, 'password': pw, 'nick': nick,
'print_status': bm[7]}
gajim.connections[acct].bookmarks.append(bmdict)
gajim.connections[acct].store_bookmarks()
gajim.interface.roster.set_actions_menu_needs_rebuild()
self.window.destroy()
def on_cancel_button_clicked(self, widget):
self.window.destroy()
def bookmark_selected(self, selection):
"""
Fill in the bookmark's data into the fields.
"""
(model, iter_) = selection.get_selected()
if not iter_:
# After removing the last bookmark for one account
# this will be None, so we will just:
return
widgets = [ self.title_entry, self.nick_entry, self.room_entry,
self.server_entry, self.pass_entry, self.autojoin_checkbutton,
self.minimize_checkbutton, self.print_status_combobox]
if model.iter_parent(iter_):
# make the fields sensitive
for field in widgets:
field.set_sensitive(True)
else:
# Top-level has no data (it's the account fields)
# clear fields & make them insensitive
self.clear_fields()
for field in widgets:
field.set_sensitive(False)
return
# Fill in the data for childs
self.title_entry.set_text(model[iter_][1])
room_jid = model[iter_][2]
(room, server) = room_jid.split('@')
self.room_entry.handler_block(self.room_entry_changed_id)
self.room_entry.set_text(room)
self.room_entry.handler_unblock(self.room_entry_changed_id)
self.server_entry.set_text(server)
self.autojoin_checkbutton.set_active(model[iter_][3])
self.minimize_checkbutton.set_active(model[iter_][4])
# sensitive only if auto join is checked
self.minimize_checkbutton.set_sensitive(model[iter_][3])
if model[iter_][5] is not None:
password = model[iter_][5]
else:
password = None
if password:
self.pass_entry.set_text(password)
else:
self.pass_entry.set_text('')
nick = model[iter_][6]
if nick:
self.nick_entry.set_text(nick)
else:
self.nick_entry.set_text('')
print_status = model[iter_][7]
opts = sorted(self.option_list.keys())
self.print_status_combobox.set_active(opts.index(print_status))
def on_title_entry_changed(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if iter_: # After removing a bookmark, we got nothing selected
if model.iter_parent(iter_):
# Don't clear the title field for account nodes
model[iter_][1] = self.title_entry.get_text()
def on_nick_entry_changed(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if iter_:
nick = self.nick_entry.get_text()
try:
nick = helpers.parse_resource(nick)
except helpers.InvalidFormat:
dialogs.ErrorDialog(_('Invalid nickname'),
_('Character not allowed'), transient_for=self.window)
self.nick_entry.set_text(model[iter_][6])
return True
model[iter_][6] = nick
def on_server_entry_changed(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if not iter_:
return
server = widget.get_text()
if not server:
return
if '@' in server:
dialogs.ErrorDialog(_('Invalid server'),
_('Character not allowed'), transient_for=self.window)
widget.set_text(server.replace('@', ''))
room = self.room_entry.get_text().strip()
if not room:
return
room_jid = room + '@' + server.strip()
try:
room_jid = helpers.parse_jid(room_jid)
except helpers.InvalidFormat as e:
dialogs.ErrorDialog(_('Invalid server'),
_('Character not allowed'), transient_for=self.window)
self.server_entry.set_text(model[iter_][2].split('@')[1])
return True
model[iter_][2] = room_jid
def on_room_entry_changed(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if not iter_:
return
room = widget.get_text()
if not room:
return
if '@' in room:
room, server = room.split('@', 1)
widget.set_text(room)
if server:
self.server_entry.set_text(server)
self.server_entry.grab_focus()
server = self.server_entry.get_text().strip()
if not server:
return
room_jid = room.strip() + '@' + server
try:
room_jid = helpers.parse_jid(room_jid)
except helpers.InvalidFormat:
dialogs.ErrorDialog(_('Invalid room'),
_('Character not allowed'), transient_for=self.window)
return True
model[iter_][2] = room_jid
def on_pass_entry_changed(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if iter_:
model[iter_][5] = self.pass_entry.get_text()
def on_autojoin_checkbutton_toggled(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if iter_:
model[iter_][3] = self.autojoin_checkbutton.get_active()
self.minimize_checkbutton.set_sensitive(model[iter_][3])
def on_minimize_checkbutton_toggled(self, widget):
if self.ignore_events:
return
(model, iter_) = self.selection.get_selected()
if iter_:
model[iter_][4] = self.minimize_checkbutton.get_active()
def on_print_status_combobox_changed(self, widget):
if self.ignore_events:
return
active = widget.get_active()
model = widget.get_model()
print_status = model[active][1]
(model2, iter_) = self.selection.get_selected()
if iter_:
model2[iter_][7] = print_status
def clear_fields(self):
widgets = [ self.title_entry, self.nick_entry, self.room_entry,
self.server_entry, self.pass_entry ]
for field in widgets:
field.set_text('')
self.autojoin_checkbutton.set_active(False)
self.minimize_checkbutton.set_active(False)
self.print_status_combobox.set_active(1)
class AccountCreationWizardWindow:
def __init__(self):
self.xml = gtkgui_helpers.get_gtk_builder(
'account_creation_wizard_window.ui')
self.window = self.xml.get_object('account_creation_wizard_window')
self.window.set_transient_for(gajim.interface.roster.window)
# Connect events from comboboxtext_entry
server_comboboxtext = self.xml.get_object('server_comboboxtext')
entry = self.xml.get_object('server_comboboxtext_entry')
entry.connect('key_press_event',
self.on_server_comboboxentry_key_press_event, server_comboboxtext)
server_comboboxtext1 = self.xml.get_object('server_comboboxtext1')
self.update_proxy_list()
# parse servers.xml
servers_xml = os.path.join(gajim.DATA_DIR, 'other', 'servers.xml')
servers = gtkgui_helpers.parse_server_xml(servers_xml)
servers_model = self.xml.get_object('server_liststore')
for server in servers:
servers_model.append((server,))
server_comboboxtext.set_model(servers_model)
server_comboboxtext1.set_model(servers_model)
# Generic widgets
self.notebook = self.xml.get_object('notebook')
self.cancel_button = self.xml.get_object('cancel_button')
self.back_button = self.xml.get_object('back_button')
self.forward_button = self.xml.get_object('forward_button')
self.finish_button = self.xml.get_object('finish_button')
self.advanced_button = self.xml.get_object('advanced_button')
self.finish_label = self.xml.get_object('finish_label')
self.go_online_checkbutton = self.xml.get_object(
'go_online_checkbutton')
self.show_vcard_checkbutton = self.xml.get_object(
'show_vcard_checkbutton')
self.progressbar = self.xml.get_object('progressbar')
# some vars
self.update_progressbar_timeout_id = None
self.notebook.set_current_page(0)
self.xml.connect_signals(self)
self.window.show_all()
gajim.ged.register_event_handler('new-account-connected', ged.GUI1,
self._nec_new_acc_connected)
gajim.ged.register_event_handler('new-account-not-connected', ged.GUI1,
self._nec_new_acc_not_connected)
gajim.ged.register_event_handler('account-created', ged.GUI1,
self._nec_acc_is_ok)
gajim.ged.register_event_handler('account-not-created', ged.GUI1,
self._nec_acc_is_not_ok)
def on_wizard_window_destroy(self, widget):
page = self.notebook.get_current_page()
if page in (4, 5) and self.account in gajim.connections:
# connection instance is saved in gajim.connections and we canceled
# the addition of the account
del gajim.connections[self.account]
if self.account in gajim.config.get_per('accounts'):
gajim.config.del_per('accounts', self.account)
gajim.ged.remove_event_handler('new-account-connected', ged.GUI1,
self._nec_new_acc_connected)
gajim.ged.remove_event_handler('new-account-not-connected', ged.GUI1,
self._nec_new_acc_not_connected)
gajim.ged.remove_event_handler('account-created', ged.GUI1,
self._nec_acc_is_ok)
gajim.ged.remove_event_handler('account-not-created', ged.GUI1,
self._nec_acc_is_not_ok)
del gajim.interface.instances['account_creation_wizard']
def on_register_server_features_button_clicked(self, widget):
helpers.launch_browser_mailer('url',
'http://www.jabber.org/network/oldnetwork.shtml')
def on_save_password_checkbutton_toggled(self, widget):
self.xml.get_object('password_entry').grab_focus()
def on_cancel_button_clicked(self, widget):
self.window.destroy()
def on_back_button_clicked(self, widget):
cur_page = self.notebook.get_current_page()
self.forward_button.set_sensitive(True)
if cur_page in (1, 2):
self.notebook.set_current_page(0)
self.back_button.set_sensitive(False)
elif cur_page == 3:
self.xml.get_object('form_vbox').remove(self.data_form_widget)
self.notebook.set_current_page(2) # show server page
elif cur_page == 4:
if self.account in gajim.connections:
del gajim.connections[self.account]
self.notebook.set_current_page(2)
self.xml.get_object('form_vbox').remove(self.data_form_widget)
elif cur_page == 6: # finish page
self.forward_button.show()
if self.modify:
self.notebook.set_current_page(1) # Go to parameters page
else:
self.notebook.set_current_page(2) # Go to server page
def on_anonymous_checkbutton1_toggled(self, widget):
active = widget.get_active()
self.xml.get_object('username_entry').set_sensitive(not active)
self.xml.get_object('password_entry').set_sensitive(not active)
self.xml.get_object('save_password_checkbutton').set_sensitive(
not active)
def show_finish_page(self):
self.cancel_button.hide()
self.back_button.hide()
self.forward_button.hide()
if self.modify:
finish_text = '<big><b>%s</b></big>\n\n%s' % (
_('Account has been added successfully'),
_('You can set advanced account options by pressing the '
'Advanced button, or later by choosing the Accounts menu item '
'under the Edit menu from the main window.'))
else:
finish_text = '<big><b>%s</b></big>\n\n%s' % (
_('Your new account has been created successfully'),
_('You can set advanced account options by pressing the '
'Advanced button, or later by choosing the Accounts menu item '
'under the Edit menu from the main window.'))
self.finish_label.set_markup(finish_text)
self.finish_button.show()
self.finish_button.set_property('has-default', True)
self.advanced_button.show()
self.go_online_checkbutton.show()
img = self.xml.get_object('finish_image')
if self.modify:
img.set_from_stock(Gtk.STOCK_APPLY, Gtk.IconSize.DIALOG)
else:
path_to_file = gtkgui_helpers.get_icon_path('gajim', 48)
img.set_from_file(path_to_file)
self.show_vcard_checkbutton.set_active(not self.modify)
self.notebook.set_current_page(6) # show finish page
def on_forward_button_clicked(self, widget):
cur_page = self.notebook.get_current_page()
if cur_page == 0:
widget = self.xml.get_object('use_existing_account_radiobutton')
if widget.get_active():
self.modify = True
self.notebook.set_current_page(1)
else:
self.modify = False
self.notebook.set_current_page(2)
self.back_button.set_sensitive(True)
return
elif cur_page == 1:
# We are adding an existing account
anonymous = self.xml.get_object('anonymous_checkbutton1').\
get_active()
username = self.xml.get_object('username_entry').get_text().strip()
if not username and not anonymous:
pritext = _('Invalid username')
sectext = _(
'You must provide a username to configure this account.')
dialogs.ErrorDialog(pritext, sectext)
return
server = self.xml.get_object('server_comboboxtext_entry').\
get_text().strip()
savepass = self.xml.get_object('save_password_checkbutton').\
get_active()
password = self.xml.get_object('password_entry').get_text()
if anonymous:
jid = ''
else:
jid = username + '@'
jid += server
# check if jid is conform to RFC and stringprep it
try:
jid = helpers.parse_jid(jid)
except helpers.InvalidFormat as s:
pritext = _('Invalid Jabber ID')
dialogs.ErrorDialog(pritext, str(s))
return
self.account = server
i = 1
while self.account in gajim.connections:
self.account = server + str(i)
i += 1
username, server = gajim.get_name_and_server_from_jid(jid)
if self.xml.get_object('anonymous_checkbutton1').get_active():
self.save_account('', server, False, '', anonymous=True)
else:
self.save_account(username, server, savepass, password)
self.show_finish_page()
elif cur_page == 2:
# We are creating a new account
server = self.xml.get_object('server_comboboxtext_entry1').\
get_text()
if not server:
dialogs.ErrorDialog(_('Invalid server'),
_('Please provide a server on which you want to register.'))
return
self.account = server
i = 1
while self.account in gajim.connections:
self.account = server + str(i)
i += 1
config = self.get_config('', server, '', '')
# Get advanced options
proxies_combobox = self.xml.get_object('proxies_combobox')
active = proxies_combobox.get_active()
proxy = proxies_combobox.get_model()[active][0]
if proxy == _('None'):
proxy = ''
config['proxy'] = proxy
config['use_custom_host'] = self.xml.get_object(
'custom_host_port_checkbutton').get_active()
custom_port = self.xml.get_object('custom_port_entry').get_text()
try:
custom_port = int(custom_port)
except Exception:
dialogs.ErrorDialog(_('Invalid entry'),
_('Custom port must be a port number.'))
return
config['custom_port'] = custom_port
config['custom_host'] = self.xml.get_object(
'custom_host_entry').get_text()
if self.xml.get_object('anonymous_checkbutton2').get_active():
self.modify = True
self.save_account('', server, False, '', anonymous=True)
self.show_finish_page()
else:
self.notebook.set_current_page(5) # show creating page
self.back_button.hide()
self.forward_button.hide()
self.update_progressbar_timeout_id = GLib.timeout_add(100,
self.update_progressbar)
# Get form from serveur
con = connection.Connection(self.account)
gajim.connections[self.account] = con
con.new_account(self.account, config)
elif cur_page == 3:
checked = self.xml.get_object('ssl_checkbutton').get_active()
if checked:
hostname = gajim.connections[self.account].new_account_info[
'hostname']
# Check if cert is already in file
certs = ''
if os.path.isfile(gajim.MY_CACERTS):
f = open(gajim.MY_CACERTS)
certs = f.read()
f.close()
if self.ssl_cert in certs:
dialogs.ErrorDialog(_('Certificate Already in File'),
_('This certificate is already in file %s, so it\'s '
'not added again.') % gajim.MY_CACERTS)
else:
f = open(gajim.MY_CACERTS, 'a')
f.write(hostname + '\n')
f.write(self.ssl_cert + '\n\n')
f.close()
gajim.connections[self.account].new_account_info[
'ssl_fingerprint_sha1'] = self.ssl_fingerprint_sha1
gajim.connections[self.account].new_account_info[
'ssl_fingerprint_sha256'] = self.ssl_fingerprint_sha256
self.notebook.set_current_page(4) # show fom page
elif cur_page == 4:
if self.is_form:
form = self.data_form_widget.data_form
else:
form = self.data_form_widget.get_infos()
gajim.connections[self.account].send_new_account_infos(form,
self.is_form)
self.xml.get_object('form_vbox').remove(self.data_form_widget)
self.xml.get_object('progressbar_label').set_markup(
'<b>Account is being created</b>\n\nPlease wait...')
self.notebook.set_current_page(5) # show creating page
self.back_button.hide()
self.forward_button.hide()
self.update_progressbar_timeout_id = GLib.timeout_add(100,
self.update_progressbar)
def update_proxy_list(self):
proxies_combobox = self.xml.get_object('proxies_combobox')
model = Gtk.ListStore(str)
proxies_combobox.set_model(model)
l = gajim.config.get_per('proxies')
l.insert(0, _('None'))
for i in range(len(l)):
model.append([l[i]])
proxies_combobox.set_active(0)
def on_manage_proxies_button_clicked(self, widget):
if 'manage_proxies' in gajim.interface.instances:
gajim.interface.instances['manage_proxies'].window.present()
else:
gajim.interface.instances['manage_proxies'] = \
ManageProxiesWindow()
def on_custom_host_port_checkbutton_toggled(self, widget):
self.xml.get_object('custom_host_hbox').set_sensitive(widget.\
get_active())
def update_progressbar(self):
self.progressbar.pulse()
return True # loop forever
def _nec_new_acc_connected(self, obj):
"""
Connection to server succeded, present the form to the user
"""
# We receive events from all accounts from GED
if obj.conn.name != self.account:
return
if self.update_progressbar_timeout_id is not None:
GLib.source_remove(self.update_progressbar_timeout_id)
self.back_button.show()
self.forward_button.show()
self.is_form = obj.is_form
empty_config = True
if obj.is_form:
dataform = dataforms.ExtendForm(node=obj.config)
self.data_form_widget = dataforms_widget.DataFormWidget()
self.data_form_widget.selectable = True
self.data_form_widget.set_data_form(dataform)
empty_config = False
else:
self.data_form_widget = FakeDataForm(obj.config, selectable=True)
for field in obj.config:
if field in ('key', 'instructions', 'x', 'registered'):
continue
empty_config = False
break
self.data_form_widget.show_all()
self.xml.get_object('form_vbox').pack_start(self.data_form_widget, True, True, 0)
if empty_config:
self.forward_button.set_sensitive(False)
self.notebook.set_current_page(4) # show form page
return
self.ssl_fingerprint_sha1 = obj.ssl_fingerprint_sha1
self.ssl_fingerprint_sha256 = obj.ssl_fingerprint_sha256
self.ssl_cert = obj.ssl_cert
if obj.ssl_msg:
# An SSL warning occured, show it
hostname = gajim.connections[self.account].new_account_info[
'hostname']
self.xml.get_object('ssl_label').set_markup(_(
'<b>Security Warning</b>'
'\n\nThe authenticity of the %(hostname)s SSL certificate could'
' be invalid.\nSSL Error: %(error)s\n'
'Do you still want to connect to this server?') % {
'hostname': hostname, 'error': obj.ssl_msg})
if obj.errnum in (18, 27):
text = _('Add this certificate to the list of trusted '
'certificates.\nSHA1 fingerprint of the certificate:\n%s'
'\nSHA256 fingerprint of the certificate:\n%s') \
% (obj.ssl_fingerprint_sha1, obj.ssl_fingerprint_sha256)
self.xml.get_object('ssl_checkbutton').set_label(text)
else:
self.xml.get_object('ssl_checkbutton').set_no_show_all(True)
self.xml.get_object('ssl_checkbutton').hide()
self.notebook.set_current_page(3) # show SSL page
else:
self.notebook.set_current_page(4) # show form page
def _nec_new_acc_not_connected(self, obj):
"""
Account creation failed: connection to server failed
"""
# We receive events from all accounts from GED
if obj.conn.name != self.account:
return
if self.account not in gajim.connections:
return
if self.update_progressbar_timeout_id is not None:
GLib.source_remove(self.update_progressbar_timeout_id)
del gajim.connections[self.account]
if self.account in gajim.config.get_per('accounts'):
gajim.config.del_per('accounts', self.account)
self.back_button.show()
self.cancel_button.show()
self.go_online_checkbutton.hide()
self.show_vcard_checkbutton.hide()
img = self.xml.get_object('finish_image')
img.set_from_stock(Gtk.STOCK_DIALOG_ERROR, Gtk.IconSize.DIALOG)
finish_text = '<big><b>%s</b></big>\n\n%s' % (
_('An error occurred during account creation'), obj.reason)
self.finish_label.set_markup(finish_text)
self.notebook.set_current_page(6) # show finish page
def _nec_acc_is_ok(self, obj):
"""
Account creation succeeded
"""
# We receive events from all accounts from GED
if obj.conn.name != self.account:
return
self.create_vars(obj.account_info)
self.show_finish_page()
if self.update_progressbar_timeout_id is not None:
GLib.source_remove(self.update_progressbar_timeout_id)
def _nec_acc_is_not_ok(self, obj):
"""
Account creation failed
"""
# We receive events from all accounts from GED
if obj.conn.name != self.account:
return
self.back_button.show()
self.cancel_button.show()
self.go_online_checkbutton.hide()
self.show_vcard_checkbutton.hide()
del gajim.connections[self.account]
if self.account in gajim.config.get_per('accounts'):
gajim.config.del_per('accounts', self.account)
img = self.xml.get_object('finish_image')
img.set_from_stock(Gtk.STOCK_DIALOG_ERROR, Gtk.IconSize.DIALOG)
finish_text = '<big><b>%s</b></big>\n\n%s' % (_(
'An error occurred during account creation'), obj.reason)
self.finish_label.set_markup(finish_text)
self.notebook.set_current_page(6) # show finish page
if self.update_progressbar_timeout_id is not None:
GLib.source_remove(self.update_progressbar_timeout_id)
def on_advanced_button_clicked(self, widget):
if 'accounts' in gajim.interface.instances:
gajim.interface.instances['accounts'].window.present()
else:
gajim.interface.instances['accounts'] = AccountsWindow()
gajim.interface.instances['accounts'].select_account(self.account)
self.window.destroy()
def on_finish_button_clicked(self, widget):
go_online = self.xml.get_object('go_online_checkbutton').get_active()
show_vcard = self.xml.get_object('show_vcard_checkbutton').get_active()
self.window.destroy()
if show_vcard:
gajim.interface.show_vcard_when_connect.append(self.account)
if go_online:
gajim.interface.roster.send_status(self.account, 'online', '')
def on_username_entry_key_press_event(self, widget, event):
# Check for pressed @ and jump to combobox if found
if event.keyval == Gdk.KEY_at:
entry = self.xml.get_object('server_comboboxtext_entry')
entry.grab_focus()
entry.set_position(-1)
return True
def on_server_comboboxentry_key_press_event(self, widget, event, combobox):
# If backspace is pressed in empty field, return to the nick entry field
backspace = event.keyval == Gdk.KEY_BackSpace
empty = len(combobox.get_active_text()) == 0
if backspace and empty and self.modify:
username_entry = self.xml.get_object('username_entry')
username_entry.grab_focus()
username_entry.set_position(-1)
return True
def get_config(self, login, server, savepass, password, anonymous=False):
config = {}
config['name'] = login
config['hostname'] = server
config['savepass'] = savepass
config['password'] = password
config['resource'] = 'Gajim'
config['anonymous_auth'] = anonymous
config['priority'] = 5
config['autoconnect'] = True
config['no_log_for'] = ''
config['sync_with_global_status'] = True
config['proxy'] = ''
config['usessl'] = False
config['use_custom_host'] = False
config['custom_port'] = 0
config['custom_host'] = ''
config['keyname'] = ''
config['keyid'] = ''
return config
def save_account(self, login, server, savepass, password, anonymous=False):
if self.account in gajim.connections:
dialogs.ErrorDialog(_('Account name is in use'),
_('You already have an account using this name.'))
return
con = connection.Connection(self.account)
con.password = password
config = self.get_config(login, server, savepass, password, anonymous)
if not self.modify:
con.new_account(self.account, config)
return
gajim.connections[self.account] = con
self.create_vars(config)
def create_vars(self, config):
gajim.config.add_per('accounts', self.account)
if not config['savepass']:
config['password'] = ''
for opt in config:
gajim.config.set_per('accounts', self.account, opt, config[opt])
# update variables
gajim.interface.instances[self.account] = {'infos': {}, 'disco': {},
'gc_config': {}, 'search': {}, 'online_dialog': {},
'sub_request': {}}
gajim.interface.minimized_controls[self.account] = {}
gajim.connections[self.account].connected = 0
gajim.connections[self.account].keepalives = gajim.config.get_per(
'accounts', self.account, 'keep_alive_every_foo_secs')
gajim.groups[self.account] = {}
gajim.contacts.add_account(self.account)
gajim.gc_connected[self.account] = {}
gajim.automatic_rooms[self.account] = {}
gajim.newly_added[self.account] = []
gajim.to_be_removed[self.account] = []
gajim.nicks[self.account] = config['name']
gajim.block_signed_in_notifications[self.account] = True
gajim.sleeper_state[self.account] = 'off'
gajim.encrypted_chats[self.account] = []
gajim.last_message_time[self.account] = {}
gajim.status_before_autoaway[self.account] = ''
gajim.transport_avatar[self.account] = {}
gajim.gajim_optional_features[self.account] = []
gajim.caps_hash[self.account] = ''
helpers.update_optional_features(self.account)
# refresh accounts window
if 'accounts' in gajim.interface.instances:
gajim.interface.instances['accounts'].init_accounts()
# refresh roster
if len(gajim.connections) >= 2:
# Do not merge accounts if only one exists
gajim.interface.roster.regroup = gajim.config.get('mergeaccounts')
else:
gajim.interface.roster.regroup = False
gajim.interface.roster.setup_and_draw_roster()
gajim.interface.roster.set_actions_menu_needs_rebuild()
class ManagePEPServicesWindow:
def __init__(self, account):
self.xml = gtkgui_helpers.get_gtk_builder('manage_pep_services_window.ui')
self.window = self.xml.get_object('manage_pep_services_window')
self.window.set_transient_for(gajim.interface.roster.window)
self.xml.get_object('configure_button').set_sensitive(False)
self.xml.get_object('delete_button').set_sensitive(False)
self.xml.connect_signals(self)
self.account = account
self.init_services()
self.xml.get_object('services_treeview').get_selection().connect(
'changed', self.on_services_selection_changed)
gajim.ged.register_event_handler('pep-config-received', ged.GUI1,
self._nec_pep_config_received)
gajim.ged.register_event_handler('agent-items-received', ged.GUI1,
self._nec_agent_items_received)
self.window.show_all()
def on_manage_pep_services_window_destroy(self, widget):
'''close window'''
del gajim.interface.instances[self.account]['pep_services']
gajim.ged.remove_event_handler('pep-config-received', ged.GUI1,
self._nec_pep_config_received)
gajim.ged.remove_event_handler('agent-items-received', ged.GUI1,
self._nec_agent_items_received)
def on_close_button_clicked(self, widget):
self.window.destroy()
def on_services_selection_changed(self, sel):
self.xml.get_object('configure_button').set_sensitive(True)
self.xml.get_object('delete_button').set_sensitive(True)
def init_services(self):
self.treeview = self.xml.get_object('services_treeview')
# service, access_model, group
self.treestore = Gtk.ListStore(str)
self.treeview.set_model(self.treestore)
col = Gtk.TreeViewColumn('Service')
self.treeview.append_column(col)
cellrenderer_text = Gtk.CellRendererText()
col.pack_start(cellrenderer_text, True, True, 0)
col.add_attribute(cellrenderer_text, 'text', 0)
our_jid = gajim.get_jid_from_account(self.account)
gajim.connections[self.account].discoverItems(our_jid)
def _nec_agent_items_received(self, obj):
our_jid = gajim.get_jid_from_account(self.account)
for item in obj.items:
if 'jid' in item and item['jid'] == our_jid and 'node' in item:
self.treestore.append([item['node']])
def node_removed(self, jid, node):
if jid != gajim.get_jid_from_account(self.account):
return
model = self.treeview.get_model()
iter_ = model.get_iter_first()
while iter_:
if model[iter_][0] == node:
model.remove(iter_)
break
iter_ = model.iter_next(iter_)
def node_not_removed(self, jid, node, msg):
if jid != gajim.get_jid_from_account(self.account):
return
dialogs.WarningDialog(_('PEP node was not removed'),
_('PEP node %(node)s was not removed: %(message)s') % {'node': node,
'message': msg})
def on_delete_button_clicked(self, widget):
selection = self.treeview.get_selection()
if not selection:
return
model, iter_ = selection.get_selected()
node = model[iter_][0]
our_jid = gajim.get_jid_from_account(self.account)
gajim.connections[self.account].send_pb_delete(our_jid, node,
on_ok=self.node_removed, on_fail=self.node_not_removed)
def on_configure_button_clicked(self, widget):
selection = self.treeview.get_selection()
if not selection:
return
model, iter_ = selection.get_selected()
node = model[iter_][0]
our_jid = gajim.get_jid_from_account(self.account)
gajim.connections[self.account].request_pb_configuration(our_jid, node)
def _nec_pep_config_received(self, obj):
def on_ok(form, node):
form.type_ = 'submit'
our_jid = gajim.get_jid_from_account(self.account)
gajim.connections[self.account].send_pb_configure(our_jid, node, form)
window = dialogs.DataFormWindow(obj.form, (on_ok, obj.node))
title = _('Configure %s') % obj.node
window.set_title(title)
window.show_all()
class ManageSoundsWindow:
def __init__(self):
self.xml = gtkgui_helpers.get_gtk_builder('manage_sounds_window.ui')
self.window = self.xml.get_object('manage_sounds_window')
self.window.set_transient_for(
gajim.interface.instances['preferences'].window)
# sounds treeview
self.sound_tree = self.xml.get_object('sounds_treeview')
# active, event ui name, path to sound file, event_config_name
model = Gtk.ListStore(bool, str, str, str)
self.sound_tree.set_model(model)
col = Gtk.TreeViewColumn(_('Active'))
self.sound_tree.append_column(col)
renderer = Gtk.CellRendererToggle()
renderer.set_property('activatable', True)
renderer.connect('toggled', self.sound_toggled_cb)
col.pack_start(renderer, True)
col.add_attribute(renderer, 'active', 0)
col = Gtk.TreeViewColumn(_('Event'))
self.sound_tree.append_column(col)
renderer = Gtk.CellRendererText()
col.pack_start(renderer, True)
col.add_attribute(renderer, 'text', 1)
self.fill_sound_treeview()
self.xml.connect_signals(self)
self.sound_tree.get_model().connect('row-changed',
self.on_sounds_treemodel_row_changed)
self.window.show_all()
def on_sounds_treemodel_row_changed(self, model, path, iter_):
sound_event = model[iter_][3]
gajim.config.set_per('soundevents', sound_event, 'enabled',
bool(model[path][0]))
gajim.config.set_per('soundevents', sound_event, 'path',
model[iter_][2])
def sound_toggled_cb(self, cell, path):
model = self.sound_tree.get_model()
model[path][0] = not model[path][0]
def fill_sound_treeview(self):
model = self.sound_tree.get_model()
model.clear()
model.set_sort_column_id(1, Gtk.SortType.ASCENDING)
# NOTE: sounds_ui_names MUST have all items of
# sounds = gajim.config.get_per('soundevents') as keys
sounds_dict = {
'attention_received': _('Attention Message Received'),
'first_message_received': _('First Message Received'),
'next_message_received_focused': _('Next Message Received Focused'),
'next_message_received_unfocused':
_('Next Message Received Unfocused'),
'contact_connected': _('Contact Connected'),
'contact_disconnected': _('Contact Disconnected'),
'message_sent': _('Message Sent'),
'muc_message_highlight': _('Group Chat Message Highlight'),
'muc_message_received': _('Group Chat Message Received'),
'gmail_received': _('GMail Email Received')
}
for sound_event_config_name, sound_ui_name in sounds_dict.items():
enabled = gajim.config.get_per('soundevents',
sound_event_config_name, 'enabled')
path = gajim.config.get_per('soundevents',
sound_event_config_name, 'path')
model.append((enabled, sound_ui_name, path, sound_event_config_name))
def on_treeview_sounds_cursor_changed(self, widget, data = None):
sounds_entry = self.xml.get_object('sounds_entry')
sel = self.sound_tree.get_selection()
if not sel:
sounds_entry.set_text('')
return
(model, iter_) = sel.get_selected()
if not iter_:
sounds_entry.set_text('')
return
path_to_snd_file = model[iter_][2]
sounds_entry.set_text(path_to_snd_file)
def on_browse_for_sounds_button_clicked(self, widget, data = None):
sel = self.sound_tree.get_selection()
if not sel:
return
(model, iter_) = sel.get_selected()
if not iter_:
return
def on_ok(widget, path_to_snd_file):
self.dialog.destroy()
model, iter_ = self.sound_tree.get_selection().get_selected()
if not path_to_snd_file:
model[iter_][2] = ''
self.xml.get_object('sounds_entry').set_text('')
model[iter_][0] = False
return
directory = os.path.dirname(path_to_snd_file)
gajim.config.set('last_sounds_dir', directory)
path_to_snd_file = helpers.strip_soundfile_path(path_to_snd_file)
self.xml.get_object('sounds_entry').set_text(path_to_snd_file)
model[iter_][2] = path_to_snd_file # set new path to sounds_model
model[iter_][0] = True # set the sound to enabled
def on_cancel(widget):
self.dialog.destroy()
path_to_snd_file = model[iter_][2]
self.dialog = dialogs.SoundChooserDialog(path_to_snd_file, on_ok,
on_cancel, transient_for=self.window)
def on_sounds_entry_changed(self, widget):
path_to_snd_file = widget.get_text()
model, iter_ = self.sound_tree.get_selection().get_selected()
model[iter_][2] = path_to_snd_file # set new path to sounds_model
def on_play_button_clicked(self, widget):
sel = self.sound_tree.get_selection()
if not sel:
return
model, iter_ = sel.get_selected()
if not iter_:
return
snd_event_config_name = model[iter_][3]
helpers.play_sound(snd_event_config_name)
def on_close_button_clicked(self, widget):
self.window.hide()
def on_manage_sounds_window_delete_event(self, widget, event):
self.window.hide()
return True # do NOT destroy the window
| gpl-3.0 | -294,304,490,076,439,400 | 41.31188 | 95 | 0.590218 | false |
opendatakosovo/data-centar | import-budzet.py | 1 | 4132 | import argparse
from importer.rashodi_manager import RashodiDataImporter
from importer.prihodi_manager import PrihodiDataImporter
rashodi_importer = RashodiDataImporter()
prihodi_importer = PrihodiDataImporter()
def main_importer(data, municipalities):
mun_list = municipalities.split(",")
data_source = data.split(",")
for mun in mun_list:
if mun in ["all", "prijepolje"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_prijepolje("prijepolje", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_prijepolje()
if mun in ["all", "vranje"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_vranje("vranje", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_vranje()
if mun in ["all", "loznica"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_loznica("loznitsa", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_loznica()
if mun in ["all", "sombor"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_sombor("sombor", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_sombor()
if mun in ["all", "valjevo"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_valjevo("valjevo", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_valjevo()
if mun in ["all", "indjija"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_indjija("indjija", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_indjija()
if mun in ["all", "cacak"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_cacak("chachak", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_cacak()
if mun in ["all", "kraljevo"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_krajlevo("kraljevo", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_krajlevo()
if mun in ["all", "zvezdara"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_zvezdara("zvezdara", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_zvezdara()
if mun in ["all", "novi_beograd"]:
for data in data_source:
if data == "prihodi":
prihodi_importer.data_importer_of_municipality_novi_beograd("novi-beograd", "prihodi")
elif data == "rashodi":
rashodi_importer.data_importer_of_municipality_novi_beograd()
if __name__ == '__main__':
# Initialize arguments
parser = argparse.ArgumentParser()
parser.add_argument("--municipalities", help="The data source we want to import for municipality")
parser.add_argument("--data", help="The data source we want to import")
args = parser.parse_args()
# Read the arguments and run the function
municipalities_sr = args.municipalities
data_sr = args.data
main_importer(data_sr, municipalities_sr) | gpl-2.0 | 7,470,330,547,350,416,000 | 42.505263 | 106 | 0.571152 | false |
IdeaSolutionsOnline/ERP4R | core/objs/linha_entrega.py | 1 | 4855 | # !/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
ERP+
"""
__author__ = 'António Anacleto'
__credits__ = []
__version__ = "1.0"
__maintainer__ = "António Anacleto"
__status__ = "Development"
__model_name__ = 'linha_entrega.LinhaEntrega'
import auth, base_models
from orm import *
from form import *
try:
from my_produto import Produto
except:
from produto import Produto
try:
from my_unidade import Unidade
except:
from unidade import Unidade
class LinhaEntrega(Model, View):
def __init__(self, **kargs):
Model.__init__(self, **kargs)
self.__name__ = 'linha_entrega'
self.__title__ = 'Linhas de Entrega'
self.__model_name__ = __model_name__
self.__list_edit_mode__ = 'inline'
self.__get_options__ = ['produto']
self.entrega = parent_field(view_order=1, name='Entrega', args='style:visibility="hidden"', model_name='entrega.Entrega', nolabel=True, onlist=False, column='numero')
self.ean = string_field(view_order=2, name='EAN', size=45, onchange='ean_onchange')
self.produto = choice_field(view_order=3, name='Produto', args='required tabIndex="-1"', size=60, onchange='produto_onchange', model='produto', column='nome', options="model.get_opts('Produto', '_sellable()')")
self.quantidade = decimal_field(view_order=4, name='Quantidade', size=20, sum=True, onchange='valores_onchange', default=to_decimal(1))
self.unidade = combo_field(view_order=5, name='Unidade', args='required tabIndex="-1"', size=40, onchange='produto_onchange', model='unidade', column='nome', options="model.get_opts('Unidade','()')")
self.valor_unitario = currency_field(view_order=6, name='Valor Unitário', args='tabIndex="-1"', size=20, sum=True, onchange='valores_onchange', default=to_decimal(1))
self.desconto = percent_field(view_order=7, name='Desconto', args='tabIndex="-1"', size=20, onchange='valores_onchange')
self.iva = percent_field(view_order=8, name='IVA', args='readonly="readonly" tabIndex="-1"', size=20, nolabel=True, search=False)
self.valor_total = currency_field(view_order=9, name='Valor Total', args='readonly="readonly" tabIndex="-1"', size=20, sum=True, default=to_decimal(1))
def get_opts(self, model, tipo):
return eval(model + '().get_options' + tipo)
def ean_onchange(self, record):
result = record.copy()
product = Produto(where='referencia = {ean}'.format(ean=record['ean'])).get()
if len(product) != 0:
product = product[0]
for key in ['quantidade', 'valor_unitario', 'valor_total']:
result[key] = to_decimal(result[key])
if result[key] <= to_decimal(0):
result[key] = to_decimal(1)
unidade = record['unidade']
if not record['unidade']:
unidade = product['unidade_medida_venda']
terminal = get_terminal(bottle.request.session['terminal'])
result['valor_unitario'] = to_decimal(Produto().get_sale_price(product['id'], terminal, result['quantidade'], unidade))
result['valor_total'] = to_decimal(result['quantidade']) * to_decimal(result['valor_unitario'])
result['iva'] = to_decimal(product['iva'])
result['unidade'] = unidade
result['produto'] = product['id']
else:
result = {}
return result
def valores_onchange(self, record):
result = record.copy()
for key in ['quantidade', 'valor_unitario', 'valor_total']:
result[key] = to_decimal(result[key])
if result[key] <= to_decimal(0):
result[key] = to_decimal(1)
result['valor_total'] = to_decimal(result['quantidade']) * to_decimal(result['valor_unitario'])
return result
def produto_onchange(self, record):
result = record.copy()
product = Produto().get(key=record['produto'])
if len(product) != 0:
product = product[0]
for key in ['quantidade', 'valor_unitario', 'valor_total']:
result[key]= to_decimal(result[key])
if result[key] <= to_decimal(0):
result[key] = to_decimal(1)
unidade = record['unidade']
if not record['unidade']:
unidade = product['unidade_medida_venda']
terminal = get_terminal(bottle.request.session['terminal'])
result['valor_unitario'] = to_decimal(Produto().get_sale_price(product['id'], terminal, result['quantidade'], unidade))
result['valor_total'] = result['quantidade'] * result['valor_unitario']
result['iva'] = to_decimal(product['iva'])
result['ean'] = product['referencia']
result['unidade'] = unidade
else:
result={}
return result
| mit | -3,195,471,058,506,067,000 | 48.510204 | 218 | 0.599959 | false |
endlessm/chromium-browser | third_party/chromite/utils/attrs_freezer_unittest.py | 1 | 2645 | # -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test the attrs_freezer module."""
from __future__ import print_function
import six
from chromite.lib import cros_test_lib
from chromite.utils import attrs_freezer
class FrozenAttributesTest(cros_test_lib.TestCase):
"""Tests FrozenAttributesMixin functionality."""
class DummyClass(object):
"""Any class that does not override __setattr__."""
class SetattrClass(object):
"""Class that does override __setattr__."""
SETATTR_OFFSET = 10
def __setattr__(self, attr, value):
"""Adjust value here to later confirm that this code ran."""
object.__setattr__(self, attr, self.SETATTR_OFFSET + value)
def _TestBasics(self, cls):
# pylint: disable=attribute-defined-outside-init
def _Expected(val):
return getattr(cls, 'SETATTR_OFFSET', 0) + val
obj = cls()
obj.a = 1
obj.b = 2
self.assertEqual(_Expected(1), obj.a)
self.assertEqual(_Expected(2), obj.b)
obj.Freeze()
self.assertRaises(attrs_freezer.Error, setattr, obj, 'a', 3)
self.assertEqual(_Expected(1), obj.a)
self.assertRaises(attrs_freezer.Error, setattr, obj, 'c', 3)
self.assertFalse(hasattr(obj, 'c'))
def testFrozenByMetaclass(self):
"""Test attribute freezing with FrozenAttributesClass."""
@six.add_metaclass(attrs_freezer.Class)
class DummyByMeta(self.DummyClass):
"""Class that freezes DummyClass using metaclass construct."""
self._TestBasics(DummyByMeta)
@six.add_metaclass(attrs_freezer.Class)
class SetattrByMeta(self.SetattrClass):
"""Class that freezes SetattrClass using metaclass construct."""
self._TestBasics(SetattrByMeta)
def testFrozenByMixinFirst(self):
"""Test attribute freezing with Mixin first in hierarchy."""
class Dummy(attrs_freezer.Mixin, self.DummyClass):
"""Class that freezes DummyClass using mixin construct."""
self._TestBasics(Dummy)
class Setattr(attrs_freezer.Mixin, self.SetattrClass):
"""Class that freezes SetattrClass using mixin construct."""
self._TestBasics(Setattr)
def testFrozenByMixinLast(self):
"""Test attribute freezing with Mixin last in hierarchy."""
class Dummy(self.DummyClass, attrs_freezer.Mixin):
"""Class that freezes DummyClass using mixin construct."""
self._TestBasics(Dummy)
class Setattr(self.SetattrClass, attrs_freezer.Mixin):
"""Class that freezes SetattrClass using mixin construct."""
self._TestBasics(Setattr)
| bsd-3-clause | -4,560,031,592,534,890,000 | 30.86747 | 72 | 0.699811 | false |
ArcherSys/ArcherSys | Lib/lzma.py | 1 | 58253 | <<<<<<< HEAD
<<<<<<< HEAD
"""Interface to the liblzma compression library.
This module provides a class for reading and writing compressed files,
classes for incremental (de)compression, and convenience functions for
one-shot (de)compression.
These classes and functions support both the XZ and legacy LZMA
container formats, as well as raw compressed data streams.
"""
__all__ = [
"CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256",
"CHECK_ID_MAX", "CHECK_UNKNOWN",
"FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64",
"FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC",
"FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW",
"MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4",
"MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME",
"LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError",
"open", "compress", "decompress", "is_check_supported",
]
import builtins
import io
from _lzma import *
from _lzma import _encode_filter_properties, _decode_filter_properties
_MODE_CLOSED = 0
_MODE_READ = 1
_MODE_READ_EOF = 2
_MODE_WRITE = 3
_BUFFER_SIZE = 8192
class LZMAFile(io.BufferedIOBase):
"""A file object providing transparent LZMA (de)compression.
An LZMAFile can act as a wrapper for an existing file object, or
refer directly to a named file on disk.
Note that LZMAFile provides a *binary* file interface - data read
is returned as bytes, and data to be written must be given as bytes.
"""
def __init__(self, filename=None, mode="r", *,
format=None, check=-1, preset=None, filters=None):
"""Open an LZMA-compressed file in binary mode.
filename can be either an actual file name (given as a str or
bytes object), in which case the named file is opened, or it can
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing,
"x" for creating exclusively, or "a" for appending. These can
equivalently be given as "rb", "wb", "xb" and "ab" respectively.
format specifies the container format to use for the file.
If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the
default is FORMAT_XZ.
check specifies the integrity check to use. This argument can
only be used when opening a file for writing. For FORMAT_XZ,
the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not
support integrity checks - for these formats, check must be
omitted, or be CHECK_NONE.
When opening a file for reading, the *preset* argument is not
meaningful, and should be omitted. The *filters* argument should
also be omitted, except when format is FORMAT_RAW (in which case
it is required).
When opening a file for writing, the settings used by the
compressor can be specified either as a preset compression
level (with the *preset* argument), or in detail as a custom
filter chain (with the *filters* argument). For FORMAT_XZ and
FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset
level. For FORMAT_RAW, the caller must always specify a filter
chain; the raw compressor does not support preset compression
levels.
preset (if provided) should be an integer in the range 0-9,
optionally OR-ed with the constant PRESET_EXTREME.
filters (if provided) should be a sequence of dicts. Each dict
should have an entry for "id" indicating ID of the filter, plus
additional entries for options to the filter.
"""
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
self._pos = 0
self._size = -1
if mode in ("r", "rb"):
if check != -1:
raise ValueError("Cannot specify an integrity check "
"when opening a file for reading")
if preset is not None:
raise ValueError("Cannot specify a preset compression "
"level when opening a file for reading")
if format is None:
format = FORMAT_AUTO
mode_code = _MODE_READ
# Save the args to pass to the LZMADecompressor initializer.
# If the file contains multiple compressed streams, each
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = b""
self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab", "x", "xb"):
if format is None:
format = FORMAT_XZ
mode_code = _MODE_WRITE
self._compressor = LZMACompressor(format=format, check=check,
preset=preset, filters=filters)
else:
raise ValueError("Invalid mode: {!r}".format(mode))
if isinstance(filename, (str, bytes)):
if "b" not in mode:
mode += "b"
self._fp = builtins.open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
self._fp = filename
self._mode = mode_code
else:
raise TypeError("filename must be a str or bytes object, or a file")
def close(self):
"""Flush and close the file.
May be called more than once without error. Once the file is
closed, any other operation on it will raise a ValueError.
"""
if self._mode == _MODE_CLOSED:
return
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
finally:
try:
if self._closefp:
self._fp.close()
finally:
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
@property
def closed(self):
"""True if this file is closed."""
return self._mode == _MODE_CLOSED
def fileno(self):
"""Return the file descriptor for the underlying file."""
self._check_not_closed()
return self._fp.fileno()
def seekable(self):
"""Return whether the file supports seeking."""
return self.readable() and self._fp.seekable()
def readable(self):
"""Return whether the file was opened for reading."""
self._check_not_closed()
return self._mode in (_MODE_READ, _MODE_READ_EOF)
def writable(self):
"""Return whether the file was opened for writing."""
self._check_not_closed()
return self._mode == _MODE_WRITE
# Mode-checking helper functions.
def _check_not_closed(self):
if self.closed:
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
if self._mode not in (_MODE_READ, _MODE_READ_EOF):
self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
if self._mode != _MODE_WRITE:
self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
if self._mode not in (_MODE_READ, _MODE_READ_EOF):
self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
raise io.UnsupportedOperation("The underlying file object "
"does not support seeking")
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
if self._mode == _MODE_READ_EOF:
return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
while self._buffer_offset == len(self._buffer):
rawblock = (self._decompressor.unused_data or
self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
if self._decompressor.eof:
# Continue to next stream.
self._decompressor = LZMADecompressor(**self._init_args)
try:
self._buffer = self._decompressor.decompress(rawblock)
except LZMAError:
# Trailing data isn't a valid compressed stream; ignore it.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
self._buffer = self._decompressor.decompress(rawblock)
self._buffer_offset = 0
return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
# The loop assumes that _buffer_offset is 0. Ensure that this is true.
self._buffer = self._buffer[self._buffer_offset:]
self._buffer_offset = 0
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
# If we have enough data buffered, return immediately.
end = self._buffer_offset + n
if end <= len(self._buffer):
data = self._buffer[self._buffer_offset : end]
self._buffer_offset = end
self._pos += len(data)
return data if return_data else None
# The loop assumes that _buffer_offset is 0. Ensure that this is true.
self._buffer = self._buffer[self._buffer_offset:]
self._buffer_offset = 0
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
self._buffer_offset = n
else:
data = self._buffer
self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
n -= len(data)
if return_data:
return b"".join(blocks)
def peek(self, size=-1):
"""Return buffered data without advancing the file position.
Always returns at least one byte of data, unless at EOF.
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
if not self._fill_buffer():
return b""
return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
If size is negative or omitted, read until EOF is reached.
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
if size == 0:
return b""
elif size < 0:
return self._read_all()
else:
return self._read_block(size)
def read1(self, size=-1):
"""Read up to size uncompressed bytes, while trying to avoid
making multiple reads from the underlying stream.
Returns b"" if the file is at EOF.
"""
# Usually, read1() calls _fp.read() at most once. However, sometimes
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
if (size == 0 or
# Only call _fill_buffer() if the buffer is actually empty.
# This gives a significant speedup if *size* is small.
(self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
if size > 0:
data = self._buffer[self._buffer_offset :
self._buffer_offset + size]
self._buffer_offset += len(data)
else:
data = self._buffer[self._buffer_offset:]
self._buffer = b""
self._buffer_offset = 0
self._pos += len(data)
return data
def readline(self, size=-1):
"""Read a line of uncompressed bytes from the file.
The terminating newline (if present) is retained. If size is
non-negative, no more than size bytes will be read (in which
case the line may be incomplete). Returns b'' if already at EOF.
"""
self._check_can_read()
# Shortcut for the common case - the whole line is in the buffer.
if size < 0:
end = self._buffer.find(b"\n", self._buffer_offset) + 1
if end > 0:
line = self._buffer[self._buffer_offset : end]
self._buffer_offset = end
self._pos += len(line)
return line
return io.BufferedIOBase.readline(self, size)
def write(self, data):
"""Write a bytes object to the file.
Returns the number of uncompressed bytes written, which is
always len(data). Note that due to buffering, the file on disk
may not reflect the data written until close() is called.
"""
self._check_can_write()
compressed = self._compressor.compress(data)
self._fp.write(compressed)
self._pos += len(data)
return len(data)
# Rewind the file to the beginning of the data stream.
def _rewind(self):
self._fp.seek(0, 0)
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = b""
self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
The new position is specified by offset, relative to the
position indicated by whence. Possible values for whence are:
0: start of stream (default): offset must not be negative
1: current stream position
2: end of stream; offset must not be positive
Returns the new file position.
Note that seeking is emulated, sp depending on the parameters,
this operation may be extremely slow.
"""
self._check_can_seek()
# Recalculate offset as an absolute file position.
if whence == 0:
pass
elif whence == 1:
offset = self._pos + offset
elif whence == 2:
# Seeking relative to EOF - we need to know the file's size.
if self._size < 0:
self._read_all(return_data=False)
offset = self._size + offset
else:
raise ValueError("Invalid value for whence: {}".format(whence))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
self._rewind()
else:
offset -= self._pos
# Read and discard data until we reach the desired position.
self._read_block(offset, return_data=False)
return self._pos
def tell(self):
"""Return the current file position."""
self._check_not_closed()
return self._pos
def open(filename, mode="rb", *,
format=None, check=-1, preset=None, filters=None,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
filename can be either an actual file name (given as a str or bytes
object), in which case the named file is opened, or it can be an
existing file object to read from or write to.
The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb",
"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text
mode.
The format, check, preset and filters arguments specify the
compression settings, as for LZMACompressor, LZMADecompressor and
LZMAFile.
For binary mode, this function is equivalent to the LZMAFile
constructor: LZMAFile(filename, mode, ...). In this case, the
encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
io.TextIOWrapper instance with the specified encoding, error
handling behavior, and line ending(s).
"""
if "t" in mode:
if "b" in mode:
raise ValueError("Invalid mode: %r" % (mode,))
else:
if encoding is not None:
raise ValueError("Argument 'encoding' not supported in binary mode")
if errors is not None:
raise ValueError("Argument 'errors' not supported in binary mode")
if newline is not None:
raise ValueError("Argument 'newline' not supported in binary mode")
lz_mode = mode.replace("t", "")
binary_file = LZMAFile(filename, lz_mode, format=format, check=check,
preset=preset, filters=filters)
if "t" in mode:
return io.TextIOWrapper(binary_file, encoding, errors, newline)
else:
return binary_file
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
"""Compress a block of data.
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
"""Decompress a block of data.
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
decomp = LZMADecompressor(format, memlimit, filters)
try:
res = decomp.decompress(data)
except LZMAError:
if results:
break # Leftover data is not a valid LZMA/XZ stream; ignore it.
else:
raise # Error on the first iteration; bail out.
results.append(res)
if not decomp.eof:
raise LZMAError("Compressed data ended before the "
"end-of-stream marker was reached")
data = decomp.unused_data
if not data:
break
return b"".join(results)
=======
"""Interface to the liblzma compression library.
This module provides a class for reading and writing compressed files,
classes for incremental (de)compression, and convenience functions for
one-shot (de)compression.
These classes and functions support both the XZ and legacy LZMA
container formats, as well as raw compressed data streams.
"""
__all__ = [
"CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256",
"CHECK_ID_MAX", "CHECK_UNKNOWN",
"FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64",
"FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC",
"FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW",
"MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4",
"MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME",
"LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError",
"open", "compress", "decompress", "is_check_supported",
]
import builtins
import io
from _lzma import *
from _lzma import _encode_filter_properties, _decode_filter_properties
_MODE_CLOSED = 0
_MODE_READ = 1
_MODE_READ_EOF = 2
_MODE_WRITE = 3
_BUFFER_SIZE = 8192
class LZMAFile(io.BufferedIOBase):
"""A file object providing transparent LZMA (de)compression.
An LZMAFile can act as a wrapper for an existing file object, or
refer directly to a named file on disk.
Note that LZMAFile provides a *binary* file interface - data read
is returned as bytes, and data to be written must be given as bytes.
"""
def __init__(self, filename=None, mode="r", *,
format=None, check=-1, preset=None, filters=None):
"""Open an LZMA-compressed file in binary mode.
filename can be either an actual file name (given as a str or
bytes object), in which case the named file is opened, or it can
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing,
"x" for creating exclusively, or "a" for appending. These can
equivalently be given as "rb", "wb", "xb" and "ab" respectively.
format specifies the container format to use for the file.
If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the
default is FORMAT_XZ.
check specifies the integrity check to use. This argument can
only be used when opening a file for writing. For FORMAT_XZ,
the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not
support integrity checks - for these formats, check must be
omitted, or be CHECK_NONE.
When opening a file for reading, the *preset* argument is not
meaningful, and should be omitted. The *filters* argument should
also be omitted, except when format is FORMAT_RAW (in which case
it is required).
When opening a file for writing, the settings used by the
compressor can be specified either as a preset compression
level (with the *preset* argument), or in detail as a custom
filter chain (with the *filters* argument). For FORMAT_XZ and
FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset
level. For FORMAT_RAW, the caller must always specify a filter
chain; the raw compressor does not support preset compression
levels.
preset (if provided) should be an integer in the range 0-9,
optionally OR-ed with the constant PRESET_EXTREME.
filters (if provided) should be a sequence of dicts. Each dict
should have an entry for "id" indicating ID of the filter, plus
additional entries for options to the filter.
"""
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
self._pos = 0
self._size = -1
if mode in ("r", "rb"):
if check != -1:
raise ValueError("Cannot specify an integrity check "
"when opening a file for reading")
if preset is not None:
raise ValueError("Cannot specify a preset compression "
"level when opening a file for reading")
if format is None:
format = FORMAT_AUTO
mode_code = _MODE_READ
# Save the args to pass to the LZMADecompressor initializer.
# If the file contains multiple compressed streams, each
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = b""
self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab", "x", "xb"):
if format is None:
format = FORMAT_XZ
mode_code = _MODE_WRITE
self._compressor = LZMACompressor(format=format, check=check,
preset=preset, filters=filters)
else:
raise ValueError("Invalid mode: {!r}".format(mode))
if isinstance(filename, (str, bytes)):
if "b" not in mode:
mode += "b"
self._fp = builtins.open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
self._fp = filename
self._mode = mode_code
else:
raise TypeError("filename must be a str or bytes object, or a file")
def close(self):
"""Flush and close the file.
May be called more than once without error. Once the file is
closed, any other operation on it will raise a ValueError.
"""
if self._mode == _MODE_CLOSED:
return
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
finally:
try:
if self._closefp:
self._fp.close()
finally:
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
@property
def closed(self):
"""True if this file is closed."""
return self._mode == _MODE_CLOSED
def fileno(self):
"""Return the file descriptor for the underlying file."""
self._check_not_closed()
return self._fp.fileno()
def seekable(self):
"""Return whether the file supports seeking."""
return self.readable() and self._fp.seekable()
def readable(self):
"""Return whether the file was opened for reading."""
self._check_not_closed()
return self._mode in (_MODE_READ, _MODE_READ_EOF)
def writable(self):
"""Return whether the file was opened for writing."""
self._check_not_closed()
return self._mode == _MODE_WRITE
# Mode-checking helper functions.
def _check_not_closed(self):
if self.closed:
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
if self._mode not in (_MODE_READ, _MODE_READ_EOF):
self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
if self._mode != _MODE_WRITE:
self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
if self._mode not in (_MODE_READ, _MODE_READ_EOF):
self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
raise io.UnsupportedOperation("The underlying file object "
"does not support seeking")
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
if self._mode == _MODE_READ_EOF:
return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
while self._buffer_offset == len(self._buffer):
rawblock = (self._decompressor.unused_data or
self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
if self._decompressor.eof:
# Continue to next stream.
self._decompressor = LZMADecompressor(**self._init_args)
try:
self._buffer = self._decompressor.decompress(rawblock)
except LZMAError:
# Trailing data isn't a valid compressed stream; ignore it.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
self._buffer = self._decompressor.decompress(rawblock)
self._buffer_offset = 0
return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
# The loop assumes that _buffer_offset is 0. Ensure that this is true.
self._buffer = self._buffer[self._buffer_offset:]
self._buffer_offset = 0
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
# If we have enough data buffered, return immediately.
end = self._buffer_offset + n
if end <= len(self._buffer):
data = self._buffer[self._buffer_offset : end]
self._buffer_offset = end
self._pos += len(data)
return data if return_data else None
# The loop assumes that _buffer_offset is 0. Ensure that this is true.
self._buffer = self._buffer[self._buffer_offset:]
self._buffer_offset = 0
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
self._buffer_offset = n
else:
data = self._buffer
self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
n -= len(data)
if return_data:
return b"".join(blocks)
def peek(self, size=-1):
"""Return buffered data without advancing the file position.
Always returns at least one byte of data, unless at EOF.
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
if not self._fill_buffer():
return b""
return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
If size is negative or omitted, read until EOF is reached.
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
if size == 0:
return b""
elif size < 0:
return self._read_all()
else:
return self._read_block(size)
def read1(self, size=-1):
"""Read up to size uncompressed bytes, while trying to avoid
making multiple reads from the underlying stream.
Returns b"" if the file is at EOF.
"""
# Usually, read1() calls _fp.read() at most once. However, sometimes
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
if (size == 0 or
# Only call _fill_buffer() if the buffer is actually empty.
# This gives a significant speedup if *size* is small.
(self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
if size > 0:
data = self._buffer[self._buffer_offset :
self._buffer_offset + size]
self._buffer_offset += len(data)
else:
data = self._buffer[self._buffer_offset:]
self._buffer = b""
self._buffer_offset = 0
self._pos += len(data)
return data
def readline(self, size=-1):
"""Read a line of uncompressed bytes from the file.
The terminating newline (if present) is retained. If size is
non-negative, no more than size bytes will be read (in which
case the line may be incomplete). Returns b'' if already at EOF.
"""
self._check_can_read()
# Shortcut for the common case - the whole line is in the buffer.
if size < 0:
end = self._buffer.find(b"\n", self._buffer_offset) + 1
if end > 0:
line = self._buffer[self._buffer_offset : end]
self._buffer_offset = end
self._pos += len(line)
return line
return io.BufferedIOBase.readline(self, size)
def write(self, data):
"""Write a bytes object to the file.
Returns the number of uncompressed bytes written, which is
always len(data). Note that due to buffering, the file on disk
may not reflect the data written until close() is called.
"""
self._check_can_write()
compressed = self._compressor.compress(data)
self._fp.write(compressed)
self._pos += len(data)
return len(data)
# Rewind the file to the beginning of the data stream.
def _rewind(self):
self._fp.seek(0, 0)
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = b""
self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
The new position is specified by offset, relative to the
position indicated by whence. Possible values for whence are:
0: start of stream (default): offset must not be negative
1: current stream position
2: end of stream; offset must not be positive
Returns the new file position.
Note that seeking is emulated, sp depending on the parameters,
this operation may be extremely slow.
"""
self._check_can_seek()
# Recalculate offset as an absolute file position.
if whence == 0:
pass
elif whence == 1:
offset = self._pos + offset
elif whence == 2:
# Seeking relative to EOF - we need to know the file's size.
if self._size < 0:
self._read_all(return_data=False)
offset = self._size + offset
else:
raise ValueError("Invalid value for whence: {}".format(whence))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
self._rewind()
else:
offset -= self._pos
# Read and discard data until we reach the desired position.
self._read_block(offset, return_data=False)
return self._pos
def tell(self):
"""Return the current file position."""
self._check_not_closed()
return self._pos
def open(filename, mode="rb", *,
format=None, check=-1, preset=None, filters=None,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
filename can be either an actual file name (given as a str or bytes
object), in which case the named file is opened, or it can be an
existing file object to read from or write to.
The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb",
"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text
mode.
The format, check, preset and filters arguments specify the
compression settings, as for LZMACompressor, LZMADecompressor and
LZMAFile.
For binary mode, this function is equivalent to the LZMAFile
constructor: LZMAFile(filename, mode, ...). In this case, the
encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
io.TextIOWrapper instance with the specified encoding, error
handling behavior, and line ending(s).
"""
if "t" in mode:
if "b" in mode:
raise ValueError("Invalid mode: %r" % (mode,))
else:
if encoding is not None:
raise ValueError("Argument 'encoding' not supported in binary mode")
if errors is not None:
raise ValueError("Argument 'errors' not supported in binary mode")
if newline is not None:
raise ValueError("Argument 'newline' not supported in binary mode")
lz_mode = mode.replace("t", "")
binary_file = LZMAFile(filename, lz_mode, format=format, check=check,
preset=preset, filters=filters)
if "t" in mode:
return io.TextIOWrapper(binary_file, encoding, errors, newline)
else:
return binary_file
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
"""Compress a block of data.
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
"""Decompress a block of data.
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
decomp = LZMADecompressor(format, memlimit, filters)
try:
res = decomp.decompress(data)
except LZMAError:
if results:
break # Leftover data is not a valid LZMA/XZ stream; ignore it.
else:
raise # Error on the first iteration; bail out.
results.append(res)
if not decomp.eof:
raise LZMAError("Compressed data ended before the "
"end-of-stream marker was reached")
data = decomp.unused_data
if not data:
break
return b"".join(results)
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"""Interface to the liblzma compression library.
This module provides a class for reading and writing compressed files,
classes for incremental (de)compression, and convenience functions for
one-shot (de)compression.
These classes and functions support both the XZ and legacy LZMA
container formats, as well as raw compressed data streams.
"""
__all__ = [
"CHECK_NONE", "CHECK_CRC32", "CHECK_CRC64", "CHECK_SHA256",
"CHECK_ID_MAX", "CHECK_UNKNOWN",
"FILTER_LZMA1", "FILTER_LZMA2", "FILTER_DELTA", "FILTER_X86", "FILTER_IA64",
"FILTER_ARM", "FILTER_ARMTHUMB", "FILTER_POWERPC", "FILTER_SPARC",
"FORMAT_AUTO", "FORMAT_XZ", "FORMAT_ALONE", "FORMAT_RAW",
"MF_HC3", "MF_HC4", "MF_BT2", "MF_BT3", "MF_BT4",
"MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME",
"LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError",
"open", "compress", "decompress", "is_check_supported",
]
import builtins
import io
from _lzma import *
from _lzma import _encode_filter_properties, _decode_filter_properties
_MODE_CLOSED = 0
_MODE_READ = 1
_MODE_READ_EOF = 2
_MODE_WRITE = 3
_BUFFER_SIZE = 8192
class LZMAFile(io.BufferedIOBase):
"""A file object providing transparent LZMA (de)compression.
An LZMAFile can act as a wrapper for an existing file object, or
refer directly to a named file on disk.
Note that LZMAFile provides a *binary* file interface - data read
is returned as bytes, and data to be written must be given as bytes.
"""
def __init__(self, filename=None, mode="r", *,
format=None, check=-1, preset=None, filters=None):
"""Open an LZMA-compressed file in binary mode.
filename can be either an actual file name (given as a str or
bytes object), in which case the named file is opened, or it can
be an existing file object to read from or write to.
mode can be "r" for reading (default), "w" for (over)writing,
"x" for creating exclusively, or "a" for appending. These can
equivalently be given as "rb", "wb", "xb" and "ab" respectively.
format specifies the container format to use for the file.
If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the
default is FORMAT_XZ.
check specifies the integrity check to use. This argument can
only be used when opening a file for writing. For FORMAT_XZ,
the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not
support integrity checks - for these formats, check must be
omitted, or be CHECK_NONE.
When opening a file for reading, the *preset* argument is not
meaningful, and should be omitted. The *filters* argument should
also be omitted, except when format is FORMAT_RAW (in which case
it is required).
When opening a file for writing, the settings used by the
compressor can be specified either as a preset compression
level (with the *preset* argument), or in detail as a custom
filter chain (with the *filters* argument). For FORMAT_XZ and
FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset
level. For FORMAT_RAW, the caller must always specify a filter
chain; the raw compressor does not support preset compression
levels.
preset (if provided) should be an integer in the range 0-9,
optionally OR-ed with the constant PRESET_EXTREME.
filters (if provided) should be a sequence of dicts. Each dict
should have an entry for "id" indicating ID of the filter, plus
additional entries for options to the filter.
"""
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
self._pos = 0
self._size = -1
if mode in ("r", "rb"):
if check != -1:
raise ValueError("Cannot specify an integrity check "
"when opening a file for reading")
if preset is not None:
raise ValueError("Cannot specify a preset compression "
"level when opening a file for reading")
if format is None:
format = FORMAT_AUTO
mode_code = _MODE_READ
# Save the args to pass to the LZMADecompressor initializer.
# If the file contains multiple compressed streams, each
# stream will need a separate decompressor object.
self._init_args = {"format":format, "filters":filters}
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = b""
self._buffer_offset = 0
elif mode in ("w", "wb", "a", "ab", "x", "xb"):
if format is None:
format = FORMAT_XZ
mode_code = _MODE_WRITE
self._compressor = LZMACompressor(format=format, check=check,
preset=preset, filters=filters)
else:
raise ValueError("Invalid mode: {!r}".format(mode))
if isinstance(filename, (str, bytes)):
if "b" not in mode:
mode += "b"
self._fp = builtins.open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
self._fp = filename
self._mode = mode_code
else:
raise TypeError("filename must be a str or bytes object, or a file")
def close(self):
"""Flush and close the file.
May be called more than once without error. Once the file is
closed, any other operation on it will raise a ValueError.
"""
if self._mode == _MODE_CLOSED:
return
try:
if self._mode in (_MODE_READ, _MODE_READ_EOF):
self._decompressor = None
self._buffer = b""
elif self._mode == _MODE_WRITE:
self._fp.write(self._compressor.flush())
self._compressor = None
finally:
try:
if self._closefp:
self._fp.close()
finally:
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
@property
def closed(self):
"""True if this file is closed."""
return self._mode == _MODE_CLOSED
def fileno(self):
"""Return the file descriptor for the underlying file."""
self._check_not_closed()
return self._fp.fileno()
def seekable(self):
"""Return whether the file supports seeking."""
return self.readable() and self._fp.seekable()
def readable(self):
"""Return whether the file was opened for reading."""
self._check_not_closed()
return self._mode in (_MODE_READ, _MODE_READ_EOF)
def writable(self):
"""Return whether the file was opened for writing."""
self._check_not_closed()
return self._mode == _MODE_WRITE
# Mode-checking helper functions.
def _check_not_closed(self):
if self.closed:
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
if self._mode not in (_MODE_READ, _MODE_READ_EOF):
self._check_not_closed()
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
if self._mode != _MODE_WRITE:
self._check_not_closed()
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
if self._mode not in (_MODE_READ, _MODE_READ_EOF):
self._check_not_closed()
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self._fp.seekable():
raise io.UnsupportedOperation("The underlying file object "
"does not support seeking")
# Fill the readahead buffer if it is empty. Returns False on EOF.
def _fill_buffer(self):
if self._mode == _MODE_READ_EOF:
return False
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
while self._buffer_offset == len(self._buffer):
rawblock = (self._decompressor.unused_data or
self._fp.read(_BUFFER_SIZE))
if not rawblock:
if self._decompressor.eof:
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
if self._decompressor.eof:
# Continue to next stream.
self._decompressor = LZMADecompressor(**self._init_args)
try:
self._buffer = self._decompressor.decompress(rawblock)
except LZMAError:
# Trailing data isn't a valid compressed stream; ignore it.
self._mode = _MODE_READ_EOF
self._size = self._pos
return False
else:
self._buffer = self._decompressor.decompress(rawblock)
self._buffer_offset = 0
return True
# Read data until EOF.
# If return_data is false, consume the data without returning it.
def _read_all(self, return_data=True):
# The loop assumes that _buffer_offset is 0. Ensure that this is true.
self._buffer = self._buffer[self._buffer_offset:]
self._buffer_offset = 0
blocks = []
while self._fill_buffer():
if return_data:
blocks.append(self._buffer)
self._pos += len(self._buffer)
self._buffer = b""
if return_data:
return b"".join(blocks)
# Read a block of up to n bytes.
# If return_data is false, consume the data without returning it.
def _read_block(self, n, return_data=True):
# If we have enough data buffered, return immediately.
end = self._buffer_offset + n
if end <= len(self._buffer):
data = self._buffer[self._buffer_offset : end]
self._buffer_offset = end
self._pos += len(data)
return data if return_data else None
# The loop assumes that _buffer_offset is 0. Ensure that this is true.
self._buffer = self._buffer[self._buffer_offset:]
self._buffer_offset = 0
blocks = []
while n > 0 and self._fill_buffer():
if n < len(self._buffer):
data = self._buffer[:n]
self._buffer_offset = n
else:
data = self._buffer
self._buffer = b""
if return_data:
blocks.append(data)
self._pos += len(data)
n -= len(data)
if return_data:
return b"".join(blocks)
def peek(self, size=-1):
"""Return buffered data without advancing the file position.
Always returns at least one byte of data, unless at EOF.
The exact number of bytes returned is unspecified.
"""
self._check_can_read()
if not self._fill_buffer():
return b""
return self._buffer[self._buffer_offset:]
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
If size is negative or omitted, read until EOF is reached.
Returns b"" if the file is already at EOF.
"""
self._check_can_read()
if size == 0:
return b""
elif size < 0:
return self._read_all()
else:
return self._read_block(size)
def read1(self, size=-1):
"""Read up to size uncompressed bytes, while trying to avoid
making multiple reads from the underlying stream.
Returns b"" if the file is at EOF.
"""
# Usually, read1() calls _fp.read() at most once. However, sometimes
# this does not give enough data for the decompressor to make progress.
# In this case we make multiple reads, to avoid returning b"".
self._check_can_read()
if (size == 0 or
# Only call _fill_buffer() if the buffer is actually empty.
# This gives a significant speedup if *size* is small.
(self._buffer_offset == len(self._buffer) and not self._fill_buffer())):
return b""
if size > 0:
data = self._buffer[self._buffer_offset :
self._buffer_offset + size]
self._buffer_offset += len(data)
else:
data = self._buffer[self._buffer_offset:]
self._buffer = b""
self._buffer_offset = 0
self._pos += len(data)
return data
def readline(self, size=-1):
"""Read a line of uncompressed bytes from the file.
The terminating newline (if present) is retained. If size is
non-negative, no more than size bytes will be read (in which
case the line may be incomplete). Returns b'' if already at EOF.
"""
self._check_can_read()
# Shortcut for the common case - the whole line is in the buffer.
if size < 0:
end = self._buffer.find(b"\n", self._buffer_offset) + 1
if end > 0:
line = self._buffer[self._buffer_offset : end]
self._buffer_offset = end
self._pos += len(line)
return line
return io.BufferedIOBase.readline(self, size)
def write(self, data):
"""Write a bytes object to the file.
Returns the number of uncompressed bytes written, which is
always len(data). Note that due to buffering, the file on disk
may not reflect the data written until close() is called.
"""
self._check_can_write()
compressed = self._compressor.compress(data)
self._fp.write(compressed)
self._pos += len(data)
return len(data)
# Rewind the file to the beginning of the data stream.
def _rewind(self):
self._fp.seek(0, 0)
self._mode = _MODE_READ
self._pos = 0
self._decompressor = LZMADecompressor(**self._init_args)
self._buffer = b""
self._buffer_offset = 0
def seek(self, offset, whence=0):
"""Change the file position.
The new position is specified by offset, relative to the
position indicated by whence. Possible values for whence are:
0: start of stream (default): offset must not be negative
1: current stream position
2: end of stream; offset must not be positive
Returns the new file position.
Note that seeking is emulated, sp depending on the parameters,
this operation may be extremely slow.
"""
self._check_can_seek()
# Recalculate offset as an absolute file position.
if whence == 0:
pass
elif whence == 1:
offset = self._pos + offset
elif whence == 2:
# Seeking relative to EOF - we need to know the file's size.
if self._size < 0:
self._read_all(return_data=False)
offset = self._size + offset
else:
raise ValueError("Invalid value for whence: {}".format(whence))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
self._rewind()
else:
offset -= self._pos
# Read and discard data until we reach the desired position.
self._read_block(offset, return_data=False)
return self._pos
def tell(self):
"""Return the current file position."""
self._check_not_closed()
return self._pos
def open(filename, mode="rb", *,
format=None, check=-1, preset=None, filters=None,
encoding=None, errors=None, newline=None):
"""Open an LZMA-compressed file in binary or text mode.
filename can be either an actual file name (given as a str or bytes
object), in which case the named file is opened, or it can be an
existing file object to read from or write to.
The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb",
"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text
mode.
The format, check, preset and filters arguments specify the
compression settings, as for LZMACompressor, LZMADecompressor and
LZMAFile.
For binary mode, this function is equivalent to the LZMAFile
constructor: LZMAFile(filename, mode, ...). In this case, the
encoding, errors and newline arguments must not be provided.
For text mode, a LZMAFile object is created, and wrapped in an
io.TextIOWrapper instance with the specified encoding, error
handling behavior, and line ending(s).
"""
if "t" in mode:
if "b" in mode:
raise ValueError("Invalid mode: %r" % (mode,))
else:
if encoding is not None:
raise ValueError("Argument 'encoding' not supported in binary mode")
if errors is not None:
raise ValueError("Argument 'errors' not supported in binary mode")
if newline is not None:
raise ValueError("Argument 'newline' not supported in binary mode")
lz_mode = mode.replace("t", "")
binary_file = LZMAFile(filename, lz_mode, format=format, check=check,
preset=preset, filters=filters)
if "t" in mode:
return io.TextIOWrapper(binary_file, encoding, errors, newline)
else:
return binary_file
def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None):
"""Compress a block of data.
Refer to LZMACompressor's docstring for a description of the
optional arguments *format*, *check*, *preset* and *filters*.
For incremental compression, use an LZMACompressor instead.
"""
comp = LZMACompressor(format, check, preset, filters)
return comp.compress(data) + comp.flush()
def decompress(data, format=FORMAT_AUTO, memlimit=None, filters=None):
"""Decompress a block of data.
Refer to LZMADecompressor's docstring for a description of the
optional arguments *format*, *check* and *filters*.
For incremental decompression, use an LZMADecompressor instead.
"""
results = []
while True:
decomp = LZMADecompressor(format, memlimit, filters)
try:
res = decomp.decompress(data)
except LZMAError:
if results:
break # Leftover data is not a valid LZMA/XZ stream; ignore it.
else:
raise # Error on the first iteration; bail out.
results.append(res)
if not decomp.eof:
raise LZMAError("Compressed data ended before the "
"end-of-stream marker was reached")
data = decomp.unused_data
if not data:
break
return b"".join(results)
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| mit | -5,395,204,604,067,377,000 | 36.851202 | 84 | 0.584073 | false |
fracpete/change-of-guard | palace/src/palace/palace/config.py | 1 | 1441 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# config.py
# Copyright (C) 2017 Fracpete (fracpete at gmail dot com)
import yaml
def load_config(config=None):
"""
Loads the configuration.
:param config: the configuration file to load, uses default it None
:type config: str
:return: yaml dictionary
:rtype: dict
"""
if config is None:
config = 'palace/config.yaml'
f = open(config)
config = yaml.safe_load(f)
f.close()
return config
def get_monitors(config):
"""
Returns a list of available monitors, i.e., all the enabled ones.
:param config: the configuration to use
:type config: dict
:return: dictionary of monitors
:rtype: dict
"""
result = {}
for monitor in config['monitors']:
result[monitor] = config['monitors'][monitor]
return result
| gpl-3.0 | -7,539,678,615,406,612,000 | 29.659574 | 71 | 0.693963 | false |
primoz-k/parilis | config/wsgi.py | 1 | 1723 | """
WSGI config for parilis project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
# application = get_wsgi_application()
if os.environ.get("DJANGO_SETTINGS_MODULE") == "config.settings.production":
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
application = get_wsgi_application()
application = Sentry(application)
else:
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause | -8,953,276,399,421,357,000 | 41.02439 | 79 | 0.785258 | false |
gloryofrobots/opparse | parsers/ipy/lexicon.py | 1 | 6194 | from opparse.lexicon import Lexicon, token, keyword
class IpyLexicon(Lexicon):
# TOKEN TYPES
TT_INT = "TT_INT"
TT_STR = "TT_STR"
TT_NAME = "TT_NAME"
TT_FUN = "TT_FUN"
TT_FOR = "TT_FOR"
TT_WHILE = "TT_WHILE"
TT_BREAK = "TT_BREAK"
TT_CONTINUE = "TT_CONTINUE"
TT_CLASS = "TT_CLASS"
TT_DEF = "TT_DEF"
TT_IF = "TT_IF"
TT_ELIF = "TT_ELIF"
TT_ELSE = "TT_ELSE"
TT_IN = "TT_IN"
TT_AS = "TT_AS"
TT_IS = "TT_IS"
TT_IS_NOT = "TT_IS_NOT"
TT_NOT_IN = "TT_NOT_IN"
TT_AND = "TT_AND"
TT_NOT = "TT_AND"
TT_OR = "TT_OR"
TT_TRUE = "TT_TRUE"
TT_FALSE = "TT_FALSE"
TT_NONE = "TT_NONE"
TT_TRY = "TT_TRY"
TT_RAISE = "TT_RAISE"
TT_YIELD = "TT_YIELD"
TT_RETURN = "TT_RETURN"
TT_EXCEPT = "TT_EXCEPT"
TT_FINALLY = "TT_FINALLY"
TT_END = "TT_END"
TT_END_EXPR = "TT_END_EXPR"
TT_INDENT = "TT_INDENT"
TT_NEWLINE = "TT_NEWLINE"
TT_LAMBDA = "TT_LAMBDA"
TT_LCURLY = "TT_LCURLY"
TT_RCURLY = "TT_RCURLY"
TT_COMMA = "TT_COMMA"
TT_ASSIGN = "TT_ASSIGN"
TT_PLUS_ASSIGN = "TT_PLUS_ASSIGN"
TT_MINUS_ASSIGN = "TT_MINUS_ASSIGN"
TT_LPAREN = "TT_LPAREN"
TT_RPAREN = "TT_RPAREN"
TT_LSQUARE = "TT_LSQUARE"
TT_RSQUARE = "TT_RSQUARE"
TT_DOT = "TT_DOT"
TT_COLON = "TT_COLON"
TT_GT = "TT_GT"
TT_GE = "TT_GE"
TT_LE = "TT_LE"
TT_LT = "TT_LT"
TT_EQ = "TT_EQ"
TT_NE = "TT_NE"
TT_PLUS = "TT_PLUS"
TT_MINUS = "TT_MINUS"
TT_SLASH = "TT_SLASH"
TT_STAR = "TT_STAR"
TT_DOUBLE_STAR = "TT_DOUBLE_STAR"
TT_PERCENTS = "TT_PERCENTS"
TT_TILDE = "TT_TILDE"
TT_CARET = "TT_CARET"
TT_PIPE = "TT_PIPE"
TT_SHL = "TT_SHL"
TT_SHR = "TT_SHR"
TT_AMP = "TT_AMP"
TT_IMPORT = "TT_IMPORT"
TT_FROM = "TT_FROM"
# NODE_TYPES
NT_TRUE = "NT_TRUE"
NT_FALSE = "NT_FALSE"
NT_NONE = "NT_NONE"
NT_INT = "NT_INT"
NT_STR = "NT_STR"
NT_MULTI_STR = "NT_MULTI_STR"
NT_NAME = "NT_NAME"
NT_DICT = "NT_DICT"
NT_LIST = "NT_LIST"
NT_TUPLE = "NT_TUPLE"
NT_FUN = "NT_FUN"
NT_IF = "NT_IF"
NT_TRY = "NT_TRY"
NT_FOR = "NT_FOR"
NT_WHILE = "NT_WHILE"
NT_CONTINUE = "NT_CONTINUE"
NT_BREAK = "NT_BREAK"
NT_RAISE = "NT_RAISE"
NT_ASSIGN = "NT_ASSIGN"
NT_PLUS_ASSIGN = "NT_PLUS_ASSIGN"
NT_MINUS_ASSIGN = "NT_MINUS_ASSIGN"
NT_CALL = "NT_CALL"
NT_DOT = "NT_DOT"
NT_COMMA = "NT_COMMA"
NT_AS = "NT_AS"
NT_AND = "NT_AND"
NT_OR = "NT_OR"
NT_NOT = "NT_NOT"
NT_GT = "NT_GT"
NT_GE = "NT_GE"
NT_LE = "NT_LE"
NT_LT = "NT_LT"
NT_EQ = "NT_EQ"
NT_NE = "NT_NE"
NT_IN = "NT_IN"
NT_IS = "NT_IS"
NT_IS_NOT = "NT_IS_NOT"
NT_NOT_IN = "NT_NOT_IN"
NT_ADD = "NT_ADD"
NT_SUB = "NT_SUB"
NT_DIV = "NT_DIV"
NT_MUL = "NT_MUL"
NT_POW = "NT_POW"
NT_MOD = "NT_MOD"
NT_BXOR = "NT_BXOR"
NT_BNOT = "NT_BNOT"
NT_BOR = "NT_BOR"
NT_BAND = "NT_BAND"
NT_BSHL = "NT_BSHL"
NT_BSHR = "NT_BSHR"
NT_NEGATE = "NT_NEGATE"
NT_VARGS = "NT_VARGS"
NT_KVARGS = "NT_KVARGS"
NT_CLASS = "NT_CLASS"
NT_IMPORT = "NT_IMPORT"
NT_IMPORT_FROM = "NT_IMPORT_FROM"
NT_IMPORT_ALL = "NT_IMPORT_ALL"
RULES = [
(token('\n'), TT_NEWLINE),
(token(' '), -1),
(token('#[^\n]*'), -1),
(token('is[\s]+not'), TT_IS_NOT),
(token('not[\s]+in'), TT_NOT_IN),
(keyword('if'), TT_IF),
(keyword('elif'), TT_ELIF),
(keyword('else'), TT_ELSE),
(keyword('end'), TT_END),
(keyword('is'), TT_IS),
(keyword('and'), TT_AND),
(keyword('or'), TT_OR),
(keyword('not'), TT_NOT),
(keyword('True'), TT_TRUE),
(keyword('False'), TT_FALSE),
(keyword('None'), TT_NONE),
(keyword('raise'), TT_RAISE),
(keyword('return'), TT_RETURN),
(keyword('yield'), TT_YIELD),
(keyword('try'), TT_TRY),
(keyword('except'), TT_EXCEPT),
(keyword('finally'), TT_FINALLY),
(keyword('lambda'), TT_LAMBDA),
(keyword('fun'), TT_FUN),
(keyword('def'), TT_DEF),
(keyword('class'), TT_CLASS),
(keyword('while'), TT_WHILE),
(keyword('for'), TT_FOR),
(keyword('in'), TT_IN),
(keyword('break'), TT_BREAK),
(keyword('continue'), TT_CONTINUE),
(keyword('import'), TT_IMPORT),
(keyword('from'), TT_FROM),
(keyword('as'), TT_AS),
(token("[0-9]+"), TT_INT),
(token('"([^\\\"]+|\\.)*"'), TT_STR),
(token('[a-zA-Z_][0-9a-zA-Z_]*'), TT_NAME),
(token('\;'), TT_END_EXPR),
(token('\{'), TT_LCURLY),
(token('\}'), TT_RCURLY),
(token('\,'), TT_COMMA),
(token('\('), TT_LPAREN),
(token('\)'), TT_RPAREN),
(token('\['), TT_LSQUARE),
(token('\]'), TT_RSQUARE),
(token('\.'), TT_DOT),
(token(':'), TT_COLON),
(token('>>'), TT_SHR),
(token('<<'), TT_SHL),
(token('\^'), TT_CARET),
(token('\&'), TT_AMP),
(token('\~'), TT_TILDE),
(token('\|'), TT_PIPE),
(token('\+='), TT_PLUS_ASSIGN),
(token('\-='), TT_MINUS_ASSIGN),
(token('\*\*'), TT_DOUBLE_STAR),
(token('=='), TT_EQ),
(token('>='), TT_GE),
(token('>'), TT_GT),
(token('<'), TT_LT),
(token('<='), TT_LE),
(token('=='), TT_EQ),
(token('!='), TT_NE),
(token('\+'), TT_PLUS),
(token('\-'), TT_MINUS),
(token('\*'), TT_STAR),
(token('\/'), TT_SLASH),
(token('\%'), TT_PERCENTS),
(token('='), TT_ASSIGN),
]
TERM_BLOCK = [TT_END]
TERM_EXP = [TT_END_EXPR]
TERM_CONDITION = [TT_COLON]
TERM_FOR_CONDITION = [TT_IN]
TERM_IF_BODY = [TT_ELSE, TT_ELIF] + TERM_BLOCK
TERM_TRY = [TT_EXCEPT]
TERM_EXCEPT = [TT_FINALLY, TT_EXCEPT] + TERM_BLOCK
TERM_FUN_SIGNATURE = [TT_COLON]
TERM_FROM_IMPORTED = [TT_IMPORT]
LEVELS_IF = [TT_ELSE, TT_ELIF]
LEVELS_TRY = [TT_EXCEPT, TT_FINALLY]
LEVELS_FOR = [TT_ELSE]
ASSIGNMENT_TOKENS = [TT_ASSIGN, TT_PLUS_ASSIGN, TT_MINUS_ASSIGN] | gpl-3.0 | -1,211,293,951,085,114,400 | 24.285714 | 68 | 0.481757 | false |
mucow24/roboviva | roboviva/latex.py | 1 | 7662 | # Roboviva - Better cue sheets for everyone
# Copyright (C) 2015 Mike Kocurek
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cue
import re
def _makeClimb(climb_type):
'''Very simple utility method -- provides a common way to specify climb types'''
return r"$\underset{\textbf{" + climb_type + r"}}{\text{\large \Mountain}}$"
def _instructionToLatex(instruction, modifier):
'''Maps a cue.Instruction the latex that should be used to render it'''
if instruction == cue.Instruction.CAT_1:
return _makeClimb("1")
elif instruction == cue.Instruction.CAT_2:
return _makeClimb("2")
elif instruction == cue.Instruction.CAT_3:
return _makeClimb("3")
elif instruction == cue.Instruction.CAT_4:
return _makeClimb("4")
elif instruction == cue.Instruction.CAT_5:
return _makeClimb("5")
elif instruction == cue.Instruction.CAT_HC:
return _makeClimb("HC")
elif instruction == cue.Instruction.SUMMIT:
return _makeClimb("End")
elif instruction == cue.Instruction.DANGER:
return r"\Large \danger "
elif instruction == cue.Instruction.FIRST_AID:
return r"\raisebox{-0.15em}{\Plus} "
else:
# all others can be rendered as-is, in bold:
return r"\textbf{" + _escape(modifier) + _escape(instruction) + "}"
def _escape(text):
r''' Escapes &, #, and other characters in 'text' so they don't break the
latex render.'''
ret = re.sub(r'\\([^\\]?)', r'\\textbackslash \1', text)
ret = ret.replace("_", r"\textunderscore ")
ret = ret.replace("$", "\$")
ret = ret.replace("#", "\#")
ret = ret.replace("&", "\&")
ret = ret.replace("|", r'$|$')
ret = ret.replace("<", r'$<$')
ret = ret.replace(">", r'$\Rightarrow$')
ret = ret.replace("%", r'\%')
ret = ret.replace('{', r'\{')
ret = ret.replace('}', r'\}')
return ret
def _format(text):
'''Looks for markdown-style *emphasis* and **strong emphasis** in the text,
turning it into \emph and \textbf, accordingly.'''
# Step 0: Escape any whitespace-delimited *'s and **'s:
text = re.sub(ur'\s\*\s', ur' \* ', text)
text = re.sub(ur'\s\*\*\s', ur' \*\* ', text)
# Do this in two passes. Each pass will replace **...** with \textbf{...},
# and *...* with \emph{...}, where "..." DOES NOT CONTAIN ANY NESTED **...**
# or *...* PATTERNS. We should do this to fixed point, but if people are
# seriously doing this:
# **Foo *bar **baz *foobar******
# Screw 'em :)
Num_Passes = 2
for p in xrange(Num_Passes):
text = re.sub(ur'(\*\*)(?!\s)((\\.|[^\\\*])*?[^\s\\])\1',
ur'\\textbf{\2}',
text)
text = re.sub(ur'\*(?!\s)((\\.|[^\\\*])*?[^\s\\*])\*',
ur'\emph{\1}',
text)
# Finally, un-escape any escaped *'s:
text = re.sub(ur'\\(\*|_)', ur'\1', text)
return text
def _entryColor(entry):
'''Figures out what color, if any, this entry should have. Returns a color
string, if appropriate, or 'None' if this entry doesn't need to be
colored.'''
# Figure out row color:
color = None
if entry.color == cue.Color.YELLOW:
color = ur'{yellow}'
elif entry.color == cue.Color.GRAY:
color = ur'[gray]{0.8}'
return color
def _entryToLatex(entry):
'''Converts a cue.Entry into a latex supertabular row string'''
color_str = ""
note_str = ""
for_str = ""
color = _entryColor(entry)
# Escape all user-provided strings:
esc_note = _escape(entry.note)
esc_description = _escape(entry.description)
if color:
color_str = ur'\rowcolor%s' % color
if entry.note:
# If the user left the description empty, but added a note, treat the note
# as if it were the description. Otherwise, append the note as a an actual
# note after the description.
if esc_description.strip() == "":
note_str = esc_note
else:
note_str = ur' \newline \textit{%s}' % esc_note
if entry.for_distance:
for_str = "%5.1f" % entry.for_distance
instruction_str = _instructionToLatex(entry.instruction, entry.modifier)
note_str = _format(note_str)
description_str = _format(esc_description)
return r"%s %s & %5.1f & %s%s & %s \\ \hline" % (color_str,
instruction_str,
entry.absolute_distance,
description_str,
note_str,
for_str)
def makeLatex(route):
''' Makes a full latex document from a cue.Route object
route - a Cue.Route object, fully initialized.
Returns the Latex output generated from 'route', as a string.
'''
ents = route.entries
route_id = _escape("%s" % route.id)
route_name = _escape("%s" % route.name)
ret = _makeHeader(route)
for ent in ents:
ret = ret + _entryToLatex(ent) + "\n"
ret = ret + LatexFooter
return ret
def _makeHeader(route):
'''
Generates the beginning of a Latex document, meaning everything from \documentclass to the beginning of the supertable.
route: a cue.Route object to use when filling in the header
'''
route_id = route.id
route_name = route.name
elevation_gain_ft = route.elevation_gain_ft
total_distance_mi = route.length_mi
header = unicode(r'''
\documentclass[11pt]{article}
\usepackage[left=0.20in,right=0.20in,top=0.7in,bottom=0.25in]{geometry}
\geometry{letterpaper}
\usepackage{colortbl}
\usepackage{supertabular}
\usepackage{amsmath}
\usepackage{helvet}
\usepackage{fourier}
\usepackage{bbding}
\usepackage[alpine]{ifsym}
\usepackage{fancyhdr}
\usepackage{lastpage}
\pagestyle{fancy}
\fancyhf{}''')
# Fill in left, right headers.
lhead = None
rhead = r"\emph{Route \#%d}" % route_id
# We stick the total distance + climb after the route title if it exists,
# otherwise we put it after the route #:
if elevation_gain_ft:
route_stats_esc = _escape("%.1f mi / %d ft" % (total_distance_mi, elevation_gain_ft))
else:
route_stats_esc= _escape("%.1f mi" % (total_distance_mi))
if route_name:
lhead = r"\emph{%s (%s)}" % (_escape(route_name), route_stats_esc)
else:
# Stick stats after the right header:
rhead += r" \emph{(%s)}" % route_stats_esc
if lhead:
header += unicode(r'''
\lhead{\small %s}''' % lhead)
if rhead:
header += unicode(r'''
\rhead{\small %s}''' % rhead)
header += unicode(r'''
\fancyfoot[C]{\footnotesize{\emph{Page~\thepage~of~\pageref{LastPage}}}}
\setlength{\footskip}{0.0in}
\setlength{\headsep}{0.2in}
\renewcommand{\familydefault}{\sfdefault}
\begin{document}
\renewcommand{\arraystretch}{1.15}
\twocolumn
\tablehead{
\hline
\rowcolor[gray]{0}
\textbf{\textcolor{white}{Go}} &
\textbf{\textcolor{white}{At}} &
\textbf{\textcolor{white}{On}} &
\textbf{\textcolor{white}{For}} \\
\hline
}
\tabletail{\hline}
\tablelasttail{\hline}
\begin{center}
\begin{supertabular}{|c|p{0.30in}|p{2.25in}|l|}
\hline
''')
return header
LatexFooter = unicode(r'''
\end{supertabular}
\end{center}
\end{document}
''')
| agpl-3.0 | -8,903,511,516,372,346,000 | 30.792531 | 121 | 0.623466 | false |
timvideos/HDMI2USB-mode-switch | versioneer.py | 1 | 68611 |
# Version: 0.18
"""The Versioneer - like a rocketeer, but for versions.
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
commit date in ISO 8601 format. This will be None if the date is not
available.
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See [details.md](details.md) in the Versioneer
source tree for descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of "0+unknown". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Known Limitations
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
[issues page](https://github.com/warner/python-versioneer/issues).
### Subprojects
Versioneer has limited support for source trees in which `setup.py` is not in
the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
two common reasons why `setup.py` might not be in the root:
* Source trees which contain multiple subprojects, such as
[Buildbot](https://github.com/buildbot/buildbot), which contains both
"master" and "slave" subprojects, each with their own `setup.py`,
`setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
distributions (and upload multiple independently-installable tarballs).
* Source trees whose main purpose is to contain a C library, but which also
provide bindings to Python (and perhaps other langauges) in subdirectories.
Versioneer will look for `.git` in parent directories, and most operations
should get the right version string. However `pip` and `setuptools` have bugs
and implementation details which frequently cause `pip install .` from a
subproject directory to fail to find a correct version string (so it usually
defaults to `0+unknown`).
`pip install --editable .` should work correctly. `setup.py install` might
work too.
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
this issue. The discussion in
[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
pip to let Versioneer work correctly.
Versioneer-0.16 and earlier only looked for a `.git` directory next to the
`setup.cfg`, so subprojects were completely unsupported with those releases.
### Editable installs with setuptools <= 18.5
`setup.py develop` and `pip install --editable .` allow you to install a
project into a virtualenv once, then continue editing the source code (and
test) without re-installing after every change.
"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
convenient way to specify executable scripts that should be installed along
with the python package.
These both work as expected when using modern setuptools. When using
setuptools-18.5 or earlier, however, certain operations will cause
`pkg_resources.DistributionNotFound` errors when running the entrypoint
script, which must be resolved by re-installing the package. This happens
when the install happens with one version, then the egg_info data is
regenerated while a different version is checked out. Many setup.py commands
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
### Unicode version strings
While Versioneer works (and is continually tested) with both Python 2 and
Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
Newer releases probably generate unicode version strings on py2. It's not
clear that this is wrong, but it may be surprising for applications when then
write these strings to a network connection or include them in bytes-oriented
APIs like cryptographic checksums.
[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
this question.
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
Specifically, both are released under the Creative Commons "Public Domain
Dedication" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
me_dir = os.path.normcase(os.path.splitext(me)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search('=\\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search('=\\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search('=\\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search('\\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search('^(.+)-(\\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-subst keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version",
"date": None}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
print(" date: %s" % vers.get("date"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# pip install:
# copies source tree to a tempdir before running egg_info/etc
# if .git isn't copied too, 'git describe' will fail
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
# "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
# "product_version": versioneer.get_version(),
# ...
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
if 'py2exe' in sys.modules: # py2exe enabled?
try:
from py2exe.distutils_buildexe import py2exe as _py2exe # py3
except ImportError:
from py2exe.build_exe import py2exe as _py2exe # py2
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
| apache-2.0 | -6,073,901,523,147,702,000 | 36.65697 | 79 | 0.613575 | false |
chrinide/PyFV | pyfv/portfolio/mean_variance.py | 1 | 2360 | # -*- coding: utf-8 -*-
'''
@author: Hung-Hsin Chen
@mail: [email protected]
Markowitz mean variance model
'''
from __future__ import division
from coopr.pyomo import *
from time import time
from datetime import date
import numpy as np
import pandas as pd
import os
import time
from coopr.opt import SolverFactory
def MeanVariance(symbols, risk_ret, money=1e6, risk_weight=1, solver="cplex"):
'''
@riskyRet, shape: M*T
minimize risk_weight * risk - (1-risk_weight) * mean
'''
t = time.time()
sigma = np.cov(risk_ret)
mu = risk_ret.mean(axis=1)
model = ConcreteModel()
#Set
model.symbols = range(len(symbols))
#decision variables
model.W = Var(model.symbols, within=NonNegativeReals)
#constraint
def CapitalConstraint_rule(model):
allocation = sum(model.W[idx] for idx in model.symbols)
return allocation == money
model.CapitalConstraint = Constraint()
#objective
def minRiskObjective_rule(model):
profit = sum(model.W[idx]*mu[idx] for idx in model.symbols)
risk = 0
for idx in model.symbols:
for jdx in model.symbols:
risk += model.W[idx] * model.W[jdx] * sigma[idx, jdx]
return 1./2 * risk_weight * risk - (1. - risk_weight) * profit
model.minRiskObjective = Objective(sense=minimize)
# Create a solver
opt = SolverFactory(solver)
if solver =="cplex":
opt.options["threads"] = 4
instance = model.create()
results = opt.solve(instance)
instance.load(results)
obj = results.Solution.Objective.__default_objective__['value']
display(instance)
print "MeanVariance elapsed %.3f secs"%(time.time()-t)
def testMeanVariance():
FileDir = os.path.abspath(os.path.curdir)
PklBasicFeaturesDir = os.path.join(FileDir, '..', 'pkl', 'BasicFeatures')
symbols = ['2330', '2317', '6505']
n_period = 100
ROIs = np.empty((len(symbols), n_period))
for idx, symbol in enumerate(symbols):
df = pd.read_pickle(os.path.join(PklBasicFeaturesDir, '%s.pkl'%symbol))
roi = df['adjROI'][:n_period]
ROIs[idx] = roi
MeanVariance(symbols, ROIs, money=1e6, risk_weight=1, solver="cplex")
if __name__ == '__main__':
testMeanVariance() | gpl-2.0 | -3,923,712,547,247,650,300 | 26.137931 | 79 | 0.618644 | false |
datapythonista/pandas | pandas/tests/tools/test_to_numeric.py | 1 | 22742 | import decimal
import numpy as np
from numpy import iinfo
import pytest
import pandas as pd
from pandas import (
DataFrame,
Index,
Series,
to_numeric,
)
import pandas._testing as tm
@pytest.fixture(params=[None, "ignore", "raise", "coerce"])
def errors(request):
return request.param
@pytest.fixture(params=[True, False])
def signed(request):
return request.param
@pytest.fixture(params=[lambda x: x, str], ids=["identity", "str"])
def transform(request):
return request.param
@pytest.fixture(params=[47393996303418497800, 100000000000000000000])
def large_val(request):
return request.param
@pytest.fixture(params=[True, False])
def multiple_elts(request):
return request.param
@pytest.fixture(
params=[
(lambda x: Index(x, name="idx"), tm.assert_index_equal),
(lambda x: Series(x, name="ser"), tm.assert_series_equal),
(lambda x: np.array(Index(x).values), tm.assert_numpy_array_equal),
]
)
def transform_assert_equal(request):
return request.param
@pytest.mark.parametrize(
"input_kwargs,result_kwargs",
[
({}, {"dtype": np.int64}),
({"errors": "coerce", "downcast": "integer"}, {"dtype": np.int8}),
],
)
def test_empty(input_kwargs, result_kwargs):
# see gh-16302
ser = Series([], dtype=object)
result = to_numeric(ser, **input_kwargs)
expected = Series([], **result_kwargs)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("last_val", ["7", 7])
def test_series(last_val):
ser = Series(["1", "-3.14", last_val])
result = to_numeric(ser)
expected = Series([1, -3.14, 7])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"data",
[
[1, 3, 4, 5],
[1.0, 3.0, 4.0, 5.0],
# Bool is regarded as numeric.
[True, False, True, True],
],
)
def test_series_numeric(data):
ser = Series(data, index=list("ABCD"), name="EFG")
result = to_numeric(ser)
tm.assert_series_equal(result, ser)
@pytest.mark.parametrize(
"data,msg",
[
([1, -3.14, "apple"], 'Unable to parse string "apple" at position 2'),
(
["orange", 1, -3.14, "apple"],
'Unable to parse string "orange" at position 0',
),
],
)
def test_error(data, msg):
ser = Series(data)
with pytest.raises(ValueError, match=msg):
to_numeric(ser, errors="raise")
@pytest.mark.parametrize(
"errors,exp_data", [("ignore", [1, -3.14, "apple"]), ("coerce", [1, -3.14, np.nan])]
)
def test_ignore_error(errors, exp_data):
ser = Series([1, -3.14, "apple"])
result = to_numeric(ser, errors=errors)
expected = Series(exp_data)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"errors,exp",
[
("raise", 'Unable to parse string "apple" at position 2'),
("ignore", [True, False, "apple"]),
# Coerces to float.
("coerce", [1.0, 0.0, np.nan]),
],
)
def test_bool_handling(errors, exp):
ser = Series([True, False, "apple"])
if isinstance(exp, str):
with pytest.raises(ValueError, match=exp):
to_numeric(ser, errors=errors)
else:
result = to_numeric(ser, errors=errors)
expected = Series(exp)
tm.assert_series_equal(result, expected)
def test_list():
ser = ["1", "-3.14", "7"]
res = to_numeric(ser)
expected = np.array([1, -3.14, 7])
tm.assert_numpy_array_equal(res, expected)
@pytest.mark.parametrize(
"data,arr_kwargs",
[
([1, 3, 4, 5], {"dtype": np.int64}),
([1.0, 3.0, 4.0, 5.0], {}),
# Boolean is regarded as numeric.
([True, False, True, True], {}),
],
)
def test_list_numeric(data, arr_kwargs):
result = to_numeric(data)
expected = np.array(data, **arr_kwargs)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("kwargs", [{"dtype": "O"}, {}])
def test_numeric(kwargs):
data = [1, -3.14, 7]
ser = Series(data, **kwargs)
result = to_numeric(ser)
expected = Series(data)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"columns",
[
# One column.
"a",
# Multiple columns.
["a", "b"],
],
)
def test_numeric_df_columns(columns):
# see gh-14827
df = DataFrame(
{
"a": [1.2, decimal.Decimal(3.14), decimal.Decimal("infinity"), "0.1"],
"b": [1.0, 2.0, 3.0, 4.0],
}
)
expected = DataFrame({"a": [1.2, 3.14, np.inf, 0.1], "b": [1.0, 2.0, 3.0, 4.0]})
df_copy = df.copy()
df_copy[columns] = df_copy[columns].apply(to_numeric)
tm.assert_frame_equal(df_copy, expected)
@pytest.mark.parametrize(
"data,exp_data",
[
(
[[decimal.Decimal(3.14), 1.0], decimal.Decimal(1.6), 0.1],
[[3.14, 1.0], 1.6, 0.1],
),
([np.array([decimal.Decimal(3.14), 1.0]), 0.1], [[3.14, 1.0], 0.1]),
],
)
def test_numeric_embedded_arr_likes(data, exp_data):
# Test to_numeric with embedded lists and arrays
df = DataFrame({"a": data})
df["a"] = df["a"].apply(to_numeric)
expected = DataFrame({"a": exp_data})
tm.assert_frame_equal(df, expected)
def test_all_nan():
ser = Series(["a", "b", "c"])
result = to_numeric(ser, errors="coerce")
expected = Series([np.nan, np.nan, np.nan])
tm.assert_series_equal(result, expected)
def test_type_check(errors):
# see gh-11776
df = DataFrame({"a": [1, -3.14, 7], "b": ["4", "5", "6"]})
kwargs = {"errors": errors} if errors is not None else {}
with pytest.raises(TypeError, match="1-d array"):
to_numeric(df, **kwargs)
@pytest.mark.parametrize("val", [1, 1.1, 20001])
def test_scalar(val, signed, transform):
val = -val if signed else val
assert to_numeric(transform(val)) == float(val)
def test_really_large_scalar(large_val, signed, transform, errors):
# see gh-24910
kwargs = {"errors": errors} if errors is not None else {}
val = -large_val if signed else large_val
val = transform(val)
val_is_string = isinstance(val, str)
if val_is_string and errors in (None, "raise"):
msg = "Integer out of range. at position 0"
with pytest.raises(ValueError, match=msg):
to_numeric(val, **kwargs)
else:
expected = float(val) if (errors == "coerce" and val_is_string) else val
tm.assert_almost_equal(to_numeric(val, **kwargs), expected)
def test_really_large_in_arr(large_val, signed, transform, multiple_elts, errors):
# see gh-24910
kwargs = {"errors": errors} if errors is not None else {}
val = -large_val if signed else large_val
val = transform(val)
extra_elt = "string"
arr = [val] + multiple_elts * [extra_elt]
val_is_string = isinstance(val, str)
coercing = errors == "coerce"
if errors in (None, "raise") and (val_is_string or multiple_elts):
if val_is_string:
msg = "Integer out of range. at position 0"
else:
msg = 'Unable to parse string "string" at position 1'
with pytest.raises(ValueError, match=msg):
to_numeric(arr, **kwargs)
else:
result = to_numeric(arr, **kwargs)
exp_val = float(val) if (coercing and val_is_string) else val
expected = [exp_val]
if multiple_elts:
if coercing:
expected.append(np.nan)
exp_dtype = float
else:
expected.append(extra_elt)
exp_dtype = object
else:
exp_dtype = float if isinstance(exp_val, (int, float)) else object
tm.assert_almost_equal(result, np.array(expected, dtype=exp_dtype))
def test_really_large_in_arr_consistent(large_val, signed, multiple_elts, errors):
# see gh-24910
#
# Even if we discover that we have to hold float, does not mean
# we should be lenient on subsequent elements that fail to be integer.
kwargs = {"errors": errors} if errors is not None else {}
arr = [str(-large_val if signed else large_val)]
if multiple_elts:
arr.insert(0, large_val)
if errors in (None, "raise"):
index = int(multiple_elts)
msg = f"Integer out of range. at position {index}"
with pytest.raises(ValueError, match=msg):
to_numeric(arr, **kwargs)
else:
result = to_numeric(arr, **kwargs)
if errors == "coerce":
expected = [float(i) for i in arr]
exp_dtype = float
else:
expected = arr
exp_dtype = object
tm.assert_almost_equal(result, np.array(expected, dtype=exp_dtype))
@pytest.mark.parametrize(
"errors,checker",
[
("raise", 'Unable to parse string "fail" at position 0'),
("ignore", lambda x: x == "fail"),
("coerce", lambda x: np.isnan(x)),
],
)
def test_scalar_fail(errors, checker):
scalar = "fail"
if isinstance(checker, str):
with pytest.raises(ValueError, match=checker):
to_numeric(scalar, errors=errors)
else:
assert checker(to_numeric(scalar, errors=errors))
@pytest.mark.parametrize("data", [[1, 2, 3], [1.0, np.nan, 3, np.nan]])
def test_numeric_dtypes(data, transform_assert_equal):
transform, assert_equal = transform_assert_equal
data = transform(data)
result = to_numeric(data)
assert_equal(result, data)
@pytest.mark.parametrize(
"data,exp",
[
(["1", "2", "3"], np.array([1, 2, 3], dtype="int64")),
(["1.5", "2.7", "3.4"], np.array([1.5, 2.7, 3.4])),
],
)
def test_str(data, exp, transform_assert_equal):
transform, assert_equal = transform_assert_equal
result = to_numeric(transform(data))
expected = transform(exp)
assert_equal(result, expected)
def test_datetime_like(tz_naive_fixture, transform_assert_equal):
transform, assert_equal = transform_assert_equal
idx = pd.date_range("20130101", periods=3, tz=tz_naive_fixture)
result = to_numeric(transform(idx))
expected = transform(idx.asi8)
assert_equal(result, expected)
def test_timedelta(transform_assert_equal):
transform, assert_equal = transform_assert_equal
idx = pd.timedelta_range("1 days", periods=3, freq="D")
result = to_numeric(transform(idx))
expected = transform(idx.asi8)
assert_equal(result, expected)
def test_period(transform_assert_equal):
transform, assert_equal = transform_assert_equal
idx = pd.period_range("2011-01", periods=3, freq="M", name="")
inp = transform(idx)
if isinstance(inp, Index):
result = to_numeric(inp)
expected = transform(idx.asi8)
assert_equal(result, expected)
else:
# TODO: PeriodDtype, so support it in to_numeric.
pytest.skip("Missing PeriodDtype support in to_numeric")
@pytest.mark.parametrize(
"errors,expected",
[
("raise", "Invalid object type at position 0"),
("ignore", Series([[10.0, 2], 1.0, "apple"])),
("coerce", Series([np.nan, 1.0, np.nan])),
],
)
def test_non_hashable(errors, expected):
# see gh-13324
ser = Series([[10.0, 2], 1.0, "apple"])
if isinstance(expected, str):
with pytest.raises(TypeError, match=expected):
to_numeric(ser, errors=errors)
else:
result = to_numeric(ser, errors=errors)
tm.assert_series_equal(result, expected)
def test_downcast_invalid_cast():
# see gh-13352
data = ["1", 2, 3]
invalid_downcast = "unsigned-integer"
msg = "invalid downcasting method provided"
with pytest.raises(ValueError, match=msg):
to_numeric(data, downcast=invalid_downcast)
def test_errors_invalid_value():
# see gh-26466
data = ["1", 2, 3]
invalid_error_value = "invalid"
msg = "invalid error value specified"
with pytest.raises(ValueError, match=msg):
to_numeric(data, errors=invalid_error_value)
@pytest.mark.parametrize(
"data",
[
["1", 2, 3],
[1, 2, 3],
np.array(["1970-01-02", "1970-01-03", "1970-01-04"], dtype="datetime64[D]"),
],
)
@pytest.mark.parametrize(
"kwargs,exp_dtype",
[
# Basic function tests.
({}, np.int64),
({"downcast": None}, np.int64),
# Support below np.float32 is rare and far between.
({"downcast": "float"}, np.dtype(np.float32).char),
# Basic dtype support.
({"downcast": "unsigned"}, np.dtype(np.typecodes["UnsignedInteger"][0])),
],
)
def test_downcast_basic(data, kwargs, exp_dtype):
# see gh-13352
result = to_numeric(data, **kwargs)
expected = np.array([1, 2, 3], dtype=exp_dtype)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("signed_downcast", ["integer", "signed"])
@pytest.mark.parametrize(
"data",
[
["1", 2, 3],
[1, 2, 3],
np.array(["1970-01-02", "1970-01-03", "1970-01-04"], dtype="datetime64[D]"),
],
)
def test_signed_downcast(data, signed_downcast):
# see gh-13352
smallest_int_dtype = np.dtype(np.typecodes["Integer"][0])
expected = np.array([1, 2, 3], dtype=smallest_int_dtype)
res = to_numeric(data, downcast=signed_downcast)
tm.assert_numpy_array_equal(res, expected)
def test_ignore_downcast_invalid_data():
# If we can't successfully cast the given
# data to a numeric dtype, do not bother
# with the downcast parameter.
data = ["foo", 2, 3]
expected = np.array(data, dtype=object)
res = to_numeric(data, errors="ignore", downcast="unsigned")
tm.assert_numpy_array_equal(res, expected)
def test_ignore_downcast_neg_to_unsigned():
# Cannot cast to an unsigned integer
# because we have a negative number.
data = ["-1", 2, 3]
expected = np.array([-1, 2, 3], dtype=np.int64)
res = to_numeric(data, downcast="unsigned")
tm.assert_numpy_array_equal(res, expected)
@pytest.mark.parametrize("downcast", ["integer", "signed", "unsigned"])
@pytest.mark.parametrize(
"data,expected",
[
(["1.1", 2, 3], np.array([1.1, 2, 3], dtype=np.float64)),
(
[10000.0, 20000, 3000, 40000.36, 50000, 50000.00],
np.array(
[10000.0, 20000, 3000, 40000.36, 50000, 50000.00], dtype=np.float64
),
),
],
)
def test_ignore_downcast_cannot_convert_float(data, expected, downcast):
# Cannot cast to an integer (signed or unsigned)
# because we have a float number.
res = to_numeric(data, downcast=downcast)
tm.assert_numpy_array_equal(res, expected)
@pytest.mark.parametrize(
"downcast,expected_dtype",
[("integer", np.int16), ("signed", np.int16), ("unsigned", np.uint16)],
)
def test_downcast_not8bit(downcast, expected_dtype):
# the smallest integer dtype need not be np.(u)int8
data = ["256", 257, 258]
expected = np.array([256, 257, 258], dtype=expected_dtype)
res = to_numeric(data, downcast=downcast)
tm.assert_numpy_array_equal(res, expected)
@pytest.mark.parametrize(
"dtype,downcast,min_max",
[
("int8", "integer", [iinfo(np.int8).min, iinfo(np.int8).max]),
("int16", "integer", [iinfo(np.int16).min, iinfo(np.int16).max]),
("int32", "integer", [iinfo(np.int32).min, iinfo(np.int32).max]),
("int64", "integer", [iinfo(np.int64).min, iinfo(np.int64).max]),
("uint8", "unsigned", [iinfo(np.uint8).min, iinfo(np.uint8).max]),
("uint16", "unsigned", [iinfo(np.uint16).min, iinfo(np.uint16).max]),
("uint32", "unsigned", [iinfo(np.uint32).min, iinfo(np.uint32).max]),
("uint64", "unsigned", [iinfo(np.uint64).min, iinfo(np.uint64).max]),
("int16", "integer", [iinfo(np.int8).min, iinfo(np.int8).max + 1]),
("int32", "integer", [iinfo(np.int16).min, iinfo(np.int16).max + 1]),
("int64", "integer", [iinfo(np.int32).min, iinfo(np.int32).max + 1]),
("int16", "integer", [iinfo(np.int8).min - 1, iinfo(np.int16).max]),
("int32", "integer", [iinfo(np.int16).min - 1, iinfo(np.int32).max]),
("int64", "integer", [iinfo(np.int32).min - 1, iinfo(np.int64).max]),
("uint16", "unsigned", [iinfo(np.uint8).min, iinfo(np.uint8).max + 1]),
("uint32", "unsigned", [iinfo(np.uint16).min, iinfo(np.uint16).max + 1]),
("uint64", "unsigned", [iinfo(np.uint32).min, iinfo(np.uint32).max + 1]),
],
)
def test_downcast_limits(dtype, downcast, min_max):
# see gh-14404: test the limits of each downcast.
series = to_numeric(Series(min_max), downcast=downcast)
assert series.dtype == dtype
@pytest.mark.parametrize(
"ser,expected",
[
(
Series([0, 9223372036854775808]),
Series([0, 9223372036854775808], dtype=np.uint64),
)
],
)
def test_downcast_uint64(ser, expected):
# see gh-14422:
# BUG: to_numeric doesn't work uint64 numbers
result = to_numeric(ser, downcast="unsigned")
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"data,exp_data",
[
(
[200, 300, "", "NaN", 30000000000000000000],
[200, 300, np.nan, np.nan, 30000000000000000000],
),
(
["12345678901234567890", "1234567890", "ITEM"],
[12345678901234567890, 1234567890, np.nan],
),
],
)
def test_coerce_uint64_conflict(data, exp_data):
# see gh-17007 and gh-17125
#
# Still returns float despite the uint64-nan conflict,
# which would normally force the casting to object.
result = to_numeric(Series(data), errors="coerce")
expected = Series(exp_data, dtype=float)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"errors,exp",
[
("ignore", Series(["12345678901234567890", "1234567890", "ITEM"])),
("raise", "Unable to parse string"),
],
)
def test_non_coerce_uint64_conflict(errors, exp):
# see gh-17007 and gh-17125
#
# For completeness.
ser = Series(["12345678901234567890", "1234567890", "ITEM"])
if isinstance(exp, str):
with pytest.raises(ValueError, match=exp):
to_numeric(ser, errors=errors)
else:
result = to_numeric(ser, errors=errors)
tm.assert_series_equal(result, ser)
@pytest.mark.parametrize("dc1", ["integer", "float", "unsigned"])
@pytest.mark.parametrize("dc2", ["integer", "float", "unsigned"])
def test_downcast_empty(dc1, dc2):
# GH32493
tm.assert_numpy_array_equal(
to_numeric([], downcast=dc1),
to_numeric([], downcast=dc2),
check_dtype=False,
)
def test_failure_to_convert_uint64_string_to_NaN():
# GH 32394
result = to_numeric("uint64", errors="coerce")
assert np.isnan(result)
ser = Series([32, 64, np.nan])
result = to_numeric(Series(["32", "64", "uint64"]), errors="coerce")
tm.assert_series_equal(result, ser)
@pytest.mark.parametrize(
"strrep",
[
"243.164",
"245.968",
"249.585",
"259.745",
"265.742",
"272.567",
"279.196",
"280.366",
"275.034",
"271.351",
"272.889",
"270.627",
"280.828",
"290.383",
"308.153",
"319.945",
"336.0",
"344.09",
"351.385",
"356.178",
"359.82",
"361.03",
"367.701",
"380.812",
"387.98",
"391.749",
"391.171",
"385.97",
"385.345",
"386.121",
"390.996",
"399.734",
"413.073",
"421.532",
"430.221",
"437.092",
"439.746",
"446.01",
"451.191",
"460.463",
"469.779",
"472.025",
"479.49",
"474.864",
"467.54",
"471.978",
],
)
def test_precision_float_conversion(strrep):
# GH 31364
result = to_numeric(strrep)
assert result == float(strrep)
@pytest.mark.parametrize(
"values, expected",
[
(["1", "2", None], Series([1, 2, np.nan])),
(["1", "2", "3"], Series([1, 2, 3])),
(["1", "2", 3], Series([1, 2, 3])),
(["1", "2", 3.5], Series([1, 2, 3.5])),
(["1", None, 3.5], Series([1, np.nan, 3.5])),
(["1", "2", "3.5"], Series([1, 2, 3.5])),
],
)
def test_to_numeric_from_nullable_string(values, nullable_string_dtype, expected):
# https://github.com/pandas-dev/pandas/issues/37262
s = Series(values, dtype=nullable_string_dtype)
result = to_numeric(s)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"data, input_dtype, downcast, expected_dtype",
(
([1, 1], "Int64", "integer", "Int8"),
([1.0, pd.NA], "Float64", "integer", "Int8"),
([1.0, 1.1], "Float64", "integer", "Float64"),
([1, pd.NA], "Int64", "integer", "Int8"),
([450, 300], "Int64", "integer", "Int16"),
([1, 1], "Float64", "integer", "Int8"),
([np.iinfo(np.int64).max - 1, 1], "Int64", "integer", "Int64"),
([1, 1], "Int64", "signed", "Int8"),
([1.0, 1.0], "Float32", "signed", "Int8"),
([1.0, 1.1], "Float64", "signed", "Float64"),
([1, pd.NA], "Int64", "signed", "Int8"),
([450, -300], "Int64", "signed", "Int16"),
pytest.param(
[np.iinfo(np.uint64).max - 1, 1],
"UInt64",
"signed",
"UInt64",
marks=pytest.mark.xfail(reason="GH38798"),
),
([1, 1], "Int64", "unsigned", "UInt8"),
([1.0, 1.0], "Float32", "unsigned", "UInt8"),
([1.0, 1.1], "Float64", "unsigned", "Float64"),
([1, pd.NA], "Int64", "unsigned", "UInt8"),
([450, -300], "Int64", "unsigned", "Int64"),
([-1, -1], "Int32", "unsigned", "Int32"),
([1, 1], "Float64", "float", "Float32"),
([1, 1.1], "Float64", "float", "Float32"),
),
)
def test_downcast_nullable_numeric(data, input_dtype, downcast, expected_dtype):
arr = pd.array(data, dtype=input_dtype)
result = to_numeric(arr, downcast=downcast)
expected = pd.array(data, dtype=expected_dtype)
tm.assert_extension_array_equal(result, expected)
def test_downcast_nullable_mask_is_copied():
# GH38974
arr = pd.array([1, 2, pd.NA], dtype="Int64")
result = to_numeric(arr, downcast="integer")
expected = pd.array([1, 2, pd.NA], dtype="Int8")
tm.assert_extension_array_equal(result, expected)
arr[1] = pd.NA # should not modify result
tm.assert_extension_array_equal(result, expected)
def test_to_numeric_scientific_notation():
# GH 15898
result = to_numeric("1.7e+308")
expected = np.float64(1.7e308)
assert result == expected
| bsd-3-clause | -5,607,707,761,760,515,000 | 27.823828 | 88 | 0.58038 | false |
pjryan126/solid-start-careers | store/api/zillow/venv/lib/python2.7/site-packages/pandas/sandbox/qtpandas.py | 1 | 4363 | '''
Easy integration of DataFrame into pyqt framework
@author: Jev Kuznetsov
'''
# flake8: noqa
# GH9615
import warnings
warnings.warn("The pandas.sandbox.qtpandas module is deprecated and will be "
"removed in a future version. We refer users to the external package "
"here: https://github.com/datalyze-solutions/pandas-qt")
try:
from PyQt4.QtCore import QAbstractTableModel, Qt, QVariant, QModelIndex
from PyQt4.QtGui import (
QApplication, QDialog, QVBoxLayout, QTableView, QWidget)
except ImportError:
from PySide.QtCore import QAbstractTableModel, Qt, QModelIndex
from PySide.QtGui import (
QApplication, QDialog, QVBoxLayout, QTableView, QWidget)
QVariant = lambda value=None: value
from pandas import DataFrame, Index
class DataFrameModel(QAbstractTableModel):
''' data model for a DataFrame class '''
def __init__(self):
super(DataFrameModel, self).__init__()
self.df = DataFrame()
def setDataFrame(self, dataFrame):
self.df = dataFrame
def signalUpdate(self):
''' tell viewers to update their data (this is full update, not
efficient)'''
self.layoutChanged.emit()
#------------- table display functions -----------------
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role != Qt.DisplayRole:
return QVariant()
if orientation == Qt.Horizontal:
try:
return self.df.columns.tolist()[section]
except (IndexError, ):
return QVariant()
elif orientation == Qt.Vertical:
try:
# return self.df.index.tolist()
return self.df.index.tolist()[section]
except (IndexError, ):
return QVariant()
def data(self, index, role=Qt.DisplayRole):
if role != Qt.DisplayRole:
return QVariant()
if not index.isValid():
return QVariant()
return QVariant(str(self.df.ix[index.row(), index.column()]))
def flags(self, index):
flags = super(DataFrameModel, self).flags(index)
flags |= Qt.ItemIsEditable
return flags
def setData(self, index, value, role):
row = self.df.index[index.row()]
col = self.df.columns[index.column()]
if hasattr(value, 'toPyObject'):
# PyQt4 gets a QVariant
value = value.toPyObject()
else:
# PySide gets an unicode
dtype = self.df[col].dtype
if dtype != object:
value = None if value == '' else dtype.type(value)
self.df.set_value(row, col, value)
return True
def rowCount(self, index=QModelIndex()):
return self.df.shape[0]
def columnCount(self, index=QModelIndex()):
return self.df.shape[1]
class DataFrameWidget(QWidget):
''' a simple widget for using DataFrames in a gui '''
def __init__(self, dataFrame, parent=None):
super(DataFrameWidget, self).__init__(parent)
self.dataModel = DataFrameModel()
self.dataTable = QTableView()
self.dataTable.setModel(self.dataModel)
layout = QVBoxLayout()
layout.addWidget(self.dataTable)
self.setLayout(layout)
# Set DataFrame
self.setDataFrame(dataFrame)
def setDataFrame(self, dataFrame):
self.dataModel.setDataFrame(dataFrame)
self.dataModel.signalUpdate()
self.dataTable.resizeColumnsToContents()
#-----------------stand alone test code
def testDf():
''' creates test dataframe '''
data = {'int': [1, 2, 3], 'float': [1.5, 2.5, 3.5],
'string': ['a', 'b', 'c'], 'nan': [np.nan, np.nan, np.nan]}
return DataFrame(data, index=Index(['AAA', 'BBB', 'CCC']),
columns=['int', 'float', 'string', 'nan'])
class Form(QDialog):
def __init__(self, parent=None):
super(Form, self).__init__(parent)
df = testDf() # make up some data
widget = DataFrameWidget(df)
widget.resizeColumnsToContents()
layout = QVBoxLayout()
layout.addWidget(widget)
self.setLayout(layout)
if __name__ == '__main__':
import sys
import numpy as np
app = QApplication(sys.argv)
form = Form()
form.show()
app.exec_()
| gpl-2.0 | -1,997,314,913,183,287,800 | 29.089655 | 84 | 0.595691 | false |
ianunruh/hvac | tests/unit_tests/v1/test_aws_iam_methods.py | 1 | 2158 | import json
from base64 import b64decode
from datetime import datetime
from unittest import TestCase
import mock
from hvac import Client
class TestAwsIamMethods(TestCase):
"""Unit tests providing coverage for AWS (EC2) auth backend-related methods/routes."""
@mock.patch('hvac.aws_utils.datetime')
@mock.patch('hvac.v1.Client.login')
def test_auth_aws_iam(self, login_mock, datetime_mock):
datetime_mock.utcnow.return_value = datetime(2015, 8, 30, 12, 36, 0)
client = Client()
client.auth_aws_iam('AKIDEXAMPLE', 'wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY')
login_mock.assert_called()
args, kwargs = login_mock.call_args
actual_params = kwargs['json']
actual_iam_http_request_method = actual_params['iam_http_request_method']
self.assertEqual('POST', actual_iam_http_request_method)
actual_iam_request_url = b64decode(actual_params['iam_request_url']).decode('utf-8')
self.assertEqual('https://sts.amazonaws.com/', actual_iam_request_url)
expected_auth_header_parts = [
'Credential=AKIDEXAMPLE/20150830/us-east-1/sts/aws4_request',
'SignedHeaders=content-length;content-type;host;x-amz-date',
'Signature=0268ea4a725deae1116f5228d6b177fb047f9f3a9e1c5fd4baa0dc1fbb0d1a99',
]
expected_iam_request_headers = {
'Authorization': ['{0} {1}'.format('AWS4-HMAC-SHA256', ', '.join(expected_auth_header_parts))],
'Content-Length': ['43'],
'Content-Type': ['application/x-www-form-urlencoded; charset=utf-8'],
'Host': ['sts.amazonaws.com'],
'X-Amz-Date': ['20150830T123600Z'],
}
actual_iam_request_headers = json.loads(b64decode(actual_params['iam_request_headers']))
self.assertEqual(expected_iam_request_headers, actual_iam_request_headers)
actual_iam_request_body = b64decode(actual_params['iam_request_body']).decode('utf-8')
self.assertEqual('Action=GetCallerIdentity&Version=2011-06-15', actual_iam_request_body)
actual_role = actual_params['role']
self.assertEqual('', actual_role)
| apache-2.0 | -8,182,685,032,218,114,000 | 42.16 | 107 | 0.666821 | false |
radiasoft/radtrack | radtrack/ui/rbcbt.py | 1 | 5681 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'radtrack/ui/rbcbt.ui'
#
# Created: Thu Jun 16 05:40:41 2016
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_RBCBT(object):
def setupUi(self, RBCBT):
RBCBT.setObjectName(_fromUtf8("RBCBT"))
RBCBT.resize(644, 938)
self.verticalLayout_4 = QtGui.QVBoxLayout(RBCBT)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.label = QtGui.QLabel(RBCBT)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout_4.addWidget(self.label)
self.elementButtonLayout = QtGui.QHBoxLayout()
self.elementButtonLayout.setObjectName(_fromUtf8("elementButtonLayout"))
self.verticalLayout_4.addLayout(self.elementButtonLayout)
self.splitter = QtGui.QSplitter(RBCBT)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setChildrenCollapsible(False)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.elementListLayout = QtGui.QVBoxLayout(self.layoutWidget)
self.elementListLayout.setMargin(0)
self.elementListLayout.setObjectName(_fromUtf8("elementListLayout"))
self.elementListLabel = QtGui.QLabel(self.layoutWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.elementListLabel.setFont(font)
self.elementListLabel.setObjectName(_fromUtf8("elementListLabel"))
self.elementListLayout.addWidget(self.elementListLabel)
self.treeWidget = dtreeWidget(self.layoutWidget)
self.treeWidget.setObjectName(_fromUtf8("treeWidget"))
self.elementListLayout.addWidget(self.treeWidget)
self.layoutWidget1 = QtGui.QWidget(self.splitter)
self.layoutWidget1.setObjectName(_fromUtf8("layoutWidget1"))
self.beamlineEditorLayout = QtGui.QVBoxLayout(self.layoutWidget1)
self.beamlineEditorLayout.setMargin(0)
self.beamlineEditorLayout.setObjectName(_fromUtf8("beamlineEditorLayout"))
self.beamlineEditorLabel = QtGui.QLabel(self.layoutWidget1)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.beamlineEditorLabel.setFont(font)
self.beamlineEditorLabel.setObjectName(_fromUtf8("beamlineEditorLabel"))
self.beamlineEditorLayout.addWidget(self.beamlineEditorLabel)
self.beamlineEditorLayout_2 = QtGui.QHBoxLayout()
self.beamlineEditorLayout_2.setObjectName(_fromUtf8("beamlineEditorLayout_2"))
self.workingBeamline = dlistWidget(self.layoutWidget1)
self.workingBeamline.setObjectName(_fromUtf8("workingBeamline"))
self.beamlineEditorLayout_2.addWidget(self.workingBeamline)
self.saveBeamlineButton = QtGui.QPushButton(self.layoutWidget1)
self.saveBeamlineButton.setObjectName(_fromUtf8("saveBeamlineButton"))
self.beamlineEditorLayout_2.addWidget(self.saveBeamlineButton)
self.clearBeamlineButton = QtGui.QPushButton(self.layoutWidget1)
self.clearBeamlineButton.setObjectName(_fromUtf8("clearBeamlineButton"))
self.beamlineEditorLayout_2.addWidget(self.clearBeamlineButton)
self.beamlineEditorLayout.addLayout(self.beamlineEditorLayout_2)
self.layoutWidget2 = QtGui.QWidget(self.splitter)
self.layoutWidget2.setObjectName(_fromUtf8("layoutWidget2"))
self.graphicsLayout = QtGui.QVBoxLayout(self.layoutWidget2)
self.graphicsLayout.setMargin(0)
self.graphicsLayout.setObjectName(_fromUtf8("graphicsLayout"))
self.graphicsLabel = QtGui.QLabel(self.layoutWidget2)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.graphicsLabel.setFont(font)
self.graphicsLabel.setObjectName(_fromUtf8("graphicsLabel"))
self.graphicsLayout.addWidget(self.graphicsLabel)
self.graphicsView = beamGraphicsWindow(self.layoutWidget2)
self.graphicsView.setObjectName(_fromUtf8("graphicsView"))
self.graphicsLayout.addWidget(self.graphicsView)
self.verticalLayout_4.addWidget(self.splitter)
self.retranslateUi(RBCBT)
QtCore.QMetaObject.connectSlotsByName(RBCBT)
def retranslateUi(self, RBCBT):
RBCBT.setWindowTitle(_translate("RBCBT", "Widget", None))
self.label.setText(_translate("RBCBT", "New Beamline Elements", None))
self.elementListLabel.setText(_translate("RBCBT", "Beamline Element List", None))
self.beamlineEditorLabel.setText(_translate("RBCBT", "Beamline Editor - Drag elements here to create beamlines", None))
self.saveBeamlineButton.setText(_translate("RBCBT", "Save Beamline", None))
self.clearBeamlineButton.setText(_translate("RBCBT", "Clear Beamline", None))
self.graphicsLabel.setText(_translate("RBCBT", "Graphical Preview", None))
from cbt import beamGraphicsWindow, dlistWidget, dtreeWidget
| apache-2.0 | -5,649,579,195,361,632,000 | 48.833333 | 127 | 0.71924 | false |
gplib/gplib | gplib/apps/extauth/ldapauth.py | 1 | 16002 | # -*- coding: utf-8 -*-
# Este archivo es parte de GPLib - http://gplib.org/
#
# GPlib es software libre desarrollado en la Facultad de Filosofía y Letras de
# la Universidad de Buenos Aires y liberado bajo los términos de la licencia
# GPLIB FILO www.gplib.org/licencia bajo los términos de GPL de GNU. Usted
# puede redistribuirlo y/o modificarlo bajo los términos de la licencia GPLIB
# FILO de GNU General Public License como esta publicado en la Free Software
# Foundation, tanto en la versión 3 de la licencia, o cualquiera de las
# versiones futuras Gplib es distribuido con el objetivo de que sea útil, pero
# SIN NINGUNA GARANTÍA DE FUNCIONAMIENTO; ni siquiera la garantía implícita de
# que sirva para un propósito particular. Cuando implemente este sistema
# sugerimos el registro en www.gplib.org/registro, con el fin de fomentar una
# comunidad de usuarios de GPLib. Ver la GNU General Public License para más
# detalles.http://www.gnu.org/licenses/>
#
#
# Este arquivo é parte do GPLib http://gplib.org/
#
# GPLib é sofware livre desenviolvido na Faculdade de Filosofia e Letras da
# Universidade de Buenos Aires e liberado sob os termos da licença GPLib FILO
# www.gplib.org/licencia/ sob os termos de GPL de GNU. Você pode redistribuí-lo
# e/ou modificá-lo sob os termos da licença pública geral GNU como publicado na
# Free Software Foundation , tanto na versão 3 da licença ou quaisquer
# versões futuras. GPLib é distribuído com o objetivo de que seja útil, mas SEM
# QUALQUER GARANTIA DE PERFORMANCE; nem a garantia implícita de que servem a uma
# finalidade específica. Quando você implementar este sistema sugerimos o
# registro em www.gplib.org/registro/, a fim de promover uma comunidade de
# usuarios do GPLib. Veja a GNU General Public License para mais detalles.
# http://www.gnu.org/licenses/
#
#
# This file is part of GPLib - http://gplib.org/
#
# GPLib is free software developed by Facultad de Filosofia y Letras Universidad
# de Buenos Aires and distributed under the scope of GPLIB FILO
# www.gplib.org/license and the GPL Public License GNU. You can redistribute it
# and/or modify it under the terms of the GPLIB FILO GNU General Public License
# as published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# GPLib is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. After roll your own version of GPLIB you may register
# at www.gplib.org/register to buld a comunity of users and developers. See the
# GNU General Public License for more details.
from django.conf import settings
from django.contrib.auth.models import User
import ldap
import logging
from gplib.apps.couchauth.libs import create_couch_user
class LDAPBackend(object):
"""
Authenticate a user against LDAP.
Requires python-ldap to be installed.
Requires the following things to be in settings.py:
LDAP_DEBUG -- boolean
Uses logging module for debugging messages.
LDAP_SERVER_URI -- string, ldap uri.
default: 'ldap://localhost'
LDAP_SEARCHDN -- string of the LDAP dn to use for searching
default: 'dc=localhost'
LDAP_SCOPE -- one of: ldap.SCOPE_*, used for searching
see python-ldap docs for the search function
default = ldap.SCOPE_SUBTREE
LDAP_SEARCH_FILTER -- formated string, the filter to use for searching for a
user. Used as: filterstr = LDAP_SEARCH_FILTER % username
default = 'cn=%s'
LDAP_UPDATE_FIELDS -- boolean, do we sync the db with ldap on each auth
default = True
Required unless LDAP_FULL_NAME is set:
LDAP_FIRST_NAME -- string, LDAP attribute to get the given name from
LDAP_LAST_NAME -- string, LDAP attribute to get the last name from
Optional Settings:
LDAP_FULL_NAME -- string, LDAP attribute to get name from, splits on ' '
LDAP_GID -- string, LDAP attribute to get group name/number from
LDAP_SU_GIDS -- list of strings, group names/numbers that are superusers
LDAP_STAFF_GIDS -- list of strings, group names/numbers that are staff
LDAP_EMAIL -- string, LDAP attribute to get email from
LDAP_DEFAULT_EMAIL_SUFFIX -- string, appened to username if no email found
LDAP_OPTIONS -- hash, python-ldap global options and their values
{ldap.OPT_X_TLS_CACERTDIR: '/etc/ldap/ca/'}
LDAP_ACTIVE_FIELD -- list of strings, LDAP attribute to get active status
from
LDAP_ACTIVE -- list of strings, allowed for active from LDAP_ACTIVE_FIELD
You must pick a method for determining the DN of a user and set the needed
settings:
- You can set LDAP_BINDDN and LDAP_BIND_ATTRIBUTE like:
LDAP_BINDDN = 'ou=people,dc=example,dc=com'
LDAP_BIND_ATTRIBUTE = 'uid'
and the user DN would be:
'uid=%s,ou=people,dc=example,dc=com' % username
- Look for the DN on the directory, this is what will happen if you do
not define the LDAP_BINDDN setting. In that case you may need to
define LDAP_PREBINDDN and LDAP_PREBINDPW if your LDAP server does not
allow anonymous queries. The search will be performed with the
LDAP_SEARCH_FILTER setting.
- Override the _pre_bind() method, which receives the ldap object and
the username as it's parameters and should return the DN of the user.
By inheriting this class you can change:
- How the dn to bind with is produced by overriding _pre_bind()
- What type of user object to use by overriding: _get_user_by_name(),
_create_user_object(), and get_user()
"""
import ldap
from django.conf import settings
from django.contrib.auth.models import User
def __init__(self):
self.settings = {
'LDAP_SERVER_URI': 'ldap://localhost',
'LDAP_SEARCHDN': 'dc=localhost',
'LDAP_SCOPE': ldap.SCOPE_SUBTREE,
'LDAP_SEARCH_FILTER': 'cn=%s',
'LDAP_UPDATE_FIELDS': True,
'LDAP_PREBINDDN': None,
'LDAP_PREBINDPW': None,
'LDAP_BINDDN': None,
'LDAP_BIND_ATTRIBUTE': None,
'LDAP_FIRST_NAME': None,
'LDAP_LAST_NAME': None,
'LDAP_FULL_NAME': None,
'LDAP_GID': None,
'LDAP_SU_GIDS': None,
'LDAP_STAFF_GIDS': None,
'LDAP_ACTIVE_FIELD': None,
'LDAP_ACTIVE': None,
'LDAP_EMAIL': None,
'LDAP_DEFAULT_EMAIL_SUFFIX': None,
'LDAP_OPTIONS': None,
'LDAP_DEBUG': True,
}
# Load settings from settings.py, put them on self.settings
# overriding the defaults.
for var in self.settings.iterkeys():
if hasattr(settings, var):
self.settings[var] = settings.__getattr__(var)
def authenticate(self, username=None, password=None):
# Make sure we have a user and pass
if not username and password is not None:
if self.settings['LDAP_DEBUG']:
assert False
logging.info('LDAPBackend.authenticate failed: username or password empty: %s %s' % (
username, password))
return None
if self.settings['LDAP_OPTIONS']:
for k in self.settings['LDAP_OPTIONS']:
self.ldap.set_option(k, self.settings.LDAP_OPTIONS[k])
l = self.ldap.initialize(self.settings['LDAP_SERVER_URI'])
try:
bind_string = self._pre_bind(l, username)
except:
return None
if not bind_string:
if self.settings['LDAP_DEBUG']:
logging.info('LDAPBackend.authenticate failed: _pre_bind return no bind_string (%s, %s)' % (
l, username))
return None
try:
# Try to bind as the provided user. We leave the bind until
# the end for other ldap.search_s call to work authenticated.
l.bind_s(bind_string, password)
except (self.ldap.INVALID_CREDENTIALS,
self.ldap.UNWILLING_TO_PERFORM), exc:
# Failed user/pass (or missing password)
if self.settings['LDAP_DEBUG']:
logging.info('LDAPBackend.authenticate failed: %s' % exc)
l.unbind_s()
return None
try:
user = self._get_user_by_name(username)
except User.DoesNotExist:
user = self._get_ldap_user(l, username)
if user is not None:
if self.settings['LDAP_UPDATE_FIELDS']:
self._update_user(l, user)
l.unbind_s()
if self.settings['LDAP_DEBUG']:
if user is None:
logging.info('LDAPBackend.authenticate failed: user is None')
else:
logging.info('LDAPBackend.authenticate ok: %s %s' % (user, user.__dict__))
create_couch_user(username, password)
return user
# Functions provided to override to customize to your LDAP configuration.
def _pre_bind(self, l, username):
"""
Function that returns the dn to bind against ldap with.
called as: self._pre_bind(ldapobject, username)
"""
if not self.settings['LDAP_BINDDN']:
# When the LDAP_BINDDN setting is blank we try to find the
# dn binding anonymously or using LDAP_PREBINDDN
if self.settings['LDAP_PREBINDDN']:
try:
l.simple_bind_s(self.settings['LDAP_PREBINDDN'],
self.settings['LDAP_PREBINDPW'])
except self.ldap.LDAPError, exc:
if self.settings['LDAP_DEBUG']:
logging.info('LDAPBackend _pre_bind: LDAPError : %s' % exc)
logging.info("LDAP_PREBINDDN: "+self.settings['LDAP_PREBINDDN']+" PW "+self.settings['LDAP_PREBINDPW'])
return None
# Now do the actual search
filter = self.settings['LDAP_SEARCH_FILTER'] % username
result = l.search_s(self.settings['LDAP_SEARCHDN'],
self.settings['LDAP_SCOPE'], filter, attrsonly=1)
if len(result) != 1:
if self.settings['LDAP_DEBUG']:
logging.info('LDAPBackend _pre_bind: not exactly one result: %s (%s %s %s)' % (
result, self.settings['LDAP_SEARCHDN'], self.settings['LDAP_SCOPE'], filter))
return None
return result[0][0]
else:
# LDAP_BINDDN is set so we use it as a template.
return "%s=%s,%s" % (self.settings['LDAP_BIND_ATTRIBUTE'], username,
self.settings['LDAP_BINDDN'])
def _get_user_by_name(self, username):
"""
Returns an object of contrib.auth.models.User that has a matching
username.
called as: self._get_user_by_name(username)
"""
return User.objects.get(username=username)
def _create_user_object(self, username, password):
"""
Creates and returns an object of contrib.auth.models.User.
called as: self._create_user_object(username, password)
"""
return User(username=username, password=password)
# Required for an authentication backend
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except:
return None
# End of functions to override
def _get_ldap_user(self, l, username):
"""
Helper method, makes a user object and call update_user to populate
"""
# Generate a random password string.
password = User.objects.make_random_password(10)
user = self._create_user_object(username, password)
return user
def _update_user(self, l, user):
"""
Helper method, populates a user object with various attributes from
LDAP.
"""
username = user.username
filter = self.settings['LDAP_SEARCH_FILTER'] % username
# Get results of search and make sure something was found.
# At this point this shouldn't fail.
hold = l.search_s(self.settings['LDAP_SEARCHDN'],
self.settings['LDAP_SCOPE'], filter)
if len(hold) < 1:
raise AssertionError('No results found with: %s' % (filter))
dn = hold[0][0]
attrs = hold[0][1]
firstn = self.settings['LDAP_FIRST_NAME'] or None
lastn = self.settings['LDAP_LAST_NAME'] or None
emailf = self.settings['LDAP_EMAIL'] or None
if firstn:
if firstn in attrs:
user.first_name = attrs[firstn][0]
else:
raise NameError('Missing attribute: %s in result for %s'
% (firstn, dn))
if lastn:
if lastn in attrs:
user.last_name = attrs[lastn][0]
else:
raise NameError('Missing attribute: %s in result for %s'
% (lastn, dn))
if not firstn and not lastn and self.settings['LDAP_FULL_NAME']:
fulln = self.settings['LDAP_FULL_NAME']
if fulln in attrs:
tmp = attrs[fulln][0]
user.first_name = tmp.split(' ')[0]
user.last_name = ' '.join(tmp.split(' ')[1:])
else:
raise NameError('Missing attribute: %s in result for %s'
% (fulln, dn))
if emailf and emailf in attrs:
user.email = attrs[emailf][0]
elif self.settings['LDAP_DEFAULT_EMAIL_SUFFIX']:
user.email = username + self.settings['LDAP_DEFAULT_EMAIL_SUFFIX']
# Check if we are mapping an ldap id to check if the user is staff or super
# Other wise the user is created but not give access
if ('LDAP_GID' in self.settings
and self.settings['LDAP_GID'] in attrs):
# Turn off access flags
user.is_superuser = False
user.is_staff = False
check_staff_flag = True
gids = set(attrs[self.settings['LDAP_GID']])
# Check to see if we are mapping any super users
if 'LDAP_SU_GIDS' in self.settings:
su_gids = set(self.settings['LDAP_SU_GIDS'])
# If any of the su_gids exist in the gid_data then the user is super
if (len(gids-su_gids) < len(gids)):
user.is_superuser = True
user.is_staff = True
# No need to check if a staff user
check_staff_flag = False
# Check for staff user?
if 'LDAP_STAFF_GIDS' in self.settings and check_staff_flag == True:
# We are checking to see if the user is staff
staff_gids = set(self.settings['LDAP_STAFF_GIDS'])
if (len(gids-staff_gids) < len(gids)):
user.is_staff = True
# Check if we need to see if a user is active
if ('LDAP_ACTIVE_FIELD' in self.settings
and self.settings['LDAP_ACTIVE_FIELD']):
user.is_active = False
if (self.settings.LDAP_ACTIVE_FIELD in attrs
and 'LDAP_ACTIVE' in self.settings):
active_data = set(attrs[self.settings['LDAP_ACTIVE_FIELD']])
active_flags = set(self.settings.LDAP_ACTIVE)
# if any of the active flags exist in the active data then
# the user is active
if (len(active_data-active_flags) < len(active_data)):
user.is_active = True
else:
# LDAP_ACTIVE_FIELD not defined, all users are active
user.is_active = True
user.save()
| gpl-3.0 | 2,061,814,885,940,155,600 | 42.407609 | 127 | 0.605922 | false |
DimaWittmann/Regenschirm | regenschirm/settings.py | 1 | 2379 | """
Django settings for regenschirm project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fvt7qozy)2mgo!^gxlln-sx#*-absdfoe0_gqtryvvs_lc6l#$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'regenschirm.urls'
WSGI_APPLICATION = 'regenschirm.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'database',
'USER': 'wittmann',
'PASSWORD': 'Dima-1993',
'HOST': 'localhost',
'PORT': '',
}
}
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'), )
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
| gpl-2.0 | -3,644,200,480,265,471,000 | 24.042105 | 71 | 0.711223 | false |
nguyenkims/projecteuler-python | src/p85.py | 1 | 1049 |
def f(m,n):
'''return the number of rectangles that a m x n contains'''
s=0
for a in range(1,m+1):
for b in range(1,n+1):
s+= (m-a+1)*(n-b+1)
return s
print f(1,1),f(2,4), f(3,3)
def g(m,n):
''' the same as f(m,n) except g(m,n) is calculated recursively'''
if m==0:
return 0
elif m == 1 :
return n * (n+1) /2
else:
return 2* g(m-1,n) - g(m-2,n) + n*(n+1)/2
print g(1,1), g(2,1), g(2,3), g(3,3)
limit = 2 * 10 **6
M=200
N=2000
L={} # L contains (m,n,f(m,n))
def fillL():
for m in range(0,M):
for n in range(0,N):
if m==0:
L[(m,n)]=0
elif m == 1 :
L[(m,n)] = (n * (n+1)) /2
else:
L[(m,n)] = 2* L[(m-1,n)] - L[(m-2,n)] + n*(n+1)/2
fillL()
print 'L is filled'
# print L[(3,3)], L[(2,3)], L[(100,100)], L[(20,200)] , L[(672,854)]
def main() :
mimumum = 10 ** 6
for m in range(1,M):
for n in range(1, N):
if m*n + n*(n+1) + m*(m+1)> 3*limit:
pass
else:
t = L[(m,n)]
# t= g(m,n)
if abs(t - limit) < mimumum:
mimumum = abs(t - limit)
print m,n,t, m*n
main() | mit | -4,020,211,150,527,830,000 | 20.428571 | 68 | 0.481411 | false |
pnichols104/python-koans | python2/koans/about_control_statements.py | 1 | 2053 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutControlStatements(Koan):
def test_if_then_else_statements(self):
if True:
result = 'true value'
else:
result = 'false value'
self.assertEqual('true value', result)
def test_if_then_statements(self):
result = 'default value'
if True:
result = 'true value'
self.assertEqual('true value', result)
def test_while_statement(self):
i = 1
result = 1
while i <= 5:
result = result * i
i += 1
self.assertEqual(120, result)
def test_break_statement(self):
i = 1
result = 1
while True:
if i > 5:
break
result = result * i
i += 1
self.assertEqual(120, result)
def test_continue_statement(self):
i = 0
result = []
while i < 5:
i += 1
if (i % 2) == 0:
continue
result.append(i)
self.assertEqual([1, 3, 5], result)
def test_for_statement(self):
phrase = ["fish", "and", "chips"]
def cap(word):
return word.upper()
result = map(cap, phrase)
self.assertEqual(['FISH', 'AND', 'CHIPS'], result)
def test_for_statement_with_tuples(self):
round_table = [
("Lancelot", "Blue"),
("Galahad", "I don't know!"),
("Robin", "Blue! I mean Green!"),
("Arthur", "Is that an African Swallow or Amazonian Swallow?")
]
result = []
for knight, answer in round_table:
result.append("Contestant: '" + knight + \
"' Answer: '" + answer + "'")
text = "Contestant: '" + 'Robin' + "' Answer: '" + 'Blue! I mean Green!' + "'"
self.assertMatch(text, result[2])
self.assertNoMatch(text, result[0])
self.assertNoMatch(text, result[1])
self.assertNoMatch(text, result[3])
| mit | -911,688,356,711,184,600 | 25.662338 | 88 | 0.494398 | false |
MakarenaLabs/Orator-Google-App-Engine | orator/commands/migrations/base_command.py | 1 | 1798 | # -*- coding: utf-8 -*-
import os
from cleo import Command, InputOption, ListInput
from orator import DatabaseManager
class BaseCommand(Command):
def __init__(self, resolver=None):
self._resolver = resolver
super(BaseCommand, self).__init__()
def configure(self):
if not self._resolver:
self.add_option('config', 'c',
InputOption.VALUE_REQUIRED,
'The config file path')
def execute(self, i, o):
"""
Executes the command.
:type i: cleo.inputs.input.Input
:type o: cleo.outputs.output.Output
"""
if not self._resolver:
config = self._get_config(i)
self._resolver = DatabaseManager(config)
def call(self, name, options=None, o=None):
"""
Call another command.
:param name: The command name
:type name: str
:param options: The options
:type options: list or None
:param o: The output
:type o: cleo.outputs.output.Output
"""
if options is None:
options = []
command = self.get_application().find(name)
options = [('command', command.get_name())] + options
return command.run(ListInput(options), o)
def _get_migration_path(self):
return os.path.join(os.getcwd(), 'migrations')
def _get_config(self, i):
"""
Get the config.
:type i: cleo.inputs.input.Input
:rtype: dict
"""
variables = {}
if not i.get_option('config'):
raise Exception('The --config|-c option is missing.')
with open(i.get_option('config')) as fh:
exec(fh.read(), {}, variables)
return variables['DATABASES']
| mit | -466,278,202,544,454,000 | 23.972222 | 65 | 0.546719 | false |
mindbody/API-Examples | SDKs/Python/swagger_client/models/add_client_response.py | 1 | 3305 | # coding: utf-8
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.client import Client # noqa: F401,E501
class AddClientResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'client': 'Client'
}
attribute_map = {
'client': 'Client'
}
def __init__(self, client=None): # noqa: E501
"""AddClientResponse - a model defined in Swagger""" # noqa: E501
self._client = None
self.discriminator = None
if client is not None:
self.client = client
@property
def client(self):
"""Gets the client of this AddClientResponse. # noqa: E501
Contains information about the client. # noqa: E501
:return: The client of this AddClientResponse. # noqa: E501
:rtype: Client
"""
return self._client
@client.setter
def client(self, client):
"""Sets the client of this AddClientResponse.
Contains information about the client. # noqa: E501
:param client: The client of this AddClientResponse. # noqa: E501
:type: Client
"""
self._client = client
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AddClientResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AddClientResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| bsd-2-clause | 4,975,355,453,291,282,000 | 26.773109 | 119 | 0.554614 | false |
instantchow/home-assistant | homeassistant/components/rfxtrx.py | 1 | 2671 | """
Support for RFXtrx components.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/rfxtrx/
"""
import logging
from homeassistant.util import slugify
REQUIREMENTS = ['https://github.com/Danielhiversen/pyRFXtrx/' +
'archive/0.5.zip#pyRFXtrx==0.5']
DOMAIN = "rfxtrx"
ATTR_DEVICE = 'device'
ATTR_DEBUG = 'debug'
ATTR_STATE = 'state'
ATTR_NAME = 'name'
ATTR_PACKETID = 'packetid'
ATTR_FIREEVENT = 'fire_event'
ATTR_DATA_TYPE = 'data_type'
ATTR_DUMMY = "dummy"
EVENT_BUTTON_PRESSED = 'button_pressed'
RECEIVED_EVT_SUBSCRIBERS = []
RFX_DEVICES = {}
_LOGGER = logging.getLogger(__name__)
RFXOBJECT = None
def setup(hass, config):
"""Setup the RFXtrx component."""
# Declare the Handle event
def handle_receive(event):
"""Callback all subscribers for RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
entity_id = slugify(event.device.id_string.lower())
packet_id = "".join("{0:02x}".format(x) for x in event.data)
entity_name = "%s : %s" % (entity_id, packet_id)
_LOGGER.info("Receive RFXCOM event from %s => %s",
event.device, entity_name)
# Callback to HA registered components.
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
# Try to load the RFXtrx module.
import RFXtrx as rfxtrxmod
# Init the rfxtrx module.
global RFXOBJECT
if ATTR_DEVICE not in config[DOMAIN]:
_LOGGER.error(
"can not find device parameter in %s YAML configuration section",
DOMAIN
)
return False
device = config[DOMAIN][ATTR_DEVICE]
debug = config[DOMAIN].get(ATTR_DEBUG, False)
dummy_connection = config[DOMAIN].get(ATTR_DUMMY, False)
if dummy_connection:
RFXOBJECT =\
rfxtrxmod.Core(device, handle_receive, debug=debug,
transport_protocol=rfxtrxmod.DummyTransport2)
else:
RFXOBJECT = rfxtrxmod.Core(device, handle_receive, debug=debug)
return True
def get_rfx_object(packetid):
"""Return the RFXObject with the packetid."""
import RFXtrx as rfxtrxmod
binarypacket = bytearray.fromhex(packetid)
pkt = rfxtrxmod.lowlevel.parse(binarypacket)
if pkt is not None:
if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket):
obj = rfxtrxmod.SensorEvent(pkt)
elif isinstance(pkt, rfxtrxmod.lowlevel.Status):
obj = rfxtrxmod.StatusEvent(pkt)
else:
obj = rfxtrxmod.ControlEvent(pkt)
return obj
return None
| mit | -4,113,527,993,356,664,000 | 27.115789 | 77 | 0.643579 | false |
jabelone/Unearthed2017 | tensorflowshit/helloworld.py | 1 | 3595 | '''
HelloWorld example using TensorFlow library.
Author: Aymeric Damien
Project: https://github.com/aymericdamien/TensorFlow-Examples/
'''
from __future__ import print_function
import tensorflow as tf
import csv
import time
def getNetGraph(X, h1size):
with tf.name_scope('hidden'):
weights = tf.Variable(tf.random_normal([tf.size(X), h1size]), name='weights')
biases = tf.Variable(tf.zeros([h1size], tf.float32), name='biases')
hidden1 = tf.nn.relu(tf.matmul(X, weights)) + biases
with tf.name_scope('output'):
weights = tf.Variable(tf.random_normal([h1size, 1]), name='weights')
# weights = tf.Print(weights, [weights])
bias = tf.Variable(0.00, tf.float32, name='bias')
output = tf.matmul(hidden1, weights) + bias
return output
def loss(X, target):
#abs loss
return tf.abs(X - target)
def pruneRow(row, columnIndexes, targetColIndex):
prunedRow = [0 if row[index] == 'NULL' else row[index] for index in columnIndexes]
return (prunedRow, row[targetColIndex])
featuresColNames = ['Casing Pressure',
'Gas Flow (Volume)',
'Motor Speed',
'Motor Torque',
'Pump Speed Actual',
'Tubing Flow Meter',
'Tubing Pressure',
'Water Flow Mag from Separator']
targetColName = 'Downhole Gauge Pressure'
with open('D:/unearthed/Bottom Hole Pressure and Fluid Level Challenge/Data/Well1B3mths.csv',
newline='') as csvFile:
csvReader = csv.reader(csvFile)
allColNames = next(csvReader)
featuresColIndexes = [allColNames.index(name) for name in featuresColNames]
targetColIndex = allColNames.index(targetColName)
print("feature column indexes", featuresColIndexes)
print("target column index", targetColIndex)
learning_rate = 0.00005
learning_iterations = 100
hiddenLayerSize = 8
# trainingSet = [pruneRow(next(csvReader), featuresColIndexes, targetColIndex)
# for i in range(100)]
trainX = [[1,2,3,4,5,6,7,8]]
target = [[30]]
tf.set_random_seed(time.time())
targetPlaceholder = tf.placeholder(tf.float32, shape=[1,1], name='phTarget')
inputPlaceholder = tf.placeholder(tf.float32, shape = [1,len(featuresColIndexes)], name='phIn')
netGraph = getNetGraph(inputPlaceholder, hiddenLayerSize)
lossVal = loss(netGraph, targetPlaceholder)
trainOp = tf.train.GradientDescentOptimizer(learning_rate).minimize(lossVal)
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init, feed_dict={inputPlaceholder: trainX, targetPlaceholder: target})
testSet = [next(csvReader) for i in range(50)]
x = 0
for line in csvReader:
x = x + 1
if x > 5000: break
pruned = pruneRow(line, featuresColIndexes, targetColIndex)
# print("Train row " + str(i) + ":", pruned)
# for epoch in range(learning_iterations):
sess.run(trainOp, feed_dict={inputPlaceholder: [pruned[0]],
targetPlaceholder: [[pruned[1]]]})
# print(sess.run(lossVal, feed_dict={inputPlaceholder: [pruned[0]],
# targetPlaceholder: [[pruned[1]]]}))
for i in range(len(testSet)):
testRow = pruneRow(testSet[i], featuresColIndexes, targetColIndex)
print ("Test Row " + str(i) + ":", testRow[1])
print(sess.run(netGraph, feed_dict={inputPlaceholder: [testRow[0]]}))
sess.close()
| gpl-3.0 | 6,357,136,693,624,156,000 | 32.598131 | 99 | 0.625035 | false |
smattis/BET-1 | examples/FEniCS/BET_multiple_serial_models_script.py | 1 | 5328 | #! /usr/bin/env python
# Copyright (C) 2014-2019 The BET Development Team
r"""
This example requires the following external packages not shipped
with BET:
(1) An installation of FEniCS that can be run using the same
python as used for installing BET. See http://fenicsproject.org/
for more information.
(2) A copy of Launcher. See https://github.com/TACC/launcher for
more information. The user needs to set certain environment
variables inside of lbModel.py for this to run. See lbModel.py
for more information.
This example generates samples for a KL expansion associated with
a covariance defined by ``cov`` in computeSaveKL.py on an L-shaped mesh
that defines the permeability field for a Poisson equation solved in
myModel.py.
The quantities of interest (QoI) are defined as two spatial
averages of the solution to the PDE.
The user defines the dimension of the parameter space (corresponding
to the number of KL terms) and the number of samples in this space.
"""
import numpy as np
import bet.calculateP.simpleFunP as simpleFunP
import bet.calculateP.calculateP as calculateP
import bet.postProcess.plotP as plotP
import bet.postProcess.plotDomains as plotD
import bet.sample as samp
import bet.sampling.basicSampling as bsam
from lbModel import lb_model
from myModel import my_model
from Compute_Save_KL import computeSaveKL
# Interface BET to the model.
sampler = bsam.sampler(lb_model)
# Define the number of KL terms to use to represent permeability field
num_KL_terms = 2
# Compute and save the KL expansion -- can comment out after running once
computeSaveKL(num_KL_terms)
# Initialize input parameter sample set object
input_samples = samp.sample_set(num_KL_terms)
# Set parameter domain
KL_term_min = -3.0
KL_term_max = 3.0
input_samples.set_domain(np.repeat([[KL_term_min, KL_term_max]],
num_KL_terms,
axis=0))
'''
Suggested changes for user:
Try with and without random sampling.
If using regular sampling, try different numbers of samples
per dimension (be careful in the dimension is not 2).
'''
# Generate samples on the parameter space
randomSampling = False
if randomSampling is True:
input_samples = sampler.random_sample_set(
'random', input_samples, num_samples=1E2)
else:
input_samples = sampler.regular_sample_set(
input_samples, num_samples_per_dim=[10, 10])
'''
A standard Monte Carlo (MC) assumption is that every Voronoi cell
has the same volume. If a regular grid of samples was used, then
the standard MC assumption is true.
'''
MC_assumption = True
# Estimate volumes of Voronoi cells associated with the parameter samples
if MC_assumption is False:
input_samples.estimate_volume(n_mc_points=1E5)
else:
input_samples.estimate_volume_mc()
# Create the discretization object using the input samples
my_discretization = sampler.compute_QoI_and_create_discretization(
input_samples, savefile='FEniCS_Example.txt.gz')
'''
Suggested changes for user:
Try different reference parameters.
'''
# Define the reference parameter
#param_ref = np.zeros((1,num_KL_terms))
param_ref = np.ones((1, num_KL_terms))
# Compute the reference QoI
Q_ref = my_model(param_ref)
# Create some plots of input and output discretizations
plotD.scatter_2D(input_samples, ref_sample=param_ref[0, :],
filename='FEniCS_ParameterSamples.eps')
if Q_ref.size == 2:
plotD.show_data_domain_2D(my_discretization, Q_ref=Q_ref[0, :],
file_extension="eps")
'''
Suggested changes for user:
Try different ways of discretizing the probability measure on D defined
as a uniform probability measure on a rectangle or interval depending
on choice of QoI_num in myModel.py.
'''
randomDataDiscretization = False
if randomDataDiscretization is False:
simpleFunP.regular_partition_uniform_distribution_rectangle_scaled(
data_set=my_discretization, Q_ref=Q_ref[0, :], rect_scale=0.1,
cells_per_dimension=3)
else:
simpleFunP.uniform_partition_uniform_distribution_rectangle_scaled(
data_set=my_discretization, Q_ref=Q_ref[0, :], rect_scale=0.1,
M=50, num_d_emulate=1E5)
# calculate probabilities
calculateP.prob(my_discretization)
########################################
# Post-process the results
########################################
# calculate 2d marginal probs
(bins, marginals2D) = plotP.calculate_2D_marginal_probs(input_samples,
nbins=20)
# smooth 2d marginals probs (optional)
marginals2D = plotP.smooth_marginals_2D(marginals2D, bins, sigma=0.5)
# plot 2d marginals probs
plotP.plot_2D_marginal_probs(marginals2D, bins, input_samples, filename="FEniCS",
lam_ref=param_ref[0, :], file_extension=".eps",
plot_surface=False)
# calculate 1d marginal probs
(bins, marginals1D) = plotP.calculate_1D_marginal_probs(input_samples,
nbins=20)
# smooth 1d marginal probs (optional)
marginals1D = plotP.smooth_marginals_1D(marginals1D, bins, sigma=0.5)
# plot 2d marginal probs
plotP.plot_1D_marginal_probs(marginals1D, bins, input_samples, filename="FEniCS",
lam_ref=param_ref[0, :], file_extension=".eps")
| gpl-3.0 | 435,122,958,647,670,100 | 34.052632 | 81 | 0.70458 | false |
fengbeihong/tempest_automate_ironic | tempest/services/baremetal/v1/json/baremetal_client.py | 1 | 15425 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.services.baremetal import base
from tempest.common import waiters
class BaremetalClientJSON(base.BaremetalClient):
"""
Base Tempest REST client for Ironic API v1.
"""
version = '1'
uri_prefix = 'v1'
@base.handle_errors
def list_nodes(self, **kwargs):
"""List all existing nodes."""
return self._list_request('nodes', **kwargs)
@base.handle_errors
def list_chassis(self):
"""List all existing chassis."""
return self._list_request('chassis')
@base.handle_errors
def list_chassis_nodes(self, chassis_uuid):
"""List all nodes associated with a chassis."""
return self._list_request('/chassis/%s/nodes' % chassis_uuid)
@base.handle_errors
def list_ports(self, **kwargs):
"""List all existing ports."""
return self._list_request('ports', **kwargs)
@base.handle_errors
def list_node_ports(self, uuid):
"""List all ports associated with the node."""
return self._list_request('/nodes/%s/ports' % uuid)
@base.handle_errors
def list_nodestates(self, uuid):
"""List all existing states."""
return self._list_request('/nodes/%s/states' % uuid)
@base.handle_errors
def list_ports_detail(self, **kwargs):
"""Details list all existing ports."""
return self._list_request('/ports/detail', **kwargs)
@base.handle_errors
def list_drivers(self):
"""List all existing drivers."""
return self._list_request('drivers')
@base.handle_errors
def show_node(self, uuid):
"""
Gets a specific node.
:param uuid: Unique identifier of the node in UUID format.
:return: Serialized node as a dictionary.
"""
return self._show_request('nodes', uuid)
@base.handle_errors
def show_node_by_instance_uuid(self, instance_uuid):
"""
Gets a node associated with given instance uuid.
:param uuid: Unique identifier of the node in UUID format.
:return: Serialized node as a dictionary.
"""
uri = '/nodes/detail?instance_uuid=%s' % instance_uuid
return self._show_request('nodes',
uuid=None,
uri=uri)
@base.handle_errors
def show_chassis(self, uuid):
"""
Gets a specific chassis.
:param uuid: Unique identifier of the chassis in UUID format.
:return: Serialized chassis as a dictionary.
"""
return self._show_request('chassis', uuid)
@base.handle_errors
def show_port(self, uuid):
"""
Gets a specific port.
:param uuid: Unique identifier of the port in UUID format.
:return: Serialized port as a dictionary.
"""
return self._show_request('ports', uuid)
@base.handle_errors
def show_port_by_address(self, address):
"""
Gets a specific port by address.
:param address: MAC address of the port.
:return: Serialized port as a dictionary.
"""
uri = '/ports/detail?address=%s' % address
return self._show_request('ports', uuid=None, uri=uri)
def show_driver(self, driver_name):
"""
Gets a specific driver.
:param driver_name: Name of driver.
:return: Serialized driver as a dictionary.
"""
return self._show_request('drivers', driver_name)
@base.handle_errors
def create_node(self, chassis_id=None, **kwargs):
"""
Create a baremetal node with the specified parameters.
:param cpu_arch: CPU architecture of the node. Default: x86_64.
:param cpus: Number of CPUs. Default: 8.
:param local_gb: Disk size. Default: 1024.
:param memory_mb: Available RAM. Default: 4096.
:param driver: Driver name. Default: "fake"
:return: A tuple with the server response and the created node.
"""
node = {'chassis_uuid': chassis_id,
'properties': {'cpu_arch': kwargs.get('cpu_arch', 'x86_64'),
'cpus': kwargs.get('cpus', 8),
'local_gb': kwargs.get('local_gb', 1024),
'memory_mb': kwargs.get('memory_mb', 4096)},
'driver': kwargs.get('driver', 'fake')}
return self._create_request('nodes', node)
@base.handle_errors
def create_chassis(self, **kwargs):
"""
Create a chassis with the specified parameters.
:param description: The description of the chassis.
Default: test-chassis
:return: A tuple with the server response and the created chassis.
"""
chassis = {'description': kwargs.get('description', 'test-chassis')}
return self._create_request('chassis', chassis)
@base.handle_errors
def create_port(self, node_id, **kwargs):
"""
Create a port with the specified parameters.
:param node_id: The ID of the node which owns the port.
:param address: MAC address of the port.
:param extra: Meta data of the port. Default: {'foo': 'bar'}.
:param uuid: UUID of the port.
:return: A tuple with the server response and the created port.
"""
port = {'extra': kwargs.get('extra', {'foo': 'bar'}),
'uuid': kwargs['uuid']}
if node_id is not None:
port['node_uuid'] = node_id
if kwargs['address'] is not None:
port['address'] = kwargs['address']
return self._create_request('ports', port)
@base.handle_errors
def delete_node(self, uuid):
"""
Deletes a node having the specified UUID.
:param uuid: The unique identifier of the node.
:return: A tuple with the server response and the response body.
"""
return self._delete_request('nodes', uuid)
@base.handle_errors
def delete_chassis(self, uuid):
"""
Deletes a chassis having the specified UUID.
:param uuid: The unique identifier of the chassis.
:return: A tuple with the server response and the response body.
"""
return self._delete_request('chassis', uuid)
@base.handle_errors
def delete_port(self, uuid):
"""
Deletes a port having the specified UUID.
:param uuid: The unique identifier of the port.
:return: A tuple with the server response and the response body.
"""
return self._delete_request('ports', uuid)
@base.handle_errors
def update_node(self, uuid, **kwargs):
"""
Update the specified node.
:param uuid: The unique identifier of the node.
:return: A tuple with the server response and the updated node.
"""
node_attributes = ('properties/cpu_arch',
'properties/cpus',
'properties/local_gb',
'properties/memory_mb',
'driver',
'instance_uuid')
patch = self._make_patch(node_attributes, **kwargs)
return self._patch_request('nodes', uuid, patch)
@base.handle_errors
def update_chassis(self, uuid, **kwargs):
"""
Update the specified chassis.
:param uuid: The unique identifier of the chassis.
:return: A tuple with the server response and the updated chassis.
"""
chassis_attributes = ('description',)
patch = self._make_patch(chassis_attributes, **kwargs)
return self._patch_request('chassis', uuid, patch)
@base.handle_errors
def update_port(self, uuid, patch):
"""
Update the specified port.
:param uuid: The unique identifier of the port.
:param patch: List of dicts representing json patches.
:return: A tuple with the server response and the updated port.
"""
return self._patch_request('ports', uuid, patch)
@base.handle_errors
def set_node_power_state(self, node_uuid, state):
"""
Set power state of the specified node.
:param node_uuid: The unique identifier of the node.
:state: desired state to set (on/off/reboot).
"""
target = {'target': state}
return self._put_request('nodes/%s/states/power' % node_uuid,
target)
@base.handle_errors
def validate_driver_interface(self, node_uuid):
"""
Get all driver interfaces of a specific node.
:param uuid: Unique identifier of the node in UUID format.
"""
uri = '{pref}/{res}/{uuid}/{postf}'.format(pref=self.uri_prefix,
res='nodes',
uuid=node_uuid,
postf='validate')
return self._show_request('nodes', node_uuid, uri=uri)
@base.handle_errors
def set_node_boot_device(self, node_uuid, boot_device, persistent=False):
"""
Set the boot device of the specified node.
:param node_uuid: The unique identifier of the node.
:param boot_device: The boot device name.
:param persistent: Boolean value. True if the boot device will
persist to all future boots, False if not.
Default: False.
"""
request = {'boot_device': boot_device, 'persistent': persistent}
resp, body = self._put_request('nodes/%s/management/boot_device' %
node_uuid, request)
self.expected_success(204, resp.status)
return body
@base.handle_errors
def get_node_boot_device(self, node_uuid):
"""
Get the current boot device of the specified node.
:param node_uuid: The unique identifier of the node.
"""
path = 'nodes/%s/management/boot_device' % node_uuid
resp, body = self._list_request(path)
self.expected_success(200, resp.status)
return body
@base.handle_errors
def get_node_supported_boot_devices(self, node_uuid):
"""
Get the supported boot devices of the specified node.
:param node_uuid: The unique identifier of the node.
"""
path = 'nodes/%s/management/boot_device/supported' % node_uuid
resp, body = self._list_request(path)
self.expected_success(200, resp.status)
return body
@base.handle_errors
def get_console(self, node_uuid):
"""
Get connection information about the console.
:param node_uuid: Unique identifier of the node in UUID format.
"""
resp, body = self._show_request('nodes/states/console', node_uuid)
self.expected_success(200, resp.status)
return resp, body
@base.handle_errors
def set_console_mode(self, node_uuid, enabled):
"""
Start and stop the node console.
:param node_uuid: Unique identifier of the node in UUID format.
:param enabled: Boolean value; whether to enable or disable the
console.
"""
enabled = {'enabled': enabled}
resp, body = self._put_request('nodes/%s/states/console' % node_uuid,
enabled)
self.expected_success(202, resp.status)
return resp, body
@base.handle_errors
def create_node_advanced(self, driver, properties, driver_info, **kwargs):
"""
Create a baremetal node with the specified parameters.
:param driver: Driver name. Default: "fake"
:param properties
:param driver_info
:return: A tuple with the server response and the created node.
"""
node = {
'driver': driver,
'properties': properties,
'driver_info': driver_info
}
extra = kwargs.get('extra', None)
if extra is not None:
node['extra'] = extra
return self._create_request('nodes', node)
@base.handle_errors
def create_port_advanced(self, node_id, **kwargs):
"""
Create a port with the specified parameters.
:param node_id: The ID of the node which owns the port.
:param address: MAC address of the port.
:param extra: Meta data of the port. Default: {'foo': 'bar'}.
:param uuid: UUID of the port.
:return: A tuple with the server response and the created port.
"""
port = {'extra': kwargs.get('extra', {'foo': 'bar'}),
'uuid': kwargs['uuid']}
if node_id is not None:
port['node_uuid'] = node_id
if kwargs['address'] is not None:
port['address'] = kwargs['address']
return self._create_request('ports', port)
@base.handle_errors
def update_node_advanced(self, uuid, **kwargs):
"""
Update the specified node.
:param uuid: The unique identifier of the node.
:return: A tuple with the server response and the updated node.
"""
node_attributes = ('properties/capabilities',
)
patch = self._make_patch(node_attributes, **kwargs)
return self._patch_request('nodes', uuid, patch)
@base.handle_errors
def set_node_provision_state(self, node_uuid, state, optional=None):
"""
Set power state of the specified node.
:param node_uuid: The unique identifier of the node.
:param state: desired state to set
(active/deleted/rebuild/inspect/provide/manage/abort).
:param optional: configdrive or clean steps
"""
resp, versions = self.get_api_description()
target = {'target': state}
if optional is not None:
target.update(optional)
headers = {
'X-OpenStack-Ironic-API-Version': versions['default_version']['version']
}
return self._put_request('nodes/%s/states/provision' % node_uuid,
target, headers, extra_headers=True)
@base.handle_errors
def set_node_raid_config(self, node_uuid, target_raid_config):
"""
Set raid configuration of the node
:param node_uuid: The unique identifier of the node.
:param target_raid_config: raid configuration.
"""
resp, versions = self.get_api_description()
headers = {
'X-OpenStack-Ironic-API-Version': versions['default_version']['version']
}
return self._put_request('nodes/%s/states/raid' % node_uuid,
target_raid_config, headers, extra_headers=True)
| apache-2.0 | -6,374,396,648,853,870,000 | 31.610994 | 84 | 0.580097 | false |
anchore/anchore-engine | tests/unit/anchore_engine/apis/test_oauth.py | 1 | 6264 | import pytest
import time
from anchore_engine.apis.oauth import merge_client_metadata
from anchore_engine.apis.oauth import (
setup_oauth_client,
OAuth2Client,
CLIENT_GRANT_KEY,
ANONYMOUS_CLIENT_ID,
)
@pytest.mark.parametrize(
"existing_metadata, meta_to_add, expected_output",
[
(
{"grant_types": []},
{"grant_types": ["password"]},
{"grant_types": ["password"]},
),
(
{"grant_types": ["password"]},
{"grant_types": ["password"]},
{"grant_types": ["password"]},
),
(
{"grant_types": ["password"]},
{"grant_types": []},
{"grant_types": ["password"]},
),
(
{"grant_types": ["password"]},
{"grant_types": ["password", "bearer"]},
{"grant_types": ["password", "bearer"]},
),
(
{"grant_types": ["password", "foobar"]},
{"grant_types": ["password", "bearer"]},
{"grant_types": ["password", "bearer", "foobar"]},
),
(
{},
{"grant_types": ["password"]},
{"grant_types": ["password"]},
),
(
{},
{"grant_types": []},
{"grant_types": []},
),
(
None,
{"grant_types": []},
{"grant_types": []},
),
(
None,
{"grant_types": ["password"]},
{"grant_types": ["password"]},
),
],
)
def test_merge_client_metadata(existing_metadata, meta_to_add, expected_output):
"""
Unit test for merging client metadata records for the OAuth2Client
:param existing_metadata:
:param meta_to_add:
:param expected_output:
:return:
"""
merged = merge_client_metadata(existing_metadata, meta_to_add)
check_metadata(merged, expected_output)
def check_metadata(candidate: dict, expected: dict):
for k, v in expected.items():
if type(v) == list:
assert sorted(candidate.get(k)) == sorted(v)
else:
assert (
candidate.get(k) == v
), "Key {} from candidate {} did not match expected {}".format(
k, candidate, v
)
def password_oauth2_client():
c = OAuth2Client()
c.client_id = ANONYMOUS_CLIENT_ID
c.user_id = None
c.client_secret = None
# These are no-ops effectively since the client isn't authenticated itself
c.client_id_issued_at = time.time() - 100
c.client_secret_expires_at = time.time() + 1000
c.set_client_metadata(
{
"token_endpoint_auth_method": "none", # This should be a function of the grant type input but all of our types are this currently
"client_name": ANONYMOUS_CLIENT_ID,
"grant_types": ["password"],
}
)
return c
def legacy_password_oauth2_client():
c = OAuth2Client()
c.client_id = ANONYMOUS_CLIENT_ID
c.user_id = None
c.client_secret = None
# These are no-ops effectively since the client isn't authenticated itself
c.client_id_issued_at = time.time() - 100
c.client_secret_expires_at = time.time() + 1000
c.set_client_metadata(
{
"grant_types": ["password"],
}
)
return c
def no_metadata_oauth2_client():
c = OAuth2Client()
c.client_id = ANONYMOUS_CLIENT_ID
c.user_id = None
c.client_secret = None
# These are no-ops effectively since the client isn't authenticated itself
c.client_id_issued_at = time.time() - 100
c.client_secret_expires_at = time.time() + 1000
return c
def empty_metadata_oauth2_client():
c = OAuth2Client()
c.client_id = ANONYMOUS_CLIENT_ID
c.user_id = None
c.client_secret = None
# These are no-ops effectively since the client isn't authenticated itself
c.client_id_issued_at = time.time() - 100
c.client_secret_expires_at = time.time() + 1000
c.set_client_metadata({})
return c
def authorization_oauth2_client():
c = OAuth2Client()
c.client_id = ANONYMOUS_CLIENT_ID
c.user_id = None
c.client_secret = None
c.client_id_issued_at = time.time() - 100
c.client_secret_expires_at = time.time() + 1000
c.set_client_metadata(
{
"token_endpoint_auth_method": "none", # This should be a function of the grant type input but all of our types are this currently
"client_name": ANONYMOUS_CLIENT_ID,
"grant_types": ["authorization"],
}
)
return c
def combined_oauth2_client():
c = OAuth2Client()
c.client_id = ANONYMOUS_CLIENT_ID
c.user_id = None
c.client_secret = None
c.client_id_issued_at = time.time() - 100
c.client_secret_expires_at = time.time() + 1000
c.set_client_metadata(
{
"token_endpoint_auth_method": "none", # This should be a function of the grant type input but all of our types are this currently
"client_name": ANONYMOUS_CLIENT_ID,
"grant_types": ["authorization", "password"],
}
)
return c
@pytest.mark.parametrize(
"found_client, add_client, expected_result",
[
(
password_oauth2_client(),
authorization_oauth2_client(),
combined_oauth2_client(),
),
(
legacy_password_oauth2_client(),
authorization_oauth2_client(),
combined_oauth2_client(),
),
(
no_metadata_oauth2_client(),
authorization_oauth2_client(),
authorization_oauth2_client(),
),
(
empty_metadata_oauth2_client(),
authorization_oauth2_client(),
authorization_oauth2_client(),
),
],
)
def test_setup_oauth_client(found_client, add_client, expected_result):
"""
:param found_client:
:param add_client:
:param expected_result:
:return:
"""
assert found_client.client_id == expected_result.client_id
result = setup_oauth_client(found_client, add_client)
assert result is not None
check_metadata(
result.client_metadata,
expected_result.client_metadata,
)
| apache-2.0 | -2,403,788,572,274,175,000 | 27.472727 | 142 | 0.554598 | false |
Sriee/epi | data_structures/backtracking/combination_sums.py | 1 | 5229 | def combination_sum(candidates, target):
"""
Leet code. Solution -> Accepted
Run Time: 100 ms. Not optimal but this gives a template for writing backtracking
problems
Given an array without duplicates. Find the list of candidates which are equal to
the target sum. Each element can be repeated n times
Examples:
nums: [2, 3, 5] target = 8
Output should be
[
[2, 2, 2, 2],
[2, 3, 3]
[3, 5]
]
:param candidates: Given array
:param target: target sum
:return: list of candidates who sum is equal to candidates sum
"""
res = []
def dfs(candidates, target, index, path):
if target == 0:
res.append(path.copy())
return
for i in range(index, len(candidates)):
if target - candidates[i] < 0:
continue
path.append(candidates[i])
dfs(candidates, target - candidates[i], i, path)
path.pop()
dfs(candidates, target, 0, [])
return res
def combination_sum2(candidates, target):
"""
Leet code. Solution -> Accepted
Run Time: 52 ms. Optimal solution
Given an array with duplicates. Find the list of candidates which are equal to
the target sum. Each element in the output array should appear once.
Examples:
nums: [10, 1, 2, 7, 6, 1, 5] target = 8
Output should be
[
[1, 7],
[1, 2, 5],
[2, 6],
[1, 1, 6]
]
:param candidates: Given array
:param target: target sum
:return: list of candidates who sum is equal to candidates sum
"""
res = []
def dfs(target, idx, path):
if target == 0:
res.append(path)
return
for i in range(idx, len(candidates)):
if i > idx and candidates[i] == candidates[i - 1]:
continue
if target - candidates[i] < 0 or candidates[i] > target:
break
dfs(target - candidates[i], i + 1, path + [candidates[i]])
dfs(target, 0, [])
return res
def combination_sum3(k, n):
"""
Leet code. Solution -> Accepted
Run time: 36 ms Optimized. Optimal solution
Find all possible combinations of k numbers that add up to a number n, given that
only numbers from 1 to 9 can be used and each combination should be a unique set of numbers.
Examples:
k: 3 n: 7
Output should be
[
[1, 2, 4],
]
:param k: Length of combination
:param n: target sum
:return: list of candidates who sum is equal to candidates sum
"""
res, candidates = [], [i for i in range(1, 10)]
def dfs(candidates, target, path):
if target == 0 and len(path) == k:
res.append(path)
return
for i in range(len(candidates)):
if target - candidates[i] >= 0 and len(path) + 1 <= k:
dfs(candidates[i + 1:], target - candidates[i], path + [candidates[i]])
else:
break
dfs(candidates, n, [])
return res
def combination_sum_4(nums, target):
"""
Leet Code. Time Limit Exceeded
Given an integer array with all positive numbers and no duplicates, find the number
of possible combinations that add up to a positive integer target.
Example:
nums: [1, 2, 3], target: 4
Output should be 7 and the combinations are
[1, 1, 1, 1]
[1, 1, 2]
[1, 2, 1]
[1, 3]
[2, 1, 1]
[2, 2]
[3, 1]
:param nums: Given array
:param target: target sum
:return: total number of combinations that can formed equal to sum
"""
output = 0
def combination_helper(nums, target):
nonlocal output
if target == 0:
output += 1
return
for i in range(len(nums)):
if target - nums[i] < 0:
continue
combination_helper(nums, target - nums[i])
combination_helper(nums, target)
return output
def combination_sum_4_optimized(nums, target):
"""
Leet Code. Solution -> Accepted
Given an integer array with all positive numbers and no duplicates, find the number
of possible combinations that add up to a positive integer target.
Example:
nums: [1, 2, 3], target: 4
Output should be 7 and the combinations are
[1, 1, 1, 1]
[1, 1, 2]
[1, 2, 1]
[1, 3]
[2, 1, 1]
[2, 2]
[3, 1]
:param nums: Given array
:param target: target sum
:return: total number of combinations that can formed equal to sum
"""
nums.sort()
mem = {}
def combination_helper(target):
if target in mem:
return mem[target]
count = 0
for i in nums:
if i > target:
break
elif i == target:
count += 1
break
else:
count += combination_helper(target - i)
mem[target] = count
return count
return combination_helper(target)
print(combination_sum_4_optimized([1, 2, 3], 4))
| gpl-3.0 | 4,889,357,968,295,173,000 | 23.665094 | 96 | 0.544081 | false |
IIIT-Delhi/jobport | placement/jobport/urls.py | 1 | 4188 | # //=======================================================================
# // Copyright JobPort, IIIT Delhi 2015.
# // Distributed under the MIT License.
# // (See accompanying file LICENSE or copy at
# // http://opensource.org/licenses/MIT)
# //=======================================================================
# __author__ = 'naman'
from django.conf.urls import patterns, url
from jobport import views
handler404 = 'views.my_404_view'
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
url(r'^logout/$', views.logout, name='logout'),
url(r'^needlogin/$', views.needlogin, name='needlogin'),
url(r'^newuser/$', views.newuser, name='newuser'),
url(r'^openjob/$', views.openjob, name='openjob'),
url(r'^profile/$', views.profile, name='profile'),
url(r'^stats/$', views.stats, name='stats'),
url(r'^uploadcgpa/$', views.uploadcgpa, name='uploadcgpa'),
url(r'^students/(?P<studentid>.*)/edit/$',
views.admineditstudent, name='admineditstudent'),
url(r'^job/(?P<jobid>\d+)/$',
views.jobpage, name='jobpage'),
url(r'^job/(?P<jobid>\d+)/apply/$',
views.jobapply, name='jobapply'),
url(r'^job/(?P<jobid>\d+)/withdraw/$',
views.jobwithdraw, name='jobwithdraw'),
url(r'^job/(?P<jobid>\d+)/edit/$',
views.jobedit, name='jobedit'),
url(r'^job/(?P<jobid>\d+)/delete/$',
views.jobdelete, name='jobdelete'),
url(r'^job/(?P<jobid>\d+)/sendselectedemail/$', views.sendselectedemail,
name='sendselectedemail'),
url(r'^job/(?P<jobid>\d+)/applicants/$',
views.jobapplicants, name='jobapplicants'),
url(r'^job/(?P<jobid>\d+)/getresume/$',
views.getresumes, name='jobgetresumes'),
url(r'^job/(?P<jobid>\d+)/getcsv/$',
views.getjobcsv, name='jobgetcsvs'),
url(r'^job/(?P<jobid>\d+)/selections/$',
views.adminjobselected, name='adminjobselected'),
url(r'^myapplications/$', views.myapplications,
name='myapplications'),
url(r'^batches/$', views.viewbatches, name='viewbatches'),
url(r'^openbatch/$', views.batchcreate, name='openbatch'),
url(r'^batch/(?P<batchid>\d+)/$',
views.batchpage, name='batchpage'),
url(r'^batch/(?P<batchid>\d+)/delete/$',
views.batchdestroy, name='batchdestroy'),
url(r'^batch/(?P<batchid>\d+)/edit/$',
views.batchedit, name='batchedit'),
url(r'^batch/(?P<batchid>\d+)/getbatchlist/$',
views.getbatchlist, name='getbatchlist'),
url(r'^batch/(?P<batchid>\d+)/addstudentstobatch/$', views.uploadstudentsinbatch,
name='uploadstudentsinbatch'),
url(r'^batch/(?P<batchid>\d+)/getbatchresume/$',
views.getbatchresumes, name='getbatchresumes'),
url(r'^feedback/$', views.feedback, name='feedback'),
url(r'^extraStuff/$', views.blockedUnplacedlist,
name='blockedUnplacedlist'),
url(r'files/resume/(.+)', views.fileview, name='fileview'),
url(r'files/jobfiles/(.+)',
views.docfileview, name='docfileview'),
url(r'search/results/$', views.search, name='search'),
# url(r'material.min.js.map$',views.test,name='test'),
)
| mit | -534,863,692,878,665,340 | 57.985915 | 104 | 0.443649 | false |
bklang/GO2 | stats.py | 1 | 3669 | #
# stats class for Gig-o-Matic 2
#
# Aaron Oppenheimer
# 29 Jan 2014
#
from google.appengine.ext import ndb
from requestmodel import *
import webapp2_extras.appengine.auth.models
import webapp2
from debug import *
import assoc
import gig
import band
import member
import logging
import json
def stats_key(member_name='stats_key'):
"""Constructs a Datastore key for a Stats entity with stats_name."""
return ndb.Key('stats', stats_name)
class BandStats(ndb.Model):
""" class to hold statistics """
band = ndb.KeyProperty()
date = ndb.DateProperty(auto_now_add=True)
number_members = ndb.IntegerProperty()
number_upcoming_gigs = ndb.IntegerProperty()
number_gigs_created_today = ndb.IntegerProperty()
def get_band_stats(the_band_key):
""" Return all the stats we have for a band """
stats_query = BandStats.query( BandStats.band==the_band_key).order(-BandStats.date)
the_stats = stats_query.fetch(limit=30)
return the_stats
def make_band_stats(the_band_key):
""" make a stats object for a band key and return it """
the_stats = BandStats(band=the_band_key)
all_member_keys = assoc.get_member_keys_of_band_key(the_band_key)
the_stats.number_members = len(all_member_keys)
logging.info("band {0} stats: {1} members".format(the_band_key.id(), the_stats.number_members))
all_gigs = gig.get_gigs_for_band_keys(the_band_key, keys_only=True)
the_stats.number_upcoming_gigs = len(all_gigs)
logging.info("band {0} stats: {1} upcoming gigs".format(the_band_key.id(), the_stats.number_upcoming_gigs))
today_gigs = gig.get_gigs_for_creation_date(the_band_key, the_stats.date)
the_stats.number_gigs_created_today = len(today_gigs)
the_stats.put()
def delete_band_stats(the_band_key):
""" delete all stats for a band """
stats_query = BandStats.query( BandStats.band==the_band_key)
the_stats = stats_query.fetch(keys_only=True)
ndb.delete_multi(the_stats)
#####
#
# Page Handlers
#
#####
class StatsPage(BaseHandler):
"""Page for showing stats"""
@user_required
def get(self):
self._make_page(the_user=self.user)
def _make_page(self,the_user):
the_member_keys = member.get_all_members(order=False, keys_only=True, verified_only=True)
the_bands = band.get_all_bands()
stats=[]
inactive_bands=[]
for a_band in the_bands:
is_band_active=False
a_stat = get_band_stats(a_band.key)
the_count_data=[]
for s in a_stat:
if s.number_upcoming_gigs > 0:
is_band_active = True
the_count_data.append([s.date.year, s.date.month-1, s.date.day, s.number_members, s.number_upcoming_gigs])
if is_band_active:
the_count_data_json=json.dumps(the_count_data)
stats.append([a_band, the_count_data_json])
else:
inactive_bands.append(a_band)
template_args = {
'the_stats' : stats,
'num_members' : len(the_member_keys),
'num_bands' : len(the_bands),
'num_active_bands' : len(the_bands) - len(inactive_bands),
'inactive_bands' : inactive_bands
}
self.render_template('stats.html', template_args)
##########
#
# auto generate stats
#
##########
class AutoGenerateStats(BaseHandler):
""" automatically generate statistics """
def get(self):
the_band_keys = band.get_all_bands(keys_only = True)
for band_key in the_band_keys:
make_band_stats(band_key) | gpl-3.0 | 2,893,236,756,040,592,400 | 29.330579 | 122 | 0.620878 | false |
ray-project/ray | rllib/env/wrappers/dm_control_wrapper.py | 1 | 7328 | """
DeepMind Control Suite Wrapper directly sourced from:
https://github.com/denisyarats/dmc2gym
MIT License
Copyright (c) 2020 Denis Yarats
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from gym import core, spaces
try:
from dm_env import specs
except ImportError:
specs = None
try:
# Suppress MuJoCo warning (dm_control uses absl logging).
import absl.logging
absl.logging.set_verbosity("error")
from dm_control import suite
except (ImportError, OSError):
suite = None
import numpy as np
def _spec_to_box(spec):
def extract_min_max(s):
assert s.dtype == np.float64 or s.dtype == np.float32
dim = np.int(np.prod(s.shape))
if type(s) == specs.Array:
bound = np.inf * np.ones(dim, dtype=np.float32)
return -bound, bound
elif type(s) == specs.BoundedArray:
zeros = np.zeros(dim, dtype=np.float32)
return s.minimum + zeros, s.maximum + zeros
mins, maxs = [], []
for s in spec:
mn, mx = extract_min_max(s)
mins.append(mn)
maxs.append(mx)
low = np.concatenate(mins, axis=0)
high = np.concatenate(maxs, axis=0)
assert low.shape == high.shape
return spaces.Box(low, high, dtype=np.float32)
def _flatten_obs(obs):
obs_pieces = []
for v in obs.values():
flat = np.array([v]) if np.isscalar(v) else v.ravel()
obs_pieces.append(flat)
return np.concatenate(obs_pieces, axis=0)
class DMCEnv(core.Env):
def __init__(self,
domain_name,
task_name,
task_kwargs=None,
visualize_reward=False,
from_pixels=False,
height=64,
width=64,
camera_id=0,
frame_skip=2,
environment_kwargs=None,
channels_first=True,
preprocess=True):
self._from_pixels = from_pixels
self._height = height
self._width = width
self._camera_id = camera_id
self._frame_skip = frame_skip
self._channels_first = channels_first
self.preprocess = preprocess
if specs is None:
raise RuntimeError((
"The `specs` module from `dm_env` was not imported. Make sure "
"`dm_env` is installed and visible in the current python "
"environment."))
if suite is None:
raise RuntimeError(
("The `suite` module from `dm_control` was not imported. Make "
"sure `dm_control` is installed and visible in the current "
"python enviornment."))
# create task
self._env = suite.load(
domain_name=domain_name,
task_name=task_name,
task_kwargs=task_kwargs,
visualize_reward=visualize_reward,
environment_kwargs=environment_kwargs)
# true and normalized action spaces
self._true_action_space = _spec_to_box([self._env.action_spec()])
self._norm_action_space = spaces.Box(
low=-1.0,
high=1.0,
shape=self._true_action_space.shape,
dtype=np.float32)
# create observation space
if from_pixels:
shape = [3, height,
width] if channels_first else [height, width, 3]
self._observation_space = spaces.Box(
low=0, high=255, shape=shape, dtype=np.uint8)
if preprocess:
self._observation_space = spaces.Box(
low=-0.5, high=0.5, shape=shape, dtype=np.float32)
else:
self._observation_space = _spec_to_box(
self._env.observation_spec().values())
self._state_space = _spec_to_box(self._env.observation_spec().values())
self.current_state = None
def __getattr__(self, name):
return getattr(self._env, name)
def _get_obs(self, time_step):
if self._from_pixels:
obs = self.render(
height=self._height,
width=self._width,
camera_id=self._camera_id)
if self._channels_first:
obs = obs.transpose(2, 0, 1).copy()
if self.preprocess:
obs = obs / 255.0 - 0.5
else:
obs = _flatten_obs(time_step.observation)
return obs
def _convert_action(self, action):
action = action.astype(np.float64)
true_delta = self._true_action_space.high - self._true_action_space.low
norm_delta = self._norm_action_space.high - self._norm_action_space.low
action = (action - self._norm_action_space.low) / norm_delta
action = action * true_delta + self._true_action_space.low
action = action.astype(np.float32)
return action
@property
def observation_space(self):
return self._observation_space
@property
def state_space(self):
return self._state_space
@property
def action_space(self):
return self._norm_action_space
def step(self, action):
assert self._norm_action_space.contains(action)
action = self._convert_action(action)
assert self._true_action_space.contains(action)
reward = 0
extra = {"internal_state": self._env.physics.get_state().copy()}
for _ in range(self._frame_skip):
time_step = self._env.step(action)
reward += time_step.reward or 0
done = time_step.last()
if done:
break
obs = self._get_obs(time_step)
self.current_state = _flatten_obs(time_step.observation)
extra["discount"] = time_step.discount
return obs, reward, done, extra
def reset(self):
time_step = self._env.reset()
self.current_state = _flatten_obs(time_step.observation)
obs = self._get_obs(time_step)
return obs
def render(self, mode="rgb_array", height=None, width=None, camera_id=0):
assert mode == "rgb_array", "only support for rgb_array mode"
height = height or self._height
width = width or self._width
camera_id = camera_id or self._camera_id
return self._env.physics.render(
height=height, width=width, camera_id=camera_id)
| apache-2.0 | -323,708,426,664,062,600 | 34.572816 | 79 | 0.600573 | false |
nathanbjenx/cairis | cairis/bin/gt2pc.py | 1 | 6864 | #!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import string
import argparse
import csv
__author__ = 'Shamal Faily'
def remspace(my_str):
if len(my_str) < 2: # returns ' ' unchanged
return my_str
if my_str[-1] == '\n':
if my_str[-2] == ' ':
return my_str[:-2] + '\n'
if my_str[-1] == ' ':
return my_str[:-1]
return my_str
def main(args=None):
parser = argparse.ArgumentParser(description='Computer Aided Integration of Requirements and Information Security - Grounded Theory to Persona Case converter')
parser.add_argument('modelFile',help='model file to create')
parser.add_argument('--context',dest='contextName',help='model context')
parser.add_argument('--originator',dest='originatorName',help='model originator')
parser.add_argument('--concepts',dest='conceptsFile',help='grounded theory model concepts')
parser.add_argument('--propositions',dest='propositionsFile',help='Propositions associated with grounded theory model quotations')
parser.add_argument('--characteristics',dest='characteristicsFile',help='Persona characteristics associated with grounded theory model associations')
parser.add_argument('--narratives',dest='narrativesFile',help='Persona narratives')
args = parser.parse_args()
xmlHdr = '<?xml version="1.0"?>\n<!DOCTYPE cairis_model PUBLIC "-//CAIRIS//DTD MODEL 1.0//EN" "http://cairis.org/dtd/cairis_model.dtd">\n\n<cairis_model>\n\n'
xmlHdr += '<cairis>\n <project_settings name="' + args.contextName + '">\n <contributors>\n <contributor first_name="None" surname="None" affiliation="' + args.originatorName + '" role="Scribe" />\n </contributors>\n </project_settings>\n <environment name="' + args.contextName + '" short_code="' + args.contextName + '">\n <definition>' + args.contextName + '</definition>\n <asset_values>\n <none>TBC</none>\n <low>TBC</low>\n <medium>TBC</medium>\n <high>TBC</high>\n </asset_values>\n </environment>\n</cairis>\n\n<riskanalysis>\n <role name="Undefined" type="Stakeholder" short_code="UNDEF">\n <description>Undefined</description>\n </role>\n</riskanalysis>\n\n<usability>\n'
xmlBuf = ''
conceptDict = {}
with open(args.conceptsFile,'r') as cFile:
cReader = csv.reader(cFile, delimiter = ',', quotechar='"')
for row in cReader:
edCode = row[0]
edName = row[1] + ' GT concept'
conceptDict[edCode] = edName
edVersion = row[2]
edDate = row[3]
edAuthors = row[4]
xmlBuf += '<external_document name=\"' + edName + '\" version=\"' + edVersion + '\" date=\"' + edDate + '\" authors=\"' + edAuthors + '\">\n <description>' + edName + '</description>\n</external_document>\n'
xmlBuf += '\n'
propDict = {}
with open(args.propositionsFile,'r') as pFile:
pReader = csv.reader(pFile, delimiter = ',', quotechar='"')
for row in pReader:
pId = row[0]
edCode,pNo = pId.split('-')
docName = conceptDict[edCode]
pName = row[1]
pDesc = row[2]
pContrib = row[3]
propDict[pId] = (pName,pDesc)
xmlBuf += '<document_reference name=\"' + pName + '\" contributor=\"' + pContrib + '\" document=\"' + docName + '\">\n <excerpt>' + pDesc + '</excerpt>\n</document_reference>\n'
xmlBuf += '\n'
xmlBuf += '\n'
bvDict = {}
bvDict['ACT'] = 'Activities'
bvDict['ATT'] = 'Attitudes'
bvDict['APT'] = 'Aptitudes'
bvDict['MOT'] = 'Motivations'
bvDict['SKI'] = 'Skills'
bvDict['INT'] = 'Intrinsic'
bvDict['CON'] = 'Contextual'
personaNames = set([])
pcf = open(args.characteristicsFile,"r")
for li in pcf.readlines():
li = string.strip(li)
pce = li.split(',')
gtr = pce[0]
pcName = pce[1]
labelName = pce[2]
pName = pce[3]
if pName == 'NONE':
continue
personaNames.add(pName)
bvName = bvDict[pce[4]]
gcList = pce[5].split(' ')
gList = []
for gc in gcList:
if gc != '':
gVal = propDict[gc]
gList.append((gVal[0],gVal[1],'document'))
wcList = pce[6].split(' ')
wList = []
for wc in wcList:
if wc != '':
wVal = propDict[wc]
wList.append((wVal[0],wVal[1],'document'))
modQual = pce[7]
rcList = pce[8].split(' ')
rList = []
for rc in rcList:
if rc != '':
rVal = propDict[rc]
rList.append((rVal[0],rVal[1],'document'))
xmlBuf += '<persona_characteristic persona=\"' + pName + '\" behavioural_variable=\"' + bvName + '\" modal_qualifier=\"' + modQual + '\" >\n <definition>' + pcName + '</definition>\n'
for g in gList:
xmlBuf += ' <grounds type=\"document\" reference=\"' + g[0] + '\" />\n'
for w in wList:
xmlBuf += ' <warrant type=\"document\" reference=\"' + w[0] + '\" />\n'
for r in rList:
xmlBuf += ' <rebuttal type=\"document\" reference=\"' + r[0] + '\" />\n'
xmlBuf += '</persona_characteristic>\n'
pcf.close()
pnDict = {}
with open(args.narrativesFile,'r') as nFile:
nReader = csv.reader(nFile, delimiter = ',', quotechar='"')
for row in nReader:
pnDict[(row[0],row[1])] = row[2]
pHdr = ''
for personaName in personaNames:
pHdr += '<persona name=\"' + personaName + '\" type=\"Primary\" assumption_persona=\"FALSE\" image=\"\" >\n <activities>' + pnDict[(personaName,'ACT')] + '</activities>\n <attitudes>' + pnDict[(personaName,'ATT')] + '</attitudes>\n <aptitudes>' + pnDict[(personaName,'APT')] + '</aptitudes>\n <motivations>' + pnDict[(personaName,'MOT')] + '</motivations>\n <skills>' + pnDict[(personaName,'SKI')] + '</skills>\n <intrinsic>' + pnDict[(personaName,'INT')] + '</intrinsic>\n <contextual>' + pnDict[(personaName,'CON')] + '</contextual>\n<persona_environment name=\"' + args.contextName + '\" is_direct="TRUE">\n <persona_role name="Undefined" />\n <narrative>Nothing stipulated</narrative>\n</persona_environment>\n</persona>\n\n'
xmlBuf = xmlHdr + '\n' + pHdr + '\n' + xmlBuf + '\n</usability>\n</cairis_model>'
xmlOut = open(args.modelFile,"w")
xmlOut.write(xmlBuf)
xmlOut.close()
if __name__ == '__main__':
main()
| apache-2.0 | -7,310,285,682,522,548,000 | 43.571429 | 741 | 0.626748 | false |
kajic/django-javascript-features | setup.py | 1 | 1024 | from setuptools import setup, find_packages
setup(
name="django-javascript-features",
version="0.1",
description="Helps initalizing javascript modules on the pages that need them",
long_description=open('README.rst').read(),
author='Robert Kajic',
author_email='[email protected]',
url='https://github.com/kajic/django-javascript-features',
download_url='https://github.com/kajic/django-javascript-features/django-javascript-features/downloads',
license="MIT",
packages=find_packages(),
include_package_data=True,
zip_safe=False, # because we're including media that Django needs
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| mit | -4,747,344,510,530,771,000 | 38.384615 | 108 | 0.666016 | false |
ping/instagram_private_api | instagram_web_api/http.py | 1 | 3461 | from io import BytesIO
import sys
import codecs
import mimetypes
import random
import string
from .compat import compat_cookiejar, compat_pickle
class ClientCookieJar(compat_cookiejar.CookieJar):
"""Custom CookieJar that can be pickled to/from strings
"""
def __init__(self, cookie_string=None, policy=None):
compat_cookiejar.CookieJar.__init__(self, policy)
if cookie_string:
if isinstance(cookie_string, bytes):
self._cookies = compat_pickle.loads(cookie_string)
else:
self._cookies = compat_pickle.loads(cookie_string.encode('utf-8'))
@property
def auth_expires(self):
try:
return min([
cookie.expires for cookie in self
if cookie.name in ('sessionid', 'ds_user_id', 'ds_user')
and cookie.expires])
except ValueError:
# empty sequence
pass
return None
@property
def expires_earliest(self):
"""For backward compatibility"""
return self.auth_expires
def dump(self):
return compat_pickle.dumps(self._cookies)
class MultipartFormDataEncoder(object):
"""
Modified from
http://stackoverflow.com/questions/1270518/python-standard-library-to-post-multipart-form-data-encoded-data
"""
def __init__(self, boundary=None):
self.boundary = boundary or \
''.join(random.choice(string.ascii_letters + string.digits + '_-') for _ in range(30))
self.content_type = 'multipart/form-data; boundary={}'.format(self.boundary)
@classmethod
def u(cls, s):
if sys.hexversion < 0x03000000 and isinstance(s, str):
s = s.decode('utf-8')
if sys.hexversion >= 0x03000000 and isinstance(s, bytes):
s = s.decode('utf-8')
return s
def iter(self, fields, files):
"""
:param fields: sequence of (name, value) elements for regular form fields
:param files: sequence of (name, filename, contenttype, filedata) elements for data to be uploaded as files
:return:
"""
encoder = codecs.getencoder('utf-8')
for (key, value) in fields:
key = self.u(key)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"\r\n').format(key))
yield encoder('\r\n')
if isinstance(value, (int, float)):
value = str(value)
yield encoder(self.u(value))
yield encoder('\r\n')
for (key, filename, contenttype, fd) in files:
key = self.u(key)
filename = self.u(filename)
yield encoder('--{}\r\n'.format(self.boundary))
yield encoder(self.u('Content-Disposition: form-data; name="{}"; filename="{}"\r\n').format(key, filename))
yield encoder('Content-Type: {}\r\n'.format(
contenttype or mimetypes.guess_type(filename)[0] or 'application/octet-stream'))
yield encoder('Content-Transfer-Encoding: binary\r\n')
yield encoder('\r\n')
yield (fd, len(fd))
yield encoder('\r\n')
yield encoder('--{}--\r\n'.format(self.boundary))
def encode(self, fields, files):
body = BytesIO()
for chunk, _ in self.iter(fields, files):
body.write(chunk)
return self.content_type, body.getvalue()
| mit | -6,506,087,738,307,260,000 | 35.819149 | 119 | 0.588847 | false |
wdm0006/twitter-pandas | examples/friendship_methods.py | 1 | 1117 | from twitterpandas import TwitterPandas
from examples.keys import TWITTER_OAUTH_SECRET, TWITTER_OAUTH_TOKEN, TWITTER_CONSUMER_SECRET, TWITTER_CONSUMER_KEY
__author__ = 'keyanvakil'
if __name__ == '__main__':
# create a twitter pandas client object
tp = TwitterPandas(
TWITTER_OAUTH_TOKEN,
TWITTER_OAUTH_SECRET,
TWITTER_CONSUMER_KEY,
TWITTER_CONSUMER_SECRET
)
user_id = tp.api_id
df = tp.followers_friendships(id_=user_id, rich=True, limit=5)
print(df.head())
print('\n')
print(df.info())
print('\n\n')
df = tp.friends_friendships(id_=user_id, rich=True, limit=5)
print(df.head())
print('\n')
print(df.info())
print('\n\n')
friend_id = df[0][0]
print("friend_id = " + str(friend_id))
print("Does user_id follow friend_id")
if tp.exists_friendship(source_id=user_id, target_id=friend_id):
print("True")
df = tp.show_friendship(source_id=user_id, target_id=friend_id)
print(df.head())
print('\n')
print(df.info())
print('\n\n')
else:
print("False")
| bsd-3-clause | -7,855,047,965,477,400,000 | 26.243902 | 114 | 0.602507 | false |
rhaschke/catkin_tools | tests/system/verbs/catkin_build/test_build.py | 1 | 14329 | from __future__ import print_function
import os
import re
import shutil
from ...workspace_factory import workspace_factory
from ....utils import in_temporary_directory
from ....utils import assert_cmd_success
from ....utils import assert_cmd_failure
from ....utils import assert_files_exist
from ....utils import catkin_success
from ....utils import catkin_failure
from ....utils import redirected_stdio
from ....workspace_assertions import assert_workspace_initialized
from ....workspace_assertions import assert_no_warnings
TEST_DIR = os.path.dirname(__file__)
RESOURCES_DIR = os.path.join(os.path.dirname(__file__), '..', '..', 'resources')
BUILD = ['build', '--no-notify', '--no-status']
CLEAN = ['clean', '--yes']
BUILD_TYPES = ['cmake', 'catkin']
def create_flat_workspace(wf, build_type, n_pkgs):
"""Create a bunch of packages with no interdependencies"""
for i in range(n_pkgs):
wf.create_package('pkg_{}'.format(i))
def create_chain_workspace(wf, build_type, n_pkgs):
"""Create a bunch of packages, each of which depends on one other in the
workspace except for the root."""
for i in range(n_pkgs):
wf.create_package(
'pkg_{}'.format(i),
depends=(['pkg_{}'.format(i - 1)] if i > 0 else []))
def create_tree_workspace(wf, build_type, n_pkg_layers, n_children=2):
"""Create a bunch of packages which form a balanced dependency tree"""
n_pkgs = pow(n_children, n_pkg_layers + 1) - 1
for i in range(n_pkgs):
wf.create_package(
'pkg_{}'.format(i),
build_type=build_type,
depends=(['pkg_{}'.format(int((i - 1) / n_children))] if i > 0 else []))
return n_pkgs
@in_temporary_directory
def test_build_no_src():
"""Calling catkin build without a source space should fail."""
assert catkin_failure(BUILD)
def test_build_auto_init_no_pkgs():
"""Test automatically initializing a workspace with no packages."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
wf.build()
assert catkin_success(BUILD)
assert_workspace_initialized('.')
assert_no_warnings(out)
def test_build_auto_init_with_pkg():
"""Test automatically initializing a workspace."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
wf.create_package('pkg_a')
wf.build()
assert catkin_success(BUILD)
assert_workspace_initialized('.')
assert_no_warnings(out)
def test_build_dry_run():
"""Test showing the build jobs without doing anything."""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_tree_workspace(wf, build_type, 3)
wf.build()
assert catkin_success(BUILD + ['--dry-run'])
assert not os.path.exists('build')
assert not os.path.exists('devel')
def test_build_all_isolate_install():
"""Test building dependent catkin packages with isolated installspace."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
n_pkgs = create_tree_workspace(wf, 'catkin', 2)
wf.create_package('pkg_dep', build_type='catkin',
build_depends=['pkg_{}'.format(n) for n in range(n_pkgs)])
wf.build()
assert catkin_success(['config', '--isolate-install', '--install'])
assert catkin_success(BUILD)
assert os.path.exists('install/pkg_dep')
assert_no_warnings(out)
def test_build_all_isolate_devel():
"""Test building dependent catkin packages with isolated develspace."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
n_pkgs = create_tree_workspace(wf, 'catkin', 2)
wf.create_package('pkg_dep', build_type='catkin',
build_depends=['pkg_{}'.format(n) for n in range(n_pkgs)])
wf.build()
assert catkin_success(['config', '--isolate-devel'])
assert catkin_success(BUILD)
assert os.path.exists('devel/pkg_dep')
assert not os.path.exists('install')
assert_no_warnings(out)
def test_build_all_merged():
"""Test building all packages in a merged workspace"""
pass # TODO: Implement test
def test_build_pkg():
"""Test building a package by name.
"""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_chain_workspace(wf, build_type, 4)
wf.build()
assert catkin_failure(BUILD + ['pkg_nil'])
assert catkin_success(BUILD + ['pkg_2'])
assert os.path.exists(os.path.join('build', 'pkg_0'))
assert os.path.exists(os.path.join('build', 'pkg_1'))
assert os.path.exists(os.path.join('build', 'pkg_2'))
assert not os.path.exists(os.path.join('build', 'pkg_3'))
def test_build_no_deps():
"""Test building a package by name without deps."""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_chain_workspace(wf, build_type, 3)
wf.build()
# --no-deps needs an argument
assert catkin_failure(BUILD + ['--no-deps'])
# only pkg_2 shuold be built
assert catkin_success(BUILD + ['pkg_2', '--no-deps'])
assert os.path.exists(os.path.join('build', 'pkg_2'))
assert not os.path.exists(os.path.join('build', 'pkg_1'))
assert not os.path.exists(os.path.join('build', 'pkg_0'))
def test_build_start_with():
"""Test building all packages starting with a specific one."""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_chain_workspace(wf, build_type, 4)
wf.build()
# --start-with needs an argument
assert catkin_failure(BUILD + ['--start-with'])
# --start-with needs a valid package
assert catkin_failure(BUILD + ['--start-with', 'pkg_nil'])
# this should build all packages
assert catkin_success(BUILD + ['--start-with', 'pkg_0'])
for i in range(4):
assert os.path.exists(os.path.join('build', 'pkg_{}'.format(i)))
assert catkin_success(CLEAN)
# this should skip pkg_2's deps
assert catkin_success(BUILD + ['--start-with', 'pkg_2'])
assert not os.path.exists(os.path.join('build', 'pkg_0'))
assert not os.path.exists(os.path.join('build', 'pkg_1'))
assert os.path.exists(os.path.join('build', 'pkg_2'))
assert os.path.exists(os.path.join('build', 'pkg_3'))
assert catkin_success(CLEAN)
def test_unbuilt_linked():
"""Test building packages which have yet to be built"""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_chain_workspace(wf, build_type, 2)
wf.build()
# only pkg_0 shuold be built
assert catkin_success(BUILD + ['pkg_0', '--no-deps'])
# the rest should be built, but pkg_0 shouldn't be rebuilt
assert os.path.exists(os.path.join('build', 'pkg_0'))
assert not os.path.exists(os.path.join('build', 'pkg_1'))
pkg_0_log_path = os.path.join('logs', 'pkg_0')
# build the unbuilt packages (rebuild deps)
pkg_0_log_files = os.listdir(pkg_0_log_path)
assert catkin_success(BUILD + ['--unbuilt'])
assert os.path.exists(os.path.join('build', 'pkg_0'))
assert os.path.exists(os.path.join('build', 'pkg_1'))
# make sure pkg_0 has been rebuilt
assert pkg_0_log_files != os.listdir(pkg_0_log_path)
# build the unbuilt packages (don't rebuild deps)
pkg_0_log_files = os.listdir(pkg_0_log_path)
assert catkin_success(['clean', 'pkg_1'])
assert catkin_success(BUILD + ['--unbuilt', '--no-deps'])
assert os.path.exists(os.path.join('build', 'pkg_0'))
assert os.path.exists(os.path.join('build', 'pkg_1'))
# make sure pkg_0 hasn't been rebuilt
assert pkg_0_log_files == os.listdir(pkg_0_log_path)
def test_unbuilt_isolated():
"""Test building unbuilt packages with an isolated develspace."""
pass # TODO: This should succeed, but isn't implemented for isolated develspaces
def test_unbuilt_merged():
"""Test building unbuilt packages with a merged develspace."""
pass # TODO: This should fail, but the check hsan't been tested
def test_continue_on_failure():
"""Test behavior when some packages fail to build."""
pass # TODO: Write test
def test_preclean():
"""Test pre-cleaning packages in a workspace."""
pass # TODO: Write test
def test_force_cmake():
"""Test forcing cmake to run on packages in a workspace."""
pass # TODO: Write test
def test_install():
"""Test building and installing catkin packages without DESTDIR"""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_chain_workspace(wf, build_type, 2)
wf.build()
assert catkin_success(['config', '--install'])
assert catkin_success(BUILD)
assert os.path.exists(os.path.join('install'))
def test_install_cmake():
"""Test building and installing cmake packages without DESTDIR."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
print(os.getcwd)
wf.build()
shutil.copytree(
os.path.join(RESOURCES_DIR, 'cmake_pkgs'),
os.path.join('src/cmake_pkgs'))
assert catkin_success(['config', '--install'])
assert catkin_success(BUILD)
assert os.path.exists(os.path.join('install'))
def test_install_cmake_destdir():
"""Test building and installing cmake packages with DESTDIR."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
print(os.getcwd)
wf.build()
shutil.copytree(
os.path.join(RESOURCES_DIR, 'cmake_pkgs'),
os.path.join('src/cmake_pkgs'))
tmpinstall_path = os.path.join(os.getcwd(), 'tmpinstall')
env = {'DESTDIR': tmpinstall_path}
assert catkin_success(['config', '--install', '--install-space', '/opt/foo'], env)
assert catkin_success(BUILD, env)
assert os.path.exists(tmpinstall_path)
assert not os.path.exists(os.path.join('install'))
def test_install_catkin_destdir():
"""Test building and installing catkin packages with DESTDIR."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
print(os.getcwd)
wf.build()
shutil.copytree(
os.path.join(RESOURCES_DIR, 'catkin_pkgs', 'products_0'),
os.path.join('src', 'products_0'))
tmpinstall_path = os.path.join(os.getcwd(), 'tmpinstall')
env = {'DESTDIR': tmpinstall_path}
install_space = os.path.abspath(os.path.join('opt', 'foo'))
assert catkin_success(['config', '--install', '--install-space', install_space], env)
assert catkin_success(BUILD, env)
assert os.path.exists(tmpinstall_path)
assert not os.path.exists(os.path.join('install'))
# check for _CATKIN_SETUP_DIR
setup_sh_path = os.path.join(tmpinstall_path, install_space.lstrip(os.sep), 'setup.sh')
print(setup_sh_path)
assert os.path.exists(setup_sh_path)
setup_dir_correct = False
with open(setup_sh_path, "r") as setup_sh:
for line in setup_sh:
if re.search('_CATKIN_SETUP_DIR:={}'.format(install_space), line):
setup_dir_correct = True
break
assert setup_dir_correct is True
def test_pkg_with_unicode_names():
"""Test building a package with unicode file names."""
with redirected_stdio() as (out, err):
with workspace_factory() as wf:
print(os.getcwd)
wf.build()
shutil.copytree(
os.path.join(RESOURCES_DIR, 'catkin_pkgs', 'products_unicode'),
os.path.join('src/cmake_pkgs'))
assert catkin_success(['config', '--link-devel'])
assert catkin_success(BUILD)
def test_glob_pattern_build():
"""Test building multiple packages given as glob pattern"""
with redirected_stdio() as (out, err):
for build_type in BUILD_TYPES:
with workspace_factory() as wf:
create_flat_workspace(wf, build_type, 11)
wf.build()
assert catkin_success(BUILD + ['pkg_1*'])
assert not os.path.exists(os.path.join('build', 'pkg_0'))
assert os.path.exists(os.path.join('build', 'pkg_1'))
assert os.path.exists(os.path.join('build', 'pkg_10'))
assert not os.path.exists(os.path.join('build', 'pkg_2'))
assert not os.path.exists(os.path.join('build', 'pkg_3'))
assert not os.path.exists(os.path.join('build', 'pkg_4'))
assert not os.path.exists(os.path.join('build', 'pkg_5'))
assert not os.path.exists(os.path.join('build', 'pkg_6'))
assert not os.path.exists(os.path.join('build', 'pkg_7'))
assert not os.path.exists(os.path.join('build', 'pkg_8'))
assert not os.path.exists(os.path.join('build', 'pkg_9'))
| apache-2.0 | 5,205,434,057,814,361,000 | 38.473829 | 99 | 0.574988 | false |
catalpainternational/OIPA | OIPA/iati/management/commands/organisation_name_updater.py | 1 | 1062 | from __future__ import print_function
from builtins import object
from django.core.management.base import BaseCommand
from iati.models import Organisation
from iati_synchroniser.models import Publisher
class Command(BaseCommand):
option_list = BaseCommand.option_list
counter = 0
def handle(self, *args, **options):
updater = OrganisationNameUpdater()
updater.update()
class OrganisationNameUpdater(object):
def update(self):
for o in Organisation.objects.filter(name=None):
try:
organisation_code = o.code
if Publisher.objects.exists(org_id=organisation_code):
current_publisher = Publisher.objects.get(org_id=organisation_code)
if o.abbreviation == None:
o.abbreviation = current_publisher.org_abbreviate
o.name = current_publisher.org_name
o.save()
except Exception as e:
print("error in update_organisation_names")
return True
| agpl-3.0 | 1,909,566,768,982,811,000 | 32.1875 | 87 | 0.629002 | false |
jpleger/django-analystnotes | analystnotes/migrations/0001_initial.py | 1 | 1830 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Command',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('cmd', models.CharField(max_length=2048, verbose_name=b'Command', db_index=True)),
('stdout', models.TextField(null=True, verbose_name=b'Standard Out', blank=True)),
('stderr', models.TextField(null=True, verbose_name=b'Standard Error', blank=True)),
('execute_time', models.DateTimeField(auto_now_add=True, verbose_name=b'Process Execute Time')),
('exitcode', models.IntegerField(verbose_name=b'Process Exit Code', db_index=True)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, verbose_name=b'Name of project', db_index=True)),
('slug', models.SlugField(unique=True, max_length=128, verbose_name=b'Slug Name')),
('created', models.DateTimeField(auto_now_add=True, verbose_name=b'Date project created', db_index=True)),
('owner', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='command',
name='project',
field=models.ForeignKey(to='analystnotes.Project'),
),
]
| bsd-3-clause | 8,535,255,927,093,436,000 | 43.634146 | 122 | 0.596721 | false |
xissy/titanium-mobile-sdk | module/iphone/templates/build.py | 1 | 6525 | #!/usr/bin/env python
#
# Appcelerator Titanium Module Packager
#
#
import os, subprocess, sys, glob, string
import zipfile
from datetime import date
cwd = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
os.chdir(cwd)
required_module_keys = ['name','version','moduleid','description','copyright','license','copyright','platform','minsdk']
module_defaults = {
'description':'My module',
'author': 'Your Name',
'license' : 'Specify your license',
'copyright' : 'Copyright (c) %s by Your Company' % str(date.today().year),
}
module_license_default = "TODO: place your license here and we'll include it in the module distribution"
def find_sdk(config):
sdk = config['TITANIUM_SDK']
return os.path.expandvars(os.path.expanduser(sdk))
def replace_vars(config,token):
idx = token.find('$(')
while idx != -1:
idx2 = token.find(')',idx+2)
if idx2 == -1: break
key = token[idx+2:idx2]
if not config.has_key(key): break
token = token.replace('$(%s)' % key, config[key])
idx = token.find('$(')
return token
def read_ti_xcconfig():
contents = open(os.path.join(cwd,'titanium.xcconfig')).read()
config = {}
for line in contents.splitlines(False):
line = line.strip()
if line[0:2]=='//': continue
idx = line.find('=')
if idx > 0:
key = line[0:idx].strip()
value = line[idx+1:].strip()
config[key] = replace_vars(config,value)
return config
def generate_doc(config):
docdir = os.path.join(cwd,'documentation')
if not os.path.exists(docdir):
print "Couldn't find documentation file at: %s" % docdir
return None
try:
import markdown2 as markdown
except ImportError:
import markdown
documentation = []
for file in os.listdir(docdir):
if file in ignoreFiles or os.path.isdir(os.path.join(docdir, file)):
continue
md = open(os.path.join(docdir,file)).read()
html = markdown.markdown(md)
documentation.append({file:html});
return documentation
def compile_js(manifest,config):
js_file = os.path.join(cwd,'assets','__MODULE_ID__.js')
if not os.path.exists(js_file): return
from compiler import Compiler
try:
import json
except:
import simplejson as json
path = os.path.basename(js_file)
compiler = Compiler(cwd, manifest['moduleid'], manifest['name'], 'commonjs')
method = compiler.compile_commonjs_file(path,js_file)
exports = open('metadata.json','w')
json.dump({'exports':compiler.exports }, exports)
exports.close()
method += '\treturn filterDataInRange([NSData dataWithBytesNoCopy:data length:sizeof(data) freeWhenDone:NO], ranges[0]);'
f = os.path.join(cwd,'Classes','___PROJECTNAMEASIDENTIFIER___ModuleAssets.m')
c = open(f).read()
templ_search = ' moduleAsset\n{\n'
idx = c.find(templ_search) + len(templ_search)
before = c[0:idx]
after = """
}
@end
"""
newc = before + method + after
if newc!=c:
x = open(f,'w')
x.write(newc)
x.close()
def die(msg):
print msg
sys.exit(1)
def warn(msg):
print "[WARN] %s" % msg
def validate_license():
c = open(os.path.join(cwd,'LICENSE')).read()
if c.find(module_license_default)!=-1:
warn('please update the LICENSE file with your license text before distributing')
def validate_manifest():
path = os.path.join(cwd,'manifest')
f = open(path)
if not os.path.exists(path): die("missing %s" % path)
manifest = {}
for line in f.readlines():
line = line.strip()
if line[0:1]=='#': continue
if line.find(':') < 0: continue
key,value = line.split(':')
manifest[key.strip()]=value.strip()
for key in required_module_keys:
if not manifest.has_key(key): die("missing required manifest key '%s'" % key)
if module_defaults.has_key(key):
defvalue = module_defaults[key]
curvalue = manifest[key]
if curvalue==defvalue: warn("please update the manifest key: '%s' to a non-default value" % key)
return manifest,path
ignoreFiles = ['.DS_Store','.gitignore','libTitanium.a','titanium.jar','README','__MODULE_ID__.js']
ignoreDirs = ['.DS_Store','.svn','.git','CVSROOT']
def zip_dir(zf,dir,basepath,ignore=[]):
for root, dirs, files in os.walk(dir):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles: continue
e = os.path.splitext(file)
if len(e)==2 and e[1]=='.pyc':continue
from_ = os.path.join(root, file)
to_ = from_.replace(dir, basepath, 1)
zf.write(from_, to_)
def glob_libfiles():
files = []
for libfile in glob.glob('build/**/*.a'):
if libfile.find('Release-')!=-1:
files.append(libfile)
return files
def build_module(manifest,config):
from tools import ensure_dev_path
ensure_dev_path()
rc = os.system("xcodebuild -sdk iphoneos -configuration Release")
if rc != 0:
die("xcodebuild failed")
rc = os.system("xcodebuild -sdk iphonesimulator -configuration Release")
if rc != 0:
die("xcodebuild failed")
# build the merged library using lipo
moduleid = manifest['moduleid']
libpaths = ''
for libfile in glob_libfiles():
libpaths+='%s ' % libfile
os.system("lipo %s -create -output build/lib%s.a" %(libpaths,moduleid))
def package_module(manifest,mf,config):
name = manifest['name'].lower()
moduleid = manifest['moduleid'].lower()
version = manifest['version']
modulezip = '%s-iphone-%s.zip' % (moduleid,version)
if os.path.exists(modulezip): os.remove(modulezip)
zf = zipfile.ZipFile(modulezip, 'w', zipfile.ZIP_DEFLATED)
modulepath = 'modules/iphone/%s/%s' % (moduleid,version)
zf.write(mf,'%s/manifest' % modulepath)
libname = 'lib%s.a' % moduleid
zf.write('build/%s' % libname, '%s/%s' % (modulepath,libname))
docs = generate_doc(config)
if docs!=None:
for doc in docs:
for file, html in doc.iteritems():
filename = string.replace(file,'.md','.html')
zf.writestr('%s/documentation/%s'%(modulepath,filename),html)
for dn in ('assets','example','platform'):
if os.path.exists(dn):
zip_dir(zf,dn,'%s/%s' % (modulepath,dn),['README'])
zf.write('LICENSE','%s/LICENSE' % modulepath)
zf.write('module.xcconfig','%s/module.xcconfig' % modulepath)
exports_file = 'metadata.json'
if os.path.exists(exports_file):
zf.write(exports_file, '%s/%s' % (modulepath, exports_file))
zf.close()
if __name__ == '__main__':
manifest,mf = validate_manifest()
validate_license()
config = read_ti_xcconfig()
sdk = find_sdk(config)
sys.path.insert(0,os.path.join(sdk,'iphone'))
sys.path.append(os.path.join(sdk, "common"))
compile_js(manifest,config)
build_module(manifest,config)
package_module(manifest,mf,config)
sys.exit(0)
| apache-2.0 | 4,836,279,273,150,975,000 | 28.794521 | 122 | 0.677548 | false |
nens/threedi-qgis-plugin | tool_commands/control_structures/create_measuring_group_dialog.py | 1 | 14526 | from pathlib import Path
from qgis.PyQt import uic
from qgis.PyQt.QtWidgets import QAbstractItemView
from qgis.PyQt.QtWidgets import QDialog
from qgis.PyQt.QtWidgets import QPushButton
from qgis.PyQt.QtWidgets import QTableWidget
from qgis.PyQt.QtWidgets import QTableWidgetItem
from qgis.PyQt.QtWidgets import QVBoxLayout
from qgis.PyQt.QtWidgets import QWidget
from ThreeDiToolbox.tool_commands.control_structures.main import ControlledStructures
from ThreeDiToolbox.utils.threedi_database import get_database_properties
from ThreeDiToolbox.utils.threedi_database import get_databases
import logging
logger = logging.getLogger(__name__)
ui_file = Path(__file__).parent / "create_measuring_group_dialog.ui"
assert ui_file.is_file()
FORM_CLASS, _ = uic.loadUiType(ui_file)
class CreateMeasuringGroupDialogWidget(QDialog, FORM_CLASS):
def __init__(
self,
parent=None,
command=None,
db_key=None,
measuring_group_id=None,
dockwidget_controlled_structures=None,
):
"""Constructor
# TODO: fix arguments documentation.
Args:
parent: Qt parent Widget
iface: QGiS interface
command: Command instance with a run_it method which will be called
on acceptance of the dialog
"""
super().__init__(parent)
# Show gui
self.setupUi(self)
self.measuring_group_id = measuring_group_id
self.command = command
self.dockwidget_controlled_structures = dockwidget_controlled_structures
self.db_key = db_key
self.databases = get_databases()
self.db = get_database_properties(self.db_key)
self.control_structure = ControlledStructures(
flavor=self.db["db_entry"]["db_type"]
)
self.setup_tablewidget()
self.update_ids()
self.connect_signals()
def on_accept(self):
"""Accept and run the Command.run_it method."""
self.save_measuring_group()
self.accept()
def on_reject(self):
"""Cancel"""
self.reject()
logger.debug("Reject")
def closeEvent(self, event):
"""
Close widget, called by Qt on close
:param event: QEvent, close event
"""
self.buttonbox.accepted.disconnect(self.on_accept)
self.buttonbox.rejected.disconnect(self.on_reject)
event.accept()
def setup_tablewidget(self):
tablewidget = self.tablewidget_measuring_point
tablewidget.setCellWidget(0, 0, self.combobox_input_measuring_point_table)
tablewidget.setCellWidget(0, 1, self.combobox_input_measuring_point_id)
tablewidget.setCellWidget(0, 3, self.pushbutton_input_measuring_point_new)
def update_ids(self):
"""Setup the id's for the measuring group and measuring points."""
# Set the id of the measuring group
self.label_measuring_group_id_info.setText(self.measuring_group_id)
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Set all id's of the measuring groups
self.combobox_measuring_group_load.clear()
list_of_measuring_group_ids = self.control_structure.get_attributes(
table_name="v2_control_measure_group", attribute_name="id"
)
self.combobox_measuring_group_load.addItems(list_of_measuring_group_ids)
# Set all id's of the connection nodes
self.combobox_input_measuring_point_id.clear()
list_of_connection_node_ids = self.control_structure.get_attributes(
table_name="v2_connection_nodes", attribute_name="id"
)
self.combobox_input_measuring_point_id.addItems(list_of_connection_node_ids)
def connect_signals(self):
"""Connect the signals."""
self.pushbutton_measuring_group_load.clicked.connect(self.load_measuring_group)
self.pushbutton_input_measuring_point_new.clicked.connect(
self.create_new_measuring_point
)
self.buttonbox.accepted.connect(self.on_accept)
self.buttonbox.rejected.connect(self.on_reject)
def create_new_measuring_point(self):
# Get the model
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Get last id of measure map or set to 0; set to +1
table_name = "v2_control_measure_map"
attribute_name = "MAX(id)"
try:
max_id_measure_map = int(
self.control_structure.get_attributes(table_name, attribute_name)[0]
)
except ValueError:
logger.exception("Error determining max id, using 0")
max_id_measure_map = 0
new_max_id_measure_map = max_id_measure_map + 1
# Populate the new row in the table
self.populate_measuring_point_row(new_max_id_measure_map)
def populate_measuring_point_row(self, id_measuring_point):
"""
Populate a row from the measuring point table.
Args:
(str) id_measuring_point: The id of the measuring point."""
tablewidget = self.tablewidget_measuring_point
# Always put the new row on top.
row_position = 1
tablewidget.insertRow(row_position)
# tablewidget.setItem(row_position, 0, measuring_point_id)
measuring_point_table_widget = QTableWidgetItem(
self.combobox_input_measuring_point_table.currentText()
)
tablewidget.setItem(row_position, 0, measuring_point_table_widget)
measuring_point_table_id_widget = QTableWidgetItem(
self.combobox_input_measuring_point_id.currentText()
)
tablewidget.setItem(row_position, 1, measuring_point_table_id_widget)
try:
measuring_point_weight = tablewidget.item(0, 2).text()
except AttributeError:
logger.exception(
"Error determining measuring point weight, using emty string"
)
measuring_point_weight = ""
tablewidget.setItem(row_position, 2, QTableWidgetItem(measuring_point_weight))
measuring_point_remove_widget = QPushButton("Remove")
measuring_point_remove_widget.clicked.connect(self.remove_measuring_point_row)
tablewidget.setCellWidget(row_position, 3, measuring_point_remove_widget)
def remove_measuring_point_row(self):
"""Remove a row from the measuring point table."""
tablewidget = self.tablewidget_measuring_point
row_number = tablewidget.currentRow()
# Remove measuring point from dockwidget
# Don't remove the first row.
BUTTON_ROW = 0
if row_number != BUTTON_ROW:
tablewidget.removeRow(row_number)
def load_measuring_group(self):
"""Load a measuring group in the tablewidget."""
# Remove all current rows, besides te first.
tablewidget = self.tablewidget_measuring_point
row_count = tablewidget.rowCount()
for row in range(row_count - 1):
tablewidget.removeRow(1)
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Get all the measuring points from a certain measure group
table_name = "v2_control_measure_map"
attribute_name = "*"
where_clause = "measure_group_id={}".format(
self.combobox_measuring_group_load.currentText()
)
measure_groups = self.control_structure.get_features_with_where_clause(
table_name=table_name, attribute_name=attribute_name, where=where_clause
)
for measure_group in measure_groups:
row_position = self.tablewidget_measuring_point.rowCount()
self.tablewidget_measuring_point.insertRow(row_position)
self.tablewidget_measuring_point.setItem(
row_position, 0, QTableWidgetItem(str(measure_group[2]))
)
self.tablewidget_measuring_point.setItem(
row_position, 1, QTableWidgetItem(str(measure_group[3]))
)
self.tablewidget_measuring_point.setItem(
row_position, 2, QTableWidgetItem(str(measure_group[4]))
)
measuring_point_remove = QPushButton("Remove")
measuring_point_remove.clicked.connect(self.remove_measuring_point)
self.tablewidget_measuring_point.setCellWidget(
row_position, 3, measuring_point_remove
)
def remove_measuring_point(self):
"""Remove a certain measuring point from the tablewidget."""
tablewidget = self.tablewidget_measuring_point
row_number = tablewidget.currentRow()
tablewidget.removeRow(row_number)
def save_measuring_group(self):
"""Save the measuring group in the database."""
self.control_structure.start_sqalchemy_engine(self.db["db_settings"])
# Insert the measuring group in the v2_control_measure_group
table_name = "v2_control_measure_group"
attributes = {"id": self.measuring_group_id}
self.control_structure.insert_into_table(
table_name=table_name, attributes=attributes
)
# Create a tab in the tabwidget of the 'Measuring group' tab in
# the controlled structures dockwidget
self.add_measuring_group_tab_dockwidget()
table_name = "v2_control_measure_map"
BUTTON_ROW = 0
for row in range(self.tablewidget_measuring_point.rowCount()):
if row != BUTTON_ROW:
# Get the new measuring_point id
attribute_name = "MAX(id)"
try:
max_id_measure_point = int(
self.control_structure.get_attributes(
table_name, attribute_name
)[0]
)
except ValueError:
logger.exception("Error determining max measure point id, using 0")
max_id_measure_point = 0
new_measuring_point_id = max_id_measure_point + 1
measure_point_attributes = self.get_measuring_point_attributes(
row, new_measuring_point_id
)
# Save the measuring point in the v2_control_measure_map
self.control_structure.insert_into_table(
table_name, measure_point_attributes
)
# Setup new tab of "Measuring group" tab
self.setup_measuring_group_table_dockwidget(measure_point_attributes)
def add_measuring_group_tab_dockwidget(self):
"""
Create a tab for the measure group within the Measure group tab
in the dockwidget.
"""
tab = QWidget()
layout = QVBoxLayout(tab)
tab.setLayout(layout)
table_measuring_group = QTableWidget(tab)
table_measuring_group.setGeometry(10, 10, 741, 266)
table_measuring_group.insertColumn(0)
table_measuring_group.setHorizontalHeaderItem(0, QTableWidgetItem("table"))
table_measuring_group.insertColumn(1)
table_measuring_group.setHorizontalHeaderItem(1, QTableWidgetItem("table_id"))
table_measuring_group.insertColumn(2)
table_measuring_group.setHorizontalHeaderItem(2, QTableWidgetItem("weight"))
table_measuring_group.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.dockwidget_controlled_structures.table_measuring_group = (
table_measuring_group
)
# Add the tab to the left
self.dockwidget_controlled_structures.tab_measuring_group_view_2.insertTab(
0, tab, "Group: {}".format(str(self.label_measuring_group_id_info.text()))
)
def get_measuring_point_attributes(self, row_nr, new_measuring_point_id):
"""
Get the attributes of the measuring point from the table.
Args:
(int) row_nr: The row number of the tablewidget.
(int) new_measuring_point_id: The id of the new measuring point.
Returns:
(dict) attributes: A list containing the attributes
of the measuring point.
"""
measuring_point_table = self.tablewidget_measuring_point.item(row_nr, 0).text()
try:
measuring_point_table_id = self.tablewidget_measuring_point.item(
row_nr, 1
).text()
except AttributeError:
# TODO: I've seen this measuring_point_table_id try/except
# before. Can it be unified?
logger.exception(
"Error grabbing measuring point table id, using current text"
)
measuring_point_table_id = self.tablewidget_measuring_point.cellWidget(
row_nr, 1
).currentText()
try:
measuring_point_weight = self.tablewidget_measuring_point.item(
row_nr, 2
).text()
except AttributeError:
logger.exception(
"Error grabbing measuring point weight, using empty string"
)
measuring_point_weight = ""
attributes = {
"id": new_measuring_point_id,
"measure_group_id": self.measuring_group_id,
"object_type": measuring_point_table,
"object_id": measuring_point_table_id,
"weight": measuring_point_weight,
}
return attributes
def setup_measuring_group_table_dockwidget(self, measure_map_attributes):
"""
Setup a tab for the measure group in the Measure group tab
in the dockwidget.
Args:
(dict) measure_map_attributes: A dict containing the attributes
from the measuring point (from v2_control_measure_map).
"""
row_position = (
self.dockwidget_controlled_structures.table_measuring_group.rowCount()
)
self.dockwidget_controlled_structures.table_measuring_group.insertRow(
row_position
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 0, QTableWidgetItem("v2_connection_nodes")
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 1, QTableWidgetItem(measure_map_attributes["object_id"])
)
self.dockwidget_controlled_structures.table_measuring_group.setItem(
row_position, 2, QTableWidgetItem(measure_map_attributes["weight"])
)
| gpl-3.0 | -5,747,395,667,659,924,000 | 40.741379 | 87 | 0.629423 | false |
psychopy/versions | psychopy/visual/line.py | 1 | 2906 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Creates a Line between two points as a special case of a
:class:`~psychopy.visual.ShapeStim`
"""
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
from __future__ import absolute_import, division, print_function
import psychopy # so we can get the __path__
from psychopy import logging
import numpy
from psychopy.visual.shape import ShapeStim
from psychopy.tools.attributetools import attributeSetter, setAttribute
class Line(ShapeStim):
"""Creates a Line between two points.
(New in version 1.72.00)
"""
def __init__(self, win, start=(-.5, -.5), end=(.5, .5), **kwargs):
"""Line accepts all input parameters, that
:class:`~psychopy.visual.ShapeStim` accepts, except
for vertices, closeShape and fillColor.
:Notes:
The `contains` method always return False because a line is not a
proper (2D) polygon.
"""
# what local vars are defined (these are the init params) for use by
# __repr__
self._initParams = dir()
self._initParams.remove('self')
# kwargs isn't a parameter, but a list of params
self._initParams.remove('kwargs')
self._initParams.extend(kwargs)
self.__dict__['start'] = numpy.array(start)
self.__dict__['end'] = numpy.array(end)
self.__dict__['vertices'] = [start, end]
kwargs['closeShape'] = False # Make sure nobody messes around here
kwargs['vertices'] = self.vertices
kwargs['fillColor'] = None
super(Line, self).__init__(win, **kwargs)
@attributeSetter
def start(self, start):
"""tuple, list or 2x1 array.
Specifies the position of the start of the line.
:ref:`Operations <attrib-operations>` supported.
"""
self.__dict__['start'] = numpy.array(start)
self.setVertices([self.start, self.end], log=False)
def setStart(self, start, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'start', start, log)
@attributeSetter
def end(self, end):
"""tuple, list or 2x1 array
Specifies the position of the end of the line.
:ref:`Operations <attrib-operations>` supported."""
self.__dict__['end'] = numpy.array(end)
self.setVertices([self.start, self.end], log=False)
def setEnd(self, end, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'end', end, log)
def contains(self, *args, **kwargs):
return False
| gpl-3.0 | -6,665,230,088,042,024,000 | 32.790698 | 79 | 0.627667 | false |
jdweeks/screener | screener.py | 1 | 2569 | #!/usr/local/bin/python3
import os, sys, getopt
import quandl as ql
import pandas as pd
import numpy as np
from pylab import plot, figure, savefig
# read Russell 3000 constituents from a csv
def readRuss():
try:
ticks = []
russ = open('russ3.csv', 'r').read()
split = russ.split('\n')
for tick in split:
ticks.append('WIKI/' + tick.rstrip())
return ticks
except Exception as e:
print('Failed to read Russell 3000:', str(e))
# retrieve stock data from Quandl
def getData(query, date):
try:
return ql.get(query, start_date = date)
except Exception as e:
print('Failed to get stock data:', str(e))
# fit a first-degree polynomial (i.e. a line) to the data
def calcTrend(data):
return np.polyfit(data.index.values, list(data), 1)
def main(argv):
tick = 'WIKI/' # ticker will be appended
date = '2017/01/01' # default start date
ql.ApiConfig.api_key = os.environ['QUANDL_KEY']
usage = 'usage: screener.py -t <ticker> -d <start_date>'
if len(argv) == 0:
print(usage)
sys.exit(2)
# parse command-line args
try:
opts, args = getopt.getopt(argv, 'ht:d', ['ticker=', 'date='])
except getopt.GetoptError:
print(usage)
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print(usage)
sys.exit(0)
elif opt in ('-t', '--ticker'):
tick += arg
elif opt in ('-d', '--date'):
date = arg
# retrieve the 4th & 5th cols (Close & Volume)
close = getData(tick + '.4', date)
vol = getData(tick + '.5', date)
data = pd.concat([close, vol], axis=1).reset_index(drop=True)
print(data)
# calculate trends on price and volume
pcoeffs = calcTrend(data['Close'])
vcoeffs = calcTrend(data['Volume'])
print('Price trend:', pcoeffs[0])
print('Volume trend:', vcoeffs[0])
# save plots of trend lines
xi = data.index.values
figure()
pline = pcoeffs[0] * xi + pcoeffs[1]
plot(xi, pline, 'r-', xi, list(data['Close']), '-o')
savefig('price.png')
figure()
vline = vcoeffs[0] * xi + vcoeffs[1]
plot(xi, vline, 'r-', xi, list(data['Volume']), '-o')
savefig('volume.png')
# ticks = readRuss()
# q_close = [ tick + '.4' for tick in ticks[:5] ]
# q_vol = [ tick + '.5' for tick in ticks[:5] ]
# data = getData(q_close + q_vol, '2017-01-01')
if __name__ == "__main__":
# execute only if run as a script
main(sys.argv[1:])
| mit | -2,508,157,812,468,462,600 | 26.042105 | 70 | 0.5652 | false |
spesmilo/electrum | electrum/plugins/email_requests/qt.py | 1 | 9900 | #!/usr/bin/env python
#
# Electrum - Lightweight Bitcoin Client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import random
import time
import threading
import base64
from functools import partial
import traceback
import sys
from typing import Set
import smtplib
import imaplib
import email
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.encoders import encode_base64
from PyQt5.QtCore import QObject, pyqtSignal, QThread
from PyQt5.QtWidgets import (QVBoxLayout, QLabel, QGridLayout, QLineEdit,
QInputDialog)
from electrum.gui.qt.util import (EnterButton, Buttons, CloseButton, OkButton,
WindowModalDialog)
from electrum.gui.qt.main_window import ElectrumWindow
from electrum.plugin import BasePlugin, hook
from electrum.paymentrequest import PaymentRequest
from electrum.i18n import _
from electrum.logging import Logger
from electrum.wallet import Abstract_Wallet
from electrum.invoices import OnchainInvoice
class Processor(threading.Thread, Logger):
polling_interval = 5*60
def __init__(self, imap_server, username, password, callback):
threading.Thread.__init__(self)
Logger.__init__(self)
self.daemon = True
self.username = username
self.password = password
self.imap_server = imap_server
self.on_receive = callback
self.M = None
self.reset_connect_wait()
def reset_connect_wait(self):
self.connect_wait = 100 # ms, between failed connection attempts
def poll(self):
try:
self.M.select()
except:
return
typ, data = self.M.search(None, 'ALL')
for num in str(data[0], 'utf8').split():
typ, msg_data = self.M.fetch(num, '(RFC822)')
msg = email.message_from_bytes(msg_data[0][1])
p = msg.get_payload()
if not msg.is_multipart():
p = [p]
continue
for item in p:
if item.get_content_type() == "application/bitcoin-paymentrequest":
pr_str = item.get_payload()
pr_str = base64.b64decode(pr_str)
self.on_receive(pr_str)
def run(self):
while True:
try:
self.M = imaplib.IMAP4_SSL(self.imap_server)
self.M.login(self.username, self.password)
except BaseException as e:
self.logger.info(f'connecting failed: {repr(e)}')
self.connect_wait *= 2
else:
self.reset_connect_wait()
# Reconnect when host changes
while self.M and self.M.host == self.imap_server:
try:
self.poll()
except BaseException as e:
self.logger.info(f'polling failed: {repr(e)}')
break
time.sleep(self.polling_interval)
time.sleep(random.randint(0, self.connect_wait))
def send(self, recipient, message, payment_request):
msg = MIMEMultipart()
msg['Subject'] = message
msg['To'] = recipient
msg['From'] = self.username
part = MIMEBase('application', "bitcoin-paymentrequest")
part.set_payload(payment_request)
encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="payreq.btc"')
msg.attach(part)
try:
s = smtplib.SMTP_SSL(self.imap_server, timeout=2)
s.login(self.username, self.password)
s.sendmail(self.username, [recipient], msg.as_string())
s.quit()
except BaseException as e:
self.logger.info(e)
class QEmailSignalObject(QObject):
email_new_invoice_signal = pyqtSignal()
class Plugin(BasePlugin):
def fullname(self):
return 'Email'
def description(self):
return _("Send and receive payment requests via email")
def is_available(self):
return True
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.imap_server = self.config.get('email_server', '')
self.username = self.config.get('email_username', '')
self.password = self.config.get('email_password', '')
if self.imap_server and self.username and self.password:
self.processor = Processor(self.imap_server, self.username, self.password, self.on_receive)
self.processor.start()
self.obj = QEmailSignalObject()
self.obj.email_new_invoice_signal.connect(self.new_invoice)
self.wallets = set() # type: Set[Abstract_Wallet]
def on_receive(self, pr_str):
self.logger.info('received payment request')
self.pr = PaymentRequest(pr_str)
self.obj.email_new_invoice_signal.emit()
@hook
def load_wallet(self, wallet, main_window):
self.wallets |= {wallet}
@hook
def close_wallet(self, wallet):
self.wallets -= {wallet}
def new_invoice(self):
invoice = OnchainInvoice.from_bip70_payreq(self.pr)
for wallet in self.wallets:
wallet.save_invoice(invoice)
#main_window.invoice_list.update()
@hook
def receive_list_menu(self, window: ElectrumWindow, menu, addr):
menu.addAction(_("Send via e-mail"), lambda: self.send(window, addr))
def send(self, window: ElectrumWindow, addr):
from electrum import paymentrequest
req = window.wallet.receive_requests.get(addr)
if not isinstance(req, OnchainInvoice):
window.show_error("Only on-chain requests are supported.")
return
message = req.message
if req.bip70:
payload = bytes.fromhex(req.bip70)
else:
pr = paymentrequest.make_request(self.config, req)
payload = pr.SerializeToString()
if not payload:
return
recipient, ok = QInputDialog.getText(window, 'Send request', 'Email invoice to:')
if not ok:
return
recipient = str(recipient)
self.logger.info(f'sending mail to {recipient}')
try:
# FIXME this runs in the GUI thread and blocks it...
self.processor.send(recipient, message, payload)
except BaseException as e:
self.logger.exception('')
window.show_message(repr(e))
else:
window.show_message(_('Request sent.'))
def requires_settings(self):
return True
def settings_widget(self, window):
return EnterButton(_('Settings'), partial(self.settings_dialog, window))
def settings_dialog(self, window):
d = WindowModalDialog(window, _("Email settings"))
d.setMinimumSize(500, 200)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Server hosting your email account')))
grid = QGridLayout()
vbox.addLayout(grid)
grid.addWidget(QLabel('Server (IMAP)'), 0, 0)
server_e = QLineEdit()
server_e.setText(self.imap_server)
grid.addWidget(server_e, 0, 1)
grid.addWidget(QLabel('Username'), 1, 0)
username_e = QLineEdit()
username_e.setText(self.username)
grid.addWidget(username_e, 1, 1)
grid.addWidget(QLabel('Password'), 2, 0)
password_e = QLineEdit()
password_e.setText(self.password)
grid.addWidget(password_e, 2, 1)
vbox.addStretch()
vbox.addLayout(Buttons(CloseButton(d), OkButton(d)))
if not d.exec_():
return
server = str(server_e.text())
self.config.set_key('email_server', server)
self.imap_server = server
username = str(username_e.text())
self.config.set_key('email_username', username)
self.username = username
password = str(password_e.text())
self.config.set_key('email_password', password)
self.password = password
check_connection = CheckConnectionThread(server, username, password)
check_connection.connection_error_signal.connect(lambda e: window.show_message(
_("Unable to connect to mail server:\n {}").format(e) + "\n" +
_("Please check your connection and credentials.")
))
check_connection.start()
class CheckConnectionThread(QThread):
connection_error_signal = pyqtSignal(str)
def __init__(self, server, username, password):
super().__init__()
self.server = server
self.username = username
self.password = password
def run(self):
try:
conn = imaplib.IMAP4_SSL(self.server)
conn.login(self.username, self.password)
except BaseException as e:
self.connection_error_signal.emit(repr(e))
| mit | -5,328,565,570,429,520,000 | 34.483871 | 103 | 0.626162 | false |
sveetch/sveedocuments | sveedocuments/utils/rest_roles.py | 1 | 4637 | # -*- coding: utf-8 -*-
"""
ReSTructured additional roles
"""
import os, re
from docutils import nodes, utils
from docutils.parsers.rst import roles
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.contrib.sites.models import Site
from sveedocuments.models import Page, Attachment
_ATTACHMENT_ROLE_REGEX = re.compile(r"^(?:id)(?P<id>[0-9]+)(?:\-)(?P<slug>.*?)$")
def rst_parser_error(msg, rawtext, text, lineno, inliner):
msg = inliner.reporter.error(msg, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
def get_page_slugs(force_update_cache=False):
"""
Get a dict of all visible *Pages* as a tuple ``(slug, title)``
Try to get it from the cache if it exist, else build it
"""
if force_update_cache or not cache.get(settings.PAGE_SLUGS_CACHE_KEY_NAME):
slugs_map = dict(Page.objects.filter(visible=True).values_list('slug', 'title'))
cache.set(settings.PAGE_SLUGS_CACHE_KEY_NAME, slugs_map)
return slugs_map
return cache.get(settings.PAGE_SLUGS_CACHE_KEY_NAME)
def page_link(role, rawtext, text, lineno, inliner, options={}, content=[]):
"""
Role to make a reference link to other *Pages* by using their ``slug``
Usage in document :
Blah blah :page:`my-page-slug`
"""
# Get the page slugs map
slugs = get_page_slugs()
# Throw error if the given slug does not exist
if text not in slugs and not settings.DOCUMENTS_PARSER_WIKIROLE_SILENT_WARNING:
msg = inliner.reporter.error('Page with slug "%s" does not exist.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Add a class to the item
options.update({'classes': ['documents_page_link']})
roles.set_classes(options)
# Return the node as reference to display the link for the given page's slug
site_current = Site.objects.get_current()
url = "http://{0}{1}".format(site_current.domain, reverse('documents-page-details', args=[text]))
node = nodes.reference(rawtext, utils.unescape(slugs[text]), refuri=url, **options)
return [node], []
roles.register_local_role('page', page_link)
def get_page_attachment_slugs(page_id, force_update_cache=False):
"""
Get a dict of all Attachments linked to a Page
Try to get it from the cache if it exist, else build it
"""
cache_key = settings.PAGE_ATTACHMENTS_SLUGS_CACHE_KEY_NAME.format(page_id)
if force_update_cache or not cache.get(cache_key):
page = Page.objects.get(pk=page_id)
slugs_map = dict(page.attachment.all().values_list('slug', 'file'))
cache.set(cache_key, slugs_map)
return slugs_map
return cache.get(cache_key)
def page_attachment(role, rawtext, text, lineno, inliner, options={}, content=[]):
"""
Role to make a reference link to a Page's attachment
Usage in document :
Blah blah :attachment:`idX-slug`
Where X is the page id and slug his slugname
The page id is needed because i can't find a clean way to give some page context to
the docutils parser.
"""
matched = _ATTACHMENT_ROLE_REGEX.match(text)
if not matched or len(matched.groups())<2:
return rst_parser_error('Attachment role syntax is not respected with "{0}", you should write something like "idXX-ATTACHMENT_SLUG".'.format(text), rawtext, text, lineno, inliner)
# Get the page slugs map
pk, attachment_slug = matched.groups()
try:
slugs_map = get_page_attachment_slugs(pk)
except Page.DoesNotExist:
return rst_parser_error('Page with id "{pk}" does not exist in pattern "{pattern}"'.format(pk=pk, pattern=text), rawtext, text, lineno, inliner)
else:
if attachment_slug not in slugs_map and not settings.DOCUMENTS_PARSER_WIKIROLE_SILENT_WARNING:
return rst_parser_error('Attachment with slug "{slug}" does not exist for page id "{pk}" in pattern "{pattern}".'.format(pk=pk, slug=attachment_slug, pattern=text), rawtext, text, lineno, inliner)
link = slugs_map[attachment_slug]
# Add a class to the item
options.update({'classes': ['documents_page_attachment']})
roles.set_classes(options)
# Return the node as reference to display the link for the given page's slug
node = nodes.reference(rawtext, utils.unescape(attachment_slug), refuri=os.path.join(settings.MEDIA_URL, link), **options)
return [node], []
roles.register_local_role('attachment', page_attachment)
| mit | 807,631,129,012,385,500 | 40.774775 | 208 | 0.671339 | false |
christiansandberg/canopen | canopen/network.py | 1 | 12500 | try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping
import logging
import threading
import struct
try:
import can
from can import Listener
from can import CanError
except ImportError:
# Do not fail if python-can is not installed
can = None
Listener = object
CanError = Exception
from .node import RemoteNode, LocalNode
from .sync import SyncProducer
from .timestamp import TimeProducer
from .nmt import NmtMaster
from .lss import LssMaster
from .objectdictionary.eds import import_from_node
logger = logging.getLogger(__name__)
class Network(MutableMapping):
"""Representation of one CAN bus containing one or more nodes."""
def __init__(self, bus=None):
"""
:param can.BusABC bus:
A python-can bus instance to re-use.
"""
#: A python-can :class:`can.BusABC` instance which is set after
#: :meth:`canopen.Network.connect` is called
self.bus = bus
#: A :class:`~canopen.network.NodeScanner` for detecting nodes
self.scanner = NodeScanner(self)
#: List of :class:`can.Listener` objects.
#: Includes at least MessageListener.
self.listeners = [MessageListener(self)]
self.notifier = None
self.nodes = {}
self.subscribers = {}
self.send_lock = threading.Lock()
self.sync = SyncProducer(self)
self.time = TimeProducer(self)
self.nmt = NmtMaster(0)
self.nmt.network = self
self.lss = LssMaster()
self.lss.network = self
self.subscribe(self.lss.LSS_RX_COBID, self.lss.on_message_received)
def subscribe(self, can_id, callback):
"""Listen for messages with a specific CAN ID.
:param int can_id:
The CAN ID to listen for.
:param callback:
Function to call when message is received.
"""
self.subscribers.setdefault(can_id, list())
if callback not in self.subscribers[can_id]:
self.subscribers[can_id].append(callback)
def unsubscribe(self, can_id, callback=None):
"""Stop listening for message.
:param int can_id:
The CAN ID from which to unsubscribe.
:param callback:
If given, remove only this callback. Otherwise all callbacks for
the CAN ID.
"""
if callback is None:
del self.subscribers[can_id]
else:
self.subscribers[can_id].remove(callback)
def connect(self, *args, **kwargs):
"""Connect to CAN bus using python-can.
Arguments are passed directly to :class:`can.BusABC`. Typically these
may include:
:param channel:
Backend specific channel for the CAN interface.
:param str bustype:
Name of the interface. See
`python-can manual <https://python-can.readthedocs.io/en/latest/configuration.html#interface-names>`__
for full list of supported interfaces.
:param int bitrate:
Bitrate in bit/s.
:raises can.CanError:
When connection fails.
"""
# If bitrate has not been specified, try to find one node where bitrate
# has been specified
if "bitrate" not in kwargs:
for node in self.nodes.values():
if node.object_dictionary.bitrate:
kwargs["bitrate"] = node.object_dictionary.bitrate
break
self.bus = can.interface.Bus(*args, **kwargs)
logger.info("Connected to '%s'", self.bus.channel_info)
self.notifier = can.Notifier(self.bus, self.listeners, 1)
return self
def disconnect(self):
"""Disconnect from the CAN bus.
Must be overridden in a subclass if a custom interface is used.
"""
for node in self.nodes.values():
if hasattr(node, "pdo"):
node.pdo.stop()
if self.notifier is not None:
self.notifier.stop()
if self.bus is not None:
self.bus.shutdown()
self.bus = None
self.check()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.disconnect()
def add_node(self, node, object_dictionary=None, upload_eds=False):
"""Add a remote node to the network.
:param node:
Can be either an integer representing the node ID, a
:class:`canopen.RemoteNode` or :class:`canopen.LocalNode` object.
:param object_dictionary:
Can be either a string for specifying the path to an
Object Dictionary file or a
:class:`canopen.ObjectDictionary` object.
:param bool upload_eds:
Set ``True`` if EDS file should be uploaded from 0x1021.
:return:
The Node object that was added.
:rtype: canopen.RemoteNode
"""
if isinstance(node, int):
if upload_eds:
logger.info("Trying to read EDS from node %d", node)
object_dictionary = import_from_node(node, self)
node = RemoteNode(node, object_dictionary)
self[node.id] = node
return node
def create_node(self, node, object_dictionary=None):
"""Create a local node in the network.
:param node:
An integer representing the node ID.
:param object_dictionary:
Can be either a string for specifying the path to an
Object Dictionary file or a
:class:`canopen.ObjectDictionary` object.
:return:
The Node object that was added.
:rtype: canopen.LocalNode
"""
if isinstance(node, int):
node = LocalNode(node, object_dictionary)
self[node.id] = node
return node
def send_message(self, can_id, data, remote=False):
"""Send a raw CAN message to the network.
This method may be overridden in a subclass if you need to integrate
this library with a custom backend.
It is safe to call this from multiple threads.
:param int can_id:
CAN-ID of the message
:param data:
Data to be transmitted (anything that can be converted to bytes)
:param bool remote:
Set to True to send remote frame
:raises can.CanError:
When the message fails to be transmitted
"""
if not self.bus:
raise RuntimeError("Not connected to CAN bus")
msg = can.Message(is_extended_id=can_id > 0x7FF,
arbitration_id=can_id,
data=data,
is_remote_frame=remote)
with self.send_lock:
self.bus.send(msg)
self.check()
def send_periodic(self, can_id, data, period, remote=False):
"""Start sending a message periodically.
:param int can_id:
CAN-ID of the message
:param data:
Data to be transmitted (anything that can be converted to bytes)
:param float period:
Seconds between each message
:param bool remote:
indicates if the message frame is a remote request to the slave node
:return:
An task object with a ``.stop()`` method to stop the transmission
:rtype: canopen.network.PeriodicMessageTask
"""
return PeriodicMessageTask(can_id, data, period, self.bus, remote)
def notify(self, can_id, data, timestamp):
"""Feed incoming message to this library.
If a custom interface is used, this function must be called for each
message read from the CAN bus.
:param int can_id:
CAN-ID of the message
:param bytearray data:
Data part of the message (0 - 8 bytes)
:param float timestamp:
Timestamp of the message, preferably as a Unix timestamp
"""
if can_id in self.subscribers:
callbacks = self.subscribers[can_id]
for callback in callbacks:
callback(can_id, data, timestamp)
self.scanner.on_message_received(can_id)
def check(self):
"""Check that no fatal error has occurred in the receiving thread.
If an exception caused the thread to terminate, that exception will be
raised.
"""
if self.notifier is not None:
exc = self.notifier.exception
if exc is not None:
logger.error("An error has caused receiving of messages to stop")
raise exc
def __getitem__(self, node_id):
return self.nodes[node_id]
def __setitem__(self, node_id, node):
assert node_id == node.id
self.nodes[node_id] = node
node.associate_network(self)
def __delitem__(self, node_id):
self.nodes[node_id].remove_network()
del self.nodes[node_id]
def __iter__(self):
return iter(self.nodes)
def __len__(self):
return len(self.nodes)
class PeriodicMessageTask(object):
"""
Task object to transmit a message periodically using python-can's
CyclicSendTask
"""
def __init__(self, can_id, data, period, bus, remote=False):
"""
:param int can_id:
CAN-ID of the message
:param data:
Data to be transmitted (anything that can be converted to bytes)
:param float period:
Seconds between each message
:param can.BusABC bus:
python-can bus to use for transmission
"""
self.bus = bus
self.period = period
self.msg = can.Message(is_extended_id=can_id > 0x7FF,
arbitration_id=can_id,
data=data, is_remote_frame=remote)
self._task = None
self._start()
def _start(self):
self._task = self.bus.send_periodic(self.msg, self.period)
def stop(self):
"""Stop transmission"""
self._task.stop()
def update(self, data):
"""Update data of message
:param data:
New data to transmit
"""
new_data = bytearray(data)
old_data = self.msg.data
self.msg.data = new_data
if hasattr(self._task, "modify_data"):
self._task.modify_data(self.msg)
elif new_data != old_data:
# Stop and start (will mess up period unfortunately)
self._task.stop()
self._start()
class MessageListener(Listener):
"""Listens for messages on CAN bus and feeds them to a Network instance.
:param canopen.Network network:
The network to notify on new messages.
"""
def __init__(self, network):
self.network = network
def on_message_received(self, msg):
if msg.is_error_frame or msg.is_remote_frame:
return
try:
self.network.notify(msg.arbitration_id, msg.data, msg.timestamp)
except Exception as e:
# Exceptions in any callbaks should not affect CAN processing
logger.error(str(e))
class NodeScanner(object):
"""Observes which nodes are present on the bus.
Listens for the following messages:
- Heartbeat (0x700)
- SDO response (0x580)
- TxPDO (0x180, 0x280, 0x380, 0x480)
- EMCY (0x80)
:param canopen.Network network:
The network to use when doing active searching.
"""
#: Activate or deactivate scanning
active = True
SERVICES = (0x700, 0x580, 0x180, 0x280, 0x380, 0x480, 0x80)
def __init__(self, network=None):
self.network = network
#: A :class:`list` of nodes discovered
self.nodes = []
def on_message_received(self, can_id):
service = can_id & 0x780
node_id = can_id & 0x7F
if node_id not in self.nodes and node_id != 0 and service in self.SERVICES:
self.nodes.append(node_id)
def reset(self):
"""Clear list of found nodes."""
self.nodes = []
def search(self, limit=127):
"""Search for nodes by sending SDO requests to all node IDs."""
if self.network is None:
raise RuntimeError("A Network is required to do active scanning")
sdo_req = b"\x40\x00\x10\x00\x00\x00\x00\x00"
for node_id in range(1, limit + 1):
self.network.send_message(0x600 + node_id, sdo_req)
| mit | -5,675,439,393,431,230,000 | 31.552083 | 114 | 0.5908 | false |
opreaalex/skeletout | templates/_app_name_/helpers.py | 1 | 2399 | # -*- coding: utf-8 -*-
"""
_app_name_.helpers
~~~~~~~~~~~~~~~~
_app_name_ helpers module
"""
import pkgutil
import importlib
from flask import Blueprint
from flask.json import JSONEncoder as BaseJSONEncoder
def register_blueprints(app, package_name, package_path):
"""Register all Blueprint instances on the specified Flask application found
in all modules for the specified package.
:param app: the Flask application
:param package_name: the package name
:param package_path: the package path
"""
rv = []
for _, name, _ in pkgutil.iter_modules(package_path):
m = importlib.import_module('%s.%s' % (package_name, name))
for item in dir(m):
item = getattr(m, item)
if isinstance(item, Blueprint):
app.register_blueprint(item)
rv.append(item)
return rv
class JSONEncoder(BaseJSONEncoder):
"""Custom :class:`JSONEncoder` which respects objects that include the
:class:`JsonSerializer` mixin.
"""
def default(self, obj):
if isinstance(obj, JsonSerializer):
return obj.to_json()
return super(JSONEncoder, self).default(obj)
class JsonSerializer(object):
"""A mixin that can be used to mark a SQLAlchemy model class which
implements a :func:`to_json` method. The :func:`to_json` method is used
in conjuction with the custom :class:`JSONEncoder` class. By default this
mixin will assume all properties of the SQLAlchemy model are to be visible
in the JSON output. Extend this class to customize which properties are
public, hidden or modified before being being passed to the JSON serializer.
"""
__json_public__ = None
__json_hidden__ = None
__json_modifiers__ = None
def get_field_names(self):
for p in self.__mapper__.iterate_properties:
yield p.key
def to_json(self):
field_names = self.get_field_names()
public = self.__json_public__ or field_names
hidden = self.__json_hidden__ or []
modifiers = self.__json_modifiers__ or dict()
rv = dict()
for key in public:
rv[key] = getattr(self, key)
for key, modifier in modifiers.items():
value = getattr(self, key)
rv[key] = modifier(value, self)
for key in hidden:
rv.pop(key, None)
return rv
| mit | 7,411,269,388,410,093,000 | 30.155844 | 80 | 0.628178 | false |
dabrahams/zeroinstall | zeroinstall/injector/model.py | 1 | 45719 | """In-memory representation of interfaces and other data structures.
The objects in this module are used to build a representation of an XML interface
file in memory.
@see: L{reader} constructs these data-structures
@see: U{http://0install.net/interface-spec.html} description of the domain model
@var defaults: Default values for the 'default' attribute for <environment> bindings of
well-known variables.
"""
# Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from zeroinstall import _
import os, re, locale
from logging import info, debug, warn
from zeroinstall import SafeException, version
from zeroinstall.injector.namespaces import XMLNS_IFACE
from zeroinstall.injector import qdom
# Element names for bindings in feed files
binding_names = frozenset(['environment', 'overlay', 'executable-in-path', 'executable-in-var'])
network_offline = 'off-line'
network_minimal = 'minimal'
network_full = 'full'
network_levels = (network_offline, network_minimal, network_full)
stability_levels = {} # Name -> Stability
defaults = {
'PATH': '/bin:/usr/bin',
'XDG_CONFIG_DIRS': '/etc/xdg',
'XDG_DATA_DIRS': '/usr/local/share:/usr/share',
}
class InvalidInterface(SafeException):
"""Raised when parsing an invalid feed."""
feed_url = None
def __init__(self, message, ex = None):
if ex:
try:
message += "\n\n(exact error: %s)" % ex
except:
# Some Python messages have type str but contain UTF-8 sequences.
# (e.g. IOException). Adding these to a Unicode 'message' (e.g.
# after gettext translation) will cause an error.
import codecs
decoder = codecs.lookup('utf-8')
decex = decoder.decode(str(ex), errors = 'replace')[0]
message += "\n\n(exact error: %s)" % decex
SafeException.__init__(self, message)
def __unicode__(self):
if hasattr(SafeException, '__unicode__'):
# Python >= 2.6
if self.feed_url:
return _('%s [%s]') % (SafeException.__unicode__(self), self.feed_url)
return SafeException.__unicode__(self)
else:
return unicode(SafeException.__str__(self))
def _split_arch(arch):
"""Split an arch into an (os, machine) tuple. Either or both parts may be None."""
if not arch:
return None, None
elif '-' not in arch:
raise SafeException(_("Malformed arch '%s'") % arch)
else:
osys, machine = arch.split('-', 1)
if osys == '*': osys = None
if machine == '*': machine = None
return osys, machine
def _join_arch(osys, machine):
if osys == machine == None: return None
return "%s-%s" % (osys or '*', machine or '*')
def _best_language_match(options):
(language, encoding) = locale.getlocale()
if language:
# xml:lang uses '-', while LANG uses '_'
language = language.replace('_', '-')
else:
language = 'en-US'
return (options.get(language, None) or # Exact match (language+region)
options.get(language.split('-', 1)[0], None) or # Matching language
options.get('en', None)) # English
class Stability(object):
"""A stability rating. Each implementation has an upstream stability rating and,
optionally, a user-set rating."""
__slots__ = ['level', 'name', 'description']
def __init__(self, level, name, description):
self.level = level
self.name = name
self.description = description
assert name not in stability_levels
stability_levels[name] = self
def __cmp__(self, other):
return cmp(self.level, other.level)
def __str__(self):
return self.name
def __repr__(self):
return _("<Stability: %s>") % self.description
def process_binding(e):
"""Internal"""
if e.name == 'environment':
mode = {
None: EnvironmentBinding.PREPEND,
'prepend': EnvironmentBinding.PREPEND,
'append': EnvironmentBinding.APPEND,
'replace': EnvironmentBinding.REPLACE,
}[e.getAttribute('mode')]
binding = EnvironmentBinding(e.getAttribute('name'),
insert = e.getAttribute('insert'),
default = e.getAttribute('default'),
value = e.getAttribute('value'),
mode = mode,
separator = e.getAttribute('separator'))
if not binding.name: raise InvalidInterface(_("Missing 'name' in binding"))
if binding.insert is None and binding.value is None:
raise InvalidInterface(_("Missing 'insert' or 'value' in binding"))
if binding.insert is not None and binding.value is not None:
raise InvalidInterface(_("Binding contains both 'insert' and 'value'"))
return binding
elif e.name == 'executable-in-path':
return ExecutableBinding(e, in_path = True)
elif e.name == 'executable-in-var':
return ExecutableBinding(e, in_path = False)
elif e.name == 'overlay':
return OverlayBinding(e.getAttribute('src'), e.getAttribute('mount-point'))
else:
raise Exception(_("Unknown binding type '%s'") % e.name)
def process_depends(item, local_feed_dir):
"""Internal"""
# Note: also called from selections
attrs = item.attrs
dep_iface = item.getAttribute('interface')
if not dep_iface:
raise InvalidInterface(_("Missing 'interface' on <%s>") % item.name)
if dep_iface.startswith('.'):
if local_feed_dir:
dep_iface = os.path.abspath(os.path.join(local_feed_dir, dep_iface))
# (updates the element too, in case we write it out again)
attrs['interface'] = dep_iface
else:
raise InvalidInterface(_('Relative interface URI "%s" in non-local feed') % dep_iface)
dependency = InterfaceDependency(dep_iface, element = item)
for e in item.childNodes:
if e.uri != XMLNS_IFACE: continue
if e.name in binding_names:
dependency.bindings.append(process_binding(e))
elif e.name == 'version':
dependency.restrictions.append(
VersionRangeRestriction(not_before = parse_version(e.getAttribute('not-before')),
before = parse_version(e.getAttribute('before'))))
return dependency
def N_(message): return message
insecure = Stability(0, N_('insecure'), _('This is a security risk'))
buggy = Stability(5, N_('buggy'), _('Known to have serious bugs'))
developer = Stability(10, N_('developer'), _('Work-in-progress - bugs likely'))
testing = Stability(20, N_('testing'), _('Stability unknown - please test!'))
stable = Stability(30, N_('stable'), _('Tested - no serious problems found'))
packaged = Stability(35, N_('packaged'), _('Supplied by the local package manager'))
preferred = Stability(40, N_('preferred'), _('Best of all - must be set manually'))
del N_
class Restriction(object):
"""A Restriction limits the allowed implementations of an Interface."""
__slots__ = []
def meets_restriction(self, impl):
"""Called by the L{solver.Solver} to check whether a particular implementation is acceptable.
@return: False if this implementation is not a possibility
@rtype: bool
"""
raise NotImplementedError(_("Abstract"))
class VersionRestriction(Restriction):
"""Only select implementations with a particular version number.
@since: 0.40"""
def __init__(self, version):
"""@param version: the required version number
@see: L{parse_version}; use this to pre-process the version number
"""
self.version = version
def meets_restriction(self, impl):
return impl.version == self.version
def __str__(self):
return _("(restriction: version = %s)") % format_version(self.version)
class VersionRangeRestriction(Restriction):
"""Only versions within the given range are acceptable"""
__slots__ = ['before', 'not_before']
def __init__(self, before, not_before):
"""@param before: chosen versions must be earlier than this
@param not_before: versions must be at least this high
@see: L{parse_version}; use this to pre-process the versions
"""
self.before = before
self.not_before = not_before
def meets_restriction(self, impl):
if self.not_before and impl.version < self.not_before:
return False
if self.before and impl.version >= self.before:
return False
return True
def __str__(self):
if self.not_before is not None or self.before is not None:
range = ''
if self.not_before is not None:
range += format_version(self.not_before) + ' <= '
range += 'version'
if self.before is not None:
range += ' < ' + format_version(self.before)
else:
range = 'none'
return _("(restriction: %s)") % range
class Binding(object):
"""Information about how the choice of a Dependency is made known
to the application being run."""
@property
def command(self):
""""Returns the name of the specific command needed by this binding, if any.
@since: 1.2"""
return None
class EnvironmentBinding(Binding):
"""Indicate the chosen implementation using an environment variable."""
__slots__ = ['name', 'insert', 'default', 'mode', 'value']
PREPEND = 'prepend'
APPEND = 'append'
REPLACE = 'replace'
def __init__(self, name, insert, default = None, mode = PREPEND, value=None, separator=None):
"""
mode argument added in version 0.28
value argument added in version 0.52
"""
self.name = name
self.insert = insert
self.default = default
self.mode = mode
self.value = value
if separator is None:
self.separator = os.pathsep
else:
self.separator = separator
def __str__(self):
return _("<environ %(name)s %(mode)s %(insert)s %(value)s>") % \
{'name': self.name, 'mode': self.mode, 'insert': self.insert, 'value': self.value}
__repr__ = __str__
def get_value(self, path, old_value):
"""Calculate the new value of the environment variable after applying this binding.
@param path: the path to the selected implementation
@param old_value: the current value of the environment variable
@return: the new value for the environment variable"""
if self.insert is not None:
extra = os.path.join(path, self.insert)
else:
assert self.value is not None
extra = self.value
if self.mode == EnvironmentBinding.REPLACE:
return extra
if old_value is None:
old_value = self.default or defaults.get(self.name, None)
if old_value is None:
return extra
if self.mode == EnvironmentBinding.PREPEND:
return extra + self.separator + old_value
else:
return old_value + self.separator + extra
def _toxml(self, doc, prefixes):
"""Create a DOM element for this binding.
@param doc: document to use to create the element
@return: the new element
"""
env_elem = doc.createElementNS(XMLNS_IFACE, 'environment')
env_elem.setAttributeNS(None, 'name', self.name)
if self.mode is not None:
env_elem.setAttributeNS(None, 'mode', self.mode)
if self.insert is not None:
env_elem.setAttributeNS(None, 'insert', self.insert)
else:
env_elem.setAttributeNS(None, 'value', self.value)
if self.default:
env_elem.setAttributeNS(None, 'default', self.default)
if self.separator:
env_elem.setAttributeNS(None, 'separator', self.separator)
return env_elem
class ExecutableBinding(Binding):
"""Make the chosen command available in $PATH.
@ivar in_path: True to add the named command to $PATH, False to store in named variable
@type in_path: bool
"""
__slots__ = ['qdom']
def __init__(self, qdom, in_path):
self.qdom = qdom
self.in_path = in_path
def __str__(self):
return str(self.qdom)
__repr__ = __str__
def _toxml(self, doc, prefixes):
return self.qdom.toDOM(doc, prefixes)
@property
def name(self):
return self.qdom.getAttribute('name')
@property
def command(self):
return self.qdom.getAttribute("command") or 'run'
class OverlayBinding(Binding):
"""Make the chosen implementation available by overlaying it onto another part of the file-system.
This is to support legacy programs which use hard-coded paths."""
__slots__ = ['src', 'mount_point']
def __init__(self, src, mount_point):
self.src = src
self.mount_point = mount_point
def __str__(self):
return _("<overlay %(src)s on %(mount_point)s>") % {'src': self.src or '.', 'mount_point': self.mount_point or '/'}
__repr__ = __str__
def _toxml(self, doc, prefixes):
"""Create a DOM element for this binding.
@param doc: document to use to create the element
@return: the new element
"""
env_elem = doc.createElementNS(XMLNS_IFACE, 'overlay')
if self.src is not None:
env_elem.setAttributeNS(None, 'src', self.src)
if self.mount_point is not None:
env_elem.setAttributeNS(None, 'mount-point', self.mount_point)
return env_elem
class Feed(object):
"""An interface's feeds are other interfaces whose implementations can also be
used as implementations of this interface."""
__slots__ = ['uri', 'os', 'machine', 'user_override', 'langs']
def __init__(self, uri, arch, user_override, langs = None):
self.uri = uri
# This indicates whether the feed comes from the user's overrides
# file. If true, writer.py will write it when saving.
self.user_override = user_override
self.os, self.machine = _split_arch(arch)
self.langs = langs
def __str__(self):
return "<Feed from %s>" % self.uri
__repr__ = __str__
arch = property(lambda self: _join_arch(self.os, self.machine))
class Dependency(object):
"""A Dependency indicates that an Implementation requires some additional
code to function. This is an abstract base class.
@ivar qdom: the XML element for this Dependency (since 0launch 0.51)
@type qdom: L{qdom.Element}
@ivar metadata: any extra attributes from the XML element
@type metadata: {str: str}
"""
__slots__ = ['qdom']
Essential = "essential"
Recommended = "recommended"
def __init__(self, element):
assert isinstance(element, qdom.Element), type(element) # Use InterfaceDependency instead!
self.qdom = element
@property
def metadata(self):
return self.qdom.attrs
@property
def importance(self):
return self.qdom.getAttribute("importance") or Dependency.Essential
def get_required_commands(self):
"""Return a list of command names needed by this dependency"""
return []
class InterfaceDependency(Dependency):
"""A Dependency on a Zero Install interface.
@ivar interface: the interface required by this dependency
@type interface: str
@ivar restrictions: a list of constraints on acceptable implementations
@type restrictions: [L{Restriction}]
@ivar bindings: how to make the choice of implementation known
@type bindings: [L{Binding}]
@since: 0.28
"""
__slots__ = ['interface', 'restrictions', 'bindings']
def __init__(self, interface, restrictions = None, element = None):
Dependency.__init__(self, element)
assert isinstance(interface, (str, unicode))
assert interface
self.interface = interface
if restrictions is None:
self.restrictions = []
else:
self.restrictions = restrictions
self.bindings = []
def __str__(self):
return _("<Dependency on %(interface)s; bindings: %(bindings)s%(restrictions)s>") % {'interface': self.interface, 'bindings': self.bindings, 'restrictions': self.restrictions}
def get_required_commands(self):
"""Return a list of command names needed by this dependency"""
if self.qdom.name == 'runner':
commands = [self.qdom.getAttribute('command') or 'run']
else:
commands = []
for b in self.bindings:
c = b.command
if c is not None:
commands.append(c)
return commands
@property
def command(self):
if self.qdom.name == 'runner':
return self.qdom.getAttribute('command') or 'run'
return None
class RetrievalMethod(object):
"""A RetrievalMethod provides a way to fetch an implementation."""
__slots__ = []
class DownloadSource(RetrievalMethod):
"""A DownloadSource provides a way to fetch an implementation."""
__slots__ = ['implementation', 'url', 'size', 'extract', 'start_offset', 'type']
def __init__(self, implementation, url, size, extract, start_offset = 0, type = None):
self.implementation = implementation
self.url = url
self.size = size
self.extract = extract
self.start_offset = start_offset
self.type = type # MIME type - see unpack.py
class Recipe(RetrievalMethod):
"""Get an implementation by following a series of steps.
@ivar size: the combined download sizes from all the steps
@type size: int
@ivar steps: the sequence of steps which must be performed
@type steps: [L{RetrievalMethod}]"""
__slots__ = ['steps']
def __init__(self):
self.steps = []
size = property(lambda self: sum([x.size for x in self.steps]))
class DistributionSource(RetrievalMethod):
"""A package that is installed using the distribution's tools (including PackageKit).
@ivar install: a function to call to install this package
@type install: (L{handler.Handler}) -> L{tasks.Blocker}
@ivar package_id: the package name, in a form recognised by the distribution's tools
@type package_id: str
@ivar size: the download size in bytes
@type size: int
@ivar needs_confirmation: whether the user should be asked to confirm before calling install()
@type needs_confirmation: bool"""
__slots__ = ['package_id', 'size', 'install', 'needs_confirmation']
def __init__(self, package_id, size, install, needs_confirmation = True):
RetrievalMethod.__init__(self)
self.package_id = package_id
self.size = size
self.install = install
self.needs_confirmation = needs_confirmation
class Command(object):
"""A Command is a way of running an Implementation as a program."""
__slots__ = ['qdom', '_depends', '_local_dir', '_runner', '_bindings']
def __init__(self, qdom, local_dir):
"""@param qdom: the <command> element
@param local_dir: the directory containing the feed (for relative dependencies), or None if not local
"""
assert qdom.name == 'command', 'not <command>: %s' % qdom
self.qdom = qdom
self._local_dir = local_dir
self._depends = None
self._bindings = None
path = property(lambda self: self.qdom.attrs.get("path", None))
def _toxml(self, doc, prefixes):
return self.qdom.toDOM(doc, prefixes)
@property
def requires(self):
if self._depends is None:
self._runner = None
depends = []
for child in self.qdom.childNodes:
if child.name == 'requires':
dep = process_depends(child, self._local_dir)
depends.append(dep)
elif child.name == 'runner':
if self._runner:
raise InvalidInterface(_("Multiple <runner>s in <command>!"))
dep = process_depends(child, self._local_dir)
depends.append(dep)
self._runner = dep
self._depends = depends
return self._depends
def get_runner(self):
self.requires # (sets _runner)
return self._runner
def __str__(self):
return str(self.qdom)
@property
def bindings(self):
"""@since: 1.3"""
if self._bindings is None:
bindings = []
for e in self.qdom.childNodes:
if e.uri != XMLNS_IFACE: continue
if e.name in binding_names:
bindings.append(process_binding(e))
self._bindings = bindings
return self._bindings
class Implementation(object):
"""An Implementation is a package which implements an Interface.
@ivar download_sources: list of methods of getting this implementation
@type download_sources: [L{RetrievalMethod}]
@ivar feed: the feed owning this implementation (since 0.32)
@type feed: [L{ZeroInstallFeed}]
@ivar bindings: how to tell this component where it itself is located (since 0.31)
@type bindings: [Binding]
@ivar upstream_stability: the stability reported by the packager
@type upstream_stability: [insecure | buggy | developer | testing | stable | packaged]
@ivar user_stability: the stability as set by the user
@type upstream_stability: [insecure | buggy | developer | testing | stable | packaged | preferred]
@ivar langs: natural languages supported by this package
@type langs: str
@ivar requires: interfaces this package depends on
@type requires: [L{Dependency}]
@ivar commands: ways to execute as a program
@type commands: {str: Command}
@ivar metadata: extra metadata from the feed
@type metadata: {"[URI ]localName": str}
@ivar id: a unique identifier for this Implementation
@ivar version: a parsed version number
@ivar released: release date
@ivar local_path: the directory containing this local implementation, or None if it isn't local (id isn't a path)
@type local_path: str | None
@ivar requires_root_install: whether the user will need admin rights to use this
@type requires_root_install: bool
"""
# Note: user_stability shouldn't really be here
__slots__ = ['upstream_stability', 'user_stability', 'langs',
'requires', 'metadata', 'download_sources', 'commands',
'id', 'feed', 'version', 'released', 'bindings', 'machine']
def __init__(self, feed, id):
assert id
self.feed = feed
self.id = id
self.user_stability = None
self.upstream_stability = None
self.metadata = {} # [URI + " "] + localName -> value
self.requires = []
self.version = None
self.released = None
self.download_sources = []
self.langs = ""
self.machine = None
self.bindings = []
self.commands = {}
def get_stability(self):
return self.user_stability or self.upstream_stability or testing
def __str__(self):
return self.id
def __repr__(self):
return "v%s (%s)" % (self.get_version(), self.id)
def __cmp__(self, other):
"""Newer versions come first"""
d = cmp(other.version, self.version)
if d: return d
# If the version number is the same, just give a stable sort order, and
# ensure that two different implementations don't compare equal.
d = cmp(other.feed.url, self.feed.url)
if d: return d
return cmp(other.id, self.id)
def get_version(self):
"""Return the version as a string.
@see: L{format_version}
"""
return format_version(self.version)
arch = property(lambda self: _join_arch(self.os, self.machine))
os = None
local_path = None
digests = None
requires_root_install = False
def _get_main(self):
""""@deprecated: use commands["run"] instead"""
main = self.commands.get("run", None)
if main is not None:
return main.path
return None
def _set_main(self, path):
""""@deprecated: use commands["run"] instead"""
if path is None:
if "run" in self.commands:
del self.commands["run"]
else:
self.commands["run"] = Command(qdom.Element(XMLNS_IFACE, 'command', {'path': path, 'name': 'run'}), None)
main = property(_get_main, _set_main)
def is_available(self, stores):
"""Is this Implementation available locally?
(a local implementation, an installed distribution package, or a cached ZeroInstallImplementation)
@rtype: bool
@since: 0.53
"""
raise NotImplementedError("abstract")
class DistributionImplementation(Implementation):
"""An implementation provided by the distribution. Information such as the version
comes from the package manager.
@ivar package_implementation: the <package-implementation> element that generated this impl (since 1.7)
@type package_implementation: L{qdom.Element}
@since: 0.28"""
__slots__ = ['distro', 'installed', 'package_implementation']
def __init__(self, feed, id, distro, package_implementation = None):
assert id.startswith('package:')
Implementation.__init__(self, feed, id)
self.distro = distro
self.installed = False
self.package_implementation = package_implementation
if package_implementation:
for child in package_implementation.childNodes:
if child.uri != XMLNS_IFACE: continue
if child.name == 'command':
command_name = child.attrs.get('name', None)
if not command_name:
raise InvalidInterface('Missing name for <command>')
self.commands[command_name] = Command(child, local_dir = None)
@property
def requires_root_install(self):
return not self.installed
def is_available(self, stores):
return self.installed
class ZeroInstallImplementation(Implementation):
"""An implementation where all the information comes from Zero Install.
@ivar digests: a list of "algorith=value" strings (since 0.45)
@type digests: [str]
@since: 0.28"""
__slots__ = ['os', 'size', 'digests', 'local_path']
def __init__(self, feed, id, local_path):
"""id can be a local path (string starting with /) or a manifest hash (eg "sha1=XXX")"""
assert not id.startswith('package:'), id
Implementation.__init__(self, feed, id)
self.size = None
self.os = None
self.digests = []
self.local_path = local_path
# Deprecated
dependencies = property(lambda self: dict([(x.interface, x) for x in self.requires
if isinstance(x, InterfaceDependency)]))
def add_download_source(self, url, size, extract, start_offset = 0, type = None):
"""Add a download source."""
self.download_sources.append(DownloadSource(self, url, size, extract, start_offset, type))
def set_arch(self, arch):
self.os, self.machine = _split_arch(arch)
arch = property(lambda self: _join_arch(self.os, self.machine), set_arch)
def is_available(self, stores):
if self.local_path is not None:
return os.path.exists(self.local_path)
if self.digests:
path = stores.lookup_maybe(self.digests)
return path is not None
return False # (0compile creates fake entries with no digests)
class Interface(object):
"""An Interface represents some contract of behaviour.
@ivar uri: the URI for this interface.
@ivar stability_policy: user's configured policy.
Implementations at this level or higher are preferred.
Lower levels are used only if there is no other choice.
"""
__slots__ = ['uri', 'stability_policy', 'extra_feeds']
implementations = property(lambda self: self._main_feed.implementations)
name = property(lambda self: self._main_feed.name)
description = property(lambda self: self._main_feed.description)
summary = property(lambda self: self._main_feed.summary)
last_modified = property(lambda self: self._main_feed.last_modified)
feeds = property(lambda self: self.extra_feeds + self._main_feed.feeds)
metadata = property(lambda self: self._main_feed.metadata)
last_checked = property(lambda self: self._main_feed.last_checked)
def __init__(self, uri):
assert uri
if uri.startswith('http:') or uri.startswith('https:') or os.path.isabs(uri):
self.uri = uri
else:
raise SafeException(_("Interface name '%s' doesn't start "
"with 'http:' or 'https:'") % uri)
self.reset()
def _get_feed_for(self):
retval = {}
for key in self._main_feed.feed_for:
retval[key] = True
return retval
feed_for = property(_get_feed_for) # Deprecated (used by 0publish)
def reset(self):
self.extra_feeds = []
self.stability_policy = None
def get_name(self):
from zeroinstall.injector.iface_cache import iface_cache
feed = iface_cache.get_feed(self.uri)
if feed:
return feed.get_name()
return '(' + os.path.basename(self.uri) + ')'
def __repr__(self):
return _("<Interface %s>") % self.uri
def set_stability_policy(self, new):
assert new is None or isinstance(new, Stability)
self.stability_policy = new
def get_feed(self, url):
#import warnings
#warnings.warn("use iface_cache.get_feed instead", DeprecationWarning, 2)
for x in self.extra_feeds:
if x.uri == url:
return x
#return self._main_feed.get_feed(url)
return None
def get_metadata(self, uri, name):
return self._main_feed.get_metadata(uri, name)
@property
def _main_feed(self):
#import warnings
#warnings.warn("use the feed instead", DeprecationWarning, 3)
from zeroinstall.injector import policy
iface_cache = policy.get_deprecated_singleton_config().iface_cache
feed = iface_cache.get_feed(self.uri)
if feed is None:
return _dummy_feed
return feed
def _merge_attrs(attrs, item):
"""Add each attribute of item to a copy of attrs and return the copy.
@type attrs: {str: str}
@type item: L{qdom.Element}
@rtype: {str: str}
"""
new = attrs.copy()
for a in item.attrs:
new[str(a)] = item.attrs[a]
return new
def _get_long(elem, attr_name):
val = elem.getAttribute(attr_name)
if val is not None:
try:
val = int(val)
except ValueError:
raise SafeException(_("Invalid value for integer attribute '%(attribute_name)s': %(value)s") % {'attribute_name': attr_name, 'value': val})
return val
class ZeroInstallFeed(object):
"""A feed lists available implementations of an interface.
@ivar url: the URL for this feed
@ivar implementations: Implementations in this feed, indexed by ID
@type implementations: {str: L{Implementation}}
@ivar name: human-friendly name
@ivar summaries: short textual description (in various languages, since 0.49)
@type summaries: {str: str}
@ivar descriptions: long textual description (in various languages, since 0.49)
@type descriptions: {str: str}
@ivar last_modified: timestamp on signature
@ivar last_checked: time feed was last successfully downloaded and updated
@ivar local_path: the path of this local feed, or None if remote (since 1.7)
@type local_path: str | None
@ivar feeds: list of <feed> elements in this feed
@type feeds: [L{Feed}]
@ivar feed_for: interfaces for which this could be a feed
@type feed_for: set(str)
@ivar metadata: extra elements we didn't understand
"""
# _main is deprecated
__slots__ = ['url', 'implementations', 'name', 'descriptions', 'first_description', 'summaries', 'first_summary', '_package_implementations',
'last_checked', 'last_modified', 'feeds', 'feed_for', 'metadata', 'local_path']
def __init__(self, feed_element, local_path = None, distro = None):
"""Create a feed object from a DOM.
@param feed_element: the root element of a feed file
@type feed_element: L{qdom.Element}
@param local_path: the pathname of this local feed, or None for remote feeds"""
self.local_path = local_path
self.implementations = {}
self.name = None
self.summaries = {} # { lang: str }
self.first_summary = None
self.descriptions = {} # { lang: str }
self.first_description = None
self.last_modified = None
self.feeds = []
self.feed_for = set()
self.metadata = []
self.last_checked = None
self._package_implementations = []
if distro is not None:
import warnings
warnings.warn("distro argument is now ignored", DeprecationWarning, 2)
if feed_element is None:
return # XXX subclass?
assert feed_element.name in ('interface', 'feed'), "Root element should be <interface>, not %s" % feed_element
assert feed_element.uri == XMLNS_IFACE, "Wrong namespace on root element: %s" % feed_element.uri
main = feed_element.getAttribute('main')
#if main: warn("Setting 'main' on the root element is deprecated. Put it on a <group> instead")
if local_path:
self.url = local_path
local_dir = os.path.dirname(local_path)
else:
assert local_path is None
self.url = feed_element.getAttribute('uri')
if not self.url:
raise InvalidInterface(_("<interface> uri attribute missing"))
local_dir = None # Can't have relative paths
min_injector_version = feed_element.getAttribute('min-injector-version')
if min_injector_version:
if parse_version(min_injector_version) > parse_version(version):
raise InvalidInterface(_("This feed requires version %(min_version)s or later of "
"Zero Install, but I am only version %(version)s. "
"You can get a newer version from http://0install.net") %
{'min_version': min_injector_version, 'version': version})
for x in feed_element.childNodes:
if x.uri != XMLNS_IFACE:
self.metadata.append(x)
continue
if x.name == 'name':
self.name = x.content
elif x.name == 'description':
if self.first_description == None:
self.first_description = x.content
self.descriptions[x.attrs.get("http://www.w3.org/XML/1998/namespace lang", 'en')] = x.content
elif x.name == 'summary':
if self.first_summary == None:
self.first_summary = x.content
self.summaries[x.attrs.get("http://www.w3.org/XML/1998/namespace lang", 'en')] = x.content
elif x.name == 'feed-for':
feed_iface = x.getAttribute('interface')
if not feed_iface:
raise InvalidInterface(_('Missing "interface" attribute in <feed-for>'))
self.feed_for.add(feed_iface)
# Bug report from a Debian/stable user that --feed gets the wrong value.
# Can't reproduce (even in a Debian/stable chroot), but add some logging here
# in case it happens again.
debug(_("Is feed-for %s"), feed_iface)
elif x.name == 'feed':
feed_src = x.getAttribute('src')
if not feed_src:
raise InvalidInterface(_('Missing "src" attribute in <feed>'))
if feed_src.startswith('http:') or feed_src.startswith('https:') or local_path:
langs = x.getAttribute('langs')
if langs: langs = langs.replace('_', '-')
self.feeds.append(Feed(feed_src, x.getAttribute('arch'), False, langs = langs))
else:
raise InvalidInterface(_("Invalid feed URL '%s'") % feed_src)
else:
self.metadata.append(x)
if not self.name:
raise InvalidInterface(_("Missing <name> in feed"))
if not self.summary:
raise InvalidInterface(_("Missing <summary> in feed"))
def process_group(group, group_attrs, base_depends, base_bindings, base_commands):
for item in group.childNodes:
if item.uri != XMLNS_IFACE: continue
if item.name not in ('group', 'implementation', 'package-implementation'):
continue
# We've found a group or implementation. Scan for dependencies,
# bindings and commands. Doing this here means that:
# - We can share the code for groups and implementations here.
# - The order doesn't matter, because these get processed first.
# A side-effect is that the document root cannot contain
# these.
depends = base_depends[:]
bindings = base_bindings[:]
commands = base_commands.copy()
for attr, command in [('main', 'run'),
('self-test', 'test')]:
value = item.attrs.get(attr, None)
if value is not None:
commands[command] = Command(qdom.Element(XMLNS_IFACE, 'command', {'name': command, 'path': value}), None)
for child in item.childNodes:
if child.uri != XMLNS_IFACE: continue
if child.name == 'requires':
dep = process_depends(child, local_dir)
depends.append(dep)
elif child.name == 'command':
command_name = child.attrs.get('name', None)
if not command_name:
raise InvalidInterface('Missing name for <command>')
commands[command_name] = Command(child, local_dir)
elif child.name in binding_names:
bindings.append(process_binding(child))
compile_command = item.attrs.get('http://zero-install.sourceforge.net/2006/namespaces/0compile command')
if compile_command is not None:
commands['compile'] = Command(qdom.Element(XMLNS_IFACE, 'command', {'name': 'compile', 'shell-command': compile_command}), None)
item_attrs = _merge_attrs(group_attrs, item)
if item.name == 'group':
process_group(item, item_attrs, depends, bindings, commands)
elif item.name == 'implementation':
process_impl(item, item_attrs, depends, bindings, commands)
elif item.name == 'package-implementation':
if depends:
warn("A <package-implementation> with dependencies in %s!", self.url)
self._package_implementations.append((item, item_attrs))
else:
assert 0
def process_impl(item, item_attrs, depends, bindings, commands):
id = item.getAttribute('id')
if id is None:
raise InvalidInterface(_("Missing 'id' attribute on %s") % item)
local_path = item_attrs.get('local-path')
if local_dir and local_path:
abs_local_path = os.path.abspath(os.path.join(local_dir, local_path))
impl = ZeroInstallImplementation(self, id, abs_local_path)
elif local_dir and (id.startswith('/') or id.startswith('.')):
# For old feeds
id = os.path.abspath(os.path.join(local_dir, id))
impl = ZeroInstallImplementation(self, id, id)
else:
impl = ZeroInstallImplementation(self, id, None)
if '=' in id:
# In older feeds, the ID was the (single) digest
impl.digests.append(id)
if id in self.implementations:
warn(_("Duplicate ID '%(id)s' in feed '%(feed)s'"), {'id': id, 'feed': self})
self.implementations[id] = impl
impl.metadata = item_attrs
try:
version_mod = item_attrs.get('version-modifier', None)
if version_mod:
item_attrs['version'] += version_mod
del item_attrs['version-modifier']
version = item_attrs['version']
except KeyError:
raise InvalidInterface(_("Missing version attribute"))
impl.version = parse_version(version)
impl.commands = commands
impl.released = item_attrs.get('released', None)
impl.langs = item_attrs.get('langs', '').replace('_', '-')
size = item.getAttribute('size')
if size:
impl.size = int(size)
impl.arch = item_attrs.get('arch', None)
try:
stability = stability_levels[str(item_attrs['stability'])]
except KeyError:
stab = str(item_attrs['stability'])
if stab != stab.lower():
raise InvalidInterface(_('Stability "%s" invalid - use lower case!') % item_attrs.stability)
raise InvalidInterface(_('Stability "%s" invalid') % item_attrs['stability'])
if stability >= preferred:
raise InvalidInterface(_("Upstream can't set stability to preferred!"))
impl.upstream_stability = stability
impl.bindings = bindings
impl.requires = depends
for elem in item.childNodes:
if elem.uri != XMLNS_IFACE: continue
if elem.name == 'archive':
url = elem.getAttribute('href')
if not url:
raise InvalidInterface(_("Missing href attribute on <archive>"))
size = elem.getAttribute('size')
if not size:
raise InvalidInterface(_("Missing size attribute on <archive>"))
impl.add_download_source(url = url, size = int(size),
extract = elem.getAttribute('extract'),
start_offset = _get_long(elem, 'start-offset'),
type = elem.getAttribute('type'))
elif elem.name == 'manifest-digest':
for aname, avalue in elem.attrs.iteritems():
if ' ' not in aname:
impl.digests.append('%s=%s' % (aname, avalue))
elif elem.name == 'recipe':
recipe = Recipe()
for recipe_step in elem.childNodes:
if recipe_step.uri == XMLNS_IFACE and recipe_step.name == 'archive':
url = recipe_step.getAttribute('href')
if not url:
raise InvalidInterface(_("Missing href attribute on <archive>"))
size = recipe_step.getAttribute('size')
if not size:
raise InvalidInterface(_("Missing size attribute on <archive>"))
recipe.steps.append(DownloadSource(None, url = url, size = int(size),
extract = recipe_step.getAttribute('extract'),
start_offset = _get_long(recipe_step, 'start-offset'),
type = recipe_step.getAttribute('type')))
else:
info(_("Unknown step '%s' in recipe; skipping recipe"), recipe_step.name)
break
else:
impl.download_sources.append(recipe)
root_attrs = {'stability': 'testing'}
root_commands = {}
if main:
info("Note: @main on document element is deprecated in %s", self)
root_commands['run'] = Command(qdom.Element(XMLNS_IFACE, 'command', {'path': main, 'name': 'run'}), None)
process_group(feed_element, root_attrs, [], [], root_commands)
def get_distro_feed(self):
"""Does this feed contain any <pacakge-implementation> elements?
i.e. is it worth asking the package manager for more information?
@return: the URL of the virtual feed, or None
@since: 0.49"""
if self._package_implementations:
return "distribution:" + self.url
return None
def get_package_impls(self, distro):
"""Find the best <pacakge-implementation> element(s) for the given distribution.
@param distro: the distribution to use to rate them
@type distro: L{distro.Distribution}
@return: a list of tuples for the best ranked elements
@rtype: [str]
@since: 0.49"""
best_score = 0
best_impls = []
for item, item_attrs in self._package_implementations:
distro_names = item_attrs.get('distributions', '')
for distro_name in distro_names.split(' '):
score = distro.get_score(distro_name)
if score > best_score:
best_score = score
best_impls = []
if score == best_score:
best_impls.append((item, item_attrs))
return best_impls
def get_name(self):
return self.name or '(' + os.path.basename(self.url) + ')'
def __repr__(self):
return _("<Feed %s>") % self.url
def set_stability_policy(self, new):
assert new is None or isinstance(new, Stability)
self.stability_policy = new
def get_feed(self, url):
for x in self.feeds:
if x.uri == url:
return x
return None
def add_metadata(self, elem):
self.metadata.append(elem)
def get_metadata(self, uri, name):
"""Return a list of interface metadata elements with this name and namespace URI."""
return [m for m in self.metadata if m.name == name and m.uri == uri]
@property
def summary(self):
return _best_language_match(self.summaries) or self.first_summary
@property
def description(self):
return _best_language_match(self.descriptions) or self.first_description
def get_replaced_by(self):
"""Return the URI of the interface that replaced the one with the URI of this feed's URL.
This is the value of the feed's <replaced-by interface'...'/> element.
@return: the new URI, or None if it hasn't been replaced
@since: 1.7"""
for child in self.metadata:
if child.uri == XMLNS_IFACE and child.name == 'replaced-by':
new_uri = child.getAttribute('interface')
if new_uri and (new_uri.startswith('http:') or new_uri.startswith('https:') or self.local_path):
return new_uri
return None
class DummyFeed(object):
"""Temporary class used during API transition."""
last_modified = None
name = '-'
last_checked = property(lambda self: None)
implementations = property(lambda self: {})
feeds = property(lambda self: [])
summary = property(lambda self: '-')
description = property(lambda self: '')
def get_name(self): return self.name
def get_feed(self, url): return None
def get_metadata(self, uri, name): return []
_dummy_feed = DummyFeed()
def unescape(uri):
"""Convert each %20 to a space, etc.
@rtype: str"""
uri = uri.replace('#', '/')
if '%' not in uri: return uri
return re.sub('%[0-9a-fA-F][0-9a-fA-F]',
lambda match: chr(int(match.group(0)[1:], 16)),
uri).decode('utf-8')
def escape(uri):
"""Convert each space to %20, etc
@rtype: str"""
return re.sub('[^-_.a-zA-Z0-9]',
lambda match: '%%%02x' % ord(match.group(0)),
uri.encode('utf-8'))
def _pretty_escape(uri):
"""Convert each space to %20, etc
: is preserved and / becomes #. This makes for nicer strings,
and may replace L{escape} everywhere in future.
@rtype: str"""
if os.name == "posix":
# Only preserve : on Posix systems
preserveRegex = '[^-_.a-zA-Z0-9:/]'
else:
# Other OSes may not allow the : character in file names
preserveRegex = '[^-_.a-zA-Z0-9/]'
return re.sub(preserveRegex,
lambda match: '%%%02x' % ord(match.group(0)),
uri.encode('utf-8')).replace('/', '#')
def canonical_iface_uri(uri):
"""If uri is a relative path, convert to an absolute one.
A "file:///foo" URI is converted to "/foo".
An "alias:prog" URI expands to the URI in the 0alias script
Otherwise, return it unmodified.
@rtype: str
@raise SafeException: if uri isn't valid
"""
if uri.startswith('http://') or uri.startswith('https://'):
if uri.count("/") < 3:
raise SafeException(_("Missing / after hostname in URI '%s'") % uri)
return uri
elif uri.startswith('file:///'):
path = uri[7:]
elif uri.startswith('file:'):
if uri[5] == '/':
raise SafeException(_('Use file:///path for absolute paths, not {uri}').format(uri = uri))
path = os.path.abspath(uri[5:])
elif uri.startswith('alias:'):
from zeroinstall import alias, support
alias_prog = uri[6:]
if not os.path.isabs(alias_prog):
full_path = support.find_in_path(alias_prog)
if not full_path:
raise alias.NotAnAliasScript("Not found in $PATH: " + alias_prog)
else:
full_path = alias_prog
return alias.parse_script(full_path).uri
else:
path = os.path.realpath(uri)
if os.path.isfile(path):
return path
raise SafeException(_("Bad interface name '%(uri)s'.\n"
"(doesn't start with 'http:', and "
"doesn't exist as a local file '%(interface_uri)s' either)") %
{'uri': uri, 'interface_uri': path})
_version_mod_to_value = {
'pre': -2,
'rc': -1,
'': 0,
'post': 1,
}
# Reverse mapping
_version_value_to_mod = {}
for x in _version_mod_to_value: _version_value_to_mod[_version_mod_to_value[x]] = x
del x
_version_re = re.compile('-([a-z]*)')
def parse_version(version_string):
"""Convert a version string to an internal representation.
The parsed format can be compared quickly using the standard Python functions.
- Version := DottedList ("-" Mod DottedList?)*
- DottedList := (Integer ("." Integer)*)
@rtype: tuple (opaque)
@raise SafeException: if the string isn't a valid version
@since: 0.24 (moved from L{reader}, from where it is still available):"""
if version_string is None: return None
parts = _version_re.split(version_string)
if parts[-1] == '':
del parts[-1] # Ends with a modifier
else:
parts.append('')
if not parts:
raise SafeException(_("Empty version string!"))
l = len(parts)
try:
for x in range(0, l, 2):
part = parts[x]
if part:
parts[x] = map(int, parts[x].split('.'))
else:
parts[x] = [] # (because ''.split('.') == [''], not [])
for x in range(1, l, 2):
parts[x] = _version_mod_to_value[parts[x]]
return parts
except ValueError as ex:
raise SafeException(_("Invalid version format in '%(version_string)s': %(exception)s") % {'version_string': version_string, 'exception': ex})
except KeyError as ex:
raise SafeException(_("Invalid version modifier in '%(version_string)s': %(exception)s") % {'version_string': version_string, 'exception': ex})
def format_version(version):
"""Format a parsed version for display. Undoes the effect of L{parse_version}.
@see: L{Implementation.get_version}
@rtype: str
@since: 0.24"""
version = version[:]
l = len(version)
for x in range(0, l, 2):
version[x] = '.'.join(map(str, version[x]))
for x in range(1, l, 2):
version[x] = '-' + _version_value_to_mod[version[x]]
if version[-1] == '-': del version[-1]
return ''.join(version)
| lgpl-2.1 | -2,451,330,524,096,340,000 | 33.426958 | 177 | 0.683326 | false |
Sravan2j/DIGITS | digits/model/images/classification/test_views.py | 1 | 6914 | # Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
import re
import os
import tempfile
import unittest
import mock
import flask
from bs4 import BeautifulSoup
try:
import caffe_pb2
except ImportError:
# See issue #32
from caffe.proto import caffe_pb2
from digits.config import config_value
from . import views as _
import digits
from digits.webapp import app, scheduler
class BaseTestCase(object):
@classmethod
def setupClass(cls):
app.config['TESTING'] = True
cls.app = app.test_client()
cls.server = 'http://0.0.0.0:5000/'
cls.jobs = []
scheduler.running = True
@classmethod
def tearDownClass(cls):
scheduler.jobs = []
scheduler.running = False
class TestCaffeCreate(BaseTestCase):
@staticmethod
def get_error_msg(html):
s = BeautifulSoup(html)
div = s.select('div.alert-danger')
if div:
return str(div[0])
else:
return None
@classmethod
def setupClass(cls):
super(TestCaffeCreate, cls).setupClass()
with app.test_request_context():
cls.url = flask.url_for('image_classification_model_create')
dj = mock.Mock(spec=digits.dataset.ImageClassificationDatasetJob)
dj.status.is_running.return_value = True
dj.id.return_value = 'dataset'
dj.name.return_value = ''
mj = mock.Mock(spec=digits.model.ImageClassificationModelJob)
mj.id.return_value = 'model'
mj.name.return_value = ''
_, cls.temp_snapshot_path = tempfile.mkstemp() #instead of using a dummy hardcoded value as snapshot path, temp file path is used to avoid the filen't exists exception in views.py.
mj.train_task.return_value.snapshots = [(cls.temp_snapshot_path, 1)]
mj.train_task.return_value.network = caffe_pb2.NetParameter()
digits.webapp.scheduler.jobs = [dj, mj]
@classmethod
def tearDownClass(cls):
super(TestCaffeCreate, cls).tearDownClass()
try:
os.remove(cls.temp_snapshot_path)
except OSError:
pass
def test_empty_request(self):
"""empty request"""
rv = self.app.post(self.url)
assert rv.status_code == 400
assert 'model_name' in self.get_error_msg(rv.data)
def test_crop_size(self):
"""custom crop size"""
rv = self.app.post(self.url, data={
'method': 'standard',
'dataset': 'dataset',
'crop_size': 12,
'standard_networks': 'lenet',
'model_name': 'test',
'framework' : 'caffe'
})
if not (300 <= rv.status_code <= 310):
msg = self.get_error_msg(rv.data)
if msg is not None:
raise RuntimeError(msg)
else:
raise RuntimeError('Failed to create model')
assert scheduler.jobs[-1].train_task().crop_size == 12, \
'crop size not saved properly'
def test_previous_network_pretrained_model(self):
"""previous network, pretrained model"""
rv = self.app.post(self.url, data={
'method': 'previous',
'model_name': 'test',
'dataset': 'dataset',
'previous_networks': 'model',
'model-snapshot' : 1,
'framework' : 'caffe'
})
if not (300 <= rv.status_code <= 310):
msg = self.get_error_msg(rv.data)
if msg is not None:
raise RuntimeError(msg)
else:
raise RuntimeError('Failed to create model')
assert scheduler.jobs[-1].train_task().pretrained_model == self.temp_snapshot_path, \
'pretrained model not saved properly'
class TestTorchCreate(BaseTestCase):
@staticmethod
def get_error_msg(html):
s = BeautifulSoup(html)
div = s.select('div.alert-danger')
if div:
return str(div[0])
else:
return None
@classmethod
def setupClass(cls):
super(TestTorchCreate, cls).setupClass()
if config_value('torch_root') is None:
raise unittest.SkipTest('Torch not found')
with app.test_request_context():
cls.url = flask.url_for('image_classification_model_create')
dj = mock.Mock(spec=digits.dataset.ImageClassificationDatasetJob)
dj.status.is_running.return_value = True
dj.id.return_value = 'dataset'
dj.name.return_value = ''
mj = mock.Mock(spec=digits.model.ImageClassificationModelJob)
mj.id.return_value = 'model'
mj.name.return_value = ''
_, cls.temp_snapshot_path = tempfile.mkstemp() #instead of using a dummy hardcoded value as snapshot path, temp file path is used to avoid the filen't exists exception in views.py.
mj.train_task.return_value.snapshots = [(cls.temp_snapshot_path, 1)]
#mj.train_task.return_value.network = caffe_pb2.NetParameter()
digits.webapp.scheduler.jobs = [dj, mj]
@classmethod
def tearDownClass(cls):
super(TestTorchCreate, cls).tearDownClass()
try:
os.remove(cls.temp_snapshot_path)
except OSError:
pass
def test_empty_request(self):
"""empty request"""
rv = self.app.post(self.url)
assert rv.status_code == 400
assert 'model_name' in self.get_error_msg(rv.data)
def test_crop_size(self):
"""custom crop size"""
rv = self.app.post(self.url, data={
'method': 'standard',
'dataset': 'dataset',
'crop_size': 12,
'standard_networks': 'lenet',
'model_name': 'test',
'framework' : 'torch'
})
if not (300 <= rv.status_code <= 310):
msg = self.get_error_msg(rv.data)
if msg is not None:
raise RuntimeError(msg)
else:
raise RuntimeError('Failed to create model')
assert scheduler.jobs[-1].train_task().crop_size == 12, \
'crop size not saved properly'
def test_previous_network_pretrained_model(self):
"""previous network, pretrained model"""
rv = self.app.post(self.url, data={
'method': 'previous',
'model_name': 'test',
'dataset': 'dataset',
'previous_networks': 'model',
'model-snapshot' : 1,
'framework' : 'torch'
})
if not (300 <= rv.status_code <= 310):
msg = self.get_error_msg(rv.data)
if msg is not None:
raise RuntimeError(msg)
else:
raise RuntimeError('Failed to create model')
assert scheduler.jobs[-1].train_task().pretrained_model == self.temp_snapshot_path, \
'pretrained model not saved properly'
| bsd-3-clause | -5,628,459,944,823,511,000 | 31.009259 | 188 | 0.57709 | false |
lemonade512/BluebonnetsPointsApp | bluebonnetspointsapp/routes.py | 1 | 8807 | """`main` is the top level module for your Flask application."""
import logging
import json
from flask import Flask, request, redirect, url_for, jsonify
from flask_restful import Resource, Api
from google.appengine.api import users
from google.appengine.ext import deferred
from datetime import datetime
from models.user_model import UserData
from models.point_model import PointException, PointCategory, PointRecord
from models.event_model import Event
from utils.jinja import render_jinja_template
from permissions import require_permissions
from utils.update_schema import run_update_schema
# Create the flask app
app = Flask(__name__)
api = Api(app)
# *************************************************************************** #
# FLASK ROUTES #
# *************************************************************************** #
@app.route('/')
def index():
template_values = {
'active_page': 'home',
'target_user': UserData.get_current_user_data(),
}
if UserData.get_current_user_data():
return render_jinja_template("dashboard.html", template_values)
else:
return render_jinja_template("index.html", template_values)
@app.route('/dashboard')
@app.route('/dashboard/<user_url_segment>')
@require_permissions(['self', 'officer'], logic='or')
def dashboard(user_url_segment=None):
if user_url_segment is None:
target_user = UserData.get_current_user_data()
else:
target_user = UserData.get_from_url_segment(user_url_segment)
if target_user is None:
template_values = {
'target_user': user_url_segment,
}
return render_jinja_template("noprofile.html", template_values), 404
if target_user.username != user_url_segment:
return redirect('/dashboard/{0}'.format(target_user.username))
# If looking at the current user's profile, hilight the users name in the
# nav bar
if target_user == UserData.get_current_user_data():
return redirect('/'.format(target_user.username))
else:
active = None
template_values = {
'target_user': target_user,
}
return render_jinja_template("dashboard.html", template_values)
@app.route('/admin')
@require_permissions(['admin'])
def admin():
template_values = {
'active_page': 'admin',
}
return render_jinja_template("admin.html", template_values)
@app.route('/members')
@require_permissions(['officer'])
def members():
template_values = {
'active_page': 'members',
'users': UserData.query().order(UserData.first_name),
}
return render_jinja_template("members.html", template_values)
@app.route('/permissions')
@require_permissions(['officer'])
def permissions():
template_values = {
'active_page': "permissions",
'users': UserData.query().order(UserData.first_name),
}
return render_jinja_template("permissions.html", template_values)
# TODO (phillip): The only people who should be able to view a users profile page are
# officers and the user himself
@app.route('/profile/<user_url_segment>')
@require_permissions(['self', 'officer'], logic='or')
def profile(user_url_segment):
target_user = UserData.get_from_url_segment(user_url_segment)
if target_user is None:
template_values = {
'target_user': user_url_segment,
}
return render_jinja_template("noprofile.html", template_values), 404
if target_user.username != user_url_segment:
return redirect('/profile/{0}'.format(target_user.username))
# If looking at the current user's profile, hilight the users name in the
# nav bar
if target_user == UserData.get_current_user_data():
active = 'profile'
else:
active = None
template_values = {
'active_page': active,
'target_user': target_user,
}
return render_jinja_template("profile.html", template_values)
@app.route('/login')
def login():
next_url = url_for("postlogin", next=request.args.get("next", "/"))
template_values = {
'active_page': 'login',
'google_login_url': users.create_login_url(next_url),
}
return render_jinja_template("login.html", template_values)
# TODO (phillip): There might be an issue if the user logs into their google account then doesn't
# go through the signup process. Then if they click the back button a few times they will
# be logged into their google account but not have their UserData setup which could be
# an issue. Just make sure to be careful of that
@app.route('/postlogin')
def postlogin():
""" Handler for just after a user has logged in
This takes care of making sure the user has properly setup their account.
"""
next_url = request.args.get("next", "/")
user_data = UserData.get_current_user_data()
if not user_data:
# Need to create a user account
signup_url = url_for("signup", next=next_url)
return redirect(signup_url)
else:
return redirect(next_url)
@app.route('/signup')
def signup():
template_values = {
'next': request.args.get("next", "/"),
}
return render_jinja_template("signup.html", template_values)
@app.route('/point-categories')
@require_permissions(['officer'])
def point_categories():
template_values = {
'active_page': 'point-categories',
}
return render_jinja_template('point-categories.html', template_values)
@app.route('/events')
@require_permissions(['officer'])
def event_list():
template_values = {
'active_page': 'events',
}
return render_jinja_template('events.html', template_values)
# TODO (phillip): handle the case when the event does not exist
@app.route('/events/<event>')
def event(event):
event = Event.get_from_name(event)
template_values = {
'target_event': event,
}
return render_jinja_template('event.html', template_values)
# **************************************************************************** #
# Error Handlers #
# **************************************************************************** #
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return render_jinja_template("404.html"), 404
@app.errorhandler(500)
def application_error(e):
"""Return a custom 500 error."""
template_values = {
'msg': "Sorry, unexpected error: {}".format(e)
}
return render_jinja_template("500.html", template_values), 500
# *************************************************************************** #
# REST API ENDPOINTS #
# *************************************************************************** #
from controllers.event_controller import EventAPI, EventListAPI
from controllers.exception_controller import ExceptionAPI, ExceptionListAPI
from controllers.permission_controller import PermissionAPI, PermissionListAPI
from controllers.point_controller import PointRecordAPI, PointCategoryAPI, PointCategoryListAPI
from controllers.user_controller import UserAPI, UserListAPI, UserPointsAPI
api.add_resource(UserListAPI, '/api/users', endpoint='users')
api.add_resource(UserAPI, '/api/users/<string:user_id>', endpoint='user')
api.add_resource(ExceptionListAPI, '/api/users/<string:user_id>/point-exceptions')
api.add_resource(ExceptionAPI, '/api/users/<string:user_id>/point-exceptions/<int:index>')
api.add_resource(PermissionListAPI, '/api/users/<string:user_id>/permissions')
api.add_resource(PermissionAPI, '/api/users/<string:user_id>/permissions/<string:perm>')
api.add_resource(PointCategoryListAPI, '/api/point-categories')
api.add_resource(PointCategoryAPI, '/api/point-categories/<string:name>')
api.add_resource(EventListAPI, '/api/events')
api.add_resource(EventAPI, '/api/events/<string:event>')
api.add_resource(PointRecordAPI, '/api/point-records')
api.add_resource(UserPointsAPI, '/api/users/<string:user_id>/points')
# *************************************************************************** #
# ADMIN #
# *************************************************************************** #
@app.route("/admin/updateschema")
def updateschema():
# NOTE: Sometimes there can be issues with the prerendering done by the
# chrome address bar. In that case, you might see duplicate GET requests.
# Be very aware of this when updating schema or going to endpoints that
# could potentially destroy user data.
deferred.defer(run_update_schema)
return 'Schema migration successfully initiated.'
if __name__ == "__main__":
logging.getLogger().setLevel(logging.debug)
| gpl-3.0 | 1,609,514,002,009,681,000 | 35.695833 | 97 | 0.622459 | false |
evanunderscore/pygnurl | pygnurl/callback_mananger.py | 1 | 1302 | """Callback management utilities"""
import collections
from ctypes import * # pylint: disable=wildcard-import,unused-wildcard-import
import logging
class CallbackManager(object):
"""Manager for ctypes DLL hooks"""
def __init__(self, dll):
self.dll = dll
self.hooks = collections.defaultdict(dict)
self.logger = logging.getLogger(__name__)
def install(self, name, func):
"""
Install a callback function ensuring a reference is kept.
:param name: name of function to install
:param func: callback function to install
"""
self.logger.debug('installing callback for %s in %s', name, self.dll)
self._install(name, func)
def uninstall(self, name):
"""
Remove an installed callback function.
:param name: name of function to uninstall
"""
self.logger.debug('uninstalling callback for %s in %s', name, self.dll)
self._install(name)
def _install(self, name, func=None):
"""Install or remove a callback function"""
# install the callback function
# pylint: disable=no-member
c_void_p.in_dll(self.dll, name).value = cast(func, c_void_p).value
# store the function so it doesn't get GC'd
self.hooks[name] = func
| gpl-2.0 | 4,814,608,374,765,069,000 | 34.189189 | 79 | 0.6298 | false |
dpercy/django-batch-requests | tests/test_compatibility.py | 1 | 7768 | '''
@author: Rahul Tanwani
@summary: Test cases to check compatibility between individual requests
and batch requests.
'''
import json
from django.test import TestCase
class TestCompatibility(TestCase):
'''
Tests compatibility.
'''
def assert_reponse_compatible(self, ind_resp, batch_resp):
'''
Assert if the response of independent request is compatible with
batch response.
'''
self.assertDictEqual(ind_resp, batch_resp, "Compatibility is broken!")
def headers_dict(self, headers):
'''
Converts the headers from the response in to a dict.
'''
return dict(headers.values())
def prepare_response(self, status_code, body, headers):
'''
Returns a dict of all the parameters.
'''
return {"status_code": status_code, "body": body, "headers": self.headers_dict(headers)}
def _batch_request(self, method, path, data, headers={}):
'''
Prepares a batch request.
'''
return {"url": path, "method": method, "headers": headers, "body": data}
def make_a_batch_request(self, method, url, body, headers={}):
'''
Makes a batch request using django client.
'''
return self.client.post("/api/v1/batch/", json.dumps([self._batch_request(method, url, body, headers)]),
content_type="application/json")
def make_multiple_batch_request(self, requests):
'''
Makes multiple batch request using django client.
'''
batch_requests = [self._batch_request(method, path, data, headers) for method, path, data, headers in requests]
return self.client.post("/api/v1/batch/", json.dumps(batch_requests),
content_type="application/json")
def test_compatibility_of_get_request(self):
'''
Make a GET request without the batch and in the batch and assert
that both gives the same results.
'''
# Get the response for an individual request.
inv_req = self.client.get("/views/")
inv_resp = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for a batch request.
batch_request = self.make_a_batch_request("GET", "/views/", "")
batch_resp = json.loads(batch_request.content)[0]
del batch_resp["reason_phrase"]
# Assert both individual request response and batch response are equal.
self.assert_reponse_compatible(inv_resp, batch_resp)
def test_compatibility_of_post_request(self):
'''
Make a POST request without the batch and in the batch and assert
that both gives the same results.
'''
data = json.dumps({"text": "hello"})
# Get the response for an individual request.
inv_req = self.client.post("/views/", data, content_type="text/plain")
inv_resp = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for a batch request.
batch_request = self.make_a_batch_request("POST", "/views/", data, {"content_type": "text/plain"})
batch_resp = json.loads(batch_request.content)[0]
del batch_resp["reason_phrase"]
# Assert both individual request response and batch response are equal.
self.assert_reponse_compatible(inv_resp, batch_resp)
def test_compatibility_of_put_request(self):
'''
Make a PUT request without the batch and in the batch and assert
that both gives the same results.
'''
data = json.dumps({"text": "hello"})
# Get the response for an individual request.
inv_req = self.client.patch("/views/", data, content_type="text/plain")
inv_resp = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for a batch request.
batch_request = self.make_a_batch_request("patch", "/views/", data, {"content_type": "text/plain"})
batch_resp = json.loads(batch_request.content)[0]
del batch_resp["reason_phrase"]
# Assert both individual request response and batch response are equal.
self.assert_reponse_compatible(inv_resp, batch_resp)
def test_compatibility_of_patch_request(self):
'''
Make a POST request without the batch and in the batch and assert
that both gives the same results.
'''
data = json.dumps({"text": "hello"})
# Get the response for an individual request.
inv_req = self.client.post("/views/", data, content_type="text/plain")
inv_resp = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for a batch request.
batch_request = self.make_a_batch_request("POST", "/views/", data, {"CONTENT_TYPE": "text/plain"})
batch_resp = json.loads(batch_request.content)[0]
del batch_resp["reason_phrase"]
# Assert both individual request response and batch response are equal.
self.assert_reponse_compatible(inv_resp, batch_resp)
def test_compatibility_of_delete_request(self):
'''
Make a DELETE request without the batch and in the batch and assert
that both gives the same results.
'''
# Get the response for an individual request.
inv_req = self.client.delete("/views/")
inv_resp = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for a batch request.
batch_request = self.make_a_batch_request("delete", "/views/", "")
batch_resp = json.loads(batch_request.content)[0]
del batch_resp["reason_phrase"]
# Assert both individual request response and batch response are equal.
self.assert_reponse_compatible(inv_resp, batch_resp)
def test_compatibility_of_multiple_requests(self):
'''
Make multiple requests without the batch and in the batch and
assert that both gives the same results.
'''
data = json.dumps({"text": "Batch"})
# Make GET, POST and PUT requests individually.
# Get the response for an individual GET request.
inv_req = self.client.get("/views/")
inv_get = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for an individual POST request.
inv_req = self.client.post("/views/", data, content_type="text/plain")
inv_post = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Get the response for an individual PUT request.
inv_req = self.client.patch("/views/", data, content_type="text/plain")
inv_put = self.prepare_response(inv_req.status_code, inv_req.content, inv_req._headers)
# Consolidate all the responses.
indv_responses = [inv_get, inv_post, inv_put]
# Make a batch call for GET, POST and PUT request.
get_req = ("get", "/views/", '', {})
post_req = ("post", "/views/", data, {"content_type": "text/plain"})
put_req = ("put", "/views/", data, {"content_type": "text/plain"})
# Get the response for a batch request.
batch_requests = self.make_multiple_batch_request([get_req, post_req, put_req])
batch_responses = json.loads(batch_requests.content)
# Assert all the responses are compatible.
for indv_resp, batch_resp in zip(indv_responses, batch_responses):
del batch_resp["reason_phrase"]
self.assert_reponse_compatible(indv_resp, batch_resp)
| mit | -5,558,380,822,016,206,000 | 40.989189 | 119 | 0.622425 | false |
eHealthAfrica/LMIS | LMIS/inventory/api/serializers.py | 1 | 2227 | """
Serializer for Inventory App related API end-points
"""
#import LMIS project modules
from core.api.serializers import BaseModelSerializer
from inventory.models import (Inventory, InventoryLine, ConsumptionRecord, ConsumptionRecordLine, IncomingShipment,
IncomingShipmentLine, OutgoingShipment, OutgoingShipmentLine)
class InventoryLineSerializer(BaseModelSerializer):
"""
Inventory Line serializer for Inventory records
"""
class Meta:
model = InventoryLine
class InventorySerializer(BaseModelSerializer):
"""
Inventory Model serializer
"""
class Meta:
model = Inventory
class ConsumptionRecordSerializer(BaseModelSerializer):
"""
Consumption Record Serializer used by the API endpoint to serialize Consumption records
"""
class Meta:
model = ConsumptionRecord
class ConsumptionRecordLineSerializer(BaseModelSerializer):
"""
ConsumptionRecordLine Serializer used by the API end-point to serialize ConsumptionRecordLine records
"""
class Meta:
model = ConsumptionRecordLine
class IncomingShipmentSerializer(BaseModelSerializer):
"""
IncomingShipmentSerializer used by the API end-point
"""
class Meta:
model = IncomingShipment
fields = ('supplier', 'stock_entry_type', 'input_warehouse', 'other', 'other_source', 'is_deleted',
'incoming_shipment_lines',)
class IncomingShipmentLineSerializer(BaseModelSerializer):
"""
IncomingShipmentSerializer used by the API end-point
"""
class Meta:
model = IncomingShipmentLine
class OutgoingShipmentSerializer(BaseModelSerializer):
"""
OutgoingShipmentSerializer is used by the API end-point to serialize OutgoingShipment records
"""
class Meta:
model = OutgoingShipment
fields = ('recipient', 'output_warehouse', 'status', 'is_deleted', 'outgoing_shipment_lines')
class OutgoingShipmentLineSerializer(BaseModelSerializer):
"""
OutgoingShipmentLineSerializer is used by the API end-points to serialize OutgoingShipmentLine records
"""
class Meta:
model = OutgoingShipmentLine | gpl-2.0 | -7,145,018,752,463,819,000 | 28.315789 | 115 | 0.705433 | false |
hoto17296/flask-minitwit | server/views.py | 1 | 3272 | from flask import request, session, url_for, redirect, render_template, abort, g, flash
from . import app
from .lib import Auth, AuthError, User, Timeline
@app.before_request
def before_request():
g.auth = Auth(session, app.config.get('SECRET_KEY'))
@app.route('/')
def timeline():
if not g.auth.authorized():
return redirect(url_for('public_timeline'))
return render_template('timeline.html', timeline=Timeline.following(g.auth.user))
@app.route('/public')
def public_timeline():
return render_template('timeline.html', timeline=Timeline.public())
@app.route('/<name>')
def user_timeline(name):
user = User.find_by('name', name)
if user is None:
abort(404)
following = False
if g.auth.authorized():
following = g.auth.user.is_following(user)
return render_template('timeline.html', timeline=Timeline.user(user), following=following)
@app.route('/<name>/follow')
def follow_user(name):
if not g.auth.authorized():
abort(401)
user = User.find_by('name', name)
if user is None:
abort(404)
g.auth.user.follow(user)
flash('You are now following "%s"' % name)
return redirect(url_for('user_timeline', name=name))
@app.route('/<name>/unfollow')
def unfollow_user(name):
if not g.auth.authorized():
abort(401)
user = User.find_by('name', name)
if user is None:
abort(404)
g.auth.user.unfollow(user)
flash('You are no longer following "%s"' % name)
return redirect(url_for('user_timeline', name=name))
@app.route('/add_message', methods=['POST'])
def add_message():
if not g.auth.authorized():
abort(401)
if request.form['text']:
g.auth.user.post_message(request.form['text'])
flash('Your message was recorded')
return redirect(url_for('timeline'))
@app.route('/login', methods=['GET', 'POST'])
def login():
if g.auth.authorized():
return redirect(url_for('timeline'))
error = None
if request.method == 'POST':
try:
g.auth.login(request.form['name'], request.form['password'])
flash('You were logged in')
return redirect(url_for('timeline'))
except AuthError as err:
error = str(err)
return render_template('login.html', error=error)
@app.route('/register', methods=['GET', 'POST'])
def register():
if g.auth.authorized():
return redirect(url_for('timeline'))
error = None
if request.method == 'POST':
try:
if request.form['password'] != request.form['password2']:
raise AuthError('The two passwords do not match')
g.auth.register({
'name': request.form['name'],
'email': request.form['email'],
'password': request.form['password'],
})
flash('You were successfully registered')
g.auth.login(request.form['name'], request.form['password'])
return redirect(url_for('timeline'))
except AuthError as err:
error = str(err)
return render_template('register.html', error=error)
@app.route('/logout')
def logout():
flash('You were logged out')
g.auth.logout()
return redirect(url_for('public_timeline'))
| bsd-3-clause | -7,375,406,376,846,156,000 | 29.018349 | 94 | 0.61522 | false |
snare/voltron | voltron/styles.py | 1 | 2382 | from pygments.style import Style
from pygments.token import Token, Comment, Name, Keyword, Generic, Number, Operator, String, Punctuation, Error
BASE03 = '#002b36'
BASE02 = '#073642'
BASE01 = '#586e75'
BASE00 = '#657b83'
BASE0 = '#839496'
BASE1 = '#93a1a1'
BASE2 = '#eee8d5'
BASE3 = '#fdf6e3'
YELLOW = '#b58900'
ORANGE = '#cb4b16'
RED = '#dc322f'
MAGENTA = '#d33682'
VIOLET = '#6c71c4'
BLUE = '#268bd2'
CYAN = '#2aa198'
GREEN = '#859900'
class VolarizedStyle(Style):
background_color = BASE03
styles = {
Keyword: GREEN,
Keyword.Constant: ORANGE,
Keyword.Declaration: BASE1,
Keyword.Namespace: ORANGE,
# Keyword.Pseudo
Keyword.Reserved: BLUE,
Keyword.Type: VIOLET,
Name: BASE1,
Name.Attribute: BASE1,
Name.Builtin: YELLOW,
Name.Builtin.Pseudo: YELLOW,
Name.Class: BLUE,
Name.Constant: ORANGE,
Name.Decorator: BLUE,
Name.Entity: ORANGE,
Name.Exception: YELLOW,
Name.Function: BLUE,
Name.Label: BASE01,
# Name.Namespace
# Name.Other
Name.Tag: BLUE,
Name.Variable: BLUE,
# Name.Variable.Class
# Name.Variable.Global
# Name.Variable.Instance
# Literal
# Literal.Date
String: BASE1,
String.Backtick: BASE01,
String.Char: BASE1,
String.Doc: CYAN,
# String.Double
String.Escape: RED,
String.Heredoc: CYAN,
# String.Interpol
# String.Other
String.Regex: RED,
# String.Single
# String.Symbol
Number: CYAN,
# Number.Float
# Number.Hex
# Number.Integer
# Number.Integer.Long
# Number.Oct
Operator: GREEN,
Operator.Word: GREEN,
Punctuation: BASE00,
Comment: BASE00,
# Comment.Multiline
Comment.Preproc: GREEN,
# Comment.Single
Comment.Special: GREEN,
# Generic
Generic.Deleted: CYAN,
Generic.Emph: 'italic',
Generic.Error: RED,
Generic.Heading: ORANGE,
Generic.Inserted: GREEN,
# Generic.Output
Generic.Prompt: RED,
Generic.Strong: 'bold',
Generic.Subheading: ORANGE,
# Generic.Traceback
Token: BASE1,
Token.Other: ORANGE,
Error: RED
}
| mit | -600,035,029,460,470,800 | 22.82 | 111 | 0.569689 | false |
rrafiringa/is210-week-04-synthesizing | task_01.py | 1 | 1415 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Temperature conversion """
import decimal
decimal.getcontext().prec = 5
ABSOLUTE_DIFFERENCE = decimal.Decimal('273.15')
def fahrenheit_to_kelvin(degrees):
""" Convert temperature from Farenheit to Kelvin units.
Args:
degrees (float): Farenheit temperature units to convert.
Returns:
Decimal: Temperature unit in Kelvin.
Examples:
>>> task_01.fahrenheit_to_kelvin(212)
Decimal('373.15')
"""
kelvin = celsius_to_kelvin(fahrenheit_to_celsius(degrees))
return kelvin
def celsius_to_kelvin(degrees):
""" Convert temperature from Celsius to Kelvin units.
Args:
degrees (float): Celsius units to convert.
Returns:
Decimal: Temperature unit in Kelvin.
Examples:
>>> task_01.celsius_to_kelvin(100)
Decimal('373.15')
"""
kelvin = decimal.Decimal(degrees) + ABSOLUTE_DIFFERENCE
return kelvin
def fahrenheit_to_celsius(degrees):
""" Convert temperature from Farenheit to Celsius units.
Args:
degrees (float): Farenheit value to convert to Celsius
Returns:
Decimal: Temperature unit in Celsius.
Examples:
>>> task_01.fahrenheit_to_celsius(212)
Decimal('100')
"""
celsius = decimal.Decimal(5) * \
decimal.Decimal(float(degrees) - 32) / decimal.Decimal(9)
return celsius
| mpl-2.0 | 3,742,161,318,189,985,000 | 21.109375 | 65 | 0.642403 | false |
grbot/agd | laura/add-ANC-to-vcf.py | 1 | 3691 | #!/usr/bin/env python
# Author: Jeffrey M Kidd
# 2 September 2011
# add-ANC-to-vcf.py
# adds ancestral annotation based on ensemble takes from genome data archive
# you'll have to do your own filtering based on qual etc.
import sys
import os
import genomedata
import math
from genomedata import Genome
from optparse import OptionParser
USAGE = """
add-ANC-to-vcf.py --in <vcf file to process> --out <new VCF file name> -g <in/out is gziped>
--genomedata <path to genome data archieve with GERP scores>
Adds ancestral state SNPs in VCF, based on values in genomedata archieve (in 'anc' track).
Use -g if input VCF is gzipped, output file will also be gzipped.
Note: current version assumes all variants in VCF are SNPs.
"""
parser = OptionParser(USAGE)
parser.add_option('--in',dest='inVCF', help = 'input VCF file')
parser.add_option('--out',dest='outVCF', help = 'output VCF file')
parser.add_option('-g',dest='isGzip', action='store_true', default = False, help = 'output VCF file')
parser.add_option('--genomedata',dest='genomedata', help = 'genomedata archive with GERP scores')
(options, args) = parser.parse_args()
if options.inVCF is None:
parser.error('input VCF not given')
if options.outVCF is None:
parser.error('output VCF not given')
if options.genomedata is None:
parser.error('genomedata archive not given')
###############################################################################
# try to open up the genome data archieve
try:
genome = Genome(options.genomedata)
except:
print "ERROR!! Couldn't open the genomedata archive: " + options.genomedata + "\n"
sys.exit(1)
#setup file open/close with or without gzip
if options.isGzip is True:
try:
gc = 'gunzip -c ' + options.inVCF
inFile = os.popen(gc, 'r')
except:
print "ERROR!! Couldn't open the file" + options.inVCF + " (with gzip)\n"
sys.exit(1)
try:
gc = 'gzip > ' + options.outVCF
outFile = os.popen(gc, 'w')
except:
print "ERROR!! Couldn't open the output file" + options.outVCF + " (with gzip)\n"
sys.exit(1)
else:
inFile = open(options.inVCF,'r')
outFile = open(options.outVCF,'w')
# read through VCF file up to the chrom line, we will then add addtional info fields
line = inFile.readline()
while line.split('\t')[0] != '#CHROM':
outFile.write(line)
line = inFile.readline()
# at this point, line is the 'header' line of the VCF. Output header for the GERP info line
ancInfoLine = '##INFO=<ID=ANC,Number=1,Type=Character,Description="ancestral state from ensemble">\n'
outFile.write(ancInfoLine)
outFile.write(line)
# rest of the VCF file should now just be the variants
# Set current chrom as something that isn't a chrom
currentChrom = 'notAChrom'
while True:
line = inFile.readline()
if line == "":
break
line = line.rstrip()
line = line.split('\t')
if line[0] != currentChrom:
chrom = genome[line[0]]
currentChrom = line[0]
pos = int(line[1]) - 1 #switch to zero based indexing
score = chrom[pos,'anc']
# check to see if there is a GERP score for the position, if not output line and continue
# We should probably check ot see if the variant is not a SNP, as GERP isn't well defined
# for non-SNP variants
if math.isnan(score):
anc = '.'
else:
anc = chr(score)
if line[7] == '.':
line[7] = 'ANC=%s' % (anc)
else :
rsField = ';ANC=%s' % (anc)
line[7] += rsField
line = '\t'.join(line)
line = line + '\n'
outFile.write(line)
genome.close()
inFile.close()
outFile.close()
| mit | 3,805,306,246,244,782,000 | 27.175573 | 102 | 0.636684 | false |
Hillshum/gPodder-tagging | src/gpodder/gtkui/desktop/episodeselector.py | 1 | 18139 | # -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2010 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import gtk
import pango
from xml.sax import saxutils
import gpodder
_ = gpodder.gettext
N_ = gpodder.ngettext
from gpodder import util
from gpodder.liblogger import log
from gpodder.gtkui.interface.common import BuilderWidget
class gPodderEpisodeSelector(BuilderWidget):
"""Episode selection dialog
Optional keyword arguments that modify the behaviour of this dialog:
- callback: Function that takes 1 parameter which is a list of
the selected episodes (or empty list when none selected)
- remove_callback: Function that takes 1 parameter which is a list
of episodes that should be "removed" (see below)
(default is None, which means remove not possible)
- remove_action: Label for the "remove" action (default is "Remove")
- remove_finished: Callback after all remove callbacks have finished
(default is None, also depends on remove_callback)
It will get a list of episode URLs that have been
removed, so the main UI can update those
- episodes: List of episodes that are presented for selection
- selected: (optional) List of boolean variables that define the
default checked state for the given episodes
- selected_default: (optional) The default boolean value for the
checked state if no other value is set
(default is False)
- columns: List of (name, sort_name, sort_type, caption) pairs for the
columns, the name is the attribute name of the episode to be
read from each episode object. The sort name is the
attribute name of the episode to be used to sort this column.
If the sort_name is None it will use the attribute name for
sorting. The sort type is the type of the sort column.
The caption attribute is the text that appear as column caption
(default is [('title_markup', None, None, 'Episode'),])
- title: (optional) The title of the window + heading
- instructions: (optional) A one-line text describing what the
user should select / what the selection is for
- stock_ok_button: (optional) Will replace the "OK" button with
another GTK+ stock item to be used for the
affirmative button of the dialog (e.g. can
be gtk.STOCK_DELETE when the episodes to be
selected will be deleted after closing the
dialog)
- selection_buttons: (optional) A dictionary with labels as
keys and callbacks as values; for each
key a button will be generated, and when
the button is clicked, the callback will
be called for each episode and the return
value of the callback (True or False) will
be the new selected state of the episode
- size_attribute: (optional) The name of an attribute of the
supplied episode objects that can be used to
calculate the size of an episode; set this to
None if no total size calculation should be
done (in cases where total size is useless)
(default is 'length')
- tooltip_attribute: (optional) The name of an attribute of
the supplied episode objects that holds
the text for the tooltips when hovering
over an episode (default is 'description')
"""
finger_friendly_widgets = ['btnCancel', 'btnOK', 'btnCheckAll', 'btnCheckNone', 'treeviewEpisodes']
COLUMN_INDEX = 0
COLUMN_TOOLTIP = 1
COLUMN_TOGGLE = 2
COLUMN_ADDITIONAL = 3
def new( self):
self._config.connect_gtk_window(self.gPodderEpisodeSelector, 'episode_selector', True)
if not hasattr( self, 'callback'):
self.callback = None
if not hasattr(self, 'remove_callback'):
self.remove_callback = None
if not hasattr(self, 'remove_action'):
self.remove_action = _('Remove')
if not hasattr(self, 'remove_finished'):
self.remove_finished = None
if not hasattr( self, 'episodes'):
self.episodes = []
if not hasattr( self, 'size_attribute'):
self.size_attribute = 'length'
if not hasattr(self, 'tooltip_attribute'):
self.tooltip_attribute = 'description'
if not hasattr( self, 'selection_buttons'):
self.selection_buttons = {}
if not hasattr( self, 'selected_default'):
self.selected_default = False
if not hasattr( self, 'selected'):
self.selected = [self.selected_default]*len(self.episodes)
if len(self.selected) < len(self.episodes):
self.selected += [self.selected_default]*(len(self.episodes)-len(self.selected))
if not hasattr( self, 'columns'):
self.columns = (('title_markup', None, None, _('Episode')),)
if hasattr( self, 'title'):
self.gPodderEpisodeSelector.set_title( self.title)
self.labelHeading.set_markup( '<b><big>%s</big></b>' % saxutils.escape( self.title))
if hasattr( self, 'instructions'):
self.labelInstructions.set_text( self.instructions)
self.labelInstructions.show_all()
if hasattr(self, 'stock_ok_button'):
if self.stock_ok_button == 'gpodder-download':
self.btnOK.set_image(gtk.image_new_from_stock(gtk.STOCK_GO_DOWN, gtk.ICON_SIZE_BUTTON))
self.btnOK.set_label(_('Download'))
else:
self.btnOK.set_label(self.stock_ok_button)
self.btnOK.set_use_stock(True)
# check/uncheck column
toggle_cell = gtk.CellRendererToggle()
toggle_cell.connect( 'toggled', self.toggle_cell_handler)
toggle_column = gtk.TreeViewColumn('', toggle_cell, active=self.COLUMN_TOGGLE)
toggle_column.set_clickable(True)
self.treeviewEpisodes.append_column(toggle_column)
next_column = self.COLUMN_ADDITIONAL
for name, sort_name, sort_type, caption in self.columns:
renderer = gtk.CellRendererText()
if next_column < self.COLUMN_ADDITIONAL + 1:
renderer.set_property('ellipsize', pango.ELLIPSIZE_END)
column = gtk.TreeViewColumn(caption, renderer, markup=next_column)
column.set_clickable(False)
column.set_resizable( True)
# Only set "expand" on the first column
if next_column < self.COLUMN_ADDITIONAL + 1:
column.set_expand(True)
if sort_name is not None:
column.set_sort_column_id(next_column+1)
else:
column.set_sort_column_id(next_column)
self.treeviewEpisodes.append_column( column)
next_column += 1
if sort_name is not None:
# add the sort column
column = gtk.TreeViewColumn()
column.set_clickable(False)
column.set_visible(False)
self.treeviewEpisodes.append_column( column)
next_column += 1
column_types = [ int, str, bool ]
# add string column type plus sort column type if it exists
for name, sort_name, sort_type, caption in self.columns:
column_types.append(str)
if sort_name is not None:
column_types.append(sort_type)
self.model = gtk.ListStore( *column_types)
tooltip = None
for index, episode in enumerate( self.episodes):
if self.tooltip_attribute is not None:
try:
tooltip = getattr(episode, self.tooltip_attribute)
except:
log('Episode object %s does not have tooltip attribute: "%s"', episode, self.tooltip_attribute, sender=self)
tooltip = None
row = [ index, tooltip, self.selected[index] ]
for name, sort_name, sort_type, caption in self.columns:
if not hasattr(episode, name):
log('Warning: Missing attribute "%s"', name, sender=self)
row.append(None)
else:
row.append(getattr( episode, name))
if sort_name is not None:
if not hasattr(episode, sort_name):
log('Warning: Missing attribute "%s"', sort_name, sender=self)
row.append(None)
else:
row.append(getattr( episode, sort_name))
self.model.append( row)
if self.remove_callback is not None:
self.btnRemoveAction.show()
self.btnRemoveAction.set_label(self.remove_action)
# connect to tooltip signals
if self.tooltip_attribute is not None:
try:
self.treeviewEpisodes.set_property('has-tooltip', True)
self.treeviewEpisodes.connect('query-tooltip', self.treeview_episodes_query_tooltip)
except:
log('I cannot set has-tooltip/query-tooltip (need at least PyGTK 2.12)', sender=self)
self.last_tooltip_episode = None
self.episode_list_can_tooltip = True
self.treeviewEpisodes.connect('button-press-event', self.treeview_episodes_button_pressed)
self.treeviewEpisodes.set_rules_hint( True)
self.treeviewEpisodes.set_model( self.model)
self.treeviewEpisodes.columns_autosize()
# Focus the toggle column for Tab-focusing (bug 503)
path, column = self.treeviewEpisodes.get_cursor()
if path is not None:
self.treeviewEpisodes.set_cursor(path, toggle_column)
self.calculate_total_size()
def treeview_episodes_query_tooltip(self, treeview, x, y, keyboard_tooltip, tooltip):
# With get_bin_window, we get the window that contains the rows without
# the header. The Y coordinate of this window will be the height of the
# treeview header. This is the amount we have to subtract from the
# event's Y coordinate to get the coordinate to pass to get_path_at_pos
(x_bin, y_bin) = treeview.get_bin_window().get_position()
y -= x_bin
y -= y_bin
(path, column, rx, ry) = treeview.get_path_at_pos(x, y) or (None,)*4
if not self.episode_list_can_tooltip or column != treeview.get_columns()[1]:
self.last_tooltip_episode = None
return False
if path is not None:
model = treeview.get_model()
iter = model.get_iter(path)
index = model.get_value(iter, self.COLUMN_INDEX)
description = model.get_value(iter, self.COLUMN_TOOLTIP)
if self.last_tooltip_episode is not None and self.last_tooltip_episode != index:
self.last_tooltip_episode = None
return False
self.last_tooltip_episode = index
description = util.remove_html_tags(description)
if description is not None:
if len(description) > 400:
description = description[:398]+'[...]'
tooltip.set_text(description)
return True
else:
return False
self.last_tooltip_episode = None
return False
def treeview_episodes_button_pressed(self, treeview, event):
if event.button == 3:
menu = gtk.Menu()
if len(self.selection_buttons):
for label in self.selection_buttons:
item = gtk.MenuItem(label)
item.connect('activate', self.custom_selection_button_clicked, label)
menu.append(item)
menu.append(gtk.SeparatorMenuItem())
item = gtk.MenuItem(_('Select all'))
item.connect('activate', self.on_btnCheckAll_clicked)
menu.append(item)
item = gtk.MenuItem(_('Select none'))
item.connect('activate', self.on_btnCheckNone_clicked)
menu.append(item)
menu.show_all()
# Disable tooltips while we are showing the menu, so
# the tooltip will not appear over the menu
self.episode_list_can_tooltip = False
menu.connect('deactivate', lambda menushell: self.episode_list_allow_tooltips())
menu.popup(None, None, None, event.button, event.time)
return True
def episode_list_allow_tooltips(self):
self.episode_list_can_tooltip = True
def calculate_total_size( self):
if self.size_attribute is not None:
(total_size, count) = (0, 0)
for episode in self.get_selected_episodes():
try:
total_size += int(getattr( episode, self.size_attribute))
count += 1
except:
log( 'Cannot get size for %s', episode.title, sender = self)
text = []
if count == 0:
text.append(_('Nothing selected'))
text.append(N_('%d episode', '%d episodes', count) % count)
if total_size > 0:
text.append(_('size: %s') % util.format_filesize(total_size))
self.labelTotalSize.set_text(', '.join(text))
self.btnOK.set_sensitive(count>0)
self.btnRemoveAction.set_sensitive(count>0)
if count > 0:
self.btnCancel.set_label(gtk.STOCK_CANCEL)
else:
self.btnCancel.set_label(gtk.STOCK_CLOSE)
else:
self.btnOK.set_sensitive(False)
self.btnRemoveAction.set_sensitive(False)
for index, row in enumerate(self.model):
if self.model.get_value(row.iter, self.COLUMN_TOGGLE) == True:
self.btnOK.set_sensitive(True)
self.btnRemoveAction.set_sensitive(True)
break
self.labelTotalSize.set_text('')
def toggle_cell_handler( self, cell, path):
model = self.treeviewEpisodes.get_model()
model[path][self.COLUMN_TOGGLE] = not model[path][self.COLUMN_TOGGLE]
self.calculate_total_size()
def custom_selection_button_clicked(self, button, label):
callback = self.selection_buttons[label]
for index, row in enumerate( self.model):
new_value = callback( self.episodes[index])
self.model.set_value( row.iter, self.COLUMN_TOGGLE, new_value)
self.calculate_total_size()
def on_btnCheckAll_clicked( self, widget):
for row in self.model:
self.model.set_value( row.iter, self.COLUMN_TOGGLE, True)
self.calculate_total_size()
def on_btnCheckNone_clicked( self, widget):
for row in self.model:
self.model.set_value( row.iter, self.COLUMN_TOGGLE, False)
self.calculate_total_size()
def on_remove_action_activate(self, widget):
episodes = self.get_selected_episodes(remove_episodes=True)
urls = []
for episode in episodes:
urls.append(episode.url)
self.remove_callback(episode)
if self.remove_finished is not None:
self.remove_finished(urls)
self.calculate_total_size()
# Close the window when there are no episodes left
model = self.treeviewEpisodes.get_model()
if model.get_iter_first() is None:
self.on_btnCancel_clicked(None)
def on_row_activated(self, treeview, path, view_column):
model = treeview.get_model()
iter = model.get_iter(path)
value = model.get_value(iter, self.COLUMN_TOGGLE)
model.set_value(iter, self.COLUMN_TOGGLE, not value)
def get_selected_episodes( self, remove_episodes=False):
selected_episodes = []
for index, row in enumerate( self.model):
if self.model.get_value( row.iter, self.COLUMN_TOGGLE) == True:
selected_episodes.append( self.episodes[self.model.get_value( row.iter, self.COLUMN_INDEX)])
if remove_episodes:
for episode in selected_episodes:
index = self.episodes.index(episode)
iter = self.model.get_iter_first()
while iter is not None:
if self.model.get_value(iter, self.COLUMN_INDEX) == index:
self.model.remove(iter)
break
iter = self.model.iter_next(iter)
return selected_episodes
def on_btnOK_clicked( self, widget):
self.gPodderEpisodeSelector.destroy()
if self.callback is not None:
self.callback( self.get_selected_episodes())
def on_btnCancel_clicked( self, widget):
self.gPodderEpisodeSelector.destroy()
if self.callback is not None:
self.callback([])
| gpl-3.0 | -1,909,551,925,635,607,300 | 42.085511 | 128 | 0.587739 | false |
DerThorsten/nifty | scripts/new_plcm.py | 1 | 19851 | import nifty
import numpy
import nifty.segmentation
import nifty.graph.rag
import nifty.graph.agglo
import vigra
import matplotlib.pyplot as plt
from random import shuffle
#import fastfilters
numpy.random.seed(32)
Objective = nifty.graph.opt.lifted_multicut.PixelWiseLmcObjective2D
class PlmcObjective2D(nifty.graph.opt.lifted_multicut.PixelWiseLmcObjective2D):
def __init__(self,raw, affinities, weights, offsets):
self.raw = numpy.require(raw,dtype='float32')
self.affinities = affinities
self.weights = weights
self.offsets = offsets
super(PlmcObjective2D, self).__init__(weights, offsets)
def proposals_from_raw(self):
proposals = []
for sigma in (1.0, 3.0, 5.0):
raw = self.raw
hmap = vigra.filters.hessianOfGaussianEigenvalues(raw, 5.0)[:,:,0]
seg,nseg = vigra.analysis.watersheds(1.0*hmap)
proposals.append(seg)
#plt.imshow(nifty.segmentation.markBoundaries(raw/255.0, seg, color=(1,0,0)))
#plt.show()
return proposals
def proposal_from_raw_agglo(self):
proposals = []
for sigma in (1.0, 3.0, 5.0):
grow_map = vigra.filters.hessianOfGaussianEigenvalues(self.raw, sigma)[:,:,0]
overseg,nseg = vigra.analysis.watersheds(grow_map.astype('float32'))
rag = nifty.graph.rag.gridRag(overseg)
edge_features, node_features = nifty.graph.rag.accumulateMeanAndLength(
rag, grow_map, [512,512],0)
meanEdgeStrength = edge_features[:,0]
edgeSizes = edge_features[:,1]
nodeSizes = node_features[:,1]
for size_reg in (0.1,0.2,0.4,0.8):
# cluster-policy
nnodes = rag.numberOfNodes//300
nnodes = min(nnodes, 1000)
clusterPolicy = nifty.graph.agglo.edgeWeightedClusterPolicy(
graph=rag, edgeIndicators=meanEdgeStrength,
edgeSizes=edgeSizes, nodeSizes=nodeSizes,
numberOfNodesStop=nnodes, sizeRegularizer=size_reg)
# run agglomerative clustering
agglomerativeClustering = nifty.graph.agglo.agglomerativeClustering(clusterPolicy)
agglomerativeClustering.run()
nodeSeg = agglomerativeClustering.result()
# convert graph segmentation
# to pixel segmentation
seg = nifty.graph.rag.projectScalarNodeDataToPixels(rag, nodeSeg)
#plt.imshow(nifty.segmentation.segmentOverlay(self.raw, seg, showBoundaries=False))
#plt.show()
proposals.append(seg)
return proposals
def proposal_from_local_agglo(self, hmap):
proposals = []
hmap0 = vigra.filters.gaussianSmoothing(hmap, 0.1)
for sigma in (1.0, 3.0, 5.0):
hmap1 = vigra.filters.gaussianSmoothing(hmap, sigma)
grow_map = hmap0 + 0.05*hmap1
overseg,nseg = vigra.analysis.watersheds(grow_map.astype('float32'))
rag = nifty.graph.rag.gridRag(overseg)
edge_features, node_features = nifty.graph.rag.accumulateMeanAndLength(
rag, hmap, [512,512],0)
meanEdgeStrength = edge_features[:,0]
edgeSizes = edge_features[:,1]
nodeSizes = node_features[:,1]
for size_reg in (0.1,0.2,0.4,0.8):
# cluster-policy
clusterPolicy = nifty.graph.agglo.edgeWeightedClusterPolicy(
graph=rag, edgeIndicators=meanEdgeStrength,
edgeSizes=edgeSizes, nodeSizes=nodeSizes,
numberOfNodesStop=rag.numberOfNodes//10, sizeRegularizer=size_reg)
# run agglomerative clustering
agglomerativeClustering = nifty.graph.agglo.agglomerativeClustering(clusterPolicy)
agglomerativeClustering.run()
nodeSeg = agglomerativeClustering.result()
# convert graph segmentation
# to pixel segmentation
seg = nifty.graph.rag.projectScalarNodeDataToPixels(rag, nodeSeg)
#plt.imshow(nifty.segmentation.segmentOverlay(self.raw, seg, showBoundaries=False))
#plt.show()
proposals.append(seg)
return proposals
def downsample_by_two(self):
def impl(raw, weights, affinities, offsets):
shape = weights.shape[0:2]
new_shape = [s//2 for s in shape]
new_raw = vigra.sampling.resize(raw.astype('float32'), new_shape)
n_offsets = offsets.shape[0]
new_offsets = offsets.astype('float')/2.0
new_weight_dict = dict()
new_affinity_dict = dict()
def f(o):
if(o>0.0 and o<1.0):
return 1
elif(o<0.0 and o>-1.0):
return -1
else:
return int(round(o))
for i_offset in range(n_offsets):
weights_channel = weights[:,:,i_offset]
affinity_channel = affinities[:,:,i_offset]
new_weights_channel = vigra.sampling.resize(weights_channel.astype('float32'), new_shape)
new_affinity_channel = vigra.sampling.resize(affinity_channel.astype('float32'), new_shape)
offset = offsets[i_offset,:]
nx,ny = new_offsets[i_offset,:]
nx,ny = f(nx), f(ny)
if (nx,ny) in new_weight_dict:
new_weight_dict[(nx,ny)] += new_weights_channel
new_affinity_dict[(nx,ny)] += new_affinity_channel
else:
new_weight_dict[(nx,ny)] = new_weights_channel
new_affinity_dict[(nx,ny)] = new_affinity_channel
print(offset,(nx,ny))
new_offsets = [ ]
new_weights = [ ]
new_affinities = [ ]
for key in new_weight_dict.keys():
new_offsets.append(key)
new_weights.append(new_weight_dict[key])
new_affinities.append(new_affinity_dict[key])
new_weights = numpy.array(new_weights)
new_affinities = numpy.array(new_affinities)
new_offsets = numpy.array(new_offsets)
return new_raw, numpy.rollaxis(new_weights,0,3), numpy.rollaxis(new_affinities,0,3), new_offsets#numpy.swapaxes(new_offsets,0,1)
new_raw, new_weights,new_affinities, new_offsets = impl(raw=self.raw,weights=self.weights,
affinities=self.affinities, offsets=self.offsets)
return PlmcObjective2D(raw=new_raw, affinities=new_affinities, weights=new_weights, offsets=new_offsets)
def local_affinities_to_pixel(affinities, offsets):
shape = affinities.shape[0:2]
offset_dict = dict()
for i in range(offsets.shape[0]):
x,y = offsets[i,:]
key = int(x),int(y)
offset_dict[key] = i
local_edges = [
(-1, 0),
( 1, 0),
( 0,-1),
( 0, 1)
]
acc = numpy.zeros(shape)
for local_edge in local_edges:
#print("find",local_edge)
if local_edge in offset_dict:
acc += affinities[:,:, offset_dict[local_edge]]
else:
o_local_edge = tuple([-1*e for e in local_edge])
#print("missing",local_edge)
if o_local_edge in offset_dict:
#print(" using: ",o_local_edge)
o_channel = affinities[:,:, offset_dict[o_local_edge]]
padded_o_channel = numpy.pad(o_channel, 1, mode='reflect')
if local_edge == (0,1):
acc += padded_o_channel[1:shape[0]+1, 2:shape[1]+2]
elif local_edge == (1,0):
acc += padded_o_channel[2:shape[0]+2, 1:shape[1]+1]
elif local_edge == (0,-1):
acc += padded_o_channel[1:shape[0]+1, 0:shape[1]]
elif local_edge == (1,0):
acc += padded_o_channel[0:shape[0], 1:shape[1]+1]
else:
raise RuntimeError("todo")
return acc
def make_pixel_wise(affinities, offsets):
shape = affinities.shape[0:2]
big_shape = tuple([2*s for s in shape])
padding_size = int(numpy.abs(offsets).max())*2
acc = numpy.zeros(shape)
for i in range(offsets.shape[0]):
print(i)
affinity_channel = affinities[:, :, i]
affinity_channel = vigra.sampling.resize(affinity_channel, big_shape)
padded_affinity_channel = numpy.pad(affinity_channel, padding_size, mode='reflect')
sx = padding_size - offsets[i,0]
sy = padding_size - offsets[i,1]
p_affinity = padded_affinity_channel[sx: sx+big_shape[0], sy: sy+big_shape[0]]
sigma = 0.3*numpy.sum(offsets[i,:]**2)**0.5
print("sigma",sigma)
p_affinity = vigra.filters.gaussianSmoothing(p_affinity, sigma)
acc += numpy.array(vigra.sampling.resize(p_affinity, shape))
return acc
def solve_single_scale(objective, best_l=None):
shape = objective.shape
class Fuse(object):
def __init__(self,objective, best_l=None):
self.objective = objective
self.best_l = best_l
self.best_e = None
if self.best_l is not None:
self.best_e = objective.evaluate(best_l)
G = nifty.graph.UndirectedGraph
CCObj = G.LiftedMulticutObjective
greedySolverFactory = CCObj.liftedMulticutGreedyAdditiveFactory()
klSolverFactory = CCObj.liftedMulticutKernighanLinFactory()
solverFactory = CCObj.chainedSolversFactory([greedySolverFactory, greedySolverFactory])
self.fm = nifty.graph.opt.lifted_multicut.PixelWiseLmcConnetedComponentsFusion2D(
objective=self.objective,
solver_factory=solverFactory)
def fuse_with(self, labels):
labels = numpy.squeeze(labels)
labels = numpy.require(labels, requirements=['C'])
if labels.ndim == 2:
if self.best_l is None:
self.best_l = labels
else:
#print("fuuuuu")
self.best_l = self.fm.fuse(
labels,
numpy.require(self.best_l,requirements=['C'])
)
else:
labels = numpy.concatenate([self.best_l[:,:,None], labels],axis=2)
self.best_l = self.fm.fuse(labels)
self.best_e = objective.evaluate(self.best_l)
print(self.best_e)
fuse_inf = Fuse(objective=objective, best_l=best_l)
local = local_affinities_to_pixel(objective.affinities, objective.offsets)
def seeded_watersheds(sigma):
#print("thesigma",sigma)
hmap1 = vigra.filters.gaussianSmoothing(local, 0.2)
hmap2 = vigra.filters.gaussianSmoothing(local, sigma)
hmap1 += 0.03*hmap2
#print(nifty.segmentation.seededWatersheds)
seg = nifty.segmentation.seededWatersheds(hmap1, method='edge_weighted', acc='interpixel')
return seg
def refine_watershed(labels,r, sigma):
hmap1 = vigra.filters.gaussianSmoothing(local, 0.2)
hmap2 = vigra.filters.gaussianSmoothing(local, sigma)
hmap1 += 0.03*hmap2
zeros = numpy.zeros_like(labels)
boundaries = skimage.segmentation.mark_boundaries(zeros, labels.astype('uint32'))[:,:,0]*255
#print(boundaries.min(),boundaries.max())
boundaries = vigra.filters.discDilation(boundaries.astype('uint8'),r).squeeze()
new_seeds = labels + 1
where_b = numpy.where(boundaries==1)
new_seeds[boundaries==255] = 0
seg,nseg = vigra.analysis.watersheds(hmap1.astype('float32'), seeds=new_seeds.astype('uint32'))
seg = nifty.segmentation.connectedComponents(seg)
return seg
def refiner(labels,r):
grid = numpy.arange(labels.size) + labels.max() + 1
grid = grid.reshape(labels.shape)
zeros = numpy.zeros_like(labels)
boundaries = skimage.segmentation.mark_boundaries(zeros, labels.astype('uint32'))[:,:,0]*255
#print(boundaries.min(),boundaries.max())
boundaries = vigra.filters.discDilation(boundaries.astype('uint8'),r).squeeze()
new_seeds = labels.copy()
where_mask = boundaries==255
new_seeds[where_mask] = grid[where_mask]
return new_seeds
proposals = []
proposals += objective.proposals_from_raw()
proposals += objective.proposal_from_local_agglo(local)
proposals += objective.proposal_from_raw_agglo()
proposals += [seeded_watersheds(s) for s in (1.0, 2.0, 3.0)]
#shuffle(proposals)
print("fuabsf")
for proposal in proposals:
print("fuse with prop")
fuse_inf.fuse_with(proposal)
if False:
print("refine watershed")
for r in (1,2,3,4,5):
for s in (1.0, 2.0, 3.0,5.0):
p = refine_watershed(fuse_inf.best_l,r=r,sigma=s)
fuse_inf.fuse_with(p)
else:
for r in (1,2,3,4):
while(True):
print("buja",r)
best_e = float(fuse_inf.best_e)
fuse_inf.fuse_with(refiner(fuse_inf.best_l, r=2))
if fuse_inf.best_e >= best_e:
break
#sys.exit()
if True:
for ps in (1,2,3,4):
print("multi shiftey", ps)
# shift
for i in range(10):
print("Si",i)
proposals = []
best_e = float(fuse_inf.best_e)
padded = numpy.pad(fuse_inf.best_l+1, ps+1, mode='constant', constant_values=0)
for x in range(-ps,ps+1):
for y in range(-ps,ps+1):
labels = padded[
ps + x : ps + x + shape[0],
ps + y : ps + y + shape[1]
]
#labels = nifty.segmentation.connectedComponents(prop)
proposals.append(labels[:,:,None])
if len(proposals) >= 6:
proposals = numpy.concatenate(proposals, axis=2)
fuse_inf.fuse_with(proposals)
proposals = []
if len(proposals) >= 1:
proposals = numpy.concatenate(proposals, axis=2)
fuse_inf.fuse_with(proposals)
if(fuse_inf.best_e >= best_e):
break
print("shiftey done ")
else:
print("shiftey")
# shift
ps = 2
for i in range(10):
print("Si",i)
proposals = []
best_e = float(fuse_inf.best_e)
padded = numpy.pad(fuse_inf.best_l+1, ps+1, mode='constant', constant_values=0)
for x in range(-ps,ps):
for y in range(-ps,ps):
labels = padded[
ps + x : ps + x + shape[0],
ps + y : ps + y + shape[1]
]
#labels = nifty.segmentation.connectedComponents(prop)
proposals.append(labels)
shuffle(proposals)
for labels in proposals:
fuse_inf.fuse_with(labels)
if(fuse_inf.best_e >= best_e):
break
print("shiftey done ")
return fuse_inf.best_l
def solve_pyramid(objective, best_l=None):
G = nifty.graph.UndirectedGraph
CCObj = G.LiftedMulticutObjective
solverFactory = CCObj.liftedMulticutGreedyAdditiveFactory()
fm = nifty.graph.opt.lifted_multicut.PixelWiseLmcConnetedComponentsFusion2D(objective=objective, solver_factory=solverFactory)
shape = objective.shape
best_e = None
if best_l is not None:
best_e = objective.evaluate(best_l)
# make a pyramid
current = objective
pyramid = [current]
#while(current.shape[0]!=64):
# print("jay")
# current = current.downsample_by_two()
# pyramid.append(current)
#pyramid = reversed(pyramid)
old_res = None
for obj in pyramid:
init = None
if old_res is not None:
print(old_res.shape)
print('\n\n\n\n')
init = vigra.sampling.resize(old_res.astype('float32'), obj.shape ,0).astype('int')
old_res = solve_single_scale(obj, init)
res = old_res
return res
def affinities_to_weights(affinities, offsets, beta=0.5):
eps = 0.00001
affinities = numpy.clip(affinities, eps, 1.0-eps)
weights = numpy.log((1.0-affinities)/(affinities)) + numpy.log((1.0-beta)/(beta))
return weights
def affinities_to_better_weights(affinities, offsets, beta=0.5):
weights = affinities.copy()
eps = 0.00001
affinities = numpy.clip(affinities, eps, 1.0-eps)
weights = numpy.log((1.0-affinities)/(affinities)) + numpy.log((1.0-beta)/(beta))
# long range
weights[:,:,:] = -1.0*(affinities[:,:,:]-0.5)
# local weighs
weights[:,:,0] = 1.0 - affinities[:,:,0]
weights[:,:,1] = 1.0 - affinities[:,:,1]
weights *= numpy.sum(offsets**2,1)**0.5
return weights
def affinities_lmc(raw, affinities, offsets, beta=0.5):
# convert affinities to weights
weights = affinities_to_better_weights(affinities=affinities, offsets=offsets, beta=0.5)
#w = numpy.sum(offsets**2,axis=1)
#weights *= w
#weights[:,:,0] = 0
#weights[:,:,1] = 0
objective = PlmcObjective2D(raw=raw, affinities=affinities, weights=weights, offsets=offsets)
return solve_pyramid(objective)
if __name__ == "__main__":
# load weighs and raw
path_affinities = "/home/tbeier/nice_p/isbi_test_default.h5"
#path_affinities = "/home/tbeier/nice_probs/isbi_test_default.h5"
offsets = numpy.array([
[-1,0],[0,-1],
[-9,0],[0,-9],[-9,-9],[9,-9],
[-9,-4],[-4,-9],[4,-9],[9,-4],
[-27,0],[0,-27],[-27,-27],[27,-27]
])
import h5py
f5_affinities = h5py.File(path_affinities)
affinities = f5_affinities['data']
z = 8
# get once slice
affinities = numpy.rollaxis(affinities[:,z,:,:],0,3)
affinities = numpy.require(affinities, requirements=['C'])
import skimage.io
#raw_path = "/home/tbeier/src/nifty/src/python/examples/multicut/NaturePaperDataUpl/ISBI2012/raw_test.tif"
raw_path = '/home/tbeier/src/nifty/mysandbox/NaturePaperDataUpl/ISBI2012/raw_test.tif'
raw = skimage.io.imread(raw_path)
raw = raw[z,:,:]
#raw = raw[200:64+200, 200:64+200]
#affinities = affinities[200:64+200, 200:64+200,:]
#t = 0.2
#affinities[affinities >= t ] = 1
#affinities[affinities < t ] = 0
print(raw.shape, affinities.shape)
if False:
import matplotlib.pyplot as plt
for x in range(offsets.shape[0]):
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax1.imshow(raw)
ax2 = fig.add_subplot(2,1,2)
ax2.imshow(affinities[:,:,x])
plt.show()
sys.exit()
res = affinities_lmc(raw=raw, affinities=affinities, offsets=offsets, beta=0.5)
plt.imshow(nifty.segmentation.segmentOverlay(raw, res, showBoundaries=False))
plt.show()
plt.imshow(nifty.segmentation.markBoundaries(raw, res, color=(1,0,0)))
plt.show()
| mit | 114,205,025,194,031,790 | 27.480631 | 141 | 0.557705 | false |
scrapinghub/dateparser | dateparser/data/date_translation_data/it.py | 1 | 5121 | info = {
"name": "it",
"date_order": "DMY",
"january": [
"gen",
"gennaio"
],
"february": [
"feb",
"febbraio"
],
"march": [
"mar",
"marzo"
],
"april": [
"apr",
"aprile"
],
"may": [
"mag",
"maggio"
],
"june": [
"giu",
"giugno"
],
"july": [
"lug",
"luglio"
],
"august": [
"ago",
"agosto"
],
"september": [
"set",
"settembre"
],
"october": [
"ott",
"ottobre"
],
"november": [
"nov",
"novembre"
],
"december": [
"dic",
"dicembre"
],
"monday": [
"lun",
"lunedì"
],
"tuesday": [
"mar",
"martedì"
],
"wednesday": [
"mer",
"mercoledì"
],
"thursday": [
"gio",
"giovedì"
],
"friday": [
"ven",
"venerdì"
],
"saturday": [
"sab",
"sabato"
],
"sunday": [
"dom",
"domenica"
],
"am": [
"am"
],
"pm": [
"pm"
],
"year": [
"anno",
"anni"
],
"month": [
"mese",
"mesi"
],
"week": [
"sett",
"settimana",
"settimane"
],
"day": [
"g",
"giorno",
"giorni"
],
"hour": [
"h",
"ora",
"ore"
],
"minute": [
"m",
"min",
"minuto",
"minuti"
],
"second": [
"s",
"sec",
"secondo",
"secondi"
],
"relative-type": {
"0 day ago": [
"oggi"
],
"0 hour ago": [
"quest'ora"
],
"0 minute ago": [
"questo minuto"
],
"0 month ago": [
"questo mese"
],
"0 second ago": [
"ora"
],
"0 week ago": [
"questa settimana"
],
"0 year ago": [
"quest'anno"
],
"1 day ago": [
"ieri"
],
"1 month ago": [
"mese scorso"
],
"1 week ago": [
"settimana scorsa"
],
"1 year ago": [
"anno scorso"
],
"in 1 day": [
"domani"
],
"in 1 month": [
"mese prossimo"
],
"in 1 week": [
"settimana prossima"
],
"in 1 year": [
"anno prossimo"
],
"2 day ago": [
"altro ieri"
]
},
"relative-type-regex": {
"\\1 day ago": [
"(\\d+) g fa",
"(\\d+) gg fa",
"(\\d+) giorni fa",
"(\\d+) giorno fa"
],
"\\1 hour ago": [
"(\\d+) h fa",
"(\\d+) ora fa",
"(\\d+) ore fa"
],
"\\1 minute ago": [
"(\\d+) min fa",
"(\\d+) minuti fa",
"(\\d+) minuto fa"
],
"\\1 month ago": [
"(\\d+) mese fa",
"(\\d+) mesi fa"
],
"\\1 second ago": [
"(\\d+) s fa",
"(\\d+) sec fa",
"(\\d+) secondi fa",
"(\\d+) secondo fa"
],
"\\1 week ago": [
"(\\d+) sett fa",
"(\\d+) settimana fa",
"(\\d+) settimane fa"
],
"\\1 year ago": [
"(\\d+) anni fa",
"(\\d+) anno fa"
],
"in \\1 day": [
"tra (\\d+) g",
"tra (\\d+) gg",
"tra (\\d+) giorni",
"tra (\\d+) giorno"
],
"in \\1 hour": [
"tra (\\d+) h",
"tra (\\d+) ora",
"tra (\\d+) ore"
],
"in \\1 minute": [
"tra (\\d+) min",
"tra (\\d+) minuti",
"tra (\\d+) minuto"
],
"in \\1 month": [
"tra (\\d+) mese",
"tra (\\d+) mesi"
],
"in \\1 second": [
"tra (\\d+) s",
"tra (\\d+) sec",
"tra (\\d+) secondi",
"tra (\\d+) secondo"
],
"in \\1 week": [
"tra (\\d+) sett",
"tra (\\d+) settimana",
"tra (\\d+) settimane"
],
"in \\1 year": [
"tra (\\d+) anni",
"tra (\\d+) anno"
]
},
"locale_specific": {
"it-CH": {
"name": "it-CH"
},
"it-SM": {
"name": "it-SM"
},
"it-VA": {
"name": "it-VA"
}
},
"skip": [
"circa",
"e",
" ",
"'",
",",
"-",
".",
"/",
";",
"@",
"[",
"]",
"|",
","
],
"sentence_splitter_group": 1,
"ago": [
"fa"
],
"in": [
"in"
],
"simplifications": [
{
"(\\d+)\\s+ora": "\\1 ore"
}
]
}
| bsd-3-clause | -2,389,134,767,092,052,000 | 17.134752 | 38 | 0.262026 | false |
simonmonk/make_action | python/experiments/mixing_colors.py | 2 | 1562 | from Tkinter import *
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM) # (1)
GPIO.setup(18, GPIO.OUT)
GPIO.setup(23, GPIO.OUT)
GPIO.setup(24, GPIO.OUT)
pwmRed = GPIO.PWM(18, 500) # (2)
pwmRed.start(100)
pwmGreen = GPIO.PWM(23, 500)
pwmGreen.start(100)
pwmBlue = GPIO.PWM(24, 500)
pwmBlue.start(100)
class App:
def __init__(self, master): #(3)
frame = Frame(master) #(4)
frame.pack()
Label(frame, text='Red').grid(row=0, column=0) # (5)
Label(frame, text='Green').grid(row=1, column=0)
Label(frame, text='Blue').grid(row=2, column=0)
scaleRed = Scale(frame, from_=0, to=100, # (6)
orient=HORIZONTAL, command=self.updateRed)
scaleRed.grid(row=0, column=1)
scaleGreen = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateGreen)
scaleGreen.grid(row=1, column=1)
scaleBlue = Scale(frame, from_=0, to=100,
orient=HORIZONTAL, command=self.updateBlue)
scaleBlue.grid(row=2, column=1)
def updateRed(self, duty): # (7)
# change the led brightness to match the slider
pwmRed.ChangeDutyCycle(float(duty))
def updateGreen(self, duty):
pwmGreen.ChangeDutyCycle(float(duty))
def updateBlue(self, duty):
pwmBlue.ChangeDutyCycle(float(duty))
root = Tk() # (8)
root.wm_title('RGB LED Control')
app = App(root)
root.geometry("200x150+0+0")
try:
root.mainloop()
finally:
print("Cleaning up")
GPIO.cleanup() | mit | -5,904,124,153,999,706,000 | 25.05 | 60 | 0.608195 | false |
keedio/keedio-stacks | KEEDIO/1.3/services/GANGLIA/package/scripts/params.py | 1 | 7560 | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
from resource_management.core.system import System
import os
config = Script.get_config()
user_group = config['configurations']['cluster-env']["user_group"]
ganglia_conf_dir = default("/configurations/ganglia-env/ganglia_conf_dir", "/etc/ganglia")
ganglia_dir = "/etc/ganglia"
ganglia_runtime_dir = config['configurations']['ganglia-env']["ganglia_runtime_dir"]
ganglia_shell_cmds_dir = "/usr/libexec/hdp/ganglia"
gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
gmond_user = config['configurations']['ganglia-env']["gmond_user"]
gmond_add_clusters_str = default("/configurations/ganglia-env/additional_clusters", None)
if gmond_add_clusters_str and gmond_add_clusters_str.isspace():
gmond_add_clusters_str = None
gmond_app_strs = [] if gmond_add_clusters_str is None else gmond_add_clusters_str.split(',')
gmond_apps = []
for x in gmond_app_strs:
a,b = x.strip().split(':')
gmond_apps.append((a.strip(),b.strip()))
if System.get_instance().os_family == "ubuntu":
gmond_service_name = "ganglia-monitor"
modules_dir = "/usr/lib/ganglia"
else:
gmond_service_name = "gmond"
modules_dir = "/usr/lib64/ganglia"
webserver_group = "apache"
rrdcached_base_dir = config['configurations']['ganglia-env']["rrdcached_base_dir"]
rrdcached_timeout = default("/configurations/ganglia-env/rrdcached_timeout", 3600)
rrdcached_flush_timeout = default("/configurations/ganglia-env/rrdcached_flush_timeout", 7200)
rrdcached_delay = default("/configurations/ganglia-env/rrdcached_delay", 1800)
rrdcached_write_threads = default("/configurations/ganglia-env/rrdcached_write_threads", 4)
ganglia_server_host = config["clusterHostInfo"]["ganglia_server_host"][0]
hostname = config["hostname"]
namenode_host = set(default("/clusterHostInfo/namenode_host", []))
jtnode_host = set(default("/clusterHostInfo/jtnode_host", []))
rm_host = set(default("/clusterHostInfo/rm_host", []))
hs_host = set(default("/clusterHostInfo/hs_host", []))
hbase_master_hosts = set(default("/clusterHostInfo/hbase_master_hosts", []))
datanodes_hosts = set(default("/clusterHostInfo/slave_hosts", []))
tt_hosts = set(default("/clusterHostInfo/mapred_tt_hosts", []))
nm_hosts = set(default("/clusterHostInfo/nm_hosts", []))
hbase_rs_hosts = set(default("/clusterHostInfo/hbase_rs_hosts", []))
flume_hosts = set(default("/clusterHostInfo/flume_hosts", []))
jn_hosts = set(default("/clusterHostInfo/journalnode_hosts", []))
nimbus_server_hosts = set(default("/clusterHostInfo/nimbus_hosts", []))
supervisor_server_hosts = set(default("/clusterHostInfo/supervisor_hosts", []))
kafka_broker_hosts = set(default("/clusterHostInfo/kafka_broker_hosts", []))
es_master_hosts = str(default("/clusterHostInfo/elasticsearch_hosts", ['none']))
kafka_ganglia_port = default("/configurations/kafka-broker/kafka.ganglia.metrics.port", 8671)
es_service_port = default('/configurations/elasticsearch/service_port',9200)
pure_slave = not hostname in (namenode_host | jtnode_host | rm_host | hs_host | \
hbase_master_hosts | datanodes_hosts | tt_hosts | hbase_rs_hosts | \
flume_hosts | nm_hosts | jn_hosts | nimbus_server_hosts | \
supervisor_server_hosts)
is_ganglia_server_host = (hostname == ganglia_server_host)
has_namenodes = not len(namenode_host) == 0
has_jobtracker = not len(jtnode_host) == 0
has_resourcemanager = not len(rm_host) == 0
has_historyserver = not len(hs_host) == 0
has_hbase_masters = not len(hbase_master_hosts) == 0
has_datanodes = not len(datanodes_hosts) == 0
has_tasktracker = not len(tt_hosts) == 0
has_nodemanager = not len(nm_hosts) == 0
has_hbase_rs = not len(hbase_rs_hosts) == 0
has_flume = not len(flume_hosts) == 0
has_journalnode = not len(jn_hosts) == 0
has_nimbus_server = not len(nimbus_server_hosts) == 0
has_supervisor_server = not len(supervisor_server_hosts) == 0
has_kafka_broker = not len(kafka_broker_hosts) == 0
if 'none' in es_master_hosts:
has_elasticsearch_server = False
else:
has_elasticsearch_server = True
clusters=["Slaves"]
if has_namenodes:
clusters.append("NameNode")
if has_hbase_masters:
clusters.append("HBaseMaster")
if has_resourcemanager:
clusters.append("ResourceManager")
if has_nodemanager:
clusters.append("NodeManager")
if has_historyserver:
clusters.append("HistoryServer")
if has_datanodes:
clusters.append("DataNode")
if has_hbase_rs:
clusters.append("HBaseRegionServer")
if has_nimbus_server:
clusters.append("Nimbus")
if has_supervisor_server:
clusters.append("Supervisor")
if has_kafka_broker:
clusters.append("Kafka")
if has_flume:
clusters.append("FlumeServer")
if has_journalnode:
clusters.append("JournalNode")
if has_elasticsearch_server:
clusters.append("ElasticSearch")
exclude_packages = []
if not is_ganglia_server_host:
exclude_packages += [ "python-rrdtool", "ganglia",
"ganglia-devel", "ganglia-gmetad",
"ganglia-web", "httpd" ]
ganglia_cluster_names = {
"journalnode_hosts": [("JournalNode", 8654)],
"flume_hosts": [("FlumeServer", 8655)],
"hbase_rs_hosts": [("HBaseRegionServer", 8656)],
"nm_hosts": [("NodeManager", 8657)],
"slave_hosts": [("DataNode", 8658)],
"namenode_host": [("NameNode", 8659)],
"hbase_master_hosts": [("HBaseMaster", 8660)],
"rm_host": [("ResourceManager", 8661)],
"hs_host": [("HistoryServer", 8662)],
"nimbus_hosts": [("Nimbus", 8663)],
"supervisor_hosts": [("Supervisor", 8664)],
"ReservedPort1": [("ReservedPort1", 8667)],
"ReservedPort2": [("ReservedPort2", 8668)],
"ReservedPort3": [("ReservedPort3", 8669)],
"ReservedPort4": [("ReservedPort3", 8670)],
"kafka_broker_hosts": [("Kafka", kafka_ganglia_port)],
"elasticsearch_hosts": [("ElasticSearch", 8672)]
}
ganglia_clusters = [("Slaves", 8653)]
for key in ganglia_cluster_names:
property_name = format("/clusterHostInfo/{key}")
hosts = set(default(property_name, []))
if not len(hosts) == 0:
for x in ganglia_cluster_names[key]:
ganglia_clusters.append(x)
if len(gmond_apps) > 0:
for gmond_app in gmond_apps:
ganglia_clusters.append(gmond_app)
ganglia_apache_config_file = "/etc/httpd/conf.d/ganglia.conf"
ganglia_web_path="/usr/share/ganglia"
if System.get_instance().os_family == "suse":
rrd_py_path = '/srv/www/cgi-bin'
dwoo_path = '/var/lib/ganglia-web/dwoo'
web_user = "wwwrun"
# for upgrade purposes as path to ganglia was changed
if not os.path.exists(ganglia_web_path):
ganglia_web_path='/srv/www/htdocs/ganglia'
elif System.get_instance().os_family == "redhat":
rrd_py_path = '/var/www/cgi-bin'
web_user = "apache"
elif System.get_instance().os_family == "ubuntu":
rrd_py_path = '/usr/lib/cgi-bin'
ganglia_web_path = '/usr/share/ganglia-webfrontend'
web_user = "www-data"
| apache-2.0 | -8,248,663,965,796,467,000 | 39 | 98 | 0.710714 | false |
jaracil/nxpy | pynexus/pynexus.py | 1 | 17276 | # -*- coding: utf-8 -*-
##############################################################################
#
# pynexus, a Python library for easy playing with Nexus
# Copyright (C) 2016 by the pynexus team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import atexit
from .jsocket import JSocketDecoder
import json
import multiprocessing
try:
from queue import Queue
except ImportError:
from Queue import Queue
from . import net
import select
import threading
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
import time
from .version import __version__
# Constants
ErrParse = -32700
ErrInvalidRequest = -32600
ErrMethodNotFound = -32601
ErrInvalidParams = -32602
ErrInternal = -32603
ErrTimeout = -32000
ErrCancel = -32001
ErrInvalidTask = -32002
ErrInvalidPipe = -32003
ErrInvalidUser = -32004
ErrUserExists = -32005
ErrPermissionDenied = -32010
ErrTtlExpired = -32011
ErrUnknownError = -32098
ErrNotSupported = -32099
ErrConnClosed = -32007
ErrStr = {
ErrParse: "Parse error",
ErrInvalidRequest: "Invalid request",
ErrMethodNotFound: "Method not found",
ErrInvalidParams: "Invalid params",
ErrInternal: "Internal error",
ErrTimeout: "Timeout",
ErrCancel: "Cancel",
ErrInvalidTask: "Invalid task",
ErrInvalidPipe: "Invalid pipe",
ErrInvalidUser: "Invalid user",
ErrUserExists: "User already exists",
ErrPermissionDenied: "Permission denied",
ErrTtlExpired: "TTL expired",
ErrUnknownError: "Unknown error",
ErrNotSupported: "Not supported",
ErrConnClosed: "Connection is closed",
}
class NexusConn(object):
def pushRequest(self, request):
self.requests[1].send(request)
return None
def pullRequest(self):
return self.requests[0].recv(), None
def registerChannel(self, task_id, channel):
with self.resTableLock:
self.resTable[task_id] = channel
def getChannel(self, task_id):
res = None
with self.resTableLock:
res = self.resTable.get(task_id)
return res
def unregisterChannel(self, task_id):
with self.resTableLock:
if task_id in self.resTable:
del self.resTable[task_id]
def cancelChannels(self):
with self.resTableLock:
for channel in self.resTable.values():
channel.put({u'jsonrpc': u'2.0', u'id': None, u'error': {u'code': ErrConnClosed, u'message': ErrStr[ErrConnClosed]}})
def getTimeToNextPing(self):
now = time.time()
return self.lastRead + self.keepAlive - now
def resetTimeToNextPing(self):
self.lastRead = time.time()
def mainWorker(self, pipe):
try:
while True:
delay = self.getTimeToNextPing()
ready = select.select([pipe[0]], [], [], delay)
if ready[0] and ready[0][0] == pipe[0]:
break
else:
delay = self.getTimeToNextPing()
if delay <= 0:
error = self.ping(self.keepAlive)
if error:
raise Exception("Error in ping", error)
finally:
self.cancel()
def sendWorker(self, pipe):
try:
while True:
ready = select.select([self.requests[0], pipe[0]], [], [])
if ready[0]:
if ready[0][0] == pipe[0]:
break
else:
request, error = self.pullRequest()
if error:
break
request['jsonrpc'] = '2.0'
with self.connLock:
self.conn.send(json.dumps(request).encode())
finally:
self.cancel()
def recvWorker(self, pipe):
try:
decoder = JSocketDecoder(self.conn)
while True:
ready = select.select([decoder, pipe[0]], [], [])
if ready[0]:
if ready[0][0] == pipe[0]:
break
else:
message = decoder.getObject()
self.resetTimeToNextPing()
if message:
channel = self.getChannel(message['id'])
if channel:
channel.put(message)
finally:
self.cancel()
def newId(self, taskId=None):
new_id = taskId
if not new_id:
self.lastTaskId += 1
new_id = self.lastTaskId
new_channel = Queue()
self.registerChannel(new_id, new_channel)
return new_id, new_channel
def delId(self, task_id):
self.unregisterChannel(task_id)
def __init__(self, conn, keepAlive=60):
self.conn = conn
self.connLock = threading.Lock()
self.requests = multiprocessing.Pipe(False)
self.keepAlive = keepAlive
self.resTable = {}
self.resTableLock = threading.Lock()
self.lastTaskId = 0
self.workers = []
self.lastRead = time.time()
self._stopping = False
self._stoppingLock = threading.Lock()
self.startWorker(self.sendWorker)
self.startWorker(self.recvWorker)
self.startWorker(self.mainWorker)
atexit.register(self.cancel)
def startWorker(self, target):
pipe = multiprocessing.Pipe(False)
worker = threading.Thread(target=target, args=(pipe,))
worker.daemon = True
worker.start()
self.workers.append((worker, pipe))
def cancel(self):
with self._stoppingLock:
if self._stopping:
return False
self._stopping = True
# Cancel pull requests
self.cancelChannels()
# Stop workers
for worker, pipe in self.workers:
if worker != threading.current_thread():
pipe[1].send("exit")
worker.join()
self.workers = []
return True
def executeNoWait(self, method, params, taskId=None):
with self._stoppingLock:
if self._stopping:
return 0, None, {u'code': ErrConnClosed, u'message': ErrStr[ErrConnClosed]}
task_id, channel = self.newId(taskId=taskId)
req = {
'id': task_id,
'method': method,
'params': params,
}
err = self.pushRequest(req)
if err:
self.delId(task_id)
return 0, None, err
return task_id, channel, None
def execute(self, method, params, taskId=None):
task_id, channel, err = self.executeNoWait(method, params, taskId=taskId)
if err:
return None, err
res = channel.get()
self.delId(task_id)
if 'error' in res:
return None, res['error']
else:
return res['result'], None
def ping(self, timeout):
task_id, channel, err = self.executeNoWait('sys.ping', None)
if err:
return err
try:
channel.get(True, timeout)
self.delId(task_id)
return None
except Exception as e:
self.delId(task_id)
return e
def login(self, username, password):
return self.execute('sys.login', {'user': username, 'pass': password})
def taskPush(self, method, params, timeout=0, priority=0, ttl=0, detach=False):
message = {
'method': method,
'params': params,
}
if priority != 0:
message['prio'] = priority
if ttl != 0:
message['ttl'] = ttl
if detach:
message['detach'] = True
if timeout > 0:
message['timeout'] = timeout
return self.execute('task.push', message)
def taskPushCh(self, method, params, timeout=0, priority=0, ttl=0, detach=False):
resQueue = Queue()
errQueue = Queue()
def callTaskPush():
res, err = self.taskPush(method, params, timeout=timeout, priority=priority, ttl=ttl, detach=detach)
if err:
errQueue.put(err)
else:
resQueue.put(res)
threading.Thread(target=callTaskPush).start()
return resQueue, errQueue
def taskPull(self, prefix, timeout=0, taskId=None):
message = {'prefix': prefix}
if timeout > 0:
message['timeout'] = timeout
res, err = self.execute('task.pull', message, taskId=taskId)
if err:
return None, err
task = Task(
self,
res['taskid'],
res['path'],
res['method'],
res['params'],
res['tags'],
res['prio'],
res['detach'],
res['user']
)
return task, None
def cancelPull(self, taskId):
return self.execute('task.cancel', {'id': taskId})
def taskPullCh(self, prefix, timeout=0):
resQueue = Queue()
errQueue = Queue()
def callTaskPull():
task, err = self.taskPull(prefix, timeout=timeout)
if err:
errQueue.put(err)
else:
resQueue.put(res)
threading.Thread(target=callTaskPull).start()
return resQueue, errQueue
def userCreate(self, username, password):
return self.execute('user.create', {'user': username, 'pass': password})
def userDelete(self, username):
return self.execute('user.delete', {'user': username})
def userSetTags(self, username, prefix, tags):
return self.execute('user.setTags', {'user': username, 'prefix': prefix, 'tags': tags})
def userDelTags(self, username, prefix, tags):
return self.execute('user.delTags', {'user': username, 'prefix': prefix, 'tags': tags})
def userSetPass(self, username, password):
return self.execute('user.setPass', {'user': username, 'pass': password})
def pipeOpen(self, pipeId):
return Pipe(self, pipeId), None
def pipeCreate(self, length = -1):
par = {}
if length > 0:
par["len"] = length
res, err = self.execute("pipe.create", par)
if err:
return None, err
return self.pipeOpen(res["pipeid"])
def topicSubscribe(self, pipe, topic):
return self.execute('topic.sub', {'pipeid': pipe.pipeId, 'topic': topic})
def topicUnsubscribe(self, pipe, topic):
return self.execute('topic.unsub', {'pipeid': pipe.pipeId, 'topic': topic})
def topicPublish(self, topic, message):
return self.execute('topic.pub', {'topic': topic, 'msg': message})
def lock(self, name):
res, err = self.execute('sync.lock', {'lock': name})
if err:
return None, err
else:
return bool(res['ok']), None
def unlock(self, name):
res, err = self.execute('sync.unlock', {'lock': name})
if err:
return None, err
else:
return bool(res['ok']), None
def _getNexusVersion(self):
res, err = self.execute("sys.version", None)
if err == None and isinstance(res, dict) and "version" in res and isinstance(res["version"], str):
return res["version"]
return "0.0.0"
class Client(NexusConn):
def __init__(self, url, keepAlive=60):
nexusURL = urlparse(url)
self.hostname = nexusURL.hostname
self.port = nexusURL.port
self.scheme = nexusURL.scheme
self.username = nexusURL.username
self.password = nexusURL.password
self.is_logged = False
self.login_error = None
self.connid = None
self.nexus_version = "0.0.0"
self.is_version_compatible = False
self._closing = False
self._closingLock = threading.Lock()
self.socket = net.connect(self.hostname, self.port, self.scheme)
super(Client, self).__init__(self.socket, keepAlive=keepAlive)
self.nexusConn = self # for backward compatibility
err = self.ping(20)
if err != None:
raise Exception(err)
if self.username != None and self.password != None:
self.login()
self.nexus_version = self._getNexusVersion()
self.is_version_compatible = self.nexus_version.split(".")[0] == __version__.split(".")[0]
atexit.register(self.close)
def login(self):
res, err = super(Client, self).login(self.username, self.password)
if err:
self.is_logged = False
self.login_error = err
self.connid = None
else:
self.is_logged = True
self.login_error = None
self.connid = res['connid']
def close(self):
with self._closingLock:
if self._closing:
return False
self._closing = True
self.cancel()
if self.socket:
self.socket.close()
self.socket = None
class Task(object):
def __init__(self, nexusConn, taskId, path, method, params, tags, priority, detach, user):
self.nexusConn = nexusConn
self.taskId = taskId
self.path = path
self.method = method
self.params = params
self.tags = tags
self.priority = priority
self.detach = detach
self.user = user
def sendResult(self, result):
params = {
'taskid': self.taskId,
'result': result,
}
return self.nexusConn.execute('task.result', params)
def sendError(self, code, message, data):
if code < 0:
if code in ErrStr:
if message != "":
message = "%s:[%s]" % (ErrStr[code], message)
else:
message = ErrStr[code]
params = {
'taskid': self.taskId,
'code': code,
'message': message,
'data': data,
}
return self.nexusConn.execute('task.error', params)
def reject(self):
"""
Reject the task. Task is returned to Nexus tasks queue.
"""
params = {
'taskid': self.taskId,
}
return self.nexusConn.execute('task.reject', params)
def accept(self):
"""
Accept a detached task.
"""
return self.sendResult(None)
class Pipe(object):
def __init__(self, nexusConn, pipeId):
self.nexusConn = nexusConn
self.pipeId = pipeId
def close(self):
return self.nexusConn.execute("pipe.close", {"pipeid": self.pipeId})
def write(self, msg):
return self.nexusConn.execute("pipe.write", {"pipeid": self.pipeId, "msg": msg})
def read(self, mx, timeout=0):
par = {"pipeid": self.pipeId, "max": mx, "timeout": timeout}
res, err = self.nexusConn.execute("pipe.read", par)
if err:
return None, err
try:
msgres = []
for msg in res["msgs"]:
msgres.append(Msg(msg["count"], msg["msg"]))
except:
return None, {u'code': ErrInternal, u'message': ErrStr[ErrInternal]}
return PipeData(msgres, res["waiting"], res["drops"]), None
def listen(self, channel=None):
if channel is None:
channel = Queue()
def pipeReader():
try:
while True:
data, err = self.read(100000)
if err:
break
for message in data.msgs:
channel.put(message)
except:
pass
threading.Thread(target=pipeReader).start()
return channel
def id(self):
return self.pipeId
class Msg(object):
def __init__(self, count, msg):
self.count = count
self.msg = msg
class PipeData(object):
def __init__(self, msgs, waiting, drops):
self.msgs = msgs
self.waiting = waiting
self.drops = drops
class PipeOpts(object):
def __init__(self, length):
self.length = length
| lgpl-3.0 | 8,857,020,541,984,786,000 | 29.685613 | 133 | 0.539998 | false |
kwiecien/ppl | ppl/scripts/face_paths.py | 1 | 2370 | #!/usr/bin/env python
import datetime
import rospy
from geometry_msgs.msg import PointStamped
from people_msgs.msg import People
from tf import ExtrapolationException, LookupException, TransformListener
FILE = file
RECORDED_PEOPLE = dict()
def listener():
global transform_listener
transform_listener = TransformListener()
rospy.Subscriber(
"/face_people",
People,
callbackPplPaths,
queue_size=1)
rospy.spin()
def callbackPplPaths(people_msg):
writeToFile(people_msg)
def createFile():
global FILE
time = datetime.datetime.now()
name = "/home/krzysztof/catkin_ws/src/ppl/paths/" + \
"face_" + '{:%Y-%m-%d-%H-%M-%S}'.format(time) + ".dat"
FILE = open(name, 'w')
def writeToFile(people_msg):
if len(people_msg.people) == 0:
return
writeTime(people_msg.header.stamp)
writeTime(countMeasuredTime(people_msg.header.stamp))
updatePeoplePositions(people_msg)
writePeoplePositions()
def writeTime(time):
FILE.write(str(time))
FILE.write('\t')
def countMeasuredTime(timestamp):
time = timestamp.to_sec()
time = round(time, 2)
return time
def updatePeoplePositions(people_msg):
global transform_listener
for person in RECORDED_PEOPLE:
RECORDED_PEOPLE[person] = ['"1/0"', '"1/0"']
for person in people_msg.people:
point = PointStamped()
point.header = people_msg.header
point.point = person.position
try:
base_link_point = transform_listener.transformPoint("base_link", point)
if person.name not in RECORDED_PEOPLE:
RECORDED_PEOPLE[person.name] = []
RECORDED_PEOPLE[person.name] = [base_link_point.point.x, base_link_point.point.y]
except (LookupException, ExtrapolationException):
continue
def writePeoplePositions():
i = 1
for person in RECORDED_PEOPLE:
writePosition(RECORDED_PEOPLE[person], i)
i += 1
FILE.write('\n')
print "------------------------------------"
def writePosition(position, i):
x = position[0]
y = position[1]
print "Person", i, "[x, y]", x, y
FILE.write(str(y))
FILE.write('\t')
FILE.write(str(x))
FILE.write('\t')
if __name__ == '__main__':
rospy.init_node('face_paths', anonymous=False)
createFile()
listener()
| gpl-3.0 | 6,371,509,571,875,838,000 | 23.6875 | 93 | 0.62616 | false |
CICIC/gestioCIimporter | cleanCSVdata.py | 1 | 2732 | # coding: utf8
import re
import logging
def cleanDate(date):
"Clean date format from yyyy[/]mm[/]dd"
date = date.split(' ')[0]
if date != '':
try:
query = r'([0-9]|0[1-9]|[12][0-9]|3[01])/([0-9]|0[1-9]|1[012])/((19|20)[0-9][0-9]|1[0-9])'
date = re.match(query, date).group(0)
date = date.split('/')
if len(date[2])==2:
date[2] = '20' + date[2]
date = date[2] + '-' + date[1] + '-' + date[0]
except AttributeError:
date = None
else:
date = None
return date
def minicleanDate(date):
return date.split(' ')[0]
def cleanPhone(phone):
"Clean phone date, only spain numbers"
phone = phone.replace(' ', '')
phone = phone.replace('.', '')
phone = phone.replace('-', '')
phone = phone.replace('+34', '')
if re.match(r"0034", phone):
phone = phone[4:]
phone = phone[0:9]
if not re.match(r"[0-9]{9}", phone) and len(phone) > 9:
phone = None
return phone
def cleanPostalcode(postalcode):
if re.match(r"[0-9]{4}", postalcode) and len(postalcode) == 4:
postalcode = '0' + postalcode
if (not re.match(r"[0-9]{5}", postalcode)) or len(postalcode) != 5:
postalcode = None
return postalcode
def cleanCOOPnumber(coopnumber):
coopnumber = coopnumber.replace(' ','')
if re.match(r"COOP[0-9]{4}",coopnumber):
coopnumber = coopnumber[0:8]
else:
coopnumber = ''
return coopnumber
def cleanIDcard(idcard):
idcard = idcard.replace('-','')
idcard = idcard.replace('.','')
idcard = idcard.replace(' ','')
if (not re.match(r"[a-zA-Z][0-9]{8}",idcard) or
not re.match(r"[0-9]{8}[a-zA-Z]",idcard)) and len(idcard) != 9:
idcard = ''
return idcard
def cleanFloat(num):
"Convert 0.000,00 -> 0000.00"
num = num.replace('.','')
num = num.replace(',','.')
if num == '':
num = 0
try:
num = float(num)
except ValueError:
print "Not a float:", num
num = 0.0
return num
def cleanInteger(num):
"In this case only remove the value if it's not an integer"
if num == '':
num = 0
try:
num = int(num)
except ValueError:
print "Not an integer:", num
num=0
return num
def cleanCooperative(coop):
if coop == 'x':
coop = 'X'
if coop == 'i':
coop = 'I'
if coop != 'X' and coop != 'I':
coop = None
return coop
def cleanEmail(email):
"Return a valid email"
em = re.search("(<)?([\w\-_.]+@[\w\-_.]+(?:\.\w+)+)(?(1)>)", email)
if em:
email = em.group(0)
else:
email = ''
return email
| agpl-3.0 | -2,201,793,729,169,544,200 | 22.756522 | 102 | 0.514275 | false |
DLunin/bayescraft | graphmodels/generators.py | 1 | 4075 | import networkx as nx
import numpy as np
import pandas as pd
import scipy as sp
import scipy.stats as stats
from itertools import *
import pytest
from bayescraft.graphmodels.factors import (TableCPD, MultivariateGaussianDistribution,
ParametricFunctionCPD, LinearGaussianDistribution)
from bayescraft.graphmodels import DGM
import bayescraft.stats as bstats
def names_to_str(g):
result = nx.Graph()
result.add_nodes_from(map(str, g.node()))
result.add_edges_from(map(lambda x: (str(x[0]), str(x[1])), g.edges()))
return result
class AcyclicDiGraphGen:
@staticmethod
def diamond(n_var):
G = nx.DiGraph()
G.add_nodes_from(range(n_var))
G.add_edges_from(zip(repeat(0), range(1, n_var-1)))
G.add_edges_from(zip(range(1, n_var-1), repeat(n_var-1)))
G.add_edge(0, n_var-1)
return G
@staticmethod
def star(n_var):
G = nx.DiGraph()
G.add_nodes_from(range(n_var))
G.add_edges_from([(i, 0) for i in range(1, n_var)])
return G
@staticmethod
def random_gnr(n_var, p=0.2):
return nx.gnr_graph(n_var, p)
@staticmethod
def random_erdos_renyi(n_var, p=0.2):
while True:
G = nx.erdos_renyi_graph(n_var, p, directed=True)
if not nx.is_directed_acyclic_graph(G):
continue
return G
class GraphGen:
@staticmethod
def diamond(n_var):
G = nx.Graph()
G.add_nodes_from(range(n_var))
G.add_edges_from(zip(repeat(0), range(1, n_var-1)))
G.add_edges_from(zip(range(1, n_var-1), repeat(n_var-1)))
G.add_edge(0, n_var-1)
return G
@staticmethod
def star(n_var):
return nx.star_graph(n_var)
@staticmethod
def random_erdos_renyi(n_var, p=0.2):
return nx.erdos_renyi_graph(n_var, p)
class DiscreteModelGenDGM:
@staticmethod
def dirichlet(G, alpha=1):
cpd = {}
for node in nx.topological_sort(G):
m = G.in_degree(node) + 1
dim = tuple([2] * m)
table = stats.dirichlet(alpha=tuple([alpha] * (2 ** m))).rvs()[0]
table = table.reshape(dim)
cpd[node] = TableCPD(table, [node], list(G.predecessors(node)))
return cpd
class ContinuousModelGenDGM:
@staticmethod
def gaussian(G):
cpd = {}
for node in nx.topological_sort(G):
m = G.in_degree(node) + 1
cov = np.random.rand(m, m)
cov = np.dot(cov, cov.T)
d = MultivariateGaussianDistribution(np.zeros(m), cov)
cpd[node] = ParametricFunctionCPD(d, [node] + list(G.predecessors(node)))
return cpd
@staticmethod
def linear_gaussian(G, a_0=1, b_0=1):
cpd = {}
for node in nx.topological_sort(G):
m = G.in_degree(node) + 1
nig = bstats.normal_inverse_gamma(w_0=np.zeros(m), V_0=np.eye(m), a_0=a_0, b_0=b_0)
sample = nig.rvs()
variance = sample[-1]
w = sample[1:-1]
w0 = sample[0]
cpd[node] = ParametricFunctionCPD(LinearGaussianDistribution(w0, w, variance),
[node], list(G.predecessors(node)))
return cpd
def dag_pack():
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.diamond(n_var)
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.star(n_var)
for p in [0.1, 0.2, 0.3, 0.4, 0.5, 0.9]:
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.random_gnr(n_var, p)
for p in [0.1, 0.2, 0.3, 0.4, 0.5, 0.9]:
for n_var in [5, 10, 20]:
yield AcyclicDiGraphGen.random_erdos_renyi(n_var, p)
def dgm_pack():
for dag in dag_pack():
dgm = DGM.from_graph(dag)
dgm.cpd = DiscreteModelGenDGM.dirichlet(dag.copy())
dgm.model = { node : TableCPD for node in dgm.nodes() }
yield dgm
dgm = DGM.from_graph(dag)
dgm.cpd = ContinuousModelGenDGM.linear_gaussian(dgm)
#yield dgm | mit | 4,763,116,395,961,582,000 | 31.094488 | 95 | 0.567853 | false |
ThunderShiviah/runtime_visualizer | setup.py | 1 | 1175 | #!/usr/bin/env python
from distutils.core import setup
setup(name='Runtime_visualizer',
version='0.1',
description='Visualize the time complexity of functions',
author='Thunder Shiviah',
author_email='[email protected]',
license='MIT',
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='runtime visualization',
install_requires=['matplotlib'],
url='https://github.com/ThunderShiviah/runtime_visualizer',
)
| mit | -7,040,180,799,732,818,000 | 32.571429 | 73 | 0.662979 | false |
dufferzafar/Python-Scripts | Rename TED/ted_talks_list.py | 1 | 132623 | talks = [
"Bren Brown - The power of vulnerability",
"Isabel Behncke - Evolutions gift of play from bonobo apes to humans",
"Josette Sheeran - Ending hunger now",
"Iain Hutchison - Saving faces A facial surgeons craft",
"Fred Jansen - How to land on a comet",
"Clayton Cameron - A-rhythm-etic. The math behind the beats",
"David Brooks - The social animal",
"David Kwong - Two nerdy obsessions meet -- and its magic",
"Andrew Solomon - Love no matter what",
"Cesar Kuriyama - One second every day",
"Euvin Naidoo - Why invest in Africa",
"Siddharthan Chandran - Can the damaged brain repair itself",
"Spencer Wells - A family tree for humanity",
"Jamais Cascio - Tools for a better world",
"Heribert Watzke - The brain in your gut",
"Peter Norvig - The 100000-student classroom",
"Daria van den Bercken - Why I take the piano on the road and in the air",
"Trevor Timm - How free is our freedom of the press",
"Richard Pyle - A dive into the reefs Twilight Zone",
"John Hodgman - Design explained.",
"Dan Phillips - Creative houses from reclaimed stuff",
"Hasan Elahi - FBI here I am",
"Khalida Brohi - How I work to protect women from honor killings",
"Tom Wujec - Learn to use the 13th-century astrolabe",
"Kevin Kelly - The next 5000 days of the web",
"Claron McFadden - Singing the primal mystery",
"George Ayittey - Africas cheetahs versus hippos",
"Alex Steffen - The shareable future of cities",
"Peter van Manen - Better baby care -- thanks to Formula 1",
"Peter Donnelly - How juries are fooled by statistics",
"Rainn Wilson - Ideas worth dating",
"Paul Zak - Trust morality -- and oxytocin",
"Mileha Soneji - Simple hacks for life with Parkinsons",
"Seth Priebatsch - The game layer on top of the world",
"William Noel - Revealing the lost codex of Archimedes",
"Alice Goffman - How were priming some kids for college and others for prison",
"Jonathan Drori - Every pollen grain has a story",
"Baba Shiv - Sometimes its good to give up the drivers seat",
"Sara Lewis - The loves and lies of fireflies",
"Dan Pallotta - The dream we havent dared to dream",
"Madeleine Albright - On being a woman and a diplomat",
"Melissa Marshall - Talk nerdy to me",
"Kevin Breel - Confessions of a depressed comic",
"Roberto DAngelo Francesca Fedeli - In our babys illness a life lesson",
"Nancy Lublin - Texting that saves lives",
"Monica Araya - A small country with big ideas to get rid of fossil fuels",
"Charles Hazlewood - Trusting the ensemble",
"Kiran Sethi - Kids take charge",
"Sam Richards - A radical experiment in empathy",
"Ludwick Marishane - A bath without water",
"Chris Burkard - The joy of surfing in ice-cold water",
"Joi Ito - Want to innovate Become a now-ist",
"Marcus du Sautoy - Symmetry realitys riddle",
"Rainer Strack - The workforce crisis of 2030 -- and how to start solving it now",
"Taylor Wilson - My radical plan for small nuclear fission reactors",
"Dennis Hong - My seven species of robot -- and how we created them",
"Itay Talgam - Lead like the great conductors",
"Newton Aduaka - The story of Ezra",
"Naomi Klein - Addicted to risk",
"Eve Ensler - Happiness in body and soul",
"J.J. Abrams - The mystery box",
"Tania Simoncelli - Should you be able to patent a human gene",
"Julia Galef - Why you think youre right -- even if youre wrong",
"Sarah Kay - If I should have a daughter ...",
"Kitra Cahana - A glimpse of life on the road",
"Sebastian Wernicke - How to use data to make a hit TV show",
"Gary Greenberg - The beautiful nano details of our world",
"Jean-Baptiste Michel Erez Lieberman Aiden - What we learned from 5 million books",
"Jonathan Drori - What we think we know",
"Hans Rosling - New insights on poverty",
"Marco Annunziata - Welcome to the age of the industrial internet",
"Carlo Ratti - Architecture that senses and responds",
"Peter Eigen - How to expose the corrupt",
"Kevin Slavin - How algorithms shape our world",
"E.O. Wilson - Advice to a young scientist",
"Jennifer Senior - For parents happiness is a very high bar",
"Brenda Romero - Gaming for understanding",
"Christopher Ryan - Are we designed to be sexual omnivores",
"Thomas Piketty - New thoughts on capital in the twenty-first century",
"Stephen Ritz - A teacher growing green in the South Bronx",
"Adam Grosser - A mobile fridge for vaccines",
"Abigail Washburn - Building US-China relations ... by banjo",
"Mick Cornett - How an obese town lost a million pounds",
"Colin Stokes - How movies teach manhood",
"Burt Rutan - The real future of space exploration",
"Elizabeth Gilbert - Your elusive creative genius",
"Thomas Pogge - Medicine for the 99 percent",
"Saul Griffith - Everyday inventions",
"Dave Meslin - The antidote to apathy",
"Charlie Todd - The shared experience of absurdity",
"Penelope Boston - There might just be life on Mars",
"Thomas Goetz - Its time to redesign medical data",
"Paul Ewald - Can we domesticate germs",
"Neil Turok - My wish Find the next Einstein in Africa",
"Knut Haanaes - Two reasons companies fail -- and how to avoid them",
"Auret van Heerden - Making global labor fair",
"Robin Nagle - What I discovered in New York City trash",
"John Delaney - Wiring an interactive ocean",
"James Surowiecki - The power and the danger of online crowds",
"Richard Resnick - Welcome to the genomic revolution",
"Amanda Bennett - We need a heroic narrative for death",
"Natalie Merchant - Singing old poems to life",
"Jackson Katz - Violence against womenits a mens issue",
"Sajay Samuel - How college loans exploit students for profit",
"Yves Morieux - How too many rules at work keep you from getting things done",
"Tierney Thys - Swim with the giant sunfish",
"Dame Ellen MacArthur - The surprising thing I learned sailing solo around the world",
"Ngozi Okonjo-Iweala - Want to help Africa Do business here",
"Magnus Larsson - Turning dunes into architecture",
"Salvatore Iaconesi - What happened when I open-sourced my brain cancer",
"Beardyman - The polyphonic me",
"Dave Isay - Everyone around you has a story the world needs to hear",
"Chris Anderson - TEDs secret to great public speaking",
"Mathieu Lehanneur - Science-inspired design",
"Gonzalo Vilario - How Argentinas blind soccer team became champions",
"Josh Luber - Why sneakers are a great investment",
"Carmen Agra Deedy - Once upon a time my mother ...",
"Tal Danino - Programming bacteria to detect cancer and maybe treat it",
"Nancy Frates - Meet the mom who started the Ice Bucket Challenge",
"Sarah Jones - One woman five characters and a sex lesson from the future",
"Anthony Goldbloom - The jobs well lose to machines -- and the ones we wont",
"Steve Silberman - The forgotten history of autism",
"Joshua Walters - On being just crazy enough",
"Tom Chatfield - 7 ways games reward the brain",
"Benoit Mandelbrot - Fractals and the art of roughness",
"Lisa Margonelli - The political chemistry of oil",
"Don Tapscott - Four principles for the open world",
"Thomas Suarez - A 12-year-old app developer",
"Amy Tan - Where does creativity hide",
"William Kamkwamba - How I harnessed the wind",
"Raghava KK - My 5 lives as an artist",
"Arthur Brooks - A conservatives plea Lets work together",
"James Geary - Metaphorically speaking",
"Jedidah Isler - How I fell in love with quasars blazars and our incredible universe",
"Gabby Giffords and Mark Kelly - Be passionate. Be courageous. Be your best.",
"Joel Levine - Why we need to go back to Mars",
"Alan Siegel - Lets simplify legal jargon",
"Ellen Jorgensen - What you need to know about CRISPR",
"Margaret Heffernan - Dare to disagree",
"Kevin Allocca - Why videos go viral",
"Pico Iyer - Where is home",
"Dean Kamen - To invent is to give",
"David Sedlak - 4 ways we can avoid a catastrophic drought",
"Lucianne Walkowicz - Finding planets around other stars",
"Paul Bennett - Design is in the details",
"Molly Crockett - Beware neuro-bunk",
"Antonio Donato Nobre - The magic of the Amazon A river that flows invisibly all around us",
"J.D. Vance - Americas forgotten working class",
"Mary Robinson - Why climate change is a threat to human rights",
"Jane Chen - A warm embrace that saves lives",
"Julian Treasure - 5 ways to listen better",
"Joy Sun - Should you donate differently",
"Eric Dishman - Take health care off the mainframe",
"Alex Laskey - How behavioral science can lower your energy bill",
"Robert Thurman - We can be Buddhas",
"Courtney Martin - This isnt her mothers feminism",
"Kavita Ramdas - Radical women embracing tradition",
"Vishal Vaid - Hypnotic South Asian improv music",
"Ismael Nazario - What I learned as a kid in jail",
"Mark Raymond - Victims of the city",
"Jane McGonigal - Gaming can make a better world",
"Philip Evans - How data will transform business",
"Ed Boyden - A light switch for neurons",
"Al Gore - What comes after An Inconvenient Truth",
"John Searle - Our shared condition -- consciousness",
"Steve Keil - A manifesto for play for Bulgaria and beyond",
"Bruno Torturra - Got a smartphone Start broadcasting",
"Edward Snowden - Heres how we take back the Internet",
"Frank Warren - Half a million secrets",
"Corneille Ewango - A hero of the Congo forest",
"Martin Hanczyc - The line between life and not-life",
"Carter Emmart - A 3D atlas of the universe",
"Kristina Gjerde - Making law on the high seas",
"HaasHahn - How painting can transform communities",
"Karen Armstrong - Lets revive the Golden Rule",
"Sarah Bergbreiter - Why I make robots the size of a grain of rice",
"Carolyn Steel - How food shapes our cities",
"Peter Gabriel - Fight injustice with raw video",
"Gary Slutkin - Lets treat violence like a contagious disease",
"Mary Norris - The nit-picking glory of The New Yorkers Comma Queen",
"Mark Applebaum - The mad scientist of music",
"Leslie T. Chang - The voices of Chinas workers",
"Kakenya Ntaiya - A girl who demanded school",
"Marc Pachter - The art of the interview",
"George Dyson - The story of Project Orion",
"Charles Limb - Building the musical muscle",
"Rodrigo Bijou - Governments dont understand cyber warfare. We need hackers",
"Will Potter - The secret US prisons youve never heard of before",
"Rodney Mullen - Pop an ollie and innovate",
"Mitchell Joachim - Dont build your home grow it",
"Sandra Aamodt - Why dieting doesnt usually work",
"Noah Wilson-Rich - Every city needs healthy honey bees",
"Robert Neuwirth - The hidden world of shadow cities",
"Teddy Cruz - How architectural innovations migrate across borders",
"Krista Donaldson - The 80 prosthetic knee thats changing lives",
"Miguel Nicolelis - Brain-to-brain communication has arrived. How we did it",
"Michael Murphy - Architecture thats built to heal",
"Ken Goldberg - 4 lessons from robots about being human",
"John Wilbanks - Lets pool our medical data",
"Janine Benyus - Biomimicry in action",
"Nancy Etcoff - Happiness and its surprises",
"Rob Legato - The art of creating awe",
"Ben Goldacre - What doctors dont know about the drugs they prescribe",
"Michael McDaniel - Cheap effective shelter for disaster relief",
"Yann DallAglio - Love -- youre doing it wrong",
"Lee Mokobe - A powerful poem about what it feels like to be transgender",
"Paul Bloom - The origins of pleasure",
"Alaa Murabit - What my religion really says about women",
"Dean Ornish - The killer American diet thats sweeping the planet",
"Natalie MacMaster - Fiddling in reel time",
"Inara George - Family Tree",
"Ken Robinson - Bring on the learning revolution",
"Negin Farsad - A highly scientific taxonomy of haters",
"Elizabeth Lindsey - Curating humanitys heritage",
"Reshma Saujani - Teach girls bravery not perfection",
"Roger Ebert - Remaking my voice",
"Rose George - Lets talk crap. Seriously.",
"Stephen Petranek - Your kids might live on Mars. Heres how theyll survive",
"Steve Howard - Lets go all-in on selling sustainability",
"Charles Anderson - Dragonflies that fly across oceans",
"David Hanson - Robots that show emotion",
"Robert Gupta - Music is medicine music is sanity",
"Jay Walker - The worlds English mania",
"Emily Oster - Flip your thinking on AIDS in Africa",
"Karen Armstrong - My wish The Charter for Compassion",
"eL Seed - Street art with a message of hope and peace",
"Jay Bradner - Open-source cancer research",
"Karima Bennoune - When people of Muslim heritage challenge fundamentalism",
"Seyi Oyesola - A hospital tour in Nigeria",
"Sally Kohn - Dont like clickbait Dont click",
"Cheryl Hayashi - The magnificence of spider silk",
"Daniel Kraft - A better way to harvest bone marrow",
"Rick Falkvinge - I am a pirate",
"Michael Hansmeyer - Building unimaginable shapes",
"Jane McGonigal - The game that can give you 10 extra years of life",
"Meron Gribetz - A glimpse of the future through an augmented reality headset",
"Jos Bowen - Beethoven the businessman",
"Joe Landolina - This gel can make you stop bleeding instantly",
"Cesar Harada - How I teach kids to love science",
"David Holt - The joyful tradition of mountain music",
"Marina Abramovi - An art made of trust vulnerability and connection",
"Sonia Shah - 3 reasons we still havent gotten rid of malaria",
"Sayu Bhojwani - Immigrant voices make democracy stronger",
"Tony Robbins - Why we do what we do",
"Mick Ebeling - The invention that unlocked a locked-in artist",
"Naif Al-Mutawa - Superheroes inspired by Islam",
"Tom Wujec - 3 ways the brain creates meaning",
"Iqbal Quadir - How mobile phones can fight poverty",
"David Grady - How to save the world or at least yourself from bad meetings",
"Jessica Ladd - The reporting system that sexual assault survivors want",
"Thomas Dolby - Love Is a Loaded Pistol",
"Aparna Rao - Art that craves your attention",
"Toni Mac - The laws that sex workers really want",
"Ramona Pierson - An unexpected place of healing",
"Yves Behar - Designing objects that tell stories",
"Paula Hammond - A new superweapon in the fight against cancer",
"Jennifer Healey - If cars could talk accidents might be avoidable",
"Alex Wissner-Gross - A new equation for intelligence",
"Ashraf Ghani - How to rebuild a broken state",
"Amy Lockwood - Selling condoms in the Congo",
"Sleepy Man Banjo Boys - Teen wonders play bluegrass",
"James Nachtwey - Moving photos of extreme drug-resistant TB",
"Lucianne Walkowicz - Lets not use Mars as a backup planet",
"Robert Swan - Lets save the last pristine continent",
"Sanjay Pradhan - How open data is changing international aid",
"Pter Fankhauser - Meet Rezero the dancing ballbot",
"Morley - Women of Hope",
"John Wooden - The difference between winning and succeeding",
"Chimamanda Ngozi Adichie - The danger of a single story",
"Roger Stein - A bold new way to fund drug research",
"Christien Meindertsma - How pig parts make the world turn",
"Luca Turin - The science of scent",
"Britta Riley - A garden in my apartment",
"Severine Autesserre - To solve mass violence look to locals",
"Ben Saunders - Why did I ski to the North Pole",
"Luke Syson - How I learned to stop worrying and love useless art",
"John Kasaona - How poachers became caretakers",
"Sangeeta Bhatia - This tiny particle could roam your body to find tumors",
"Robert Full - Learning from the geckos tail",
"Juliana Rotich - Meet BRCK Internet access built for Africa",
"Mary Lou Jepsen - Could future devices read images from our brains",
"Cynthia Breazeal - The rise of personal robots",
"Amy Smith - Simple designs to save a life",
"Melinda Gates - Lets put birth control back on the agenda",
"Lesley Hazleton - The doubt essential to faith",
"Lauren Zalaznick - The conscience of television",
"Rodrigo Canales - The deadly genius of drug cartels",
"Fahad Al-Attiya - A country with no water",
"Richard Wilkinson - How economic inequality harms societies",
"JP Rangaswami - Information is food",
"Frans de Waal - Moral behavior in animals",
"Svante Pbo - DNA clues to our inner neanderthal",
"Jimmy Nelson - Gorgeous portraits of the worlds vanishing people",
"Jared Diamond - Why do societies collapse",
"Doris Kim Sung - Metal that breathes",
"Sheena Iyengar - The art of choosing",
"Cameron Herold - Lets raise kids to be entrepreneurs",
"Johnny Lee - Free or cheap Wii Remote hacks",
"Dan Cobley - What physics taught me about marketing",
"Jackie Tabick - The balancing act of compassion",
"Susan Savage-Rumbaugh - The gentle genius of bonobos",
"Zaria Forman - Drawings that show the beauty and fragility of Earth",
"Juan Enriquez - We can reprogram life. How to do it wisely",
"Shonda Rhimes - My year of saying yes to everything",
"Michael Tilson Thomas - Music and emotion through time",
"Abraham Verghese - A doctors touch",
"Seth Shostak - ET is probably out there -- get ready",
"Esther Duflo - Social experiments to fight poverty",
"Aris Venetikidis - Making sense of maps",
"Jack Horner - Where are the baby dinosaurs",
"Helder Guimares - A magical search for a coincidence",
"Dan Pallotta - The way we think about charity is dead wrong",
"Thom Mayne - How architecture can connect us",
"Chade-Meng Tan - Everyday compassion at Google",
"Chris Kluwe - How augmented reality will change sports ... and build empathy",
"Stewart Brand Mark Z. Jacobson - Debate Does the world need nuclear energy",
"Max Little - A test for Parkinsons with a phone call",
"Marco Tempest - The magic of truth and lies and iPods",
"T. Boone Pickens - Lets transform energy -- with natural gas",
"Nick Veasey - Exposing the invisible",
"Michel Laberge - How synchronized hammer strikes could generate nuclear fusion",
"Memory Banda - A warriors cry against child marriage",
"Daniel Goleman - Why arent we more compassionate",
"Stephen Wolfram - Computing a theory of all knowledge",
"Julia Sweeney - Its time for The Talk",
"McKenna Pope - Want to be an activist Start with your toys",
"Jeffrey Brown - How we cut youth violence in Boston by 79 percent",
"Geert Chatrou - A whistleblower you havent heard",
"Gary Haugen - The hidden reason for poverty the world needs to address now",
"Martin Jacques - Understanding the rise of China",
"Aaron OConnell - Making sense of a visible quantum object",
"Thomas Heatherwick - Building the Seed Cathedral",
"Jack Andraka - A promising test for pancreatic cancer ... from a teenager",
"Oded Shoseyov - How were harnessing natures hidden superpowers",
"Mike Biddle - We can recycle plastic",
"Dorothy Roberts - The problem with race-based medicine",
"Eythor Bender - Human exoskeletons -- for war and healing",
"Ron McCallum - How technology allowed me to read",
"Iwan Baan - Ingenious homes in unexpected places",
"Joe Madiath - Better toilets better life",
"Dayananda Saraswati - The profound journey of compassion",
"Louie Schwartzberg - Hidden miracles of the natural world",
"John Hodgman - Aliens love -- where are they",
"Alyson McGregor - Why medicine often has dangerous side effects for women",
"Bill T. Jones - The dancer the singer the cellist ... and a moment of creative magic",
"Benedetta Berti - The surprising way groups like ISIS stay in power",
"Shlomo Benartzi - Saving for tomorrow tomorrow",
"Nathan Myhrvold - Could this laser zap malaria",
"Patience Mthunzi - Could we cure HIV with lasers",
"Mia Birdsong - The story we tell about poverty isnt true",
"Angela Patton - A father-daughter dance ... in prison",
"Ramesh Raskar - Imaging at a trillion frames per second",
"Geoff Mulgan - Post-crash investing in a better world",
"Hans Rosling - Religions and babies",
"Latif Nasser - The amazing story of the man who gave us modern pain relief",
"Elizabeth Gilbert - Success failure and the drive to keep creating",
"Alec Soth Stacey Baker - This is what enduring love looks like",
"John Maeda - My journey in design",
"Jennifer Granholm - A clean energy proposal -- race to the top",
"Ali Carr-Chellman - Gaming to re-engage boys in learning",
"Louise Leakey - A dig for humanitys origins",
"Antnio Guterres - Refugees have the right to be protected",
"Sherwin Nuland - How electroshock therapy changed me",
"Boghuma Kabisen Titanji - Ethical riddles in HIV research",
"Hubertus Knabe - The dark secrets of a surveillance state",
"Shimon Schocken - What a bike ride can teach you",
"Sheila Patek - The shrimp with a kick",
"William Ury - The walk from no to yes",
"Don Norman - 3 ways good design makes you happy",
"Matthieu Ricard - How to let altruism be your guide",
"Ivan Krastev - Can democracy exist without trust",
"Meera Vijayann - Find your voice against gender violence",
"Drew Curtis - How I beat a patent troll",
"Kenneth Lacovara - Hunting for dinosaurs showed me our place in the universe",
"Mustafa Akyol - Faith versus tradition in Islam",
"Camille Seaman - Haunting photos of polar ice",
"Holly Morris - Why stay in Chernobyl Because its home.",
"Candy Chang - Before I die I want to ...",
"Amy Webb - How I hacked online dating",
"Harry Cliff - Have we reached the end of physics",
"Molly Stevens - A new way to grow bone",
"Robin Chase - The idea behind Zipcar and what comes next",
"Alberto Cairo - There are no scraps of men",
"BLACK - My journey to yo-yo mastery",
"David Steindl-Rast - Want to be happy Be grateful",
"Luis von Ahn - Massive-scale online collaboration",
"Denise Herzing - Could we speak the language of dolphins",
"The LXD - In the Internet age dance evolves ...",
"Scott Summit - Beautiful artificial limbs",
"Michael Metcalfe - A provocative way to finance the fight against climate change",
"Seth Godin - How to get your ideas to spread",
"Nick Hanauer - Beware fellow plutocrats the pitchforks are coming",
"Anil Ananthaswamy - What it takes to do extreme astrophysics",
"Joe Sabia - The technology of storytelling",
"Mariana Mazzucato - Government -- investor risk-taker innovator",
"Jessica Jackley - Poverty money -- and love",
"Robert Wright - The evolution of compassion",
"Mike Matas - A next-generation digital book",
"Rich Benjamin - My road trip through the whitest towns in America",
"Julie Lythcott-Haims - How to raise successful kids -- without over-parenting",
"Sebastio Salgado - The silent drama of photography",
"Gill Hicks - I survived a terrorist attack. Heres what I learned",
"Kaki King - Playing Pink Noise on guitar",
"Garik Israelian - How spectroscopy could reveal alien life",
"Catherine Mohr - The tradeoffs of building green",
"Rob Dunbar - Discovering ancient climates in oceans and ice",
"Saki Mafundikwa - Ingenuity and elegance in ancient African alphabets",
"Scott Rickard - The beautiful math behind the worlds ugliest music",
"John Hunter - Teaching with the World Peace Game",
"Allan Adams - What the discovery of gravitational waves means",
"ShaoLan - The Chinese zodiac explained",
"Angela Lee Duckworth - Grit The power of passion and perseverance",
"Aubrey de Grey - A roadmap to end aging",
"Anthony Atala - Growing new organs",
"Lucy McRae - How can technology transform the human body",
"Malcolm Gladwell - The strange tale of the Norden bombsight",
"Danny Hillis - The Internet could crash. We need a Plan B",
"Zainab Salbi - Women wartime and the dream of peace",
"Steven Pinker and Rebecca Newberger Goldstein - The long reach of reason",
"Chris Anderson - Technologys long tail",
"Bill Davenhall - Your health depends on where you live",
"Xavier Vilalta - Architecture at home in its community",
"David Byrne Ethel Thomas Dolby - Nothing But Flowers with string quartet",
"Ilona Szab de Carvalho - 4 lessons I learned from taking a stand against drugs and gun violence",
"Rob Hopkins - Transition to a world without oil",
"Seema Bansal - How to fix a broken education system ... without any more money",
"Barry Schwartz - Using our practical wisdom",
"Aimee Mullins - The opportunity of adversity",
"Jon Nguyen - Tour the solar system from home",
"Jim Fallon - Exploring the mind of a killer",
"Lalitesh Katragadda - Making maps to fight disaster build economies",
"Charles Elachi - The story behind the Mars Rovers",
"Ben Katchor - Comics of bygone New York",
"Tom Honey - Why would God create a tsunami",
"Barbara Block - Tagging tuna in the deep ocean",
"Deborah Gordon - What ants teach us about the brain cancer and the Internet",
"Hetain Patel - Who am I Think again",
"Scott Fraser - Why eyewitnesses get it wrong",
"Steven Johnson - How the ghost map helped end a killer disease",
"Rory Stewart - Time to end the war in Afghanistan",
"Susan Colantuono - The career advice you probably didnt get",
"Rory Bremner - A one-man world summit",
"Sergey Brin - Why Google Glass",
"Gordon Brown - Wiring a web for global good",
"Paul Sereno - Digging up dinosaurs",
"Pavan Sukhdev - Put a value on nature",
"Ariel Garten - Know thyself with a brain scanner",
"Elizabeth Pisani - Sex drugs and HIV -- lets get rational",
"Dilip Ratha - The hidden force in global economics sending money home",
"Shai Agassi - A new ecosystem for electric cars",
"Daniel Libeskind - 17 words of architectural inspiration",
"Kevin Stone - The bio-future of joint replacement",
"Caroline Phillips - Hurdy-gurdy for beginners",
"Arthur Benjamin - Teach statistics before calculus",
"Stanley McChrystal - Listen learn ... then lead",
"Julian Treasure - Shh Sound health in 8 steps",
"Greg Stone - Saving the ocean one island at a time",
"Dennis vanEngelsdorp - A plea for bees",
"Suzana Herculano-Houzel - What is so special about the human brain",
"Majora Carter - Greening the ghetto",
"Ernest Madu - World-class health care",
"Ben Dunlap - The life-long learner",
"Bruce Schneier - The security mirage",
"Susan Blackmore - Memes and temes",
"Ash Beckham - Were all hiding something. Lets find the courage to open up",
"Roselinde Torres - What it takes to be a great leader",
"Jane Poynter - Life in Biosphere 2",
"Ellen t Hoen - Pool medical patents save lives",
"Hans Rosling - The best stats youve ever seen",
"Stewart Brand - The dawn of de-extinction. Are you ready",
"Feisal Abdul Rauf - Lose your ego find your compassion",
"Steve Truglia - A leap from the edge of space",
"Birke Baehr - Whats wrong with our food system",
"Nellie McKay - Clonie",
"Carl Safina - What are animals thinking and feeling",
"Hannah Fry - Is life really that complex",
"Ken Kamler - Medical miracle on Everest",
"Lisa Kristine - Photos that bear witness to modern slavery",
"Mark Kendall - Demo A needle-free vaccine patch thats safer and way cheaper",
"Shilo Shiv Suleman - Using tech to enable dreaming",
"Patrick Awuah - How to educate leaders Liberal arts",
"Andreas Raptopoulos - No roads Theres a drone for that",
"Elizabeth Lesser - Take the Other to lunch",
"Todd Coleman - A temporary tattoo that brings hospital care to the home",
"Billy Graham - On technology and faith",
"Dean Ornish - Healing through diet",
"Improv Everywhere - A TED speakers worst nightmare",
"Keren Elazari - Hackers the Internets immune system",
"Joe DeRisi - Solving medical mysteries",
"Joshua Foer - Feats of memory anyone can do",
"Thomas Thwaites - How I built a toaster -- from scratch",
"Ramanan Laxminarayan - The coming crisis in antibiotics",
"Greg Lynn - Organic algorithms in architecture",
"James Nachtwey - My wish Let my photographs bear witness",
"Sheryl Sandberg - So we leaned in ... now what",
"Fei-Fei Li - How were teaching computers to understand pictures",
"Erin McKean - Go ahead make up new words",
"Rory Sutherland - Life lessons from an ad man",
"Linda Liukas - A delightful way to teach kids about computers",
"Miguel Nicolelis - A monkey that controls a robot with its thoughts. No really.",
"Eric Giler - A demo of wireless electricity",
"Charles Limb - Your brain on improv",
"Nilofer Merchant - Got a meeting Take a walk",
"Sebastian Seung - I am my connectome",
"John Q. Walker - Great piano performances recreated",
"Mac Stone - Stunning photos of the endangered Everglades",
"Raghava KK - Whats your 200-year plan",
"Bertrand Piccard - My solar-powered adventure",
"Juan Enriquez - Your online life permanent as a tattoo",
"Andrs Ruzo - The boiling river of the Amazon",
"Rachel Botsman - The case for collaborative consumption",
"Renny Gleeson - Our antisocial phone tricks",
"Lennart Green - Close-up card magic with a twist",
"Garth Lenz - The true cost of oil",
"Reuben Margolin - Sculpting waves in wood and time",
"Bill Doyle - Treating cancer with electric fields",
"Matt Mills - Image recognition that triggers augmented reality",
"Cameron Sinclair - The refugees of boom-and-bust",
"Debra Jarvis - Yes I survived cancer. But that doesnt define me",
"Suheir Hammad - Poems of war peace women power",
"Kenneth Shinozuka - My simple invention designed to keep my grandfather safe",
"Talithia Williams - Own your bodys data",
"Janine di Giovanni - What I saw in the war",
"Jay Silver - Hack a banana make a keyboard",
"Audrey Choi - How to make a profit while making a difference",
"Peter Haas - Haitis disaster of engineering",
"Sal Khan - Lets teach for mastery -- not test scores",
"Sarah Jones - A one-woman global village",
"Zak Ebrahim - I am the son of a terrorist. Heres how I chose peace.",
"Kevin Rudd - Are China and the US doomed to conflict",
"Jonathan Foley - The other inconvenient truth",
"Massimo Banzi - How Arduino is open-sourcing imagination",
"Geraldine Hamilton - Body parts on a chip",
"Al Gore - The case for optimism on climate change",
"Ben Saunders - To the South Pole and back the hardest 105 days of my life",
"Megan Kamerick - Women should represent women in media",
"Christopher Emdin - Teach teachers how to create magic",
"Jeff Speck - The walkable city",
"David Deutsch - A new way to explain explanation",
"Susan Cain - The power of introverts",
"Kenichi Ebina - My magic moves",
"Paul Snelgrove - A census of the ocean",
"Zeynep Tufekci - Online social change easy to organize hard to win",
"Laura Indolfi - Good news in the fight against pancreatic cancer",
"Eric Liu - Theres no such thing as not voting",
"Boyd Varty - What I learned from Nelson Mandela",
"Guy-Philippe Goldstein - How cyberattacks threaten real-world peace",
"Leyla Acaroglu - Paper beats plastic How to rethink environmental folklore",
"Eleni Gabre-Madhin - A commodities exchange for Ethiopia",
"Gabe Barcia-Colombo - My DNA vending machine",
"David Deutsch - Chemical scum that dream of distant quasars",
"Ben Cameron - Why the live arts matter",
"Alexa Meade - Your body is my canvas",
"Kevin Kelly - Technologys epic story",
"Ryan Lobo - Photographing the hidden story",
"Amory Lovins - A 40-year plan for energy",
"Janet Iwasa - How animations can help scientists test a hypothesis",
"Peter van Uhm - Why I chose a gun",
"George Papandreou - Imagine a European democracy without borders",
"Jane McGonigal - Massively multi-player thumb-wrestling",
"Richard Ledgett - The NSA responds to Edward Snowdens TED Talk",
"Andrew Stanton - The clues to a great story",
"Kamal Meattle - How to grow fresh air",
"Joachim de Posada - Dont eat the marshmallow",
"Nicholas Negroponte - Taking OLPC to Colombia",
"Thomas Hellum - The worlds most boring television ... and why its hilariously addictive",
"Alexis Ohanian - How to make a splash in social media",
"Andrew McAfee - Are droids taking our jobs",
"Nellie McKay - Mother of Pearl If I Had You",
"Patrcia Medici - The coolest animal you know nothing about ... and how we can save it",
"David Merrill - Toy tiles that talk to each other",
"Gail Reed - Where to train the worlds doctors Cuba.",
"Pattie Maes Pranav Mistry - Meet the SixthSense interaction",
"Dennis Hong - Making a car for blind drivers",
"Mandy Len Catron - Falling in love is the easy part",
"Elyn Saks - A tale of mental illness -- from the inside",
"Lemn Sissay - A child of the state",
"Rives - The Museum of Four in the Morning",
"Christine Sun Kim - The enchanting music of sign language",
"Sebastian Kraves - The era of personal DNA testing is here",
"Mikko Hypponen - How the NSA betrayed the worlds trust -- time to act",
"David Gruber - Glow-in-the-dark sharks and other stunning sea creatures",
"Ben Goldacre - Battling bad science",
"Raspyni Brothers - Juggle and jest",
"Marcus Byrne - The dance of the dung beetle",
"Juan Enriquez - Using biology to rethink the energy challenge",
"Jean-Paul Mari - The chilling aftershock of a brush with death",
"Sally Kohn - Lets try emotional correctness",
"Jan Chipchase - The anthropology of mobile phones",
"Steven Cowley - Fusion is energys future",
"Tea Uglow - An Internet without screens might look like this",
"Michael Anti - Behind the Great Firewall of China",
"Bono - The good news on poverty Yes theres good news",
"Toby Eccles - Invest in social change",
"Michael Sandel - The lost art of democratic debate",
"Tshering Tobgay - This country isnt just carbon neutral -- its carbon negative",
"Malte Spitz - Your phone company is watching",
"Alessandra Orofino - Its our city. Lets fix it",
"Sendhil Mullainathan - Solving social problems with a nudge",
"Aspen Baker - A better way to talk about abortion",
"Rachel Armstrong - Architecture that repairs itself",
"Nancy Kanwisher - A neural portrait of the human mind",
"Daniel Goldstein - The battle between your present and future self",
"Alex Steffen - The route to a sustainable future",
"James Balog - Time-lapse proof of extreme ice loss",
"Eddi Reader - Kiteflyers Hill",
"Brian Dettmer - Old books reborn as art",
"Vinay Venkatraman - Technology crafts for the digitally underserved",
"Maya Beiser - A cello with many voices",
"Steve Jurvetson - Model rocketry",
"Astro Teller - The unexpected benefit of celebrating failure",
"Hadyn Parry - Re-engineering mosquitos to fight disease",
"Christoph Adami - Finding life we cant imagine",
"Margaret Wertheim - The beautiful math of coral",
"Taylor Wilson - Yup I built a nuclear fusion reactor",
"Andrew Fitzgerald - Adventures in Twitter fiction",
"Adam Spencer - Why I fell in love with monster prime numbers",
"Mark Plotkin - What the people of the Amazon know that you dont",
"Regina Hartley - Why the best hire might not have the perfect resume",
"Tim Berners-Lee - The year open data went worldwide",
"Lawrence Lessig - The unstoppable walk to political reform",
"James Patten - The best computer interface Maybe ... your hands",
"Graham Hill - Why Im a weekday vegetarian",
"Greg Asner - Ecology from the air",
"Marcin Jakubowski - Open-sourced blueprints for civilization",
"David Pogue - Cool tricks your phone can do",
"Yann Arthus-Bertrand - A wide-angle view of fragile Earth",
"Andy Hobsbawm - Do the green thing",
"Eddy Cartaya - My glacier cave discoveries",
"The Lady Lifers - A moving song from women in prison for life",
"Ash Beckham - When to take a stand -- and when to let it go",
"Kirby Ferguson - Embrace the remix",
"Eve Ensler - Embrace your inner girl",
"Craig Venter - Watch me unveil synthetic life",
"Eric Berlow and Sean Gourley - Mapping ideas worth spreading",
"Carne Ross - An independent diplomat",
"Stephen Wilkes - The passing of time caught in a single photo",
"Stefan Wolff - The path to ending ethnic conflicts",
"Aziza Chaouni - How I brought a river and my city back to life",
"Andrew McAfee - What will future jobs look like",
"Rebecca Onie - What if our healthcare system kept us healthy",
"David Chalmers - How do you explain consciousness",
"Elon Musk - The mind behind Tesla SpaceX SolarCity ...",
"David Damberger - What happens when an NGO admits failure",
"Bjarke Ingels - 3 warp-speed architecture tales",
"Stephen Friend - The hunt for unexpected genetic heroes",
"Marla Spivak - Why bees are disappearing",
"Donald Hoffman - Do we see reality as it is",
"Laura Boushnak - For these women reading is a daring act",
"Damon Horowitz - Philosophy in prison",
"Dong Woo Jang - The art of bow-making",
"Arianna Huffington - How to succeed Get more sleep",
"Auke Ijspeert - A robot that runs and swims like a salamander",
"Bahia Shehab - A thousand times no",
"Olafur Eliasson - Playing with space and light",
"Joshua Prince-Ramus - Building a theater that remakes itself",
"Rokia Traore - Kounandi",
"Sean Carroll - Distant time and the hint of a multiverse",
"Jennifer Golbeck - The curly fry conundrum Why social media likes say more than you might think",
"Aicha el-Wafi Phyllis Rodriguez - The mothers who found forgiveness friendship",
"Shereen El-Feki - HIV -- how to fight an epidemic of bad laws",
"Anglica Dass - The beauty of human skin in every color",
"Graham Hill - Less stuff more happiness",
"Sangu Delle - In praise of macro -- yes macro -- finance in Africa",
"Mae Jemison - Teach arts and sciences together",
"Brian Greene - Making sense of string theory",
"Wael Ghonim - Lets design social media that drives real change",
"Ge Wang - The DIY orchestra of the future",
"Joan Halifax - Compassion and the true meaning of empathy",
"Bren Brown - Listening to shame",
"Dibdo Francis Kr - How to build with clay ... and community",
"Evelyn Glennie - How to truly listen",
"Stephen Cave - The 4 stories we tell ourselves about death",
"Navi Radjou - Creative problem-solving in the face of extreme limits",
"Fabien Cousteau - What I learned from spending 31 days underwater",
"Ziyah Gafic - Everyday objects tragic histories",
"E.O. Wilson - My wish Build the Encyclopedia of Life",
"Golan Levin - Art that looks back at you",
"Guy Hoffman - Robots with soul",
"Dave Brain - What a planet needs to sustain life",
"Paula Johnson - His and hers healthcare",
"Robert Neuwirth - The power of the informal economy",
"Nicholas Negroponte - One Laptop per Child",
"Paola Antonelli - Design and the Elastic Mind",
"Oren Yakobovich - Hidden cameras that film injustice in the worlds most dangerous places",
"Antony Gormley - Sculpted space within and without",
"Evan Grant - Making sound visible through cymatics",
"Fred Swaniker - The leaders who ruined Africa and the generation who can fix it",
"Paul Kemp-Robertson - Bitcoin. Sweat. Tide. Meet the future of branded currency.",
"Manuel Lima - A visual history of human knowledge",
"Dean Ornish - Your genes are not your fate",
"Neil Pasricha - The 3 As of awesome",
"Peter Diamandis - Our next giant leap",
"Patricia Ryan - Dont insist on English",
"Mihaly Csikszentmihalyi - Flow the secret to happiness",
"Eve Ensler - Suddenly my body",
"James A. White Sr. - The little problem I had renting a house",
"Monica Lewinsky - The price of shame",
"Kenneth Cukier - Big data is better data",
"Samantha Nutt - The real harm of the global arms trade",
"David Binder - The arts festival revolution",
"Larry Brilliant - My wish Help me stop pandemics",
"Del Harvey - Protecting Twitter users sometimes from themselves",
"Katherine Fulton - You are the future of philanthropy",
"Paolo Cardini - Forget multitasking try monotasking",
"Jake Wood - A new mission for veterans -- disaster relief",
"Malcolm London - High School Training Ground",
"Timothy Ihrig - What we can do to die well",
"Diana Laufenberg - How to learn From mistakes",
"Dan Gilbert - Why we make bad decisions",
"Bruce Aylward - Humanity vs. Ebola. How we could win a terrifying war",
"Charles Robertson - Africas next boom",
"Pawan Sinha - How brains learn to see",
"May El-Khalil - Making peace is a marathon",
"Tasso Azevedo - Hopeful lessons from the battle to save rainforests",
"Peter Reinhart - The art and craft of bread",
"David Puttnam - Does the media have a duty of care",
"Richard Preston - The mysterious lives of giant trees",
"Tod Machover Dan Ellsey - Inventing instruments that unlock new music",
"Ann Cooper - Whats wrong with school lunches",
"Eddie Obeng - Smart failure for a fast-changing world",
"Martin Villeneuve - How I made an impossible film",
"Erik Brynjolfsson - The key to growth Race with the machines",
"Martin Seligman - The new era of positive psychology",
"Elizabeth Nyamayaro - An invitation to men who want a better world for women",
"Dave Troy - Social maps that reveal a citys intersections and separations",
"Topher White - What can save the rainforest Your used cell phone",
"Hendrik Poinar - Bring back the woolly mammoth",
"Alastair Parvin - Architecture for the people by the people",
"Cynthia Kenyon - Experiments that hint of longer lives",
"Kevin Surace - Eco-friendly drywall",
"Thomas Barnett - Lets rethink Americas military strategy",
"Ueli Gegenschatz - Extreme wingsuit flying",
"Steven Pinker - Human nature and the blank slate",
"Chris Anderson - How web video powers global innovation",
"Thelma Golden - How art gives shape to cultural change",
"Michael Green - Why we should build wooden skyscrapers",
"Simon Lewis - Dont take consciousness for granted",
"Liz Diller - A new museum wing ... in a giant bubble",
"Shivani Siroya - A smart loan for people with no credit history yet",
"Sandra Fisher-Martins - The right to understand",
"Graham Hawkes - A flight through the ocean",
"Evgeny Morozov - How the Net aids dictatorships",
"Janette Sadik-Khan - New Yorks streets Not so mean any more",
"Omar Ahmad - Political change with pen and paper",
"Mariano Sigman - Your words may predict your future mental health",
"Tristram Stuart - The global food waste scandal",
"Ethel - A string quartet plays Blue Room",
"Jennifer Kahn - Gene editing can now change an entire species -- forever",
"Ryan Merkley - Online video -- annotated remixed and popped",
"Tabetha Boyajian - The most mysterious star in the universe",
"Liu Bolin - The invisible man",
"Jeremy Jackson - How we wrecked the ocean",
"Bryan Stevenson - We need to talk about an injustice",
"Courtney Martin - The new American Dream",
"Bobby Ghosh - Why global jihad is losing",
"Homaro Cantu Ben Roche - Cooking as alchemy",
"Will Potter - The shocking move to criminalize nonviolent protest",
"Tim Leberecht - 3 ways to usefully lose control of your brand",
"Tom Wujec - Build a tower build a team",
"Bill Gross - A solar energy system that tracks the sun",
"Cameron Sinclair - My wish A call for open-source architecture",
"Richard Branson - Life at 30000 feet",
"Mohamed Hijri - A simple solution to the coming phosphorus crisis",
"Damian Palin - Mining minerals from seawater",
"Craig Venter - On the verge of creating synthetic life",
"Adora Svitak - What adults can learn from kids",
"Jeff Skoll - My journey into movies that matter",
"Mark Ronson - How sampling transformed music",
"Arthur Potts Dawson - A vision for sustainable restaurants",
"Wanda Diaz Merced - How a blind astronomer found a way to hear the stars",
"Dan Pink - The puzzle of motivation",
"David Camarillo - Why helmets dont prevent concussions -- and what might",
"Aomawa Shields - How well find life on other planets",
"Uri Hasson - This is your brain on communication",
"Gary Lauders new traffic sign - Take Turns",
"Pete Alcorn - The world in 2200",
"Jok Church - A circle of caring",
"Michael Kimmel - Why gender equality is good for everyone men included",
"Will Marshall - Tiny satellites show us the Earth as it changes in near-real-time",
"Hod Lipson - Building self-aware robots",
"Ernesto Sirolli - Want to help someone Shut up and listen",
"Stewart Brand - The Long Now",
"Jacqueline Novogratz - Inspiring a life of immersion",
"Jennifer Lin - Improvising on piano aged 14",
"Ken Jennings - Watson Jeopardy and me the obsolete know-it-all",
"Charles Hazlewood British Paraorchestra - The debut of the British Paraorchestra",
"Mechai Viravaidya - How Mr. Condom made Thailand a better place for life and love",
"Pia Mancini - How to upgrade democracy for the Internet era",
"Ed Gavagan - A story about knots and surgeons",
"Jason Pontin - Can technology solve our big problems",
"Mena Trott - Meet the founder of the blog revolution",
"Alice Dreger - Is anatomy destiny",
"Peter Diamandis - Abundance is our future",
"Chris Downey - Design with the blind in mind",
"Joseph Pine - What consumers want",
"Dan Barber - How I fell in love with a fish",
"Pam Warhurst - How we can eat our landscapes",
"Leymah Gbowee - Unlock the intelligence passion greatness of girls",
"Robert Wright - Progress is not a zero-sum game",
"Erik Schlangen - A self-healing asphalt",
"Jason McCue - Terrorism is a failed brand",
"Randall Munroe - Comics that ask what if",
"Lee Cronin - Print your own medicine",
"Ngozi Okonjo-Iweala - How Africa can keep rising",
"Ajit Narayanan - A word game to communicate in any language",
"Hans Rosling - Let my dataset change your mindset",
"Garrett Lisi - An 8-dimensional model of the universe",
"Liz Diller - The Blur Building and other tech-empowered architecture",
"Stefan Sagmeister - 7 rules for making more happiness",
"Charles Fleischer - All things are Moleeds",
"Ahn Trio - A modern take on piano violin cello",
"Matt Ridley - When ideas have sex",
"Jack Horner - Building a dinosaur from a chicken",
"Sharmeen Obaid-Chinoy - Inside a school for suicide bombers",
"Tim Berners-Lee - A Magna Carta for the web",
"Tristan Harris - How better tech could protect us from distraction",
"Thandie Newton - Embracing otherness embracing myself",
"Franz Freudenthal - A new way to heal hearts without surgery",
"Boniface Mwangi - The day I stood up alone",
"Maryn McKenna - What do we do when antibiotics dont work any more",
"Edward Tenner - Unintended consequences",
"Pearl Arredondo - My story from gangland daughter to star teacher",
"Sugata Mitra - Kids can teach themselves",
"Nicholas Christakis - How social networks predict epidemics",
"Dan Gilbert - The surprising science of happiness",
"Kaki King - A musical escape into a world of light and color",
"Carl Schoonover - How to look inside the brain",
"Ben Ambridge - 10 myths about psychology debunked",
"Simon Sinek - Why good leaders make you feel safe",
"Rives - If I controlled the Internet",
"Eric Mead - The magic of the placebo",
"Norman Spack - How I help transgender teens become who they want to be",
"Joshua Klein - A thought experiment on the intelligence of crows",
"Kate Stone - DJ decks made of ... paper",
"Sugata Mitra - The child-driven education",
"Misha Glenny - How global crime networks work",
"Will Wright - Spore birth of a game",
"Rory Stewart - Why democracy matters",
"Sarah Lewis - Embrace the near win",
"Samuel Cohen - Alzheimers is not normal aging and we can cure it",
"Francesco Sauro - Deep under the Earths surface discovering beauty and science",
"Sheikha Al Mayassa - Globalizing the local localizing the global",
"Kristen Marhaver - How were growing baby corals to rebuild reefs",
"Shirin Neshat - Art in exile",
"VS Ramachandran - 3 clues to understanding your brain",
"Carin Bondar - The birds and the bees are just the beginning",
"Markham Nolan - How to separate fact and fiction online",
"AJ Jacobs - The worlds largest family reunion ... were all invited",
"Klaus Stadlmann - The worlds smallest 3D printer",
"Frans Lanting - Photos that give voice to the animal kingdom",
"Shukla Bose - Teaching one child at a time",
"Bill Gates - The next outbreak Were not ready",
"Malcolm Gladwell - The unheard story of David and Goliath",
"Pico Iyer - The art of stillness",
"Ian Dunbar - Dog-friendly dog training",
"Patricia Burchat - Shedding light on dark matter",
"Cary Fowler - One seed at a time protecting the future of food",
"Sergey Brin Larry Page - The genesis of Google",
"Lewis Pugh - My mind-shifting Everest swim",
"Murray Gell-Mann - Beauty truth and ... physics",
"Kevin Briggs - The bridge between suicide and life",
"Nizar Ibrahim - How we unearthed the Spinosaurus",
"Kang Lee - Can you really tell if a kid is lying",
"Zach Kaplan Keith Schacht - Toys and materials from the future",
"Caroline Casey - Looking past limits",
"Heather Brooke - My battle to expose government corruption",
"Rachel Sussman - The worlds oldest living things",
"Brewster Kahle - A free digital library",
"Read Montague - What were learning from 5000 brains",
"Dyan deNapoli - The great penguin rescue",
"Hamish Jolly - A shark-deterrent wetsuit and its not what you think",
"Mick Mountz - What happens inside those massive warehouses",
"Kary Mullis - Play Experiment Discover",
"Clay Shirky - How social media can make history",
"Taiye Selasi - Dont ask where Im from ask where Im a local",
"Roger McNamee - 6 ways to save the internet",
"Paul MacCready - A flight on solar wings",
"Robin Chase - Excuse me may I rent your car",
"Juan Enriquez - Will our kids be a different species",
"Shaka Senghor - Why your worst deeds dont define you",
"Carolyn Porco - Could a Saturn moon harbor life",
"Jamie Drummond - Lets crowdsource the worlds goals",
"Pankaj Ghemawat - Actually the world isnt flat",
"Imogen Heap - Wait It Out",
"Julian Treasure - Why architects need to use their ears",
"Eric Whitacre - A virtual choir 2000 voices strong",
"James Cameron - Before Avatar ... a curious boy",
"Bilal Bomani - Plant fuels that could power a jet",
"Henry Markram - A brain in a supercomputer",
"Marc Kushner - Why the buildings of the future will be shaped by ... you",
"Kary Mullis - A next-gen cure for killer infections",
"Todd Kuiken - A prosthetic arm that feels",
"Sherry Turkle - Connected but alone",
"Barton Seaver - Sustainable seafood Lets get smart",
"Freeman Hrabowski - 4 pillars of college success in science",
"Philip Zimbardo - The demise of guys",
"Takaharu Tezuka - The best kindergarten youve ever seen",
"Daniel Reisel - The neuroscience of restorative justice",
"Nina Tandon - Caring for engineered tissue",
"Eric Lewis - Piano jazz that rocks",
"Eva Vertes - Meet the future of cancer research",
"Lucien Engelen - Crowdsource your health",
"Laurel Braitman - Depressed dogs cats with OCD what animal madness means for us humans",
"Nicholas Negroponte - 5 predictions from 1984",
"Harald Haas - Forget Wi-Fi. Meet the new Li-Fi Internet",
"Michael Porter - The case for letting business solve social problems",
"Charity Tillemann-Dick - Singing after a double lung transplant",
"Shigeru Ban - Emergency shelters made from paper",
"Peter Saul - Lets talk about dying",
"Jonathan Trent - Energy from floating algae pods",
"Jarrett J. Krosoczka - How a boy became an artist",
"Jason Fried - Why work doesnt happen at work",
"Cynthia Schneider - The surprising spread of Idol TV",
"Jeremy Gilley - One day of peace",
"Shih Chieh Huang - Sculptures thatd be at home in the deep sea",
"Nicholas Negroponte - One Laptop per Child two years on",
"Scott Kim - The art of puzzles",
"Sal Khan - Lets use video to reinvent education",
"Achenyo Idachaba - How I turned a deadly plant into a thriving business",
"Anote Tong - My country will be underwater soon -- unless we work together",
"Hans Rosling - The good news of the decade Were winning the war against child mortality",
"Gerard Ryle - How the Panama Papers journalists broke the biggest leak in history",
"Lidia Yuknavitch - The beauty of being a misfit",
"Marian Bantjes - Intricate beauty by design",
"Raymond Wang - How germs travel on planes -- and how we can stop them",
"Sanford Biggers - An artists unflinching look at racial violence",
"Khadija Gbla - My mothers strange definition of empowerment",
"Tom Wujec - Got a wicked problem First tell me how you make toast",
"Mike Rowe - Learning from dirty jobs",
"Nick Bostrom - A philosophical quest for our biggest problems",
"Gever Tulley - Life lessons through tinkering",
"Edith Widder - The weird wonderful world of bioluminescence",
"Joshua Prince-Ramus - Behind the design of Seattles library",
"Avi Reichental - Whats next in 3D printing",
"Stacey Kramer - The best gift I ever survived",
"Roger Doiron - My subversive garden plot",
"Alexander Tsiaras - Conception to birth -- visualized",
"Juan Enriquez - The next species of human",
"Steven Pinker - The surprising decline in violence",
"Margaret Gould Stewart - How YouTube thinks about copyright",
"Hanna Rosin - New data on the rise of women",
"Jared Diamond - How societies can grow old better",
"Eleanor Longden - The voices in my head",
"Marvin Minsky - Health and the human mind",
"Marlene Zuk - What we learn from insects sex lives",
"Mike Velings - The case for fish farming",
"Lisa Nip - How humans could evolve to survive in space",
"Dan Gross - Why gun violence cant be our new normal",
"AJ Jacobs - My year of living biblically",
"Allan Adams - The discovery that could rewrite physics",
"Vik Muniz - Art with wire sugar chocolate and string",
"Romulus Whitaker - The real danger lurking in the water",
"Alice Bows-Larkin - Climate change is happening. Heres how we adapt",
"Paul Bloom - Can prejudice ever be a good thing",
"Uri Alon - Why science demands a leap into the unknown",
"Wadah Khanfar - A historic moment in the Arab world",
"Kirk Sorensen - Thorium an alternative nuclear fuel",
"Jamil Abu-Wardeh - The Axis of Evil Middle East Comedy Tour",
"Emily Pilloton - Teaching design for change",
"Michael Bodekaer - This virtual lab will revolutionize science class",
"Dale Dougherty - We are makers",
"Rajesh Rao - A Rosetta Stone for a lost language",
"Martin Reeves - How to build a business that lasts 100 years",
"Catherine Crump - The small and surprisingly dangerous detail the police track about you",
"Adam Sadowsky - How to engineer a viral music video",
"Barat Ali Batoor - My desperate journey with a human smuggler",
"Sheila Nirenberg - A prosthetic eye to treat blindness",
"Joe Kowan - How I beat stage fright",
"Sue Desmond-Hellmann - A smarter more precise way to think about public health",
"Johann Hari - Everything you think you know about addiction is wrong",
"Michelle Obama - A passionate personal case for education",
"Richard Seymour - How beauty feels",
"Hans Rosling - Global population growth box by box",
"Kailash Satyarthi - How to make peace Get angry",
"Majora Carter - 3 stories of local eco-entrepreneurship",
"Rodney Brooks - Robots will invade our lives",
"Eric Dishman - Health care should be a team sport",
"Bruce Aylward - How well stop polio for good",
"Benjamin Wallace - The price of happiness",
"Shereen El Feki - A little-told tale of sex and sensuality",
"Temple Grandin - The world needs all kinds of minds",
"Ann Morgan - My year reading a book from every country in the world",
"Stefon Harris - There are no mistakes on the bandstand",
"Amit Sood - Every piece of art youve ever wanted to see -- up close and searchable",
"Daphne Koller - What were learning from online education",
"John McWhorter - Txtng is killing language. JK",
"Nellie McKay - The Dog Song",
"Robert Palmer - The Panama Papers exposed a huge global problem. Whats next",
"Howard Rheingold - The new power of collaboration",
"Srdja Popovic - How to topple a dictator",
"Yoruba Richen - What the gay rights movement learned from the civil rights movement",
"Andreas Schleicher - Use data to build better schools",
"P.W. Singer - Military robots and the future of war",
"Tal Golesworthy - How I repaired my own heart",
"Bill Clinton - My wish Rebuilding Rwanda",
"Bart Knols - 3 new ways to kill mosquitoes",
"Quixotic Fusion - Dancing with light",
"Catherine Mohr - Surgerys past present and robotic future",
"Janine Shepherd - A broken body isnt a broken person",
"Devdutt Pattanaik - East vs. West -- the myths that mystify",
"Sebastian Thrun - Googles driverless car",
"Seth Berkley - The troubling reason why vaccines are made too late ... if theyre made at all",
"Joseph DeSimone - What if 3D printing was 100x faster",
"Robyn Stein DeLuca - The good news about PMS",
"Derek Sivers - How to start a movement",
"Helen Fisher - Technology hasnt changed love. Heres why",
"Jeff Hawkins - How brain science will change computing",
"Julia Sweeney - Letting go of God",
"Adam Grant - The surprising habits of original thinkers",
"Steven Wise - Chimps have feelings and thoughts. They should also have rights",
"Asher Hasan - My message of peace from Pakistan",
"Jackson Browne - A song inspired by the ocean",
"Nick Bostrom - What happens when our computers get smarter than we are",
"Jill Bolte Taylor - My stroke of insight",
"Stuart Brown - Play is more than just fun",
"Meaghan Ramsey - Why thinking youre ugly is bad for you",
"James Green - 3 moons and a planet that could have alien life",
"John Bohannon - Dance vs. powerpoint a modest proposal",
"Sergei Lupashin - A flying camera ... on a leash",
"Sarah-Jayne Blakemore - The mysterious workings of the adolescent brain",
"Elise Roy - When we design for disability we all benefit",
"Jill Sobule Julia Sweeney - The Jill and Julia Show",
"Dalia Mogahed - What do you think when you look at me",
"Blaise Agera y Arcas - How computers are learning to be creative",
"David Rothkopf - How fear drives American politics",
"David Blaine - How I held my breath for 17 minutes",
"Rob Knight - How our microbes make us who we are",
"Lisa Dyson - A forgotten Space Age technology could change how we grow food",
"Suzanne Talhouk - Dont kill your language",
"Keith Chen - Could your language affect your ability to save money",
"Bruce Feiler - The council of dads",
"Pardis Sabeti - How well fight the next deadly virus",
"Alison Jackson - An unusual glimpse at celebrity",
"Henry Lin - What we can learn from galaxies far far away",
"Niall Ferguson - The 6 killer apps of prosperity",
"Christopher moot Poole - The case for anonymity online",
"Siyanda Mohutsiwa - How young Africans found a voice on Twitter",
"Batrice Coron - Stories cut from paper",
"Theo Jansen - My creations a new form of life",
"Nicole Paris and Ed Cage - A beatboxing lesson from a father-daughter duo",
"Manal al-Sharif - A Saudi woman who dared to drive",
"Safwat Saleem - Why I keep speaking up even when people mock my accent",
"Aparna Rao - High-tech art with a sense of humor",
"Billie Jean King - This tennis icon paved the way for women in sports",
"Joseph Ravenell - How barbershops can keep men healthy",
"AnnMarie Thomas - Hands-on science with squishy circuits",
"Robert Gordon - The death of innovation the end of growth",
"Andrew Connolly - Whats the next window into our universe",
"Jonathan Harris - The web as art",
"Natalie Jeremijenko - The art of the eco-mindshift",
"Felix Dennis - Odes to vice and consequences",
"Teitur - Home is a song Ive always remembered",
"Reed Kroloff - A tour of modern architecture",
"Dan Ariely - What makes us feel good about our work",
"Stella Young - Im not your inspiration thank you very much",
"Susan Solomon - The promise of research with stem cells",
"Emma Marris - Nature is everywhere -- we just need to learn to see it",
"Nicolas Perony - Puppies Now that Ive got your attention complexity theory",
"Matt Cutts - Try something new for 30 days",
"Dawn Landes - A song for my hero the woman who rowed into a hurricane",
"Sam Martin - Claim your manspace",
"Neil Burgess - How your brain tells you where you are",
"Russell Foster - Why do we sleep",
"Eames Demetrios - The design genius of Charles Ray Eames",
"Daniel H. Cohen - For arguments sake",
"George Whitesides - A lab the size of a postage stamp",
"George Dyson - The birth of the computer",
"Jonathan Haidt - How common threats can make common political ground",
"The interspecies internet An idea in progress",
"Suzanne Simard - How trees talk to each other",
"Catarina Mota - Play with smart materials",
"George Takei - Why I love a country that once betrayed me",
"Anas Aremeyaw Anas - How I named shamed and jailed",
"Maz Jobrani - A Saudi an Indian and an Iranian walk into a Qatari bar ...",
"Nina Jablonski - Skin color is an illusion",
"Martin Pistorius - How my mind came back to life and no one knew",
"Beverly Dereck Joubert - Life lessons from big cats",
"Fields Wicker-Miurin - Learning from leaderships missing manual",
"Greg Gage - How to control someone elses arm with your brain",
"Peter Hirshberg - The web is more than better TV",
"Eduardo Senz de Cabezn - Math is forever",
"Celeste Headlee - 10 ways to have a better conversation",
"Rory Sutherland - Perspective is everything",
"Michael Dickinson - How a fly flies",
"Jeremy Kasdin - The flower-shaped starshade that might help us detect Earth-like planets",
"Stewart Brand - 4 environmental heresies",
"Tom Thum - The orchestra in my mouth",
"Daniel Pauly - The oceans shifting baseline",
"Noah Feldman - Politics and religion are technologies",
"Alexander Betts - Why Brexit happened -- and what to do next",
"Natasha Tsakos - A multimedia theatrical adventure",
"Christopher Soghoian - Government surveillance this is just the beginning",
"Einstein the Parrot - A talking squawking parrot",
"Michael Pollan - A plants-eye view",
"Louie Schwartzberg - The hidden beauty of pollination",
"Sakena Yacoobi - How I stopped the Taliban from shutting down my school",
"Barry Schwartz - The paradox of choice",
"Philippa Neave - The unexpected challenges of a countrys first election",
"James Burchfield - Playing invisible turntables",
"Stefan Sagmeister - The power of time off",
"Eric Berlow - Simplifying complexity",
"Sarah Parcak - Hunting for Perus lost civilizations -- with satellites",
"Margaret Gould Stewart - How giant websites design for you and a billion others too",
"Maz Jobrani - Did you hear the one about the Iranian-American",
"Sarah Jones - What does the future hold 11 characters offer quirky answers",
"Julie Burstein - 4 lessons in creativity",
"Joe Smith - How to use a paper towel",
"Vanessa Ruiz - The spellbinding art of human anatomy",
"Daniel Schnitzer - Inventing is the easy part. Marketing takes work",
"John Gerzema - The post-crisis consumer",
"Andrew Mwenda - Aid for Africa No thanks.",
"Ed Ulbrich - How Benjamin Button got his face",
"Amy Purdy - Living beyond limits",
"Charles Leadbeater - The era of open innovation",
"Cristina Domenech - Poetry that frees the soul",
"Jon Ronson - Strange answers to the psychopath test",
"Michael Specter - The danger of science denial",
"Ellen Gustafson - Obesity hunger 1 global food issue",
"Ivan Oransky - Are we over-medicalized",
"Frederic Kaplan - How to build an information time machine",
"Robert Muggah - How to protect fast-growing cities from failing",
"Charmian Gooch - Meet global corruptions hidden players",
"Jennifer Pahlka - Coding a better government",
"Jon Gosier - The problem with trickle-down techonomics",
"Janet Echelman - Taking imagination seriously",
"R. Luke DuBois - Insightful human portraits made from data",
"Monica Byrne - A sci-fi vision of love from a 318-year-old hologram",
"Erin McKean - The joy of lexicography",
"Andrew Blum - Discover the physical side of the internet",
"Judy MacDonald Johnston - Prepare for a good end of life",
"Mark Forsyth - Whats a snollygoster A short lesson in political speak",
"John Legend - Redemption Song",
"Jonathan Drori - The beautiful tricks of flowers",
"David Pogue - Simplicity sells",
"Scilla Elworthy - Fighting with nonviolence",
"Arvind Gupta - Turning trash into toys for learning",
"Paul Rothemund - Playing with DNA that self-assembles",
"Dianna Cohen - Tough truths about plastic pollution",
"Aimee Mullins - Changing my legs - and my mindset",
"Steven Schwaitzberg - A universal translator for surgeons",
"Bob Mankoff - Anatomy of a New Yorker cartoon",
"Sara Seager - The search for planets beyond our solar system",
"Ellen Dunham-Jones - Retrofitting suburbia",
"Pamela Meyer - How to spot a liar",
"Jonathan Klein - Photos that changed the world",
"Justin Hall-Tipping - Freeing energy from the grid",
"Barry Schuler - Genomics 101",
"Eric Topol - The wireless future of medicine",
"Jeff Han - The radical promise of the multi-touch interface",
"Ray Zahab - My trek to the South Pole",
"Ronny Edry - Israel and Iran A love story",
"Raul Midon - Peace on Earth",
"Tim Berners-Lee - The next web",
"Daniel Kish - How I use sonar to navigate the world",
"Daniel Tammet - Different ways of knowing",
"Beth Noveck - Demand a more open-source government",
"Bunker Roy - Learning from a barefoot movement",
"Miru Kim - My underground art explorations",
"Qi Zhang - An electrifying organ performance",
"Erik Hersman - Reporting crisis via texting",
"Dan Pacholke - How prisons can help inmates live meaningful lives",
"Bjorn Lomborg - Global priorities bigger than climate change",
"Natalie Warne - Being young and making an impact",
"Stephen Lawler - Tour Microsofts Virtual Earth",
"Matthew Williams - Special Olympics let me be myself -- a champion",
"Taryn Simon - Photographs of secret sites",
"Laurie Garrett - Lessons from the 1918 flu",
"Camille A. Brown - A visual history of social dance in 25 moves",
"Niels Diffrient - Rethinking the way we sit down",
"Sheryl WuDunn - Our centurys greatest injustice",
"Anne-Marie Slaughter - Can we all have it all",
"Emiliano Salinas - A civil response to violence",
"Wade Davis - Dreams from endangered cultures",
"Vijay Kumar - Robots that fly ... and cooperate",
"Vicki Arroyo - Lets prepare for our new climate",
"John Green - The nerds guide to learning everything online",
"Clay Shirky - How cognitive surplus will change the world",
"Rob Harmon - How to keep rivers and streams flowing",
"Malcolm Gladwell - Choice happiness and spaghetti sauce",
"Romina Libster - The power of herd immunity",
"Clint Smith - The danger of silence",
"Peter Attia - Is the obesity crisis hiding a bigger problem",
"Chris Abani - On humanity",
"Renny Gleeson - 404 the story of a page not found",
"Damon Horowitz - We need a moral operating system",
"John Maeda - How art technology and design inform creative leaders",
"Gero Miesenboeck - Re-engineering the brain",
"Sxip Shirey Rachelle Garniez - A performance with breath music passion",
"Bill Gross - The single biggest reason why startups succeed",
"Naturally 7 - A full-band beatbox",
"Chris Hadfield - What I learned from going blind in space",
"Rick Guidotti - From stigma to supermodel",
"Linda Hill - How to manage for collective creativity",
"Jeff Hancock - The future of lying",
"Nic Marks - The Happy Planet Index",
"Ray Kurzweil - The accelerating power of technology",
"Adam Savage - My love letter to cosplay",
"Jessa Gamble - Our natural sleep cycle is nothing like what we do now",
"Michael Moschen - Juggling as art ... and science",
"Gregory Heyworth - How Im discovering the secrets of ancient texts",
"Frances Larson - Why public beheadings get millions of views",
"Matthew Childs - 9 life lessons from rock climbing",
"Dambisa Moyo - Is China the new idol for emerging economies",
"Jacek Utko - Can design save newspapers",
"David Kelley - How to build your creative confidence",
"Eli Pariser - Beware online filter bubbles",
"Simon Sinek - How great leaders inspire action",
"Jennifer 8. Lee - The hunt for General Tso",
"Eve Ensler - What security means to me",
"Maysoon Zayid - I got 99 problems ... palsy is just one",
"Ory Okolloh - How I became an activist",
"Marc Koska - 1.3m reasons to re-invent the syringe",
"Michelle Borkin - Can astronomers help doctors",
"Nandan Nilekani - Ideas for Indias future",
"Abha Dawesar - Life in the digital now",
"Moshe Safdie - How to reinvent the apartment building",
"Melvin Russell - I love being a police officer but we need reform",
"Nancy Lublin - How data from a crisis text line is saving lives",
"Emily Balcetis - Why some people find exercise harder than others",
"Molly Winter - The taboo secret to better health",
"LZ Granderson - The myth of the gay agenda",
"Noel Bairey Merz - The single biggest health threat women face",
"Maajid Nawaz - A global culture to fight extremism",
"Afra Raymond - Three myths about corruption",
"Ellen Jorgensen - Biohacking -- you can do it too",
"LaToya Ruby Frazier - A visual history of inequality in industrial America",
"Pamela Ronald - The case for engineering our food",
"Carol Dweck - The power of believing that you can improve",
"Isabel Allende - Tales of passion",
"Lemon Andersen - Please dont take my Air Jordans",
"Neha Narula - The future of money",
"Wade Davis - The worldwide web of belief and ritual",
"Robert Full - The secrets of natures grossest creatures channeled into robots",
"Deborah Rhodes - A test that finds 3x more breast tumors and why its not available to you",
"Martine Rothblatt - My daughter my wife our robot and the quest for immortality",
"Tim Brown - Tales of creativity and play",
"Moshe Safdie - Building uniqueness",
"JR - My wish Use art to turn the world inside out",
"Leana Wen - What your doctor wont disclose",
"Two young scientists break down plastics with bacteria",
"Atul Gawande - How do we heal medicine",
"Roz Savage - Why Im rowing across the Pacific",
"Martin Rees - Is this our final century",
"Ross Lovegrove - Organic design inspired by nature",
"Alwar Balasubramaniam - Art of substance and absence",
"Isabel Allende - How to live passionatelyno matter your age",
"Michael Green - How we can make the world a better place by 2030",
"Vilayanur Ramachandran - The neurons that shaped civilization",
"James Lyne - Everyday cybercrime -- and what you can do about it",
"Michael Norton - How to buy happiness",
"Brian Little - Who are you really The puzzle of personality",
"Brian Greene - Is our universe the only universe",
"Robert Ballard - The astonishing hidden world of the deep ocean",
"Jackie Savitz - Save the oceans feed the world",
"Jennifer Doudna - How CRISPR lets us edit our DNA",
"Steven Johnson - Where good ideas come from",
"Danit Peleg - Forget shopping. Soon youll download your new clothes",
"Jarreth Merz - Filming democracy in Ghana",
"Leah Buechley - How to sketch with electronics",
"Elif Shafak - The politics of fiction",
"Rokia Traore - MBifo",
"Jocelyne Bloch - The brain may be able to repair itself -- with help",
"Toni Griffin - A new vision for rebuilding Detroit",
"James Howard Kunstler - The ghastly tragedy of the suburbs",
"Misha Glenny - Hire the hackers",
"Jessi Arrington - Wearing nothing new",
"Martin Rees - Can we prevent the end of the world",
"David S. Rose - How to pitch to a VC",
"David Epstein - Are athletes really getting faster better stronger",
"Thomas Peschak - Dive into an ocean photographers world",
"Anders Ynnerman - Visualizing the medical data explosion",
"Gabriel Barcia-Colombo - Capturing memories in video art",
"Christopher Bell - Bring on the female superheroes",
"Renata Salecl - Our unhealthy obsession with choice",
"Murray Gell-Mann - The ancestor of language",
"Alain de Botton - A kinder gentler philosophy of success",
"David Gallo - Underwater astonishments",
"Julian Baggini - Is there a real you",
"David Gallo - Life in the deep oceans",
"Mark Pagel - How language transformed humanity",
"Prosanta Chakrabarty - Clues to prehistoric times found in blind cavefish",
"Seth Godin - The tribes we lead",
"Nonny de la Pea - The future of news Virtual reality",
"Sasa Vucinic - Why we should invest in a free press",
"Dalia Mogahed - The attitudes that sparked Arab Spring",
"Michael Green - What the Social Progress Index can reveal about your country",
"Raffaello DAndrea - The astounding athletic power of quadcopters",
"Natalie MacMaster - Cape Breton fiddling in reel time",
"Julie Taymor - Spider-Man The Lion King and life on the creative edge",
"David Hoffman - What happens when you lose everything",
"Willie Smits - How to restore a rainforest",
"Nicholas Negroponte - A 30-year history of the future",
"Marwa Al-Sabouni - How Syrias architecture laid the foundation for brutal war",
"Kartick Satyanarayan - How we rescued the dancing bears",
"Michael Pritchard - How to make filthy water drinkable",
"Stanley McChrystal - The military case for sharing knowledge",
"Mary Roach - 10 things you didnt know about orgasm",
"Jim Simons - A rare interview with the mathematician who cracked Wall Street",
"Sean Gourley - The mathematics of war",
"Marco Tempest - A cyber-magic card trick like no other",
"Giles Duley - When a reporter becomes the story",
"Virginia Postrel - On glamour",
"Phil Hansen - Embrace the shake",
"Sandrine Thuret - You can grow new brain cells. Heres how",
"Hugh Herr - The new bionics that let us run climb and dance",
"Fabian Oefner - Psychedelic science",
"Nick Sears - Demo The Orb",
"Tim Jackson - An economic reality check",
"Eva Zeisel - The playful search for beauty",
"Barry Schwartz - The way we think about work is broken",
"Grgoire Courtine - The paralyzed rat that walked",
"Haley Van Dyck - How a start-up in the White House is changing business as usual",
"Ralph Langner - Cracking Stuxnet a 21st-century cyber weapon",
"Inge Missmahl - Bringing peace to the minds of Afghanistan",
"Al Seckel - Visual illusions that show how we misthink",
"Elaine Morgan - I believe we evolved from aquatic apes",
"Aditi Gupta - A taboo-free way to talk about periods",
"Antonio Damasio - The quest to understand consciousness",
"Asha de Vos - Why you should care about whale poo",
"Norman Lear - An entertainment icon on living a life of meaning",
"Harry Baker - A love poem for lonely prime numbers",
"Sebastian Junger - Why veterans miss war",
"Melissa Fleming - A boat carrying 500 refugees sunk at sea. The story of two survivors",
"Daphne Bavelier - Your brain on video games",
"Glenn Greenwald - Why privacy matters",
"Sarah Gray - How my sons short life made a lasting difference",
"Stefan Sagmeister - Happiness by design",
"Shimon Steinberg - Natural pest control ... using bugs",
"Nathalie Cabrol - How Mars might hold the secret to the origin of life",
"Colin Grant - How our stories cross over",
"Robert Gupta - Between music and medicine",
"Noy Thrupkaew - Human trafficking is all around you. This is how it works",
"Pranav Mistry - The thrilling potential of SixthSense technology",
"Alan Eustace - I leapt from the stratosphere. Heres how I did it",
"Lisa Harouni - A primer on 3D printing",
"Wes Moore - How to talk to veterans about the war",
"Nadia Al-Sakkaf - See Yemen through my eyes",
"Mark Roth - Suspended animation is within our grasp",
"Ole Scheeren - Why great architecture should tell a story",
"Harish Manwani - Profits not always the point",
"Kathryn Schulz - On being wrong",
"Toby Shapshak - You dont need an app for that",
"Joseph Nye - Global power shifts",
"Caroline Lavelle - Casting a spell on the cello",
"Tim Urban - Inside the mind of a master procrastinator",
"James Stavridis - A Navy Admirals thoughts on global security",
"Peter Singer - The why and how of effective altruism",
"Don Tapscott - How the blockchain is changing money and business",
"Julian Treasure - The 4 ways sound affects us",
"Mitchell Besser - Mothers helping mothers fight HIV",
"Ameera Harouda - Why I put myself in danger to tell the stories of Gaza",
"Neri Oxman - Design at the intersection of technology and biology",
"Jamila Lyiscott - 3 ways to speak English",
"Paul Debevec - Animating a photo-real digital face",
"Alex Kipman - A futuristic vision of the age of holograms",
"Torsten Reil - Animate characters by evolving them",
"John Graham-Cumming - The greatest machine that never was",
"Geoffrey West - The surprising math of cities and corporations",
"Shea Hembrey - How I became 100 artists",
"Rishi Manchanda - What makes us get sick Look upstream",
"Dan Ariely - Beware conflicts of interest",
"Camille Seaman - Photos from a storm chaser",
"Derek Paravicini and Adam Ockelford - In the key of genius",
"Rana el Kaliouby - This app knows how you feel -- from the look on your face",
"Adam Driver - My journey from Marine to actor",
"Lawrence Lessig - Laws that choke creativity",
"Sarah Parcak - Archaeology from space",
"Laura Carstensen - Older people are happier",
"Rebecca MacKinnon - Lets take back the Internet",
"Deborah Gordon - The emergent genius of ant colonies",
"Aleph Molinari - Lets bridge the digital divide",
"Zahra Langhi - Why Libyas revolution didnt work -- and what might",
"Susan Shaw - The oil spills toxic trade-off",
"Ben Saunders - Why bother leaving the house",
"Shashi Tharoor - Why nations should pursue soft power",
"Bruce McCall - What is retro-futurism",
"Rupal Patel - Synthetic voices as unique as fingerprints",
"JoAnn Kuchera-Morin - Stunning data visualization in the AlloSphere",
"Tyler Cowen - Be suspicious of simple stories",
"Yang Lan - The generation thats remaking China",
"David Agus - A new strategy in the war on cancer",
"Paddy Ashdown - The global power shift",
"Chris McKnett - The investment logic for sustainability",
"Robert Hammond - Building a park in the sky",
"Philip Zimbardo - The psychology of time",
"Shabana Basij-Rasikh - Dare to educate Afghan girls",
"Edward Burtynsky - Photographing the landscape of oil",
"Johanna Blakley - Lessons from fashions free culture",
"Alix Generous - How I learned to communicate my inner life with Aspergers",
"Geena Rocero - Why I must come out",
"Christopher deCharms - A look inside the brain in real time",
"Latif Nasser - You have no idea where camels really come from",
"David Brooks - Should you live for your rsum ... or your eulogy",
"Nathan Wolfe - The jungle search for viruses",
"Alanna Shaikh - How Im preparing to get Alzheimers",
"Nate Silver - Does racism affect how you vote",
"Amanda Burden - How public spaces make cities work",
"Kio Stark - Why you should talk to strangers",
"Reggie Watts - Beats that defy boxes",
"Kimberley Motley - How I defend the rule of law",
"Diane Kelly - What we didnt know about penis anatomy",
"Helen Fisher - The brain in love",
"Amit Sood - Building a museum of museums on the web",
"Adam Ostrow - After your final status update",
"Paul Collier - New rules for rebuilding a broken nation",
"Manwar Ali - Inside the mind of a former radical jihadist",
"David Rockwell - A memorial at Ground Zero",
"Evan Williams - The voices of Twitter users",
"Doris Kearns Goodwin - Lessons from past presidents",
"Andy Puddicombe - All it takes is 10 mindful minutes",
"David Christian - The history of our world in 18 minutes",
"Lisa Bu - How books can open your mind",
"Shai Reshef - An ultra-low-cost college degree",
"Freeman Dyson - Lets look for life in the outer solar system",
"Jacqueline Novogratz - An escape from poverty",
"Yossi Vardi - Were worried about local warming ... in your lap",
"Keith Bellows - The camels hump",
"Maurizio Seracini - The secret lives of paintings",
"Jane Goodall - What separates us from chimpanzees",
"Keolu Fox - Why genetic research must be more diverse",
"Jane Fonda - Lifes third act",
"Zeresenay Alemseged - The search for humanitys roots",
"Sarah Kay - How many lives can you live",
"Tan Le - My immigration story",
"Kristen Ashburn - The face of AIDS in Africa",
"Kevin Kelly - How technology evolves",
"Ron Finley - A guerilla gardener in South Central LA",
"JD Schramm - Break the silence for suicide attempt survivors",
"Jorge Soto - The future of early cancer detection",
"David McCandless - The beauty of data visualization",
"Alice Rawsthorn - Pirates nurses and other rebel designers",
"Dan Dennett - The illusion of consciousness",
"Neil MacGregor - 2600 years of history in one object",
"Paul Greenberg - The four fish were overeating -- and what to eat instead",
"Michael Merzenich - Growing evidence of brain plasticity",
"Dame Stephanie Shirley - Why do ambitious women have flat heads",
"George Smoot - The design of the universe",
"Suki Kim - This is what its like to go undercover in North Korea",
"Riccardo Sabatini - How to read the genome and build a human being",
"Gary Kovacs - Tracking our online trackers",
"Elizabeth Lev - The unheard story of the Sistine Chapel",
"Bel Pesce - 5 ways to kill your dreams",
"Melissa Garren - The sea weve hardly seen",
"Lisa Gansky - The future of business is the mesh",
"Tim Harford - Trial error and the God complex",
"Chelsea Shields - How Im working for change inside my church",
"Richard Sears - Planning for the end of oil",
"Stephen Hawking - Questioning the universe",
"Bono - My wish Three actions for Africa",
"Michael Metcalfe - We need money for aid. So lets print it.",
"Roy Gould Curtis Wong - A preview of the WorldWide Telescope",
"Blaise Agera y Arcas - How PhotoSynth can connect the worlds images",
"Sivamani - Rhythm is everything everywhere",
"Thulasiraj Ravilla - How low-cost eye care can be world-class",
"Chieko Asakawa - How new technology helps blind people explore the world",
"Tom Shannon John Hockenberry - The painter and the pendulum",
"Steven Johnson - The Web as a city",
"Matthieu Ricard - The habits of happiness",
"Charles Leadbeater - Education innovation in the slums",
"Megan Washington - Why I live in mortal dread of public speaking",
"Alessandro Acquisti - What will a future without secrets look like",
"David Perry - Are games better than life",
"Rory Sutherland - Sweat the small stuff",
"Woody Norris - Hypersonic sound and other inventions",
"Abigail Marsh - Why some people are more altruistic than others",
"Heather Barnett - What humans can learn from semi-intelligent slime",
"Matthew White - The modern euphonium",
"Sebastian Junger - Our lonely society makes it hard to come home from war",
"Mary Bassett - Why your doctor should care about social justice",
"Anil Gupta - Indias hidden hotbeds of invention",
"Ze Frank - Are you human",
"Harsha Bhogle - The rise of cricket the rise of India",
"Sylvia Earle - My wish Protect our oceans",
"James Veitch - The agony of trying to unsubscribe",
"Steven Strogatz - The science of sync",
"Mark Bittman - Whats wrong with what we eat",
"Ray Kurzweil - A university for the coming singularity",
"Ryan Holladay - To hear this music you have to be there. Literally",
"Dave Eggers - My wish Once Upon a School",
"Tony Wyss-Coray - How young blood might help reverse aging. Yes really",
"Shyam Sankar - The rise of human-computer cooperation",
"Melissa Fleming - Lets help refugees thrive not just survive",
"James Flynn - Why our IQ levels are higher than our grandparents",
"Eman Mohammed - The courage to tell a hidden story",
"Loretta Napoleoni - The intricate economics of terrorism",
"David Byrne - How architecture helped music evolve",
"Hyeonseo Lee - My escape from North Korea",
"Jonathan Haidt - Religion evolution and the ecstasy of self-transcendence",
"Olivier Scalabre - The next manufacturing revolution is here",
"Eric X. Li - A tale of two political systems",
"Craig Venter - Sampling the oceans DNA",
"Paul Moller - My dream of a flying car",
"Nathan Myhrvold - Cooking as never seen before",
"Jeff Iliff - One more reason to get a good nights sleep",
"Chip Kidd - Designing books is no laughing matter. OK it is.",
"Christopher C. Deam - The Airstream restyled",
"John Francis - Walk the earth ... my 17-year vow of silence",
"Jill Sobule - Global warmings theme song Manhattan in January",
"Jenni Chang and Lisa Dazols - This is what LGBT life is like around the world",
"Diana Nyad - Extreme swimming with the worlds most dangerous jellyfish",
"Robert Fischell - My wish Three unusual medical inventions",
"Judson Brewer - A simple way to break a bad habit",
"Amber Case - We are all cyborgs now",
"Beeban Kidron - The shared wonder of film",
"Halla Tomasdottir - A feminine response to Icelands financial crash",
"Andrew Bird - A one-man orchestra of the imagination",
"Paul Conneally - How mobile phones power disaster relief",
"Marc Goodman - A vision of crimes in the future",
"Lucianne Walkowicz - Look up for a change",
"David Lang - My underwater robot",
"Scott Dinsmore - How to find work you love",
"Johanna Blakley - Social media and the end of gender",
"Beau Lotto - Optical illusions show how we see",
"Derek Sivers - Weird or just different",
"Michael Pawlyn - Using natures genius in architecture",
"Neil Gershenfeld - Unleash your creativity in a Fab Lab",
"Ken Robinson - Do schools kill creativity",
"Erik Johansson - Impossible photography",
"Faith Jegede Cole - What Ive learned from my autistic brothers",
"John Lloyd - An inventory of the invisible",
"Barbara Natterson-Horowitz - What veterinarians know that physicians dont",
"Elizabeth Loftus - How reliable is your memory",
"Arthur Benjamin - The magic of Fibonacci numbers",
"Paul Tudor Jones II - Why we need to rethink capitalism",
"Jill Shargaa - Please please people. Lets put the awe back in awesome",
"Caleb Harper - This computer will grow your food in the future",
"Norman Foster - My green agenda for architecture",
"Onora ONeill - What we dont understand about trust",
"Suzanne Lee - Grow your own clothes",
"Dave deBronkart - Meet e-Patient Dave",
"Ayah Bdeir - Building blocks that blink beep and teach",
"Jaap de Roode - How butterflies self-medicate",
"Travis Kalanick - Ubers plan to get more people into fewer cars",
"Matt Killingsworth - Want to be happier Stay in the moment",
"Sugata Mitra - Build a School in the Cloud",
"Jeff Bezos - The electricity metaphor for the webs future",
"Hans Rosling - Asias rise -- how and when",
"Lord Nicholas Stern - The state of the climate and what we might do about it",
"Rives - A mockingbird remix of TED2006",
"Sebastian Wernicke - 1000 TED Talks in six words",
"Alan Kay - A powerful idea about ideas",
"Bastian Schaefer - A 3D-printed jumbo jet",
"Laurie Santos - A monkey economy as irrational as ours",
"Parag Khanna - Mapping the future of countries",
"Pilobolus - A dance of Symbiosis",
"Timothy Bartik - The economic case for preschool",
"Van Jones - The economic injustice of plastic",
"Geoffrey Canada - Our failing schools. Enough is enough",
"Cdric Villani - Whats so sexy about math",
"Frans Lanting - The story of life in photographs",
"Jane Fonda and Lily Tomlin - A hilarious celebration of lifelong female friendship",
"Neil Harbisson - I listen to color",
"Gever Tulley - 5 dangerous things you should let your kids do",
"Alex Tabarrok - How ideas trump crises",
"Morgan Spurlock - The greatest TED Talk ever sold",
"Pico Iyer - The beauty of what well never know",
"Shekhar Kapur - We are the stories we tell ourselves",
"Bonnie Bassler - How bacteria talk",
"Shubhendu Sharma - An engineers vision for tiny forests everywhere",
"Jessica Shortall - The US needs paid family leave -- for the sake of its future",
"Peter Diamandis - Stephen Hawkings zero g flight",
"ShaoLan - Learn to read Chinese ... with ease",
"Jinha Lee - Reach into the computer and grab a pixel",
"Maira Kalman - The illustrated woman",
"Rick Warren - A life of purpose",
"Mike deGruy - Hooked by an octopus",
"Ji-Hae Park - The violin and my dark night of the soul",
"Krista Tippett - Reconnecting with compassion",
"Wayne McGregor - A choreographers creative process in real time",
"Tristram Wyatt - The smelly mystery of the human pheromone",
"David MacKay - A reality check on renewables",
"Bjarke Ingels - Hedonistic sustainability",
"JR - One year of turning the world inside out",
"George Whitesides - Toward a science of simplicity",
"James Forbes - Compassion at the dinner table",
"Clint Smith - How to raise a black son in America",
"Jinsop Lee - Design for all 5 senses",
"Blaise Agera y Arcas - Augmented-reality maps",
"Adam Davidson - What we learned from teetering on the fiscal cliff",
"They Might Be Giants - Wake up",
"Gavin Schmidt - The emergent patterns of climate change",
"Ngozi Okonjo-Iweala - Aid versus trade",
"Sleepy Man Banjo Boys - Bluegrass virtuosity from ... New Jersey",
"Boaz Almog - The levitating superconductor",
"Marcel Dicke - Why not eat insects",
"Tyrone Hayes Penelope Jagessar Chaffer - The toxic baby",
"Anna Deavere Smith - Four American characters",
"Ken Robinson - How to escape educations death valley",
"Jonas Eliasson - How to solve traffic jams",
"Bart Weetjens - How I taught rats to sniff out land mines",
"Hans Rosling - Insights on HIV in stunning data visuals",
"Stephen Coleman - Non-lethal weapons a moral hazard",
"Gabe Zichermann - How games make kids smarter",
"Rajiv Maheswaran - The math behind basketballs wildest moves",
"Vusi Mahlasela - Thula Mama",
"Ben Wellington - How we found the worst place to park in New York City -- using big data",
"Rufus Griscom Alisa Volkman - Lets talk parenting taboos",
"Ed Yong - Zombie roaches and other parasite tales",
"Karen Bass - Unseen footage untamed nature",
"Joshua Silver - Adjustable liquid-filled eyeglasses",
"Sirena Huang - An 11-year-olds magical violin",
"Shimpei Takahashi - Play this word game to come up with original ideas",
"Jon Mooallem - How the teddy bear taught us compassion",
"Wendy Chung - Autism what we know and what we dont know yet",
"Abe Davis - New video technology that reveals an objects hidden properties",
"Alison Killing - Theres a better way to die and architecture can help",
"Chris Anderson - TEDs nonprofit transition",
"Quyen Nguyen - Color-coded surgery",
"Rachel Botsman - Weve stopped trusting institutions and started trusting strangers",
"Dan Barasch - A park underneath the hustle and bustle of New York City",
"Benjamin Zander - The transformative power of classical music",
"Sanjay Dastoor - A skateboard with a boost",
"Sting - How I started writing songs again",
"Esta Soler - How we turned the tide on domestic violence Hint the Polaroid helped",
"Ric Elias - 3 things I learned while my plane crashed",
"Dean Kamen - The emotion behind invention",
"Tracy Chevalier - Finding the story inside the painting",
"Denis Dutton - A Darwinian theory of beauty",
"Vincent Cochetel - I was held hostage for 317 days. Heres what I thought about",
"Angela Belcher - Using nature to grow batteries",
"Rachelle Garniez - La Vie en Rose",
"Dan Barber - A foie gras parable",
"John McWhorter - 4 reasons to learn a new language",
"Jamila Raqib - The secret to effective nonviolent resistance",
"Yasheng Huang - Does democracy stifle economic growth",
"Cosmin Mihaiu - Physical therapy is boring -- play a game instead",
"Catherine Bracy - Why good hackers make good citizens",
"Margaret Heffernan - Forget the pecking order at work",
"Jenna McCarthy - What you dont know about marriage",
"Rosie King - How autism freed me to be myself",
"Brian Cox - CERNs supercollider",
"Paul Lewis - How mobile phones helped solve two murders",
"Vincent Moon and Nan Vasconcelos - Hidden music rituals around the world",
"Alan Russell - The potential of regenerative medicine",
"Rebecca MacKinnon - We can fight terror without sacrificing our rights",
"Frederick Balagadde - Bio-lab on a microchip",
"William Black - How to rob a bank from the inside that is",
"Bruno Bowden Rufus Cappadocia - Blindfold origami and cello",
"Paul Romer - The worlds first charter city",
"Hannah Brencher - Love letters to strangers",
"Al Gore - Averting the climate crisis",
"Regina Dugan - From mach-20 glider to hummingbird drone",
"Caleb Chung - Playtime with Pleo your robotic dinosaur friend",
"Francis Collins - We need better drugs -- now",
"Jae Rhim Lee - My mushroom burial suit",
"Keller Rinaudo - A mini robot -- powered by your phone",
"Brian Cox - What went wrong at the LHC",
"Bill Gates - Innovating to zero",
"Siegfried Woldhek - The search for the true face of Leonardo",
"Tania Luna - How a penny made me feel like a millionaire",
"Bill Stone - Im going to the moon. Whos with me",
"eL Seed - A project of peace painted across 50 buildings",
"Srikumar Rao - Plug into your hard-wired happiness",
"Carolyn Porco - This is Saturn",
"Jeremy Heimans - What new power looks like",
"Nina Tandon - Could tissue engineering mean personalized medicine",
"Becky Blanton - The year I was homeless",
"Vikram Patel - Mental health for all by involving all",
"John Maeda - Designing for simplicity",
"TED staff - Its TED the Musical",
"Tim Harford - How frustration can make us more creative",
"Eric Whitacre - Virtual Choir Live",
"Juliana Machado Ferreira - The fight to end rare-animal trafficking in Brazil",
"Al Gore - New thinking on the climate crisis",
"Clifford Stoll - The call to learn",
"Brian Goldman - Doctors make mistakes. Can we talk about that",
"Sonaar Luthra - Meet the Water Canary",
"BJ Miller - What really matters at the end of life",
"Timothy Prestero - Design for people not awards",
"David Burkus - Why you should know how much your coworkers get paid",
"Mac Barnett - Why a good book is a secret door",
"Sophal Ear - Escaping the Khmer Rouge",
"Amos Winter - The cheap all-terrain wheelchair",
"Chip Kidd - The art of first impressions -- in design and life",
"Andrew Bastawrous - Get your next eye exam on a smartphone",
"Oscar Schwartz - Can a computer write poetry",
"Enric Sala - Glimpses of a pristine ocean",
"Colin Camerer - When youre making a deal whats going on in your brain",
"Jamie Bartlett - How the mysterious dark net is going mainstream",
"Lian Pin Koh - A drones-eye view of conservation",
"Jeff Smith - Lessons in business ... from prison",
"Kelli Swazey - Life that doesnt end with death",
"Naomi Oreskes - Why we should trust scientists",
"Jonathan Zittrain - The Web as random acts of kindness",
"Louie Schwartzberg - Nature. Beauty. Gratitude.",
"Parag Khanna - How megacities are changing the map of the world",
"Rob Reid - The 8 billion iPod",
"Isaac Lidsky - What reality are you creating for yourself",
"Michael Rubinstein - See invisible motion hear silent sounds",
"Maya Penn - Meet a young entrepreneur cartoonist designer activist ...",
"Alison Gopnik - What do babies think",
"Jane Goodall - How humans and animals can live together",
"Arunachalam Muruganantham - How I started a sanitary napkin revolution",
"David Bolinsky - Visualizing the wonder of a living cell",
"Daniel Suarez - The kill decision shouldnt belong to a robot",
"James Hansen - Why I must speak out about climate change",
"Anand Giridharadas - A tale of two Americas. And the mini-mart where they collided",
"Jose Antonio Abreu - The El Sistema music revolution",
"Robert Gupta Joshua Roman - On violin and cello Passacaglia",
"Leslie Morgan Steiner - Why domestic violence victims dont leave",
"Robert Full - Robots inspired by cockroach ingenuity",
"Marco Tempest - Augmented reality techno-magic",
"John Underkoffler - Pointing to the future of UI",
"Raul Midon - Tembererana",
"Jim Al-Khalili - How quantum biology might explain lifes biggest questions",
"Daniel Kraft - Medicines future Theres an app for that",
"Mark Bezos - A life lesson from a volunteer firefighter",
"Kelli Anderson - Design to challenge reality",
"Danielle de Niese - A flirtatious aria",
"Anastasia Taylor-Lind - Fighters and mourners of the Ukrainian revolution",
"Kirk Citron - And now the real news",
"Emma Teeling - The secret of the bat genome",
"Fiorenzo Omenetto - Silk the ancient material of the future",
"Cesar Harada - A novel idea for cleaning up oil spills",
"AJ Jacobs - How healthy living nearly killed me",
"Steven Levitt - The freakonomics of crack dealing",
"Sheena Iyengar - How to make choosing easier",
"Shimon Schocken - The self-organizing computer course",
"Trevor Aaronson - How this FBI strategy is actually creating US-based terrorists",
"Frank Gehry - A master architect asks Now what",
"Keith Barry - Brain magic",
"Apollo Robbins - The art of misdirection",
"Pamelia Kurstin - The untouchable music of the theremin",
"Nathalie Miebach - Art made of storms",
"Vusi Mahlasela - Woza",
"Andrew Solomon - Depression the secret we share",
"Paola Antonelli - Why I brought Pac-Man to MoMA",
"Raffaello DAndrea - Meet the dazzling flying machines of the future",
"Ray Anderson - The business logic of sustainability",
"Gary Wolf - The quantified self",
"Aaron Koblin - Visualizing ourselves ... with crowd-sourced data",
"Morgana Bailey - The danger of hiding who you are",
"Markus Fischer - A robot that flies like a bird",
"Eli Beer - The fastest ambulance A motorcycle",
"Dan Gilbert - The psychology of your future self",
"Paola Antonelli - Treat design as art",
"Angelo Vermeulen - How to go to space without having to go to space",
"Brian Skerry - The oceans glory -- and horror",
"James Randi - Homeopathy quackery and fraud",
"Edith Widder - Glowing life in an underwater world",
"Laura Schulz - The surprisingly logical minds of babies",
"Maria Bezaitis - Why we need strangeness",
"Caitria Morgan ONeill - How to step up in the face of disaster",
"Ivan Coyote - Why we need gender-neutral bathrooms",
"Bill Gates - How state budgets are breaking US schools",
"Adam Savage - My obsession with objects and the stories they tell",
"Avi Rubin - All your devices can be hacked",
"Eduardo Paes - The 4 commandments of cities",
"Danny Hillis - Understanding cancer through proteomics",
"Leslie Dodson - Dont misrepresent Africa",
"Jessica Green - Were covered in germs. Lets design for that.",
"Carl Safina - The oil spills unseen villains -- and victims",
"Eben Bayer - Are mushrooms the new plastic",
"Cameron Russell - Looks arent everything. Believe me Im a model.",
"Hannah Fry - The mathematics of love",
"Dimitar Sasselov - How we found hundreds of potential Earth-like planets",
"Julian Assange - Why the world needs WikiLeaks",
"Drew Berry - Animations of unseeable biology",
"Larry Smith - Why you will fail to have a great career",
"Jim Hemerling - 5 ways to lead in an era of constant change",
"Eric Liu - Why ordinary people need to understand power",
"Andrew Solomon - How the worst moments in our lives make us who we are",
"Jakob Trollback - A new kind of music video",
"Tali Sharot - The optimism bias",
"Ian Ritchie - The day I turned down Tim Berners-Lee",
"Julia Bacha - Pay attention to nonviolence",
"David Griffin - How photography connects us",
"Chris Abani - Telling stories from Africa",
"David Carson - Design and discovery",
"Jay Walker - My library of human imagination",
"Jim Toomey - Learning from Sherman the shark",
"Alison Killing - What happens when a city runs out of room for its dead",
"Ron Gutman - The hidden power of smiling",
"Laura Snyder - The Philosophical Breakfast Club",
"Hilary Cottam - Social services are broken. How we can fix them",
"Derek Sivers - Keep your goals to yourself",
"Nancy Duarte - The secret structure of great talks",
"Paula Scher - Great design is serious not solemn",
"Paul Romer - Why the world needs charter cities",
"R.A. Mashelkar - Breakthrough designs for ultra-low-cost products",
"Bill Gates - Mosquitos malaria and education",
"Michael Archer - How well resurrect the gastric brooding frog the Tasmanian tiger",
"Dan Ariely - Are we in control of our own decisions",
"Anand Giridharadas - A letter to all who have lost in this era",
"Paul Root Wolpe - Its time to question bio-engineering",
"David Eagleman - Can we create new senses for humans",
"Allan Jones - A map of the brain",
"William Li - Can we eat to starve cancer",
"Young-ha Kim - Be an artist right now",
"Paul Rothemund - DNA folding in detail",
"Thomas P. Campbell - Weaving narratives in museum galleries",
"Adam de la Zerda - We can start winning the war against cancer",
"Carl Honor - In praise of slowness",
"Mikko Hypponen - Three types of online attack",
"Genevieve von Petzinger - Why are these 32 symbols found in ancient caves all over Europe",
"Simon Berrow - How do you save a shark you know nothing about",
"Christopher McDougall - Are we born to run",
"Hillel Cooperman - Legos for grownups",
"Ethan Zuckerman - Listening to global voices",
"Richard Dawkins - Why the universe seems so strange",
"Shubhendu Sharma - How to grow a forest in your backyard",
"Shaffi Mather - A new way to fight corruption",
"Zeynep Tufekci - Machine intelligence makes human morals more important",
"Joshua Prager - In search of the man who broke my neck",
"David Kelley - Human-centered design",
"Sam Harris - Can we build AI without losing control over it",
"Don Levy - A cinematic journey through visual effects",
"Julia Bacha - How women wage conflict without violence",
"Ananda Shankar Jayant - Fighting cancer with dance",
"Jimmy Wales - The birth of Wikipedia",
"Michael Sandel - Why we shouldnt trust markets with our civic life",
"Mina Bissell - Experiments that point to a new understanding of cancer",
"Dean Kamen - Luke a new prosthetic arm for soldiers",
"Bill Gates - Teachers need real feedback",
"Jamie Heywood - The big idea my brother inspired",
"Emmanuel Jal - The music of a war child",
"Christiana Figueres - The inside story of the Paris climate agreement",
"Wael Ghonim - Inside the Egyptian revolution",
"William McDonough - Cradle to cradle design",
"Richard Weller - Could the sun be good for your heart",
"Meg Jay - Why 30 is not the new 20",
"Marco Tempest - A magical tale with augmented reality",
"Lauren Hodge Shree Bose Naomi Shah - Award-winning teenage science in action",
"Marco Tempest - The electric rise and fall of Nikola Tesla",
"Nikolai Begg - A tool to fix one of the most dangerous moments in surgery",
"Roman Mars - Why city flags may be the worst-designed thing youve never noticed",
"Adam Garone - Healthier men one moustache at a time",
"Ruby Wax - Whats so funny about mental illness",
"Larry Burns - The future of cars",
"Kelly McGonigal - How to make stress your friend",
"Heather Knight - Silicon-based comedy",
"Skylar Tibbits - Can we make things that make themselves",
"Aakash Odedra - A dance in a hurricane of paper wind and light",
"Gregory Stock - To upgrade is human",
"Anand Varma - The first 21 days of a bees life",
"Joseph Kim - The family I lost in North Korea. And the family I gained.",
"Mundano - Pimp my ... trash cart",
"Manu Prakash - A 50-cent microscope that folds like origami",
"Susan Lim - Transplant cells not organs",
"Michael Shermer - The pattern behind self-deception",
"James B. Glattfelder - Who controls the world",
"Isaac Mizrahi - Fashion and creativity",
"Matthew Carter - My life in typefaces",
"Deb Roy - The birth of a word",
"Gary Flake - Is Pivot a turning point for web exploration",
"Stefano Mancuso - The roots of plant intelligence",
"Danny Hillis - Back to the future of 1994",
"Chris Milk - How virtual reality can create the ultimate empathy machine",
"Tony Fadell - The first secret of design is ... noticing",
"Tim Brown - Designers -- think big",
"Janine Benyus - Biomimicrys surprising lessons from natures engineers",
"Anna Mracek Dietrich - A plane you can drive",
"Samantha Power - A complicated hero in the war on dictatorship",
"David Cameron - The next age of government",
"Anne Milgram - Why smart statistics are the key to fighting crime",
"Alejandro Aravena - My architectural philosophy Bring the community into the process",
"Jeremy Howard - The wonderful and terrifying implications of computers that can learn",
"Hans and Ola Rosling - How not to be ignorant about the world",
"Paul Gilding - The Earth is full",
"Patricia Kuhl - The linguistic genius of babies",
"Dambisa Moyo - Economic growth has stalled. Lets fix it",
"Siddhartha Mukherjee - Soon well cure diseases with a cell not a pill",
"Robin Ince - Science versus wonder",
"Nadia Lopez - Why open a school To close a prison",
"Johan Rockstrom - Let the environment guide our development",
"Liza Donnelly - Drawing on humor for change",
"Nate Garvis - Change our culture change our world",
"Wendy Freedman - This new telescope might show us the beginning of the universe",
"Gavin Pretor-Pinney - Cloudy with a chance of joy",
"Tom Rielly - A comic sendup of TED2006",
"Colin Powell - Kids need structure",
"Allan Savory - How to fight desertification and reverse climate change",
"Jill Tarter - Join the SETI search",
"Leila Hoteit - 3 lessons on success from an Arab businesswoman",
"Emily Levine - A theory of everything",
"Brenda Laurel - Why not make video games for girls",
"Mathias Jud - Art that lets you talk back to NSA spies",
"Alisa Miller - How the news distorts our worldview",
"Jean-Baptiste Michel - The mathematics of history",
"Theaster Gates - How to revive a neighborhood with imagination beauty and art",
"Ron Eglash - The fractals at the heart of African designs",
"Carol Fishman Cohen - How to get back to work after a career break",
"Chris Domas - The 1s and 0s behind cyber warfare",
"Rodney Brooks - Why we will rely on robots",
"Honor Harger - A history of the universe in sound",
"Dan Buettner - How to live to be 100",
"Dan Dennett - Cute sexy sweet funny",
"Ray Kurzweil - Get ready for hybrid thinking",
"Margaret Heffernan - The dangers of willful blindness",
"iO Tillett Wright - Fifty shades of gay",
"Julian Treasure - How to speak so that people want to listen",
"Phil Plait - How to defend Earth from asteroids",
"Rose George - Inside the secret shipping industry",
"Beau Lotto Amy OToole - Science is for everyone kids included",
"Rebecca Saxe - How we read each others minds",
"Rives - Reinventing the encyclopedia game",
"Elliot Krane - The mystery of chronic pain",
"Dan Dennett - Dangerous memes",
"David Hoffman - Sputnik mania",
"David Logan - Tribal leadership",
"Yves Morieux - As work gets more complex 6 rules to simplify",
"Chip Conley - Measuring what makes life worthwhile",
"Jonathan Drori - Why were storing billions of seeds",
"Allison Hunt - How to get a new hip",
"David Anderson - Your brain is more than a bag of chemicals",
"Fredy Peccerelli - A forensic anthropologist who brings closure for thedisappeared",
"Sophie Hunger - Songs of secrets and city lights",
"Bran Ferren - To create for the ages lets combine art and engineering",
"Jacqueline Novogratz - Invest in Africas own solutions",
"Amory Lovins - Winning the oil endgame",
"Daniele Quercia - Happy maps",
"Sunni Brown - Doodlers unite",
"Jessica Green - Are we filtering the wrong microbes",
"Richard St. John - Success is a continuous journey",
"Phil Borges - Photos of endangered cultures",
"Philip Zimbardo - The psychology of evil",
"Richard St. John - 8 secrets of success",
"Tony Porter - A call to men",
"Nirmalya Kumar - Indias invisible innovation",
"Jacqueline Novogratz - Patient capitalism",
"Philippe Petit - The journey across the high wire",
"Billy Collins - Two poems about what dogs think probably",
"Robin Murphy - These robots come to the rescue after a disaster",
"Andrew Pelling - This scientist makes ears out of apples",
"Louise Fresco - We need to feed the whole world",
"Kwame Anthony Appiah - Is religion good or bad This is a trick question",
"Ziauddin Yousafzai - My daughter Malala",
"Jared Ficklin - New ways to see music with color and fire",
"Liz Coleman - A call to reinvent liberal arts education",
"Jill Farrant - How we can make crops survive without water",
"Steven Pinker - What our language habits reveal",
"Larry Brilliant - The case for optimism",
"Wingham Rowan - A new kind of job market",
"Patrick Chappatte - The power of cartoons",
"Bruce Feiler - Agile programming -- for your family",
"Clay Shirky - Why SOPA is a bad idea",
"Trita Parsi - Iran and Israel Peace is possible",
"Nathan Wolfe - Whats left to explore",
"Gustavo Dudamel and the Teresa Carreo Youth Orchestra - El Sistemas top youth orchestra",
"Dee Boersma - Pay attention to penguins",
"Jer Thorp - Make data more human",
"Ed Boyden - A new way to study the brains invisible secrets",
"Kent Larson - Brilliant designs to fit more people in every city",
"Kare Anderson - Be an opportunity maker",
"Chris Urmson - How a driverless car sees the road",
"Annie Murphy Paul - What we learn before were born",
"Esther Perel - The secret to desire in a long-term relationship",
"James Watson - How we discovered DNA",
"Robert Lang - The math and magic of origami",
"Matthew OReilly - Am I dying The honest answer.",
"Erica Frenkel - The universal anesthesia machine",
"Bill Strickland - Rebuilding a neighborhood with beauty dignity hope",
"Raghava KK - Shake up your story",
"Nathan Myhrvold - Archeology animal photography BBQ ...",
"Eric Lewis - Chaos and harmony on piano",
"Anand Agarawala - Rethink the desktop with BumpTop",
"Nicholas Christakis - The hidden influence of social networks",
"Gayle Tzemach Lemmon - Meet the women fighting on the front lines of an American war",
"Melissa Walker - Art can heal PTSDs invisible wounds",
"Usman Riaz Preston Reed - A young guitarist meets his hero",
"Alain de Botton - Atheism 2.0",
"David Keith - A critical look at geoengineering against climate change",
"Guy Winch - Why we all need to practice emotional first aid",
"Sebastian Wernicke - Lies damned lies and statistics about TEDTalks",
"Paul Nicklen - Animal tales from icy wonderlands",
"Rick Smolan - The story of a girl",
"Ami Klin - A new way to diagnose autism",
"Stephen Palumbi - Hidden toxins in the fish we eat",
"Stewart Brand - What squatter cities can teach us",
"Yves Behar - A supercharged motorcycle design",
"Peter Ward - A theory of Earths mass extinctions",
"Stephen Burt - Why people need poetry",
"Jonathan Eisen - Meet your microbes",
"Kim Gorgens - Protecting the brain against concussion",
"Shane Koyczan - To This Day ... for the bullied and beautiful",
"Dan Meyer - Math class needs a makeover",
"Stuart Firestein - The pursuit of ignorance",
"Paul Collier - The bottom billion",
"Simon Anholt - Which country does the most good for the world",
"Rose Goslinga - Crop insurance an idea worth seeding",
"Michael Shermer - Why people believe weird things",
"Marisa Fick-Jordan - The wonder of Zulu wire art",
"Yassmin Abdel-Magied - What does my headscarf mean to you",
"Bruno Maisonnier - Dance tiny robots",
"Diane Benscoter - How cults rewire the brain",
"Yves Rossy - Fly with the Jetman",
"David Bismark - E-voting without fraud",
"Joe Gebbia - How Airbnb designs for trust",
"Jim Holt - Why does the universe exist",
"Uldus Bakhtiozina - Wry photos that turn stereotypes upside down",
"Aaron Huey - Americas native prisoners of war",
"Sebastian Deterding - What your designs say about you",
"Didier Sornette - How we can predict the next financial crisis",
"Annette Heuser - The 3 agencies with the power to make or break economies",
"Yuval Noah Harari - What explains the rise of humans",
"Amanda Palmer - The art of asking",
"Scott McCloud - The visual magic of comics",
"Sophie Scott - Why we laugh",
"Daniel Wolpert - The real reason for brains",
"Danielle Feinberg - The magic ingredient that brings Pixar movies to life",
"Jeffrey Kluger - The sibling bond",
"Casey Gerald - The gospel of doubt",
"Juan Enriquez - The life code that will reshape the future",
"Rob Forbes - Ways of seeing",
"Roxane Gay - Confessions of a bad feminist",
"Anne Curzan - What makes a word real",
"Kees Moeliker - How a dead duck changed my life",
"Harald Haas - Wireless data from every light bulb",
"Handspring Puppet Co. - The genius puppetry behind War Horse",
"Paul Piff - Does money make you mean",
"Drew Dudley - Everyday leadership",
"Hugh Evans - What does it mean to be a citizen of the world",
"Joshua Prager - Wisdom from great writers on every year of life",
"Henry Evans and Chad Jenkins - Meet the robots for humanity",
"Guillaume Nry - The exhilarating peace of freediving",
"Linda Cliatt-Wayman - How to fix a broken school Lead fearlessly love hard",
"Lee Hotz - Inside an Antarctic time machine",
"Ursus Wehrli - Tidying up art",
"Monika Bulaj - The hidden light of Afghanistan",
"Francis de los Reyes - Sanitation is a basic human right",
"Esther Perel - Rethinking infidelity ... a talk for anyone who has ever loved",
"Mallika Sarabhai - Dance to change the world",
"Jedidah Isler - The untapped genius that could change science for the better",
"Ravin Agrawal - 10 young Indian artists to watch",
"Steven Addis - A father-daughter bond one photo at a time",
"Golan Levin - Software as art",
"Jon Ronson - When online shaming spirals out of control",
"Tan Le - A headset that reads your brainwaves",
"Kwabena Boahen - A computer that works like the brain",
"John Doerr - Salvation and profit in greentech",
"Terry Moore - Why is x the unknown",
"John Hockenberry - We are all designers",
"Georgette Mulheir - The tragedy of orphanages",
"Rita Pierson - Every kid needs a champion",
"Philip Rosedale - Life in Second Life",
"Clay Shirky - Institutions vs. collaboration",
"Todd Humphreys - How to fool a GPS",
"Paul MacCready - Nature vs. humans",
"Chrystia Freeland - The rise of the new global super-rich",
"Conrad Wolfram - Teaching kids real math with computers",
"Gordon Brown - Global ethic vs. national interest",
"Joey Alexander - An 11-year-old prodigy performs old-school jazz",
"Jason deCaires Taylor - An underwater art museum teeming with life",
"Diana Nyad - Never ever give up",
"Bandi Mbubi - Demand a fair trade cell phone",
"Lee Cronin - Making matter come alive",
"Kate Orff - Reviving New Yorks rivers -- with oysters",
"Peter Doolittle - How your working memory makes sense of the world",
"Jarrett J. Krosoczka - Why lunch ladies are heroes",
"Sherwin Nuland - The extraordinary power of ordinary people",
"Bernie Krause - The voice of the natural world",
"Richard Turere - My invention that made peace with lions",
"Lorrie Faith Cranor - Whats wrong with your paw0rd",
"Joel Selanikio - The big-data revolution in healthcare",
"Jack Choi - On the virtual dissection table",
"Stephen Petranek - 10 ways the world could end",
"Lesley Hazleton - On reading the Koran",
"Jacqueline Novogratz - A third way to think about aid",
"Sean Follmer - Shape-shifting tech will change work as we know it",
"Willard Wigan - Hold your breath for micro-sculpture",
"Vern Myers - How to overcome our biases Walk boldly toward them",
"Milton Glaser - Using design to make ideas new",
"Lakshmi Pratury - The lost art of letter-writing",
"Hans Rosling - The magic washing machine",
"Richard Baraniuk - The birth of the open-source learning revolution",
"Leonard Susskind - My friend Richard Feynman",
"Annie Lennox - Why I am an HIVAIDS activist",
"Chris Gerdes - The future race car -- 150mph and no driver",
"Jonathan Harris - The Webs secret stories",
"Irwin Redlener - How to survive a nuclear attack",
"Robert Full - The sticky wonder of gecko feet",
"Marc Abrahams - A science award that makes you laugh then think",
"Laura Boushnak - The deadly legacy of cluster bombs",
"Chris Jordan - Turning powerful stats into art",
"Deborah Scranton - An Iraq war movie crowd-sourced from soldiers",
"Jody Williams - A realistic vision for world peace",
"Dan Ariely - How equal do we want the world to be Youd be surprised",
"Jaime Lerner - A song of the city",
"Bill and Melinda Gates - Why giving away our wealth has been the most satisfying thing weve done",
"Dan Dennett - Lets teach religion -- all religion -- in schools",
"Kevin Bales - How to combat modern slavery",
"Larry Page - Wheres Google going next",
"Shereen El Feki - Pop culture in the Arab world",
"Mikko Hypponen - Fighting viruses defending the net",
"Bill Ford - A future beyond traffic gridlock",
"Daniel Kahneman - The riddle of experience vs. memory",
"Karen Tse - How to stop torture",
"Matt Kenyon - A secret memorial for civilian casualties",
"Alexander Betts - Our refugee system is failing. Heres how we can fix it",
"Philippe Starck - Design and destiny",
"Jonathan Tepperman - The risky politics of progress",
"Christopher Soghoian - How to avoid surveillance ... with the phone in your pocket",
"Eric Sanderson - New York -- before the City",
"Robert Thurman - Expanding your circle of compassion",
"David Pizarro - The strange politics of disgust",
"Susan Etlinger - What do we do with all this big data",
"Michael Nielsen - Open science now",
"Bill Joy - What Im worried about what Im excited about",
"David Macaulay - An illustrated journey through Rome",
"Jamie Oliver - Teach every child about food",
"Mark Shaw - One very dry demo",
"Trevor Copp and Jeff Fox - Ballroom dance that breaks gender roles",
"David Pogue - 10 top time-saving tech tips",
"Ben Kacyra - Ancient wonders captured in 3D",
"Ione Wells - How we talk about sexual assault online",
"Joseph Lekuton - A parable for Kenya",
"C.K. Williams - Poetry of youth and age",
"Ramsey Musallam - 3 rules to spark learning",
"Aditi Shankardass - A second opinion on developmental disorders",
"Tyler DeWitt - Hey science teachers -- make it fun",
"Terry Moore - How to tie your shoes",
"Barry Schwartz - Our loss of wisdom",
"Andreas Ekstrm - The moral bias behind your search results",
"Emilie Wapnick - Why some of us dont have one true calling",
"Donald Sadoway - The missing link to renewable energy",
"Jason Clay - How big brands can help save biodiversity",
"Gian Giudice - Why our universe might exist on a knife-edge",
"Sue Austin - Deep sea diving ... in a wheelchair",
"Charmian Gooch - My wish To launch a new era of openness in business",
"Myriam Sidibe - The simple power of hand-washing",
"Parul Sehgal - An ode to envy",
"Enrique Pealosa - Why buses represent democracy in action",
"Andras Forgacs - Leather and meat without killing animals",
"Steven Levitt - Surprising stats about child carseats",
"Paul Stamets - 6 ways mushrooms can save the world",
"Melinda Gates - What nonprofits can learn from Coca-Cola",
"Andy Yen - Think your emails private Think again",
"Stew - Black Men Ski",
"Jose Miguel Sokoloff - How Christmas lights helped guerrillas put down their guns",
"Rachel Pike - The science behind a climate headline",
"Paul Pholeros - How to reduce poverty Fix homes",
"Andrea Ghez - The hunt for a supermassive black hole",
"David Pogue - The music wars",
"Lawrence Lessig - We the People and the Republic we must reclaim",
"Nadine Burke Harris - How childhood trauma affects health across a lifetime",
"Jonas Gahr Stre - In defense of dialogue",
"Kathryn Schulz - Dont regret regret",
"Magda Sayeg - How yarn bombing grew into a worldwide movement",
"Dustin Yellin - A journey through the mind of an artist",
"Munir Virani - Why I love vultures",
"Laura Robinson - The secrets I find on the mysterious ocean floor",
"Robert Waldinger - What makes a good life Lessons from the longest study on happiness",
"Dan Berkenstock - The world is one big dataset. Now how to photograph it ...",
"Arthur Benjamin - A performance of Mathemagic",
"Rives - A story of mixed emoticons",
"Eddi Reader - What Youve Got",
"Adam Foss - A prosecutors vision for a better justice system",
"Eric Haseltine - What will be the next big scientific breakthrough",
"David Birch - A new way to stop identity theft",
"Ethan Nadelmann - Why we need to end the War on Drugs",
"Kitra Cahana - My father locked in his body but soaring free",
"Saul Griffith - High-altitude wind energy from kites",
"Sunitha Krishnan - The fight against sex slavery",
"Russ Altman - What really happens when you mix medications",
"Oliver Sacks - What hallucination reveals about our minds",
"Elora Hardy - Magical houses made of bamboo",
"Robin Morgan - 4 powerful poems about Parkinsons and growing older",
"Aziz Abu Sarah - For more tolerance we need more ... tourism",
"Andrew Youn - 3 reasons why we can win the fight against poverty",
"Ricardo Semler - How to run a company with almost no rules",
"Skylar Tibbits - The emergence of 4D printing",
"Franco Sacchi - A tour of Nollywood Nigerias booming film industry",
"David R. Dow - Lessons from death row inmates",
"David Sengeh - The sore problem of prosthetic limbs",
"Stefana Broadbent - How the Internet enables intimacy",
"Daniel Levitin - How to stay calm when you know youll be stressed",
"Sheryl Sandberg - Why we have too few women leaders",
"Chris Bliss - Comedy is translation",
"Chris Milk - The birth of virtual reality as an art form",
"Nigel Marsh - How to make work-life balance work",
"Ian Goldin - Navigating our global future",
"James Veitch - This is what happens when you reply to spam email",
"Karen Thompson Walker - What fear can teach us",
"Brian Cox - Why we need the explorers",
"Helen Fisher - Why we love why we cheat",
"Stefan Larsson - What doctors can learn from each other",
"Harvey Fineberg - Are we ready for neo-evolution",
"George Monbiot - For more wonder rewild the world",
"Peter Molyneux - Meet Milo the virtual boy",
"Rogier van der Heide - Why light needs darkness",
"Frank Gehry - My days as a young rebel",
"His Holiness the Karmapa - The technology of the heart",
"Amy Cuddy - Your body language shapes who you are",
"Yochai Benkler - The new open-source economics",
"Janna Levin - The sound the universe makes",
"Hawa Abdi Deqo Mohamed - Mother and daughter doctor-heroes",
"Thomas Insel - Toward a new understanding of mental illness",
"Arthur Ganson - Moving sculpture",
"Christina Warinner - Tracking ancient diseases using ... plaque",
"Yoav Medan - Ultrasound surgery -- healing without cuts",
"Meklit Hadero - The unexpected beauty of everyday sounds",
"Al Vernacchio - Sex needs a new metaphor. Heres one ...",
"Yanis Varoufakis - Capitalism will eat democracy -- unless we speak up",
"Becci Manson - Retouching lives through photos",
"John La Grou - A plug for smart power outlets",
"Jonathan Haidt - The moral roots of liberals and conservatives",
"Edi Rama - Take back your city with paint",
"Seth Berkley - HIV and flu -- the vaccine strategy",
"Vijay Kumar - The future of flying robots",
"Bruce Bueno de Mesquita - A prediction for the future of Iran",
"Jehane Noujaim - My wish A global day of film",
"Aimee Mullins - My 12 pairs of legs",
"Dan Ariely - Our buggy moral code",
"Wolfgang Kessling - How to air-condition outdoor spaces",
"Sam Harris - Science can answer moral questions",
"Jill Heinerth - The mysterious world of underwater caves",
"Tom Shannon - Anti-gravity sculpture",
"William Kamkwamba - How I built a windmill",
"Rachel Botsman - The currency of the new economy is trust",
"Hector Ruiz - The thinking behind 50x15",
"Noreena Hertz - How to use experts -- and when not to",
"Michael Shellenberger - How fear of nuclear power is hurting the environment",
"Richard Dawkins - Militant atheism",
"Marco Tempest - And for my next trick a robot",
"Tavi Gevinson - A teen just trying to figure it out",
"Mitch Resnick - Lets teach kids to code",
"Stefan Sagmeister - Designing with slogans",
"Miwa Matreyek - Glorious visions in animation and performance",
"Fabian Hemmert - The shape-shifting future of the mobile phone",
"Kiran Bedi - A police chief with a difference",
"Mohamed Ali - The link between unemployment and terrorism",
"Nalini Nadkarni - Life science in prison",
"Ruth Chang - How to make hard choices",
"Keith Nolan - Deaf in the military",
"Rives - The 4 a.m. mystery",
"Charles Moore - Seas of plastic",
"John Hardy - My green school dream",
"Ze Frank - Nerdcore comedy",
"Laura Trice - Remember to say thank you",
"Shawn Achor - The happy secret to better work",
"Steve Ramirez and Xu Liu - A mouse. A laser beam. A manipulated memory.",
"Tom Hulme - What can we learn from shortcuts",
"Philip K. Howard - Four ways to fix a broken legal system",
"Benjamin Barber - Why mayors should rule the world",
"Clay Shirky - How the Internet will one day transform government",
"Jake Barton - The museum of you",
"Taryn Simon - The stories behind the bloodlines",
"Onyx Ashanti - This is beatjazz",
"Bassam Tariq - The beauty and diversity of Muslim life",
"Edith Widder - How we found the giant squid",
"Lawrence Lessig - Re-examining the remix",
"Edward Burtynsky - My wish Manufactured landscapes and green education",
"Tim Ferriss - Smash fear learn anything",
"Geoff Mulgan - A short intro to the Studio School",
"Mellody Hobson - Color blind or color brave",
"Lee Smolin - Science and democracy",
"Lewis Pugh - How I swam the North Pole",
"Kate Hartman - The art of wearable communication",
"Anant Agarwal - Why massive open online courses still matter",
"Chris Bangle - Great cars are great art",
"Gayle Tzemach Lemmon - Women entrepreneurs example not exception",
"Linus Torvalds - The mind behind Linux",
"Melati and Isabel Wijsen - Our campaign to ban plastic bags in Bali",
"Nathaniel Kahn - Scenes from My Architect",
"Peter Tyack - The intriguing sound of marine mammals",
"Sarah Kaminsky - My father the forger",
"Anupam Mishra - The ancient ingenuity of water harvesting",
"Ameenah Gurib-Fakim - Humble plants that hide surprising secrets",
"Elizabeth Murchison - Fighting a contagious cancer",
"Billy Collins - Everyday moments caught in time",
"Nalini Nadkarni - Conserving the canopy",
"Gregory Petsko - The coming neurological epidemic",
"Anders Fjellberg - Two nameless bodies washed up on the beach. Here are their stories",
"Jimmy Carter - Why I believe the mistreatment of women is the number one human rights abuse",
"Ze Frank - My web playroom",
"Herbie Hancock - An all-star set",
"Anthony Atala - Printing a human kidney",
"Andres Lozano - Parkinsons depression and the switch that might turn them off",
]
| unlicense | -8,108,918,296,976,592,000 | 57.398503 | 103 | 0.700912 | false |
open-io/oio-swift | oioswift/common/middleware/regexcontainer.py | 1 | 2932 | # Copyright (C) 2017-2018 OpenIO SAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swift.common.utils import config_true_value, get_logger
from oioswift.common.middleware.autocontainerbase import AutoContainerBase
from oio.common.autocontainer import RegexContainerBuilder
from oio.common.exceptions import ConfigurationException
try:
# Available since oio-sds 4.2
from oio.common.autocontainer import NoMatchFound
except ImportError:
NoMatchFound = ValueError
class RegexContainerMiddleware(AutoContainerBase):
BYPASS_QS = "bypass-autocontainer"
BYPASS_HEADER = "X-bypass-autocontainer"
def __init__(self, app, acct, patterns, failsafe=False,
**kwargs):
super(RegexContainerMiddleware, self).__init__(
app, acct, **kwargs)
self.con_builder = RegexContainerBuilder(patterns)
self.failsafe = failsafe
def _call(self, env, start_response):
try:
return super(RegexContainerMiddleware, self)._call(
env, start_response)
except NoMatchFound:
if self.failsafe:
return self.app(env, start_response)
raise
def filter_factory(global_conf, **local_config):
conf = global_conf.copy()
conf.update(local_config)
acct = conf.get('sds_default_account')
if acct is None:
raise ConfigurationException('No OIO-SDS account configured')
account_first = config_true_value(local_config.get('account_first'))
failsafe = config_true_value(local_config.get('failsafe'))
swift3_compat = config_true_value(local_config.get('swift3_compat'))
strip_v1 = config_true_value(local_config.get('strip_v1'))
# By default this is enabled, to be compatible with openio-sds < 4.2.
stop_at_first_match = config_true_value(
local_config.get('stop_at_first_match', True))
pattern_dict = {k: v for k, v in local_config.items()
if k.startswith("pattern")}
def factory(app):
patterns = [pattern_dict[k] for k in sorted(pattern_dict.keys())]
logger = get_logger(conf)
logger.info("Using patterns %s", patterns)
return RegexContainerMiddleware(
app, acct, patterns,
strip_v1=strip_v1, account_first=account_first,
swift3_compat=swift3_compat,
stop_at_first_match=stop_at_first_match,
failsafe=failsafe)
return factory
| apache-2.0 | 6,771,192,211,727,099,000 | 36.589744 | 74 | 0.683492 | false |
miikama/telegram-bot | bot2.py | 1 | 4609 | import urllib
import urllib2
import pprint
import json
import datetime
import time
import logging
from calendar_bot import CalendarClient
'''returns 'TANAAN!!' if today is paapaiva and string for something else
returns None if no paapaiva in next 10 days
'''
def is_paapaiva(client):
#the events from raati15 calendar for the next 10 days
events = []
events = client.get_calendar_events(10)
#print(events)
#events is like [('2016-09-11T12:30:00+03:00', u'test event')]
if events:
#removing aakkoset
ascii_events = [(x[0],x[1].encode('ascii', 'xmlcharrefreplace').replace('ä', 'a') ) for x in events]
#filtering only paapaivat
only_paapaivas = [x for x in ascii_events if 'paa' in x[1].lower() and 'paiva' in x[1].lower() ]
#print(only_paapaivas)
for paiva in only_paapaivas:
#date parsing
stripped_date = paiva[0][0:10]
calendar_date = datetime.datetime.strptime(stripped_date, '%Y-%m-%d')
#if today is paapaiva
now = datetime.datetime.utcnow()
today = now - datetime.timedelta(minutes=now.minute, hours=now.hour, seconds=now.second, microseconds=now.microsecond)
#print(calendar_date)
#print(today)
if calendar_date == today:
return "TANAAN!!"
else:
return "{}".format(stripped_date)
return None
else:
return None
from telegram.error import (TelegramError, Unauthorized, BadRequest, TimedOut, ChatMigrated, NetworkError)
from telegram.ext import CommandHandler
from telegram.ext import Updater
tanaan_photo_address = 'AgADBAADBeY1G5sdZAeZOQAB_xifyPymVaAZAARU0-rzUc8xq5I8AAIC' # 'http://i.imgur.com/2k3j2NA.jpg'
fugee_rooriin_address ='AgADBAADKeI1G1caZAeDNH-tzcHDX8VYoBkABKVGDyIMeSxuQz0AAgI' #'http://i.imgur.com/ykFysmr.jpg'
ei_tanaan_address = 'AgADBAADLNM1GxUdZAfdLhEdfQINz65boBkABN7nsRV8UWIQwSAAAgI' #'http://i.imgur.com/nxkzkpW.jpg'
calendar_id = '[email protected]' #id for raati 15 calendar
calendar_client = CalendarClient(calendar_id)
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',level=logging.INFO)
botid = '268119392:AAErkOPlFBVJIG7Yc_L2m-IzRA0f67tz7qg'
test_botid = '301043923:AAE0VP2x_wWV70s-Yvz3N4_InhG0ShIGhyA'
updater = Updater(token=botid)
dispatcher = updater.dispatcher
#starting
def start(bot, update):
bot.sendMessage(chat_id=update.message.chat_id, text="I'm a bot, please talk to me!")
def stop(bot, update):
updater.stop()
updater.idle()
#paapaiva
def paapaiva(bot, update):
paapaiva = is_paapaiva(calendar_client)
if paapaiva:
bot.sendMessage(chat_id=update.message.chat_id, text=("Seuraava PAAPAIVA on:\n" + paapaiva) )
if paapaiva == "TANAAN!!":
bot.sendPhoto(chat_id=update.message.chat_id, photo=tanaan_photo_address)
else:
bot.send_message(chat_id=update.message.chat_id, text=("Seuraava PAAPAIVA on:\n" + "Ei PAAPAIVAA seuraavaan 10 paivaan :(") )
#fugee
def fugee(bot, update):
msg = bot.sendPhoto(chat_id=update.message.chat_id, photo=fugee_rooriin_address)
#ei
def ei(bot, update):
msg = bot.sendPhoto(chat_id=update.message.chat_id, photo=ei_tanaan_address)
#pprint.pprint("sent photo id: " + msg.photo[0].file_id)
#error handling
def error_callback(bot, update, error):
try:
raise error
except Unauthorized:
print("unauthorized") # remove update.message.chat_id from conversation list
except BadRequest:
print("Badrequest") # handle malformed requests - read more below!
except TimedOut:
print("TimedOut") # handle slow connection problems
except NetworkError:
print("netwrokError") # handle other connection problems
except ChatMigrated as e:
print("chatmigrated") # the chat_id of a group has changed, use e.new_chat_id instead
except TelegramError:
print("telegramerror") # handle all other telegram related errors
start_handler = CommandHandler('start', start)
stop_handler = CommandHandler('stop', stop)
paapaiva_handler = CommandHandler('paapaiva', paapaiva)
fugee_handler = CommandHandler('fugee', fugee)
ei_handler = CommandHandler('ei', ei)
dispatcher.add_handler(start_handler) #handler '/start'
dispatcher.add_handler(stop_handler)
dispatcher.add_handler(paapaiva_handler) #handle '/paapaiva'
dispatcher.add_handler(fugee_handler)
dispatcher.add_handler(ei_handler)
dispatcher.add_error_handler(error_callback) #error handler
updater.start_polling(poll_interval = 2.0, clean = True)
#curl -s -X POST "https://api.telegram.org/bot268119392:AAErkOPlFBVJIG7Yc_L2m-IzRA0f67tz7qg/sendPhoto" -F chat_id=89456514 -F photo="http://i.imgur.com/2k3j2NA.jpg"
| mit | 1,012,354,969,156,449,700 | 34.453846 | 164 | 0.73248 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.