repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
skipmodea1/plugin.video.roosterteeth | resources/lib/roosterteeth_list_episodes.py | 1 | 8176 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Imports
#
from future import standard_library
standard_library.install_aliases()
from builtins import object
import os
import requests
import sys
import urllib.request, urllib.parse, urllib.error
import xbmcgui
import xbmcplugin
import json
from roosterteeth_const import RESOURCES_PATH, HEADERS, LANGUAGE, convertToUnicodeString, log, \
FIRST_MEMBER_ONLY_VIDEO_TITLE_PREFIX, ROOSTERTEETH_BASE_URL
#
# Main class
#
class Main(object):
def __init__(self):
# Get the command line arguments
# Get the plugin url in plugin:// notation
self.plugin_url = sys.argv[0]
# Get the plugin handle as an integer number
self.plugin_handle = int(sys.argv[1])
# log("ARGV", repr(sys.argv))
#
# Parse parameters...
self.url = urllib.parse.parse_qs(urllib.parse.urlparse(sys.argv[2]).query)['url'][0]
self.next_page_possible = urllib.parse.parse_qs(urllib.parse.urlparse(sys.argv[2]).query)['next_page_possible'][0]
self.show_serie_name = urllib.parse.parse_qs(urllib.parse.urlparse(sys.argv[2]).query)['show_serie_name'][0]
# log("self.next_page_possible", self.next_page_possible)
# Make the next page url
if self.next_page_possible == 'True':
# Determine current item number, next item number, next_url
pos_of_page = self.url.rfind('page=')
# log("pos_of_page", pos_of_page)
if pos_of_page >= 0:
page_number_str = str(
self.url[pos_of_page + len('page='):pos_of_page + len('page=') + len('000')])
page_number = int(page_number_str)
self.page_number_next = page_number + 1
if self.page_number_next >= 100:
page_number_next_str = str(self.page_number_next)
elif self.page_number_next >= 10:
page_number_next_str = '0' + str(self.page_number_next)
else:
page_number_next_str = '00' + str(self.page_number_next)
self.next_url = self.url.replace('page=' + page_number_str, 'page=' + page_number_next_str)
# log("self.next_url", self.next_url)
#
# Get the videos...
#
self.getVideos()
#
# Get videos...
#
def getVideos(self):
#
# Init
#
# Create a list for our items.
listing = []
#
# Get HTML page
#
response = requests.get(self.url, headers=HEADERS)
html_source = response.text
html_source = convertToUnicodeString(html_source)
# log("html_source", html_source)
try:
json_data = json.loads(html_source)
except (ValueError, KeyError, TypeError):
xbmcgui.Dialog().ok(LANGUAGE(30000), LANGUAGE(30109))
exit(1)
for item in json_data['data']:
# log("item", item)
episode_title = item['attributes']['title']
caption = item['attributes']['caption']
length = item['attributes']['length']
channel_slug = item['attributes']['channel_slug']
# the url should be something like:
# https://svod-be.roosterteeth.com/api/v1/episodes/ffc530d0-464d-11e7-a302-065410f210c4/videos"
# or even
# https://svod-be.roosterteeth.com/api/v1/episodes/lets-play-2011-2/videos
technical_episode_url_last_part = item['links']['videos']
technical_episode_url = ROOSTERTEETH_BASE_URL + technical_episode_url_last_part
technical_url = technical_episode_url
log("technical_url", technical_url)
functional_episode_url_middle_part = item['links']['self']
functional_url = ROOSTERTEETH_BASE_URL + functional_episode_url_middle_part + '/videos'
log("functional_url", functional_url)
thumb = item['included']['images'][0]['attributes']['thumb']
serie_title = item['attributes']['show_title']
original_air_date = item['attributes']['original_air_date']
original_air_date = original_air_date[0:10]
# The backend still calls it sponsor instead of first member
is_first_member_only = item['attributes']['is_sponsors_only']
# let's put some more info in the title of the episode
if self.show_serie_name == "True":
title = serie_title + ' - ' + episode_title
else:
title = episode_title
if is_first_member_only:
title = FIRST_MEMBER_ONLY_VIDEO_TITLE_PREFIX + ' ' + title
title = convertToUnicodeString(title)
thumbnail_url = thumb
plot = caption
duration_in_seconds = length
studio = channel_slug
studio = convertToUnicodeString(studio)
studio = studio.replace("-", " ")
studio = studio.capitalize()
# Add to list...
list_item = xbmcgui.ListItem(title)
list_item.setInfo("video",
{"title": title, "studio": studio, "mediatype": "video", \
"plot": plot + '\n' + LANGUAGE(30318) + ' ' + original_air_date, \
"aired": original_air_date, "duration": duration_in_seconds})
list_item.setArt({'thumb': thumbnail_url, 'icon': thumbnail_url,
'fanart': os.path.join(RESOURCES_PATH, 'fanart-blur.jpg')})
list_item.setProperty('IsPlayable', 'true')
# let's remove any non-ascii characters from the title, to prevent errors with urllib.parse.parse_qs
# of the parameters
title = title.encode('ascii', 'ignore')
parameters = {"action": "play", "functional_url": functional_url, "technical_url": technical_url,
"title": title, "is_first_member_only": is_first_member_only, "next_page_possible": "False"}
plugin_url_with_parms = self.plugin_url + '?' + urllib.parse.urlencode(parameters)
is_folder = False
# Add refresh option to context menu
list_item.addContextMenuItems([('Refresh', 'Container.Refresh')])
# Add our item to the listing as a 3-element tuple.
listing.append((plugin_url_with_parms, list_item, is_folder))
# Make a next page item, if a next page is possible
total_pages_str = json_data['total_pages']
total_pages = int(total_pages_str)
if self.page_number_next <= total_pages:
# Next page entry
if self.next_page_possible == 'True':
list_item = xbmcgui.ListItem(LANGUAGE(30200))
list_item.setArt({'thumb': os.path.join(RESOURCES_PATH, 'next-page.png'),
'fanart': os.path.join(RESOURCES_PATH, 'fanart-blur.jpg')})
list_item.setProperty('IsPlayable', 'false')
parameters = {"action": "list-episodes", "url": str(self.next_url),
"next_page_possible": self.next_page_possible, "show_serie_name": self.show_serie_name}
url = self.plugin_url + '?' + urllib.parse.urlencode(parameters)
is_folder = True
# Add refresh option to context menu
list_item.addContextMenuItems([('Refresh', 'Container.Refresh')])
# Add our item to the listing as a 3-element tuple.
listing.append((url, list_item, is_folder))
# Add our listing to Kodi.
# Large lists and/or slower systems benefit from adding all items at once via addDirectoryItems
# instead of adding one by ove via addDirectoryItem.
xbmcplugin.addDirectoryItems(self.plugin_handle, listing, len(listing))
# Set initial sorting
xbmcplugin.addSortMethod(handle=self.plugin_handle, sortMethod=xbmcplugin.SORT_METHOD_DATEADDED)
# Finish creating a virtual folder.
xbmcplugin.endOfDirectory(self.plugin_handle) | gpl-3.0 | -2,838,575,640,047,158,300 | 38.887805 | 122 | 0.576321 | false | 3.926993 | false | false | false |
openstack/smaug | karbor/services/operationengine/engine/triggers/timetrigger/time_trigger.py | 1 | 9734 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from datetime import timedelta
import eventlet
import functools
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
from karbor import exception
from karbor.i18n import _
from karbor.services.operationengine.engine import triggers
from karbor.services.operationengine.engine.triggers.timetrigger import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class TriggerOperationGreenThread(object):
def __init__(self, first_run_time, function):
super(TriggerOperationGreenThread, self).__init__()
self._is_sleeping = True
self._pre_run_time = None
self._running = False
self._thread = None
self._function = function
self._start(first_run_time)
def kill(self):
self._running = False
if self._is_sleeping:
self._thread.kill()
@property
def running(self):
return self._running
@property
def pre_run_time(self):
return self._pre_run_time
def _start(self, first_run_time):
self._running = True
now = datetime.utcnow()
initial_delay = 0 if first_run_time <= now else (
int(timeutils.delta_seconds(now, first_run_time)))
self._thread = eventlet.spawn_after(
initial_delay, self._run, first_run_time)
self._thread.link(self._on_done)
def _on_done(self, gt, *args, **kwargs):
self._is_sleeping = True
self._pre_run_time = None
self._running = False
self._thread = None
def _run(self, expect_run_time):
while self._running:
self._is_sleeping = False
self._pre_run_time = expect_run_time
expect_run_time = self._function(expect_run_time)
if expect_run_time is None or not self._running:
break
self._is_sleeping = True
now = datetime.utcnow()
idle_time = 0 if expect_run_time <= now else int(
timeutils.delta_seconds(now, expect_run_time))
eventlet.sleep(idle_time)
class TimeTrigger(triggers.BaseTrigger):
TRIGGER_TYPE = "time"
IS_ENABLED = (CONF.scheduling_strategy == 'default')
def __init__(self, trigger_id, trigger_property, executor):
super(TimeTrigger, self).__init__(
trigger_id, trigger_property, executor)
self._trigger_property = self.check_trigger_definition(
trigger_property)
self._greenthread = None
def shutdown(self):
self._kill_greenthread()
def register_operation(self, operation_id, **kwargs):
if operation_id in self._operation_ids:
msg = (_("The operation_id(%s) is exist") % operation_id)
raise exception.ScheduledOperationExist(msg)
if self._greenthread and not self._greenthread.running:
raise exception.TriggerIsInvalid(trigger_id=self._id)
self._operation_ids.add(operation_id)
if self._greenthread is None:
self._start_greenthread()
def unregister_operation(self, operation_id, **kwargs):
if operation_id not in self._operation_ids:
return
self._operation_ids.remove(operation_id)
if 0 == len(self._operation_ids):
self._kill_greenthread()
def update_trigger_property(self, trigger_property):
valid_trigger_property = self.check_trigger_definition(
trigger_property)
if valid_trigger_property == self._trigger_property:
return
timer = self._get_timer(valid_trigger_property)
first_run_time = self._compute_next_run_time(
datetime.utcnow(), trigger_property['end_time'], timer)
if not first_run_time:
msg = (_("The new trigger property is invalid, "
"Can not find the first run time"))
raise exception.InvalidInput(msg)
if self._greenthread is not None:
pre_run_time = self._greenthread.pre_run_time
if pre_run_time:
end_time = pre_run_time + timedelta(
seconds=self._trigger_property['window'])
if first_run_time <= end_time:
msg = (_("The new trigger property is invalid, "
"First run time%(t1)s must be after %(t2)s") %
{'t1': first_run_time, 't2': end_time})
raise exception.InvalidInput(msg)
self._trigger_property = valid_trigger_property
if len(self._operation_ids) > 0:
# Restart greenthread to take the change of trigger property
# effect immediately
self._kill_greenthread()
self._create_green_thread(first_run_time, timer)
def _kill_greenthread(self):
if self._greenthread:
self._greenthread.kill()
self._greenthread = None
def _start_greenthread(self):
# Find the first time.
# We don't known when using this trigger first time.
timer = self._get_timer(self._trigger_property)
first_run_time = self._compute_next_run_time(
datetime.utcnow(), self._trigger_property['end_time'], timer)
if not first_run_time:
raise exception.TriggerIsInvalid(trigger_id=self._id)
self._create_green_thread(first_run_time, timer)
def _create_green_thread(self, first_run_time, timer):
func = functools.partial(
self._trigger_operations,
trigger_property=self._trigger_property.copy(),
timer=timer)
self._greenthread = TriggerOperationGreenThread(
first_run_time, func)
def _trigger_operations(self, expect_run_time, trigger_property, timer):
"""Trigger operations once
returns: wait time for next run
"""
# Just for robustness, actually expect_run_time always <= now
# but, if the scheduling of eventlet is not accurate, then we
# can do some adjustments.
entry_time = datetime.utcnow()
if entry_time < expect_run_time and (
int(timeutils.delta_seconds(entry_time, expect_run_time)) > 0):
return expect_run_time
# The self._executor.execute_operation may have I/O operation.
# If it is, this green thread will be switched out during looping
# operation_ids. In order to avoid changing self._operation_ids
# during the green thread is switched out, copy self._operation_ids
# as the iterative object.
operation_ids = self._operation_ids.copy()
sent_ops = set()
window = trigger_property.get("window")
end_time = expect_run_time + timedelta(seconds=window)
for operation_id in operation_ids:
if operation_id not in self._operation_ids:
# Maybe, when traversing this operation_id, it has been
# removed by self.unregister_operation
LOG.warning("Execute operation %s which is not exist, "
"ignore it", operation_id)
continue
now = datetime.utcnow()
if now >= end_time:
LOG.error("Can not trigger operations to run. Because it is "
"out of window time. now=%(now)s, "
"end time=%(end_time)s, expect run time=%(expect)s,"
" wating operations=%(ops)s",
{'now': now, 'end_time': end_time,
'expect': expect_run_time,
'ops': operation_ids - sent_ops})
break
try:
self._executor.execute_operation(
operation_id, now, expect_run_time, window)
except Exception:
LOG.exception("Submit operation to executor failed, operation"
" id=%s", operation_id)
sent_ops.add(operation_id)
next_time = self._compute_next_run_time(
expect_run_time, trigger_property['end_time'], timer)
now = datetime.utcnow()
if next_time and next_time <= now:
LOG.error("Next run time:%(next_time)s <= now:%(now)s. Maybe the "
"entry time=%(entry)s is too late, even exceeds the end"
" time of window=%(end)s, or it was blocked where "
"sending the operation to executor.",
{'next_time': next_time, 'now': now,
'entry': entry_time, 'end': end_time})
return next_time
@classmethod
def check_trigger_definition(cls, trigger_definition):
return utils.check_trigger_definition(trigger_definition)
@classmethod
def _compute_next_run_time(cls, start_time, end_time, timer):
return utils.compute_next_run_time(start_time, end_time, timer)
@classmethod
def _get_timer(cls, trigger_property):
return utils.get_timer(trigger_property)
@classmethod
def check_configuration(cls):
utils.check_configuration()
| apache-2.0 | -6,533,845,585,274,748,000 | 36.011407 | 79 | 0.59585 | false | 4.217504 | false | false | false |
amwelch/a10sdk-python | a10sdk/core/slb/slb_dns_stats.py | 2 | 3687 | from a10sdk.common.A10BaseClass import A10BaseClass
class Stats(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param nat_resp: {"description": "(NAT) No. of responses", "format": "counter", "type": "number", "oid": "8", "optional": true, "size": "2"}
:param slb_resp_no_match: {"description": "No. of requests with no response", "format": "counter", "type": "number", "oid": "5", "optional": true, "size": "2"}
:param nat_xid_reused: {"description": "(NAT) No. of requests reusing a transaction id", "format": "counter", "type": "number", "oid": "13", "optional": true, "size": "2"}
:param slb_req: {"description": "No. of requests", "format": "counter", "type": "number", "oid": "1", "optional": true, "size": "2"}
:param slb_no_resp: {"description": "No. of resource failures", "format": "counter", "type": "number", "oid": "3", "optional": true, "size": "2"}
:param nat_req: {"description": "(NAT) No. of requests", "format": "counter", "type": "number", "oid": "7", "optional": true, "size": "2"}
:param slb_req_rexmit: {"description": "No. of request retransmits", "format": "counter", "type": "number", "oid": "4", "optional": true, "size": "2"}
:param nat_no_resource: {"description": "(NAT) No. of resource failures", "format": "counter", "type": "number", "oid": "12", "optional": true, "size": "2"}
:param nat_no_resp: {"description": "(NAT) No. of resource failures", "format": "counter", "type": "number", "oid": "9", "optional": true, "size": "2"}
:param nat_req_rexmit: {"description": "(NAT) No. of request retransmits", "format": "counter", "type": "number", "oid": "10", "optional": true, "size": "2"}
:param nat_resp_no_match: {"description": "(NAT) No. of requests with no response", "format": "counter", "type": "number", "oid": "11", "optional": true, "size": "2"}
:param slb_no_resource: {"description": "No. of resource failures", "format": "counter", "type": "number", "oid": "6", "optional": true, "size": "2"}
:param slb_resp: {"description": "No. of responses", "format": "counter", "type": "number", "oid": "2", "optional": true, "size": "2"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "stats"
self.DeviceProxy = ""
self.nat_resp = ""
self.slb_resp_no_match = ""
self.nat_xid_reused = ""
self.slb_req = ""
self.slb_no_resp = ""
self.nat_req = ""
self.slb_req_rexmit = ""
self.nat_no_resource = ""
self.nat_no_resp = ""
self.nat_req_rexmit = ""
self.nat_resp_no_match = ""
self.slb_no_resource = ""
self.slb_resp = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Dns(A10BaseClass):
"""Class Description::
Statistics for the object dns.
Class dns supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/slb/dns/stats`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "dns"
self.a10_url="/axapi/v3/slb/dns/stats"
self.DeviceProxy = ""
self.stats = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
| apache-2.0 | -2,718,929,192,280,475,600 | 45.0875 | 175 | 0.584486 | false | 3.309695 | false | false | false |
epol/dhcp | server.py | 1 | 1434 | #!/usr/bin/env python2.7
from pydhcplib.dhcp_packet import *
from pydhcplib.dhcp_network import *
netopt = {'client_listen_port':"68",
'server_listen_port':"67",
'listen_address':"0.0.0.0"}
goodservers_str = [ '192.168.251.2', '192.168.251.3' ]
goodservers = [ [ int(n) for n in s.split('.') ] for s in goodservers_str ]
class Server(DhcpServer):
def __init__(self, options):
DhcpServer.__init__(self,options["listen_address"],
options["client_listen_port"],
options["server_listen_port"])
def HandleDhcpDiscover(self, packet):
pass
def HandleDhcpRequest(self, packet):
server_identifier = packet.GetOption('server_identifier')
if server_identifier is not None:
if server_identifier != []:
if server_identifier not in goodservers:
with open('badpackets.txt','a') as f:
f.write(packet.str())
f.write('-----------------------------------------------------\n\n')
print packet.str()
def HandleDhcpDecline(self, packet):
pass
def HandleDhcpRelease(self, packet):
pass
def HandleDhcpInform(self, packet):
pass
def main():
server = Server(netopt)
while True :
server.GetNextDhcpPacket()
if __name__ == "__main__":
main()
| gpl-3.0 | 4,637,498,704,910,496,000 | 27.117647 | 92 | 0.532775 | false | 3.972299 | false | false | false |
avaly/quicktile | quicktile/__main__.py | 1 | 9638 | """Entry point and related functionality"""
__author__ = "Stephan Sokolow (deitarion/SSokolow)"
__license__ = "GNU GPL 2.0 or later"
import errno, logging, os, subprocess, sys
from ConfigParser import RawConfigParser
try:
import pygtk
pygtk.require('2.0')
except ImportError:
pass # Apparently Travis-CI's build environment doesn't add this
import gtk, wnck
import gtkexcepthook
gtkexcepthook.enable()
from . import commands, layout
from .util import fmt_table, XInitError
from .version import __version__
from .wm import WindowManager
#: Location for config files (determined at runtime).
XDG_CONFIG_DIR = os.environ.get('XDG_CONFIG_HOME',
os.path.expanduser('~/.config'))
#: Default content for the config file
DEFAULTS = {
'general': {
# Use Ctrl+Alt as the default base for key combinations
'ModMask': '<Ctrl><Alt>',
'UseWorkarea': True,
'ColumnCount': 3
},
'keys': {
"KP_Enter": "monitor-switch",
"KP_0": "maximize",
"KP_1": "bottom-left",
"KP_2": "bottom",
"KP_3": "bottom-right",
"KP_4": "left",
"KP_5": "middle",
"KP_6": "right",
"KP_7": "top-left",
"KP_8": "top",
"KP_9": "top-right",
"V": "vertical-maximize",
"H": "horizontal-maximize",
"C": "move-to-center",
}
}
KEYLOOKUP = {
',': 'comma',
'.': 'period',
'+': 'plus',
'-': 'minus',
} #: Used for resolving certain keysyms
wnck.set_client_type(wnck.CLIENT_TYPE_PAGER) # pylint: disable=no-member
class QuickTileApp(object):
"""The basic Glib application itself."""
keybinder = None
dbus_name = None
dbus_obj = None
def __init__(self, winman, commands, keys=None, modmask=None):
"""Populate the instance variables.
@param keys: A dict mapping X11 keysyms to L{CommandRegistry}
command names.
@param modmask: A modifier mask to prefix to all keybindings.
@type winman: The L{WindowManager} instance to use.
@type keys: C{dict}
@type modmask: C{GdkModifierType}
"""
self.winman = winman
self.commands = commands
self._keys = keys or {}
self._modmask = modmask or ''
def run(self):
"""Initialize keybinding and D-Bus if available, then call
C{gtk.main()}.
@returns: C{False} if none of the supported backends were available.
@rtype: C{bool}
@todo 1.0.0: Retire the C{doCommand} name. (API-breaking change)
"""
# Attempt to set up the global hotkey support
try:
from . import keybinder
except ImportError:
logging.error("Could not find python-xlib. Cannot bind keys.")
else:
self.keybinder = keybinder.init(
self._modmask, self._keys, self.commands, self.winman)
# Attempt to set up the D-Bus API
try:
from . import dbus_api
except ImportError:
logging.warn("Could not load DBus backend. "
"Is python-dbus installed?")
else:
self.dbus_name, self.dbus_obj = dbus_api.init(
self.commands, self.winman)
# If either persistent backend loaded, start the GTK main loop.
if self.keybinder or self.dbus_obj:
try:
gtk.main() # pylint: disable=no-member
except KeyboardInterrupt:
pass
return True
else:
return False
def show_binds(self):
"""Print a formatted readout of defined keybindings and the modifier
mask to stdout.
@todo: Look into moving this into L{KeyBinder}
"""
print "Keybindings defined for use with --daemonize:\n"
print "Modifier: %s\n" % (self._modmask or '(none)')
print fmt_table(self._keys, ('Key', 'Action'))
def main():
"""setuptools entry point"""
from optparse import OptionParser, OptionGroup
parser = OptionParser(usage="%prog [options] [action] ...",
version="%%prog v%s" % __version__)
parser.add_option('-d', '--daemonize', action="store_true",
dest="daemonize", default=False, help="Attempt to set up global "
"keybindings using python-xlib and a D-Bus service using dbus-python. "
"Exit if neither succeeds")
parser.add_option('-b', '--bindkeys', action="store_true",
dest="daemonize", default=False,
help="Deprecated alias for --daemonize")
parser.add_option('--debug', action="store_true", dest="debug",
default=False, help="Display debug messages")
parser.add_option('--no-workarea', action="store_true", dest="no_workarea",
default=False, help="Overlap panels but work better with "
"non-rectangular desktops")
help_group = OptionGroup(parser, "Additional Help")
help_group.add_option('--show-bindings', action="store_true",
dest="show_binds", default=False, help="List all configured keybinds")
help_group.add_option('--show-actions', action="store_true",
dest="show_args", default=False, help="List valid arguments for use "
"without --daemonize")
parser.add_option_group(help_group)
opts, args = parser.parse_args()
# Hook up grep to filter out spurious libwnck error messages that we
# can't filter properly because PyGTK doesn't expose g_log_set_handler()
if not opts.debug:
glib_log_filter = subprocess.Popen(
['grep', '-v', 'Unhandled action type _OB_WM'],
stdin=subprocess.PIPE)
# Redirect stderr through grep
os.dup2(glib_log_filter.stdin.fileno(), sys.stderr.fileno())
# Set up the output verbosity
logging.basicConfig(level=logging.DEBUG if opts.debug else logging.INFO,
format='%(levelname)s: %(message)s')
# Load the config from file if present
# TODO: Refactor all this
cfg_path = os.path.join(XDG_CONFIG_DIR, 'quicktile.cfg')
first_run = not os.path.exists(cfg_path)
config = RawConfigParser()
config.optionxform = str # Make keys case-sensitive
# TODO: Maybe switch to two config files so I can have only the keys in the
# keymap case-sensitive?
config.read(cfg_path)
dirty = False
if not config.has_section('general'):
config.add_section('general')
# Change this if you make backwards-incompatible changes to the
# section and key naming in the config file.
config.set('general', 'cfg_schema', 1)
dirty = True
for key, val in DEFAULTS['general'].items():
if not config.has_option('general', key):
config.set('general', key, str(val))
dirty = True
mk_raw = modkeys = config.get('general', 'ModMask')
if ' ' in modkeys.strip() and '<' not in modkeys:
modkeys = '<%s>' % '><'.join(modkeys.strip().split())
logging.info("Updating modkeys format:\n %r --> %r", mk_raw, modkeys)
config.set('general', 'ModMask', modkeys)
dirty = True
# Either load the keybindings or use and save the defaults
if config.has_section('keys'):
keymap = dict(config.items('keys'))
else:
keymap = DEFAULTS['keys']
config.add_section('keys')
for row in keymap.items():
config.set('keys', row[0], row[1])
dirty = True
# Migrate from the deprecated syntax for punctuation keysyms
for key in keymap:
# Look up unrecognized shortkeys in a hardcoded dict and
# replace with valid names like ',' -> 'comma'
transKey = key
if key in KEYLOOKUP:
logging.warn("Updating config file from deprecated keybind syntax:"
"\n\t%r --> %r", key, KEYLOOKUP[key])
transKey = KEYLOOKUP[key]
dirty = True
if dirty:
cfg_file = file(cfg_path, 'wb')
config.write(cfg_file)
cfg_file.close()
if first_run:
logging.info("Wrote default config file to %s", cfg_path)
ignore_workarea = ((not config.getboolean('general', 'UseWorkarea')) or
opts.no_workarea)
# TODO: Rearchitect so this hack isn't needed
commands.cycle_dimensions = commands.commands.add_many(
layout.make_winsplit_positions(config.getint('general', 'ColumnCount'))
)(commands.cycle_dimensions)
try:
winman = WindowManager(ignore_workarea=ignore_workarea)
except XInitError as err:
logging.critical(err)
sys.exit(1)
app = QuickTileApp(winman, commands.commands, keymap, modmask=modkeys)
if opts.show_binds:
app.show_binds()
if opts.show_args:
print commands.commands
if opts.daemonize:
if not app.run():
logging.critical("Neither the Xlib nor the D-Bus backends were "
"available")
sys.exit(errno.ENOENT)
# FIXME: What's the proper exit code for "library not found"?
elif not first_run:
if args:
winman.screen.force_update()
for arg in args:
commands.commands.call(arg, winman)
while gtk.events_pending(): # pylint: disable=no-member
gtk.main_iteration() # pylint: disable=no-member
elif not opts.show_args and not opts.show_binds:
print commands.commands
print "\nUse --help for a list of valid options."
sys.exit(errno.ENOENT)
if __name__ == '__main__':
main()
# vim: set sw=4 sts=4 expandtab :
| gpl-2.0 | -8,080,371,509,678,494,000 | 33.298932 | 79 | 0.597634 | false | 3.858287 | true | false | false |
ContextLogic/redis-py | tests/test_encoding.py | 37 | 1336 | from __future__ import with_statement
import pytest
from redis._compat import unichr, u, unicode
from .conftest import r as _redis_client
class TestEncoding(object):
@pytest.fixture()
def r(self, request):
return _redis_client(request=request, decode_responses=True)
def test_simple_encoding(self, r):
unicode_string = unichr(3456) + u('abcd') + unichr(3421)
r['unicode-string'] = unicode_string
cached_val = r['unicode-string']
assert isinstance(cached_val, unicode)
assert unicode_string == cached_val
def test_list_encoding(self, r):
unicode_string = unichr(3456) + u('abcd') + unichr(3421)
result = [unicode_string, unicode_string, unicode_string]
r.rpush('a', *result)
assert r.lrange('a', 0, -1) == result
def test_object_value(self, r):
unicode_string = unichr(3456) + u('abcd') + unichr(3421)
r['unicode-string'] = Exception(unicode_string)
cached_val = r['unicode-string']
assert isinstance(cached_val, unicode)
assert unicode_string == cached_val
class TestCommandsAndTokensArentEncoded(object):
@pytest.fixture()
def r(self, request):
return _redis_client(request=request, charset='utf-16')
def test_basic_command(self, r):
r.set('hello', 'world')
| mit | -6,302,935,293,464,976,000 | 32.4 | 68 | 0.642216 | false | 3.640327 | true | false | false |
gr3yman/TileControl | altTileCtrl.py | 1 | 1960 | #!/usr/env/python3
from subprocess import *
from collections import *
class windo:
def __init__(self, windata):
self.windata = {}
def getwindata():
with Popen(['wmctrl -lG | tr -s " "'], shell = True, stdout=PIPE, universal_newlines=True) as wmctrlg:
winout = wmctrlg.stdout.read().splitlines()
wincontainer = []
for line in winout:
winline = line.split(' ')
windict = {}
windict['hexid'] = winline[0]
windict['desktop'] = winline[1]
windim = {}
windim['xpos'] = winline[2]
windim['ypos'] = winline[3]
windim['width'] = winline[4]
windim['height'] = winline[5]
windict['dimensions'] = windim
wincontainer.append(windict)
return wincontainer
def movewin(windata, newsizestrng):
winhxid = windata['hexid']
subprocess.call('wmctrl', '-i', '-r', winhxid, '-e', newsizstrng)
def sortwindos(screendictlist, shift):
listlen = len(screendictlist)
movedwinlist = []
def get_active_screen():
with Popen(['wmctrl', '-d'], stdout=PIPE, universal_newlines=True) as wmctrld:
wmctrlout = wmctrld.stdout.read().splitlines()
for line in wmctrlout:
if "*" in line:
values = line.split(' ')
deskid = values[0]
screensize = values[11]
try:
screenx, screeny = screensize.split('x')
return deskid, screenx, screeny
except:
print('Not Running and EWMH compliant Window Manager')
continue
AllScreenDictList = windo.getwindata()
ActiveDeskNum, ScreenXDim, ScreenYDim = get_active_screen()
for windo in AllScreenDictList:
if windo['desktop'] == ActiveDeskNum:
print(windo['hexid'])
| gpl-2.0 | -669,324,268,774,902,500 | 31.131148 | 110 | 0.536735 | false | 3.791103 | false | false | false |
madebymany/isthetoiletfree | server.py | 1 | 10989 | #!/usr/bin/env python
import tornado.ioloop
import tornado.web
import tornado.gen
import tornado.websocket
import tornado.auth
import tornado.escape
import hmac
import hashlib
import functools
import os
import momoko
import urlparse
import time
import datetime
import parsedatetime
import prettytable
import ascii_graph
import logging
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
define("host", default='localhost:8888', help="server host", type=str)
define("db_host", default="localhost", help="database hostname", type=str)
define("db_port", default=5432, help="database port", type=int)
define("db_name", default="callum", help="database name", type=str)
define("db_user", default="callum", help="database username", type=str)
define("db_pass", default="", help="database password", type=str)
class HumanDateParser(object):
def __init__(self):
self.calendar = parsedatetime.Calendar()
def parse(self, str):
return datetime.datetime.fromtimestamp(
time.mktime(self.calendar.parse(str)[0]))
def get_psql_credentials():
try:
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.getenv("DATABASE_URL"))
credentials = {"host": url.hostname, "port": url.port,
"dbname": url.path[1:], "user": url.username,
"password": url.password}
except:
credentials = {"host": options.db_host, "port": options.db_port,
"dbname": options.db_name, "user": options.db_user,
"password": options.db_pass}
return credentials
def _get_secret(filename, envvar):
try:
with open(os.path.join(os.path.dirname(__file__), filename)) as f:
return f.read().strip()
except IOError:
return os.getenv(envvar)
get_hmac_secret = \
functools.partial(_get_secret, ".hmac_secret", "ITTF_HMAC_SECRET")
get_cookie_secret = \
functools.partial(_get_secret, ".cookie_secret", "ITTF_COOKIE_SECRET")
get_google_secret = \
functools.partial(_get_secret, ".google_secret", "ITTF_GOOGLE_SECRET")
def hmac_authenticated(method):
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
hash = hmac.new(
self.settings["hmac_secret"],
self.get_argument("data"),
hashlib.sha256
)
if self.get_argument("token") != hash.hexdigest():
raise tornado.web.HTTPError(401, "Invalid token")
return method(self, *args, **kwargs)
return wrapper
def bool2str(boolean):
return "yes" if boolean else "no"
class HasFreeWebSocketHandler(tornado.websocket.WebSocketHandler):
connections = set()
def open(self):
HasFreeWebSocketHandler.connections.add(self)
def on_message(self, message):
pass
def on_close(self):
HasFreeWebSocketHandler.connections.remove(self)
class BaseHandler(tornado.web.RequestHandler):
@property
def db(self):
return self.application.db
def get_current_user(self):
return self.get_secure_cookie("ittf_user")
@tornado.gen.coroutine
def has_free_toilet(self):
cursor = yield self.db.callproc("any_are_free")
raise tornado.gen.Return(cursor.fetchone()[0])
@tornado.gen.coroutine
def has_free_shower(self):
cursor = yield self.db.execute(
"SELECT is_free FROM latest_events() WHERE toilet_id = 2")
raise tornado.gen.Return(cursor.fetchone()[0])
class GoogleLoginHandler(BaseHandler, tornado.auth.GoogleOAuth2Mixin):
@tornado.gen.coroutine
def get(self):
if self.get_argument("code", False):
access = yield self.get_authenticated_user(
redirect_uri=self.settings["login_url"],
code=self.get_argument("code"))
user = yield self.oauth2_request(
"https://www.googleapis.com/oauth2/v1/userinfo",
access_token=access["access_token"])
if user["email"].endswith("@madebymany.co.uk") or \
user["email"].endswith("@madebymany.com"):
self.set_secure_cookie("ittf_user", user["email"])
self.redirect("/stats")
else:
self.redirect("/")
else:
yield self.authorize_redirect(
redirect_uri=self.settings["login_url"],
client_id=self.settings["google_oauth"]["key"],
scope=["profile", "email"],
response_type="code",
extra_params={"approval_prompt": "auto"})
class MainHandler(BaseHandler):
@tornado.gen.coroutine
def get(self):
has_free = bool2str((yield self.has_free_toilet()))
self.render("index.html", has_free_toilet=has_free)
@hmac_authenticated
@tornado.gen.coroutine
def post(self):
values = yield [self.db.mogrify(
"(%(toilet_id)s, %(is_free)s, %(timestamp)s)", t)
for t in tornado.escape.json_decode(self.get_argument("data"))
]
yield self.db.execute(
"INSERT INTO events (toilet_id, is_free, recorded_at) "
"VALUES %s;" % ", ".join(values))
self.notify_has_free()
self.finish()
@tornado.gen.coroutine
def notify_has_free(self):
has_free = bool2str((yield self.has_free_toilet()))
for connected in HasFreeWebSocketHandler.connections:
try:
connected.write_message({
"hasFree": has_free
})
except:
logging.error("Error sending message", exc_info=True)
class ShowerHandler(BaseHandler):
@tornado.gen.coroutine
def get(self):
has_free = bool2str((yield self.has_free_shower()))
self.render("shower.html", has_free_shower=has_free)
class StatsHandler(BaseHandler):
@tornado.web.authenticated
@tornado.gen.coroutine
def get(self):
parser = HumanDateParser()
text = None
op = None
where = ""
and_where = ""
start = self.get_argument("from", None)
end = self.get_argument("to", None)
if start and end:
parsed_start = parser.parse(start)
parsed_end = parser.parse(end)
text = "Showing from %s to %s" % (parsed_start, parsed_end)
op = ("WHERE recorded_at BETWEEN %s AND %s",
(parsed_start, parsed_end))
elif start:
parsed_start = parser.parse(start)
text = "Showing from %s onward" % parsed_start
op = ("WHERE recorded_at >= %s", (parsed_start,))
elif end:
parsed_end = parser.parse(end)
text = "Showing from %s backward" % parsed_end
op = ("WHERE recorded_at <= %s", (parsed_end,))
if op:
where = yield self.db.mogrify(*op)
and_where = where.replace("WHERE", "AND", 1)
queries = [
("Number of visits",
"SELECT toilet_id, count(*) "
"AS num_visits FROM visits %(where)s "
"GROUP BY toilet_id ORDER BY toilet_id;"),
("Average visit duration",
"SELECT toilet_id, avg(duration) "
"AS duration_avg FROM visits %(where)s "
"GROUP BY toilet_id ORDER BY toilet_id;"),
("Minimum visit duration",
"SELECT toilet_id, min(duration) "
"AS duration_min FROM visits %(where)s "
"GROUP BY toilet_id ORDER BY toilet_id;"),
("Maximum visit duration",
"SELECT toilet_id, max(duration) "
"AS duration_max FROM visits %(where)s "
"GROUP BY toilet_id ORDER BY toilet_id;"),
("Visits by hour",
"SELECT s.hour AS hour_of_day, count(v.hour) "
"FROM generate_series(0, 23) s(hour) "
"LEFT OUTER JOIN (SELECT recorded_at, "
"EXTRACT('hour' from recorded_at) "
"AS hour FROM visits %(where)s) v on s.hour = v.hour "
"GROUP BY s.hour ORDER BY s.hour;"),
("Visits by day",
"SELECT s.dow AS day_of_week, count(v.dow) "
"FROM generate_series(0, 6) s(dow) "
"LEFT OUTER JOIN (SELECT recorded_at, "
"EXTRACT('dow' from recorded_at) "
"AS dow FROM visits %(where)s) v on s.dow = v.dow "
"GROUP BY s.dow ORDER BY s.dow;")
]
results = yield [self.db.execute(q % {"where": where,
"and_where": and_where})
for _, q in queries]
cursor = yield self.db.execute((
"SELECT (s.period * 10) AS seconds, count(v.duration) "
"FROM generate_series(0, 500) s(period) "
"LEFT OUTER JOIN (SELECT EXTRACT(EPOCH from duration) "
"AS duration FROM visits) v on s.period = FLOOR(v.duration / 10) "
"GROUP BY s.period HAVING s.period <= 36 ORDER BY s.period;"
))
graph = "\n".join(ascii_graph.Pyasciigraph()
.graph("Frequency graph", cursor.fetchall()))
self.render("stats.html", text=text, start=start, end=end,
tables=[(queries[i][0], prettytable.from_db_cursor(r))
for i, r in enumerate(results)],
frequency_graph=graph)
class APIHandler(BaseHandler):
@tornado.gen.coroutine
def get(self):
response = tornado.escape.json_encode({
"has_free_toilet": (yield self.has_free_toilet())
})
callback = self.get_argument("callback", None)
if callback:
response = "%s(%s)" % (callback, response)
self.set_header("content-type", "application/json")
self.write(response)
if __name__ == "__main__":
tornado.options.parse_command_line()
app = tornado.web.Application(
[(r"/login", GoogleLoginHandler),
(r"/", MainHandler),
(r"/shower", ShowerHandler),
(r"/stats", StatsHandler),
(r"/api", APIHandler),
(r"/hasfreesocket", HasFreeWebSocketHandler)],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
hmac_secret=get_hmac_secret(),
cookie_secret=get_cookie_secret(),
login_url="http://%s/login" % options.host,
google_oauth=dict(key=os.getenv("ITTF_GOOGLE_KEY"),
secret=get_google_secret())
)
ioloop = tornado.ioloop.IOLoop.instance()
app.db = momoko.Pool(
dsn=" ".join(["%s=%s" % c
for c in get_psql_credentials().iteritems()]),
size=6,
ioloop=ioloop
)
future = app.db.connect()
ioloop.add_future(future, lambda f: ioloop.stop())
ioloop.start()
future.result()
app.listen(options.port)
try:
ioloop.start()
except KeyboardInterrupt:
pass
| mit | 8,759,443,257,336,088,000 | 34.108626 | 78 | 0.576486 | false | 3.786699 | false | false | false |
DougBurke/astropy | astropy/units/format/unicode_format.py | 2 | 1620 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Unicode" unit format.
"""
from . import console, utils
class Unicode(console.Console):
"""
Output-only format to display pretty formatting at the console
using Unicode characters.
For example::
>>> import astropy.units as u
>>> print(u.bar.decompose().to_string('unicode'))
kg
100000 ────
m s²
"""
_times = "×"
_line = "─"
@classmethod
def _get_unit_name(cls, unit):
return unit.get_format_name('unicode')
@classmethod
def format_exponential_notation(cls, val):
m, ex = utils.split_mantissa_exponent(val)
parts = []
if m:
parts.append(m.replace('-', '−'))
if ex:
parts.append("10{0}".format(
cls._format_superscript(ex)))
return cls._times.join(parts)
@classmethod
def _format_superscript(cls, number):
mapping = {
'0': '⁰',
'1': '¹',
'2': '²',
'3': '³',
'4': '⁴',
'5': '⁵',
'6': '⁶',
'7': '⁷',
'8': '⁸',
'9': '⁹',
'-': '⁻',
'−': '⁻',
# This is actually a "raised omission bracket", but it's
# the closest thing I could find to a superscript solidus.
'/': '⸍',
}
output = []
for c in number:
output.append(mapping[c])
return ''.join(output)
| bsd-3-clause | 260,359,851,560,530,940 | 21.913043 | 70 | 0.466161 | false | 3.601367 | false | false | false |
google-code/betsynetpdf | sumatrapdf/scripts/metadata/metadata.py | 20 | 7301 | import types
def is_valid_signed(bits, val):
if type(val) not in (types.IntType, types.LongType): return False
e = bits - 1
min_val = -(2 ** e)
if val < min_val: return False
max_val = (2 ** e) - 1
if val > max_val: return False
return True
def is_valid_unsigned(bits, val):
if type(val) not in (types.IntType, types.LongType): return False
if val < 0: return False
if val > 2 ** bits: return False
return True
def is_valid_string(val):
if val is None: return True
return type(val) in (types.StringType, types.UnicodeType)
class Type(object):
def __init__(self, def_val):
self.c_type_override = None
self.set_val(def_val)
def set_val(self, val):
assert self.is_valid_val(val), "%s is not a valid value of %s" % (str(self.val), str(self))
self.val = val
def c_type(self):
if self.c_type_override is not None:
return self.c_type_override
return self.c_type_class
def get_type_typ_enum(self):
return self.type_enum
def is_struct(self):
return isinstance(self, Struct)
def is_array(self):
return isinstance(self, Array)
class Bool(Type):
c_type_class = "bool"
type_enum = "TYPE_BOOL"
def __init__(self, def_val):
super(Bool, self).__init__(def_val)
def is_valid_val(self, val):
return val in (True, False)
class U16(Type):
c_type_class = "uint16_t"
type_enum = "TYPE_U16"
def is_valid_val(self, val):
return is_valid_unsigned(16, val)
class I32(Type):
c_type_class = "int32_t"
type_enum = "TYPE_I32"
def __init__(self, def_val=0):
super(I32, self).__init__(def_val)
def is_valid_val(self, val):
return is_valid_signed(32, val)
class U32(Type):
c_type_class = "uint32_t"
type_enum = "TYPE_U32"
def is_valid_val(self, val):
return is_valid_unsigned(32, val)
class U64(Type):
c_type_class = "uint64_t"
type_enum = "TYPE_U64"
def is_valid_val(self, val):
return is_valid_unsigned(64, val)
# behaves like uint32_t, using unique name to signal intent
class Color(U32):
type_enum = "TYPE_COLOR"
class String(Type):
c_type_class = "const char *"
type_enum = "TYPE_STR"
def is_valid_val(self, val):
return is_valid_string(val)
class WString(Type):
c_type_class = "const WCHAR *"
type_enum = "TYPE_WSTR"
def is_valid_val(self, val):
return is_valid_string(val)
class Float(Type):
c_type_class = "float"
type_enum = "TYPE_FLOAT"
def is_valid_val(self, val):
return type(val) in (types.IntType, types.LongType, types.FloatType)
# struct is just a base class
# subclasses should have class instance fields which is a list of tuples:
# defining name and type of the struct members:
# fields = [ ("boolField", Bool(True), ("u32Field", U32(32))]
#
# TODO: implement struct inheritance i.e. a subclass should inherit all
# fields from its parent
class Struct(Type):
c_type_class = ""
type_enum = "TYPE_STRUCT_PTR"
fields = []
def __init__(self, *vals):
# fields must be a class variable in Struct's subclass
self.values = [Field(f.name, f.typ, f.flags) for f in self.fields]
self.c_type_override = "%s *" % self.name()
self.offset = None
for i in range(len(vals)):
self.values[i].set_val(vals[i])
def is_valid_val(self, val):
return issubclass(val, Struct)
def name(self):
return self.__class__.__name__
def as_str(self):
s = str(self) + "\n"
for v in self.values:
if isinstance(v, Field):
s += "%s: %s\n" % (v.name, str(v.val))
return s
def __setattr__(self, name, value):
# special-case self.values, which we refer to
if name == "values":
object.__setattr__(self, name, value)
return
for field in self.values:
if field.name == name:
field.set_val(value)
return
object.__setattr__(self, name, value)
class Array(Type):
c_type_class = ""
type_enum = "TYPE_ARRAY"
def __init__(self, typ, values):
# TODO: we don't support arrays of primitve values, just structs
assert issubclass(typ, Struct)
self.typ = typ
self.values = values
for v in values:
assert self.is_valid_val(v)
self.c_type_override = "Vec<%s*> *" % typ.__name__
self.offset = None
def is_valid_val(self, val):
return isinstance(val, self.typ)
def name(self):
try:
return self.typ.__name__
except:
print(self.typ)
raise
# those are bit flags
NoStore = 1
Compact = 2
class Field(object):
def __init__(self, name, typ_val, flags=0):
self.name = name
self.typ = typ_val
self.flags = flags
if self.is_no_store(): assert not self.is_compact()
if self.is_compact():
to_test = typ_val
if typ_val.is_array():
to_test = typ_val.typ
else:
assert to_test.is_struct()
for field in to_test.fields:
assert not field.is_struct()
if typ_val.is_struct():
# TODO: support NULL values for the struct, represented by using
# class for typ_val
self.val = typ_val
elif typ_val.is_array():
self.val = typ_val
else:
self.val = typ_val.val
def c_type(self):
return self.typ.c_type()
def is_struct(self):
return self.typ.is_struct()
def is_signed(self):
return type(self.typ) == I32
def is_unsigned(self):
return type(self.typ) in (Bool, U16, U32, U64, Color)
def is_bool(self):
return type(self.typ) == Bool
def is_color(self):
return type(self.typ) == Color
def is_string(self):
return type(self.typ) in (String, WString)
def is_float(self):
return type(self.typ) == Float
def is_no_store(self):
return self.flags & NoStore == NoStore
def is_compact(self):
return self.flags & Compact == Compact
def is_array(self):
return type(self.typ) == Array
def set_val(self, val):
# Note: we don't support this for struct or arrays
assert not (self.is_struct() or self.is_array())
assert self.typ.is_valid_val(val)
self.val = val
def get_typ_enum(self, for_bin=False):
type_enum = self.typ.get_type_typ_enum()
# binary doesn't have a notion of compact storage
is_compact = self.is_compact() and not for_bin
if self.is_no_store() or is_compact:
s = "(Type)(" + type_enum
if self.is_no_store():
s = s + " | TYPE_NO_STORE_MASK"
if self.is_compact():
s = s + " | TYPE_STORE_COMPACT_MASK"
return s + ")"
return type_enum
| gpl-3.0 | -325,171,988,607,077,570 | 26.189189 | 99 | 0.548829 | false | 3.498323 | false | false | false |
ipfs/py-ipfs-api | ipfshttpclient/client/bootstrap.py | 1 | 1604 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from . import base
class Section(base.SectionBase):
@base.returns_single_item
def add(self, peer, *peers, **kwargs):
"""Adds peers to the bootstrap list.
Parameters
----------
peer : str
IPFS MultiAddr of a peer to add to the list
Returns
-------
dict
"""
args = (peer,) + peers
return self._client.request('/bootstrap/add', args, decoder='json', **kwargs)
@base.returns_single_item
def list(self, **kwargs):
"""Returns the addresses of peers used during initial discovery of the
IPFS network.
Peers are output in the format ``<multiaddr>/<peerID>``.
.. code-block:: python
>>> client.bootstrap.list()
{'Peers': [
'/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ',
'/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRa … ca9z',
'/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKD … KrGM',
…
'/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3p … QBU3'
]}
Returns
-------
dict
+-------+-------------------------------+
| Peers | List of known bootstrap peers |
+-------+-------------------------------+
"""
return self._client.request('/bootstrap', decoder='json', **kwargs)
@base.returns_single_item
def rm(self, peer, *peers, **kwargs):
"""Removes peers from the bootstrap list.
Parameters
----------
peer : str
IPFS MultiAddr of a peer to remove from the list
Returns
-------
dict
"""
args = (peer,) + peers
return self._client.request('/bootstrap/rm', args, decoder='json', **kwargs) | mit | -5,706,565,431,632,337,000 | 22.455882 | 79 | 0.602258 | false | 2.914077 | false | false | false |
bootcamptropa/django | walladog/settings.py | 1 | 4992 | """
Django settings for walladog project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=e8m1h9)0lazq)%7bcho@a9w^_kfd)_plf_teg8_jp^ax9&k!p'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#proyect apps
'categories',
'images',
'products',
'races',
'saveserches',
'states',
'transactions',
'users',
'walladog',
#Rest_framework
'rest_framework',
'oauth2_provider',
'corsheaders',
'django.contrib.gis',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware'
)
ROOT_URLCONF = 'walladog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'walladog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.mysql',
'NAME': 'walladog_dev',
'USER': 'walladog',
'PASSWORD': 'Keepcoding123',
'HOST': 'mysql.develjitsu.com',
'PORT': '3306',
},
# 'prod': {
# 'ENGINE': 'django.db.backends.mysql',
# 'NAME': 'walladog',
# 'USER': 'walladog',
# 'PASSWORD': 'xxx',
# 'HOST': 'mysql.develjitsu.com',
# 'PORT': '3306',
# },
}
# POR SI QUEREIS TRABAJAR EN LOCAL CON SQLITE
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'es-es'
# TIME_ZONE = 'UTC'
# Europe/Madrid
TIME_ZONE = 'Europe/Madrid'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# REST FRAMEWORK
REST_FRAMEWORK = {
'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler',
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.ext.rest_framework.OAuth2Authentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_METADATA_CLASS': 'rest_framework.metadata.SimpleMetadata'
}
OAUTH2_PROVIDER = {
# this is the list of available scopes
'SCOPES': {
'read': 'Read scope',
'write': 'Write scope',
'groups': 'Access to your groups'}
}
# CORS
# APPEND_SLASH = False
CORS_ORIGIN_ALLOW_ALL = True
CORS_ORIGIN_WHITELIST = (
'http://localhost:9000',
'http://www.walladog.com',
'walladog.com'
)
# CORS_URLS_REGEX = r'^/api/.*$'
STATIC_URL = '/static/'
# DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
#
# AWS_ACCESS_KEY_ID = 'AKIAJYDV7TEBJS6JWEEQ'
# AWS_SECRET_ACCESS_KEY = '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'
# AWS_STORAGE_BUCKET_NAME = 'walladog'
# AWS_QUERYSTRING_AUTH = False
# AWS_ACCESS_KEY_ID = os.environ['AKIAJYDV7TEBJS6JWEEQ']
# AWS_SECRET_ACCESS_KEY = os.environ['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']
# AWS_STORAGE_BUCKET_NAME = os.environ['walladog']
# MEDIA_URL = 'http://%s.s3.amazonaws.com/' % AWS_STORAGE_BUCKET_NAME
# DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage" | mit | 7,755,102,613,394,406,000 | 25.700535 | 82 | 0.664063 | false | 3.171537 | false | false | false |
IntelLabs/numba | numba/core/rewrites/registry.py | 7 | 3651 | from collections import defaultdict
from numba.core import config
class Rewrite(object):
'''Defines the abstract base class for Numba rewrites.
'''
def __init__(self, state=None):
'''Constructor for the Rewrite class.
'''
pass
def match(self, func_ir, block, typemap, calltypes):
'''Overload this method to check an IR block for matching terms in the
rewrite.
'''
return False
def apply(self):
'''Overload this method to return a rewritten IR basic block when a
match has been found.
'''
raise NotImplementedError("Abstract Rewrite.apply() called!")
class RewriteRegistry(object):
'''Defines a registry for Numba rewrites.
'''
_kinds = frozenset(['before-inference', 'after-inference'])
def __init__(self):
'''Constructor for the rewrite registry. Initializes the rewrites
member to an empty list.
'''
self.rewrites = defaultdict(list)
def register(self, kind):
"""
Decorator adding a subclass of Rewrite to the registry for
the given *kind*.
"""
if kind not in self._kinds:
raise KeyError("invalid kind %r" % (kind,))
def do_register(rewrite_cls):
if not issubclass(rewrite_cls, Rewrite):
raise TypeError('{0} is not a subclass of Rewrite'.format(
rewrite_cls))
self.rewrites[kind].append(rewrite_cls)
return rewrite_cls
return do_register
def apply(self, kind, state):
'''Given a pipeline and a dictionary of basic blocks, exhaustively
attempt to apply all registered rewrites to all basic blocks.
'''
assert kind in self._kinds
blocks = state.func_ir.blocks
old_blocks = blocks.copy()
for rewrite_cls in self.rewrites[kind]:
# Exhaustively apply a rewrite until it stops matching.
rewrite = rewrite_cls(state)
work_list = list(blocks.items())
while work_list:
key, block = work_list.pop()
matches = rewrite.match(state.func_ir, block, state.typemap,
state.calltypes)
if matches:
if config.DEBUG or config.DUMP_IR:
print("_" * 70)
print("REWRITING (%s):" % rewrite_cls.__name__)
block.dump()
print("_" * 60)
new_block = rewrite.apply()
blocks[key] = new_block
work_list.append((key, new_block))
if config.DEBUG or config.DUMP_IR:
new_block.dump()
print("_" * 70)
# If any blocks were changed, perform a sanity check.
for key, block in blocks.items():
if block != old_blocks[key]:
block.verify()
# Some passes, e.g. _inline_const_arraycall are known to occasionally
# do invalid things WRT ir.Del, others, e.g. RewriteArrayExprs do valid
# things with ir.Del, but the placement is not optimal. The lines below
# fix-up the IR so that ref counts are valid and optimally placed,
# see #4093 for context. This has to be run here opposed to in
# apply() as the CFG needs computing so full IR is needed.
from numba.core import postproc
post_proc = postproc.PostProcessor(state.func_ir)
post_proc.run()
rewrite_registry = RewriteRegistry()
register_rewrite = rewrite_registry.register
| bsd-2-clause | -8,706,756,071,578,778,000 | 36.255102 | 79 | 0.567516 | false | 4.447016 | false | false | false |
docker/docker-py | tests/unit/client_test.py | 3 | 10129 | import datetime
import os
import unittest
import docker
import pytest
from docker.constants import (
DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS,
DEFAULT_MAX_POOL_SIZE, IS_WINDOWS_PLATFORM
)
from docker.utils import kwargs_from_env
from . import fake_api
try:
from unittest import mock
except ImportError:
import mock
TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs')
POOL_SIZE = 20
class ClientTest(unittest.TestCase):
@mock.patch('docker.api.APIClient.events')
def test_events(self, mock_func):
since = datetime.datetime(2016, 1, 1, 0, 0)
mock_func.return_value = fake_api.get_fake_events()[1]
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.events(since=since) == mock_func.return_value
mock_func.assert_called_with(since=since)
@mock.patch('docker.api.APIClient.info')
def test_info(self, mock_func):
mock_func.return_value = fake_api.get_fake_info()[1]
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.info() == mock_func.return_value
mock_func.assert_called_with()
@mock.patch('docker.api.APIClient.ping')
def test_ping(self, mock_func):
mock_func.return_value = True
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.ping() is True
mock_func.assert_called_with()
@mock.patch('docker.api.APIClient.version')
def test_version(self, mock_func):
mock_func.return_value = fake_api.get_fake_version()[1]
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.version() == mock_func.return_value
mock_func.assert_called_with()
def test_call_api_client_method(self):
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
with pytest.raises(AttributeError) as cm:
client.create_container()
s = cm.exconly()
assert "'DockerClient' object has no attribute 'create_container'" in s
assert "this method is now on the object APIClient" in s
with pytest.raises(AttributeError) as cm:
client.abcdef()
s = cm.exconly()
assert "'DockerClient' object has no attribute 'abcdef'" in s
assert "this method is now on the object APIClient" not in s
def test_call_containers(self):
client = docker.DockerClient(
version=DEFAULT_DOCKER_API_VERSION,
**kwargs_from_env())
with pytest.raises(TypeError) as cm:
client.containers()
s = cm.exconly()
assert "'ContainerCollection' object is not callable" in s
assert "docker.APIClient" in s
@pytest.mark.skipif(
IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
)
@mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
def test_default_pool_size_unix(self, mock_obj):
client = docker.DockerClient(
version=DEFAULT_DOCKER_API_VERSION
)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
60,
maxsize=DEFAULT_MAX_POOL_SIZE
)
@pytest.mark.skipif(
not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
)
@mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
def test_default_pool_size_win(self, mock_obj):
client = docker.DockerClient(
version=DEFAULT_DOCKER_API_VERSION
)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
mock_obj.assert_called_once_with("//./pipe/docker_engine",
60,
maxsize=DEFAULT_MAX_POOL_SIZE
)
@pytest.mark.skipif(
IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
)
@mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
def test_pool_size_unix(self, mock_obj):
client = docker.DockerClient(
version=DEFAULT_DOCKER_API_VERSION,
max_pool_size=POOL_SIZE
)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
60,
maxsize=POOL_SIZE
)
@pytest.mark.skipif(
not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
)
@mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
def test_pool_size_win(self, mock_obj):
client = docker.DockerClient(
version=DEFAULT_DOCKER_API_VERSION,
max_pool_size=POOL_SIZE
)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
mock_obj.assert_called_once_with("//./pipe/docker_engine",
60,
maxsize=POOL_SIZE
)
class FromEnvTest(unittest.TestCase):
def setUp(self):
self.os_environ = os.environ.copy()
def tearDown(self):
os.environ = self.os_environ
def test_from_env(self):
"""Test that environment variables are passed through to
utils.kwargs_from_env(). KwargsFromEnvTest tests that environment
variables are parsed correctly."""
os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api.base_url == "https://192.168.59.103:2376"
def test_from_env_with_version(self):
os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376',
DOCKER_CERT_PATH=TEST_CERT_DIR,
DOCKER_TLS_VERIFY='1')
client = docker.from_env(version='2.32')
assert client.api.base_url == "https://192.168.59.103:2376"
assert client.api._version == '2.32'
def test_from_env_without_version_uses_default(self):
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api._version == DEFAULT_DOCKER_API_VERSION
def test_from_env_without_timeout_uses_default(self):
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS
@pytest.mark.skipif(
IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
)
@mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
def test_default_pool_size_from_env_unix(self, mock_obj):
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
60,
maxsize=DEFAULT_MAX_POOL_SIZE
)
@pytest.mark.skipif(
not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
)
@mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
def test_default_pool_size_from_env_win(self, mock_obj):
client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
mock_obj.assert_called_once_with("//./pipe/docker_engine",
60,
maxsize=DEFAULT_MAX_POOL_SIZE
)
@pytest.mark.skipif(
IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux'
)
@mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool")
def test_pool_size_from_env_unix(self, mock_obj):
client = docker.from_env(
version=DEFAULT_DOCKER_API_VERSION,
max_pool_size=POOL_SIZE
)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
60,
maxsize=POOL_SIZE
)
@pytest.mark.skipif(
not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows'
)
@mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool")
def test_pool_size_from_env_win(self, mock_obj):
client = docker.from_env(
version=DEFAULT_DOCKER_API_VERSION,
max_pool_size=POOL_SIZE
)
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
mock_obj.assert_called_once_with("//./pipe/docker_engine",
60,
maxsize=POOL_SIZE
)
| apache-2.0 | 2,610,971,818,963,738,600 | 36.794776 | 79 | 0.573502 | false | 4.040287 | true | false | false |
mechtaev/angelix | src/repair/localization.py | 1 | 4985 | import sys
from math import sqrt, ceil
import logging
logger = logging.getLogger(__name__)
class NoNegativeTestException(Exception):
pass
def ochiai(executed_passing, executed_failing, total_passing, total_failing):
if not total_failing > 0:
raise NoNegativeTestException()
if executed_failing + executed_passing == 0:
return 0
return executed_failing / sqrt(total_failing * (executed_passing + executed_failing))
def jaccard(executed_passing, executed_failing, total_passing, total_failing):
if not total_failing > 0:
raise NoNegativeTestException()
return executed_failing / (total_failing + executed_passing)
def tarantula(executed_passing, executed_failing, total_passing, total_failing):
if not total_failing > 0:
raise NoNegativeTestException()
if executed_failing + executed_passing == 0:
return 0
return ((executed_failing / total_failing) /
((executed_failing / total_failing) + (executed_passing / total_passing)))
class Localizer:
def __init__(self, config, lines):
self.lines = lines
self.config = config
def __call__(self, test_suite, all_positive, all_negative):
'''
test_suite: tests under consideration
all_positive, all_negative: (test * trace) list
trace: expression list
computes config['suspicious']/config['group_size'] groups
each consisting of config['group_size'] suspicious expressions
'''
group_size = self.config['group_size']
suspicious = self.config['suspicious']
if self.config['localization'] == 'ochiai':
formula = ochiai
elif self.config['localization'] == 'jaccard':
formula = jaccard
elif self.config['localization'] == 'tarantula':
formula = tarantula
# first, remove irrelevant information:
positive = []
negative = []
if not self.config['invalid_localization']:
for test, trace in all_positive:
if test in test_suite:
positive.append((test, trace))
for test, trace in all_negative:
if test in test_suite:
negative.append((test, trace))
else:
positive = all_positive
negative = all_negative
all = set()
for _, trace in positive:
all |= set(trace)
for _, trace in negative:
all |= set(trace)
# update suspcious
if self.config['localize_only']:
suspicious = len(all)
logger.info('trace size: {}'.format(suspicious))
executed_positive = dict()
executed_negative = dict()
for e in all:
executed_positive[e] = 0
executed_negative[e] = 0
for _, trace in positive:
executed = set(trace)
for e in executed:
executed_positive[e] += 1
for _, trace in negative:
executed = set(trace)
for e in executed:
executed_negative[e] += 1
with_score = []
def is_selected(expr):
return expr[0] in self.lines
if self.lines is not None:
filtered = filter(is_selected, all)
all = list(filtered)
for e in all:
try:
score = formula(executed_positive[e], executed_negative[e],
len(positive), len(negative))
if not (score == 0.0): # 0.0 mean not executed by failing test
with_score.append((e, score))
except NoNegativeTestException:
logger.info("No negative test exists")
exit(0)
ranking = sorted(with_score, key=lambda r: r[1], reverse=True)
if self.config['group_by_score']:
top = ranking[:suspicious]
else:
if self.config['localize_from_bottom']:
top = sorted(ranking[:suspicious], key=lambda r: r[0][0], reverse=True) # sort by location backward
else:
top = sorted(ranking[:suspicious], key=lambda r: r[0][0]) # sort by location
groups_with_score = []
for i in range(0, ceil(suspicious / group_size)):
if len(top) == 0:
break
group = []
total_score = 0
for j in range(0, group_size):
if len(top) == 0:
break
expr, score = top.pop(0)
total_score += score
group.append(expr)
groups_with_score.append((group, total_score))
sorted_groups = sorted(groups_with_score, key=lambda r: r[1], reverse=True)
groups = []
for (group, score) in sorted_groups:
groups.append(group)
logger.info("selected expressions {} with group score {:.5} ".format(group, score))
return groups
| mit | -2,496,186,853,963,597,300 | 30.751592 | 116 | 0.558275 | false | 4.312284 | true | false | false |
dengdan/seglink | test/test_preprocessing.py | 1 | 7939 | # Copyright 2016 Paul Balanca. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""test code to make sure the preprocessing works all right"""
import numpy as np
import tensorflow as tf
from datasets import dataset_factory
from preprocessing import ssd_vgg_preprocessing
from tf_extended import seglink as tfe_seglink
import util
slim = tf.contrib.slim
# =========================================================================== #
# I/O and preprocessing Flags.
# =========================================================================== #
tf.app.flags.DEFINE_integer(
'num_readers', 8,
'The number of parallel readers that read data from the dataset.')
tf.app.flags.DEFINE_integer(
'num_preprocessing_threads', 4,
'The number of threads used to create the batches.')
# =========================================================================== #
# Dataset Flags.
# =========================================================================== #
tf.app.flags.DEFINE_string(
'dataset_name', 'synthtext', 'The name of the dataset to load.')
tf.app.flags.DEFINE_string(
'dataset_split_name', 'train', 'The name of the train/test split.')
tf.app.flags.DEFINE_string(
'dataset_dir', '~/dataset/SSD-tf/SynthText', 'The directory where the dataset files are stored.')
tf.app.flags.DEFINE_string(
'model_name', 'ssd_vgg', 'The name of the architecture to train.')
tf.app.flags.DEFINE_integer(
'batch_size', 2, 'The number of samples in each batch.')
tf.app.flags.DEFINE_integer(
'train_image_size', 512, 'Train image size')
tf.app.flags.DEFINE_integer('max_number_of_steps', None,
'The maximum number of training steps.')
FLAGS = tf.app.flags.FLAGS
# =========================================================================== #
# Main training routine.
# =========================================================================== #
def main(_):
if not FLAGS.dataset_dir:
raise ValueError('You must supply the dataset directory with --dataset_dir')
tf.logging.set_verbosity(tf.logging.DEBUG)
batch_size = FLAGS.batch_size;
with tf.Graph().as_default():
# Select the dataset.
dataset = dataset_factory.get_dataset(
FLAGS.dataset_name, FLAGS.dataset_split_name, FLAGS.dataset_dir)
util.proc.set_proc_name(FLAGS.model_name + '_' + FLAGS.dataset_name)
# =================================================================== #
# Create a dataset provider and batches.
# =================================================================== #
with tf.device('/cpu:0'):
with tf.name_scope(FLAGS.dataset_name + '_data_provider'):
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset,
num_readers=FLAGS.num_readers,
common_queue_capacity=20 * batch_size,
common_queue_min=10 * batch_size,
shuffle=True)
# Get for SSD network: image, labels, bboxes.
[image, shape, gignored, gbboxes, x1, x2, x3, x4, y1, y2, y3, y4] = provider.get(['image', 'shape',
'object/ignored',
'object/bbox',
'object/oriented_bbox/x1',
'object/oriented_bbox/x2',
'object/oriented_bbox/x3',
'object/oriented_bbox/x4',
'object/oriented_bbox/y1',
'object/oriented_bbox/y2',
'object/oriented_bbox/y3',
'object/oriented_bbox/y4'
])
gxs = tf.transpose(tf.stack([x1, x2, x3, x4])) #shape = (N, 4)
gys = tf.transpose(tf.stack([y1, y2, y3, y4]))
image = tf.identity(image, 'input_image')
# Pre-processing image, labels and bboxes.
image_shape = (FLAGS.train_image_size, FLAGS.train_image_size)
image, gignored, gbboxes, gxs, gys = \
ssd_vgg_preprocessing.preprocess_image(image, gignored, gbboxes, gxs, gys,
out_shape=image_shape,
is_training = True)
gxs = gxs * tf.cast(image_shape[1], gxs.dtype)
gys = gys * tf.cast(image_shape[0], gys.dtype)
gorbboxes = tfe_seglink.tf_min_area_rect(gxs, gys)
image = tf.identity(image, 'processed_image')
with tf.Session() as sess:
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
i = 0
while i < 2:
i += 1
image_data, label_data, bbox_data, xs_data, ys_data, orbboxes = \
sess.run([image, gignored, gbboxes, gxs, gys, gorbboxes])
image_data = image_data + [123., 117., 104.]
image_data = np.asarray(image_data, np.uint8)
h, w = image_data.shape[0:-1]
bbox_data = bbox_data * [h, w, h, w]
I_bbox = image_data.copy()
I_xys = image_data.copy()
I_orbbox = image_data.copy()
for idx in range(bbox_data.shape[0]):
def draw_bbox():
y1, x1, y2, x2 = bbox_data[idx, :]
util.img.rectangle(I_bbox, (x1, y1), (x2, y2), color = util.img.COLOR_WHITE)
def draw_xys():
points = zip(xs_data[idx, :], ys_data[idx, :])
cnts = util.img.points_to_contours(points);
util.img.draw_contours(I_xys, cnts, -1, color = util.img.COLOR_GREEN)
def draw_orbbox():
orbox = orbboxes[idx, :]
import cv2
rect = ((orbox[0], orbox[1]), (orbox[2], orbox[3]), orbox[4])
box = cv2.cv.BoxPoints(rect)
box = np.int0(box)
cv2.drawContours(I_orbbox, [box], 0, util.img.COLOR_RGB_RED, 1)
draw_bbox()
draw_xys();
draw_orbbox();
print util.sit(I_bbox)
print util.sit(I_xys)
print util.sit(I_orbbox)
print 'check the images and make sure that bboxes in difference colors are the same.'
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
tf.app.run()
| gpl-3.0 | 23,782,395,031,242,852 | 49.566879 | 111 | 0.452198 | false | 4.425307 | false | false | false |
vicalloy/django-lb-workflow | lbworkflow/models/config.py | 1 | 13222 | import uuid
from django.db import models
from django.template import Context, Template
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from lbutils import create_instance, get_or_none
from lbworkflow import settings
from lbworkflow.core.helper import safe_eval
class ProcessCategory(models.Model):
uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False)
name = models.CharField(_("Name"), max_length=255, db_index=True)
oid = models.IntegerField(_("Order"), default=999)
is_active = models.BooleanField(_("Is active"), default=True)
class Meta:
ordering = ["oid"]
def __str__(self):
return self.name
def natural_key(self):
return (self.uuid,)
def get_can_apply_processes(self, user, force_fetch=False):
processes = getattr(self, "__cache__can_apply_processes", None)
if processes and not force_fetch:
return processes
return self.process_set.all()
def get_report_links(self):
return ProcessReportLink.objects.filter(category=self)
def get_all_process(self):
return self.process_set.order_by("oid")
class ProcessReportLink(models.Model):
uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False)
category = models.ForeignKey(
ProcessCategory,
blank=True,
null=True,
on_delete=models.SET_NULL,
verbose_name="Category",
)
name = models.CharField("Name", max_length=255)
url = models.CharField("URL", max_length=255)
open_in_new_window = models.BooleanField(
"Open in new window", default=False
)
perm = models.CharField(
"Permission",
max_length=255,
blank=True,
help_text="Permission to view this report",
)
oid = models.IntegerField("Order", default=999)
is_active = models.BooleanField("Is active", default=True)
class Meta:
ordering = ["oid"]
def __str__(self):
return self.name
def get_url(self):
try:
url_component_list = self.url.split(" ")
return reverse(url_component_list[0], args=url_component_list[1:])
except Exception:
return self.url
def natural_key(self):
return (self.uuid,)
class Process(models.Model):
"""
A process holds the map that describes the flow of work.
"""
code = models.CharField(
"Code",
max_length=100,
unique=True,
help_text="A unique code to identify process",
)
prefix = models.CharField(
"Prefix",
max_length=8,
default="",
blank=True,
help_text="prefix for process NO.",
)
name = models.CharField(
"Name", max_length=255, help_text="Name for this process"
)
category = models.ForeignKey(
ProcessCategory,
blank=True,
null=True,
on_delete=models.SET_NULL,
verbose_name="Category",
)
note = models.TextField("Note", blank=True)
oid = models.IntegerField("Order", default=999)
is_active = models.BooleanField("Is active", default=True)
ext_data = JSONField(default="{}")
class Meta:
verbose_name = "Process"
ordering = ["oid"]
permissions = (("sft_mgr_process", "workflow - Config"),)
def natural_key(self):
return (self.code,)
def __str__(self):
return self.name
def get_draft_active(self):
return get_or_none(Node, process=self, status="draft")
def get_rejected_active(self):
return get_or_none(Node, process=self, status="rejected")
def get_given_up_active(self):
return get_or_none(Node, process=self, status="given up")
def get_rollback_transition(self, in_node, out_node):
transition = Transition(
name="Rollback",
code="rollback",
process=self,
is_agree=False,
can_auto_agree=False,
input_node=in_node,
output_node=out_node,
)
return transition
def get_give_up_transition(self, in_node):
output = self.get_given_up_active()
transition = Transition(
name="Give up",
code="give up",
process=self,
is_agree=False,
can_auto_agree=False,
input_node=in_node,
output_node=output,
)
return transition
def get_back_to_transition(self, in_node, out_node=None):
transition = Transition(
name="Back to",
code="back to",
process=self,
is_agree=False,
can_auto_agree=False,
input_node=in_node,
output_node=out_node,
)
return transition
def get_reject_transition(self, in_node):
transition = Transition(
name="Reject",
code="reject",
process=self,
is_agree=False,
can_auto_agree=False,
input_node=in_node,
output_node=self.get_rejected_active(),
)
return transition
def get_add_assignee_transition(self, in_node):
transition = Transition(
name="Add assignee",
code="add assignee",
process=self,
is_agree=False,
can_auto_agree=False,
input_node=in_node,
output_node=in_node,
)
return transition
class Node(models.Model):
"""
Node is the states of an instance.
"""
STATUS_CHOICES = (
("draft", "Draft"),
("given up", "Given up"),
("rejected", "Rejected"),
("in progress", "In Progress"),
("completed", "Completed"),
)
AUDIT_PAGE_TYPE_CHOICES = (
("view", "view"),
("edit", "Edit"),
)
TYPE_CHOICES = (
("node", "Node"),
("router", "Router"),
)
uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False)
process = models.ForeignKey(
"Process", on_delete=models.CASCADE, verbose_name="Process"
)
name = models.CharField("Name", max_length=255)
code = models.CharField("Code", max_length=255, blank=True)
step = models.IntegerField(
"Step",
default=0,
help_text="",
)
status = models.CharField(
"Status", max_length=16, default="in progress", choices=STATUS_CHOICES
)
audit_page_type = models.CharField(
"Audit page type",
max_length=64,
choices=AUDIT_PAGE_TYPE_CHOICES,
help_text="If this node can edit, will auto goto edit mode when audit.",
default="view",
)
node_type = models.CharField(
"Status", max_length=16, default="node", choices=TYPE_CHOICES
)
can_edit = models.BooleanField("Can edit", default=False)
can_reject = models.BooleanField("Can reject", default=True)
can_give_up = models.BooleanField("Can give up", default=True)
operators = models.TextField("Audit users", blank=True)
notice_users = models.TextField("Notice users", blank=True)
share_users = models.TextField("Share users", blank=True)
note = models.TextField("Note", blank=True)
# if not audit after xx day send a remind
is_active = models.BooleanField("Is active", default=True)
ext_data = JSONField(default="{}")
def __str__(self):
return self.name
def natural_key(self):
return (self.uuid,)
def is_submitted(self):
return self.status in ["in progress", "completed"]
def get_operators(self, owner, operator, instance=None):
return create_instance(
settings.USER_PARSER, self.operators, instance, operator, owner
).parse()
def get_notice_users(self, owner, operator, instance=None):
return create_instance(
settings.USER_PARSER, self.notice_users, instance, operator, owner
).parse()
def get_share_users(self, owner, operator, instance=None):
return create_instance(
settings.USER_PARSER, self.share_users, instance, operator, owner
).parse()
def get_users(self, owner, operator, instance=None):
operators = self.get_operators(owner, operator, instance)
notice_users = self.get_notice_users(owner, operator, instance)
share_users = self.get_share_users(owner, operator, instance)
return operators, notice_users, share_users
class Transition(models.Model):
"""
A Transition connects two node: a From and a To activity.
"""
ROUTING_RULE_CHOICES = (
("split", "split"),
("joint", "Joint"),
)
uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False)
process = models.ForeignKey(
"Process", on_delete=models.CASCADE, verbose_name="Process"
)
name = models.CharField(
"Name",
max_length=100,
default="Agree",
help_text="It also the action's name, like: Agree/Submit",
)
code = models.CharField( # 'back to', 'rollback'
"Code",
max_length=100,
blank=True,
)
is_agree = models.BooleanField(
"Is agree",
default=True,
help_text="User only need agree one time in one workflow",
)
can_auto_agree = models.BooleanField(
"If can auto agree",
default=True,
help_text="If user agreed in previous steps will auto agree",
)
routing_rule = models.CharField(
"Routing rule",
max_length=16,
default="split",
choices=ROUTING_RULE_CHOICES,
help_text="joint: do transition after all work item finished. joint: do transition immediately",
)
input_node = models.ForeignKey(
Node,
verbose_name="Input node",
null=True,
on_delete=models.SET_NULL,
related_name="input_transitions",
help_text="",
)
output_node = models.ForeignKey(
Node,
verbose_name="Output node",
null=True,
on_delete=models.SET_NULL,
related_name="output_transitions",
help_text="",
)
app = models.ForeignKey(
"App",
verbose_name="Application to perform",
null=True,
on_delete=models.SET_NULL,
blank=True,
)
app_param = models.CharField(
max_length=100,
verbose_name="Param for application",
null=True,
blank=True,
help_text="Depend on App config",
)
condition = models.TextField(
"Condition",
blank=True,
help_text="Uses the Python syntax.ex: `o.leave_days>3`",
) # eval(t.condition, {'o': obj})
note = models.TextField("Note", blank=True)
oid = models.IntegerField("Order", default=999)
is_active = models.BooleanField("Is active", default=True)
ext_data = JSONField(default="{}")
def __str__(self):
return "%s - %s" % (self.process.name, self.name)
def natural_key(self):
return (self.uuid,)
def is_match_condition(self, wf_obj):
if not self.condition.strip():
return True
return safe_eval(self.condition, {"o": wf_obj})
def get_condition_descn(self):
return self.condition.split("#")[-1].strip()
def as_simple_agree_transition(self):
self.pk = None
if not self.code:
self.code = "agree"
def get_app_url(self, task):
return self.app.get_url(task, self)
class App(models.Model):
"""
An application is a python view that can be called by URL.
"""
APP_TYPE_CHOICES = (("url", "URL"),)
uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=100, help_text="Name")
app_type = models.CharField(
"Type", max_length=255, choices=APP_TYPE_CHOICES, default="url"
)
action = models.CharField(
max_length=255,
blank=True,
help_text="URL: It can be url or django's url name. If it's blank will use transition's app param",
)
note = models.TextField(blank=False)
def __str__(self):
return self.name
def natural_key(self):
return (self.uuid,)
def get_url(self, task, transition):
def render(templ_str, ctx):
return Template(templ_str).render(Context(ctx))
ts_id = transition.pk or transition.code
ctx = {
"wi": task,
"wf_code": transition.process.code,
"ts": transition,
"ts_id": ts_id,
"in": task.instance,
"o": task.instance.content_object,
}
url = "wf_process"
if self.action:
url = self.action
elif transition.app_param:
url = transition.app_param
try:
url_component_list = url.split(" ")
url_param = [render(e, ctx) for e in url_component_list[1:]]
url = reverse(url_component_list[0], args=url_param)
except Exception:
pass
if "?" not in url:
url = "%s?a=" % url
url = "%s&ts_id={{ts_id}}&wi_id={{wi.id}}" % url
return render(url, ctx)
| mit | 5,586,027,824,373,666,000 | 28.447661 | 107 | 0.586674 | false | 3.874011 | false | false | false |
Stanford-Online/edx-analytics-data-api | analyticsdataserver/views.py | 1 | 3719 | from django.conf import settings
from django.db import connections
from django.http import HttpResponse
from rest_framework import permissions, schemas
from rest_framework.permissions import AllowAny
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework_swagger.renderers import OpenAPIRenderer, SwaggerUIRenderer
def handle_internal_server_error(_request):
"""Notify the client that an error occurred processing the request without providing any detail."""
return _handle_error(500)
def handle_missing_resource_error(_request, exception=None): # pylint: disable=unused-argument
"""Notify the client that the requested resource could not be found."""
return _handle_error(404)
def _handle_error(status_code):
info = {
'status': status_code
}
renderer = JSONRenderer()
content_type = '{media}; charset={charset}'.format(media=renderer.media_type, charset=renderer.charset)
return HttpResponse(renderer.render(info), content_type=content_type, status=status_code)
class SwaggerSchemaView(APIView):
"""
Renders the swagger schema for the documentation regardless of permissions.
"""
permission_classes = [AllowAny]
renderer_classes = [
OpenAPIRenderer,
SwaggerUIRenderer
]
def get(self, _request):
generator = schemas.SchemaGenerator(title='Analytics API')
return Response(generator.get_schema())
class StatusView(APIView):
"""
Simple check to determine if the server is alive
Return no data, a simple 200 OK status code is sufficient to indicate that the server is alive. This endpoint is
public and does not require an authentication token to access it.
"""
permission_classes = (permissions.AllowAny,)
def get(self, request, *args, **kwargs): # pylint: disable=unused-argument
return Response({})
class AuthenticationTestView(APIView):
"""
Verifies that the client is authenticated
Returns HTTP 200 if client is authenticated, HTTP 401 if not authenticated
"""
def get(self, request, *args, **kwargs): # pylint: disable=unused-argument
return Response({})
class HealthView(APIView):
"""
A more comprehensive check to see if the system is fully operational.
This endpoint is public and does not require an authentication token to access it.
The returned structure contains the following fields:
- overall_status: Can be either "OK" or "UNAVAILABLE".
- detailed_status: More detailed information about the status of the system.
- database_connection: Status of the database connection. Can be either "OK" or "UNAVAILABLE".
"""
permission_classes = (permissions.AllowAny,)
def get(self, request, *args, **kwargs): # pylint: disable=unused-argument
OK = 'OK'
UNAVAILABLE = 'UNAVAILABLE'
overall_status = UNAVAILABLE
db_conn_status = UNAVAILABLE
try:
connection_name = getattr(settings, 'ANALYTICS_DATABASE', 'default')
cursor = connections[connection_name].cursor()
try:
cursor.execute("SELECT 1")
cursor.fetchone()
overall_status = OK
db_conn_status = OK
finally:
cursor.close()
except Exception: # pylint: disable=broad-except
pass
response = {
"overall_status": overall_status,
"detailed_status": {
'database_connection': db_conn_status
}
}
return Response(response, status=200 if overall_status == OK else 503)
| agpl-3.0 | -8,530,381,616,471,930,000 | 31.060345 | 116 | 0.677333 | false | 4.502421 | false | false | false |
danforthcenter/plantcv | plantcv/plantcv/closing.py | 2 | 1411 | import os
import numpy as np
from skimage import morphology
from plantcv.plantcv import params
from plantcv.plantcv import print_image
from plantcv.plantcv import plot_image
from plantcv.plantcv import fatal_error
def closing(gray_img, kernel=None):
"""Wrapper for scikit-image closing functions. Opening can remove small dark spots (i.e. pepper).
Inputs:
gray_img = input image (grayscale or binary)
kernel = optional neighborhood, expressed as an array of 1s and 0s. If None, use cross-shaped structuring element.
:param gray_img: ndarray
:param kernel = ndarray
:return filtered_img: ndarray
"""
params.device += 1
# Make sure the image is binary/grayscale
if len(np.shape(gray_img)) != 2:
fatal_error("Input image must be grayscale or binary")
# If image is binary use the faster method
if len(np.unique(gray_img)) == 2:
bool_img = morphology.binary_closing(image=gray_img, selem=kernel)
filtered_img = np.copy(bool_img.astype(np.uint8) * 255)
# Otherwise use method appropriate for grayscale images
else:
filtered_img = morphology.closing(gray_img, kernel)
if params.debug == 'print':
print_image(filtered_img, os.path.join(params.debug_outdir, str(params.device) + '_opening' + '.png'))
elif params.debug == 'plot':
plot_image(filtered_img, cmap='gray')
return filtered_img
| mit | -4,691,123,303,530,248,000 | 33.414634 | 120 | 0.693125 | false | 3.59949 | false | false | false |
cslucano/scrapping-admision | admision/spiders/ingresantes_spider.py | 1 | 1952 | from scrapy.spider import Spider
from scrapy.selector import Selector
from scrapy.http import Request
from admision.items import IngresanteItem
class IngresantesSpider(Spider):
name = "ingresantes"
allowed_domains = ["admision.uni.edu.pe"]
start_urls = [
"http://www.admision.uni.edu.pe/resultado_adm.php"
]
def parse(self, response):
base_url = response.url
paginas = range(1,271)
for pagina in paginas:
url = '%s?pagina=%s' % (base_url, pagina)
yield Request(url, callback=self.parse_page)
def parse_page(self, response):
sel = Selector(response)
codigos = sel.xpath('/html/body/table/tr/td[2]/div/table/tr[2]/td/table/tr/td/table/tr/td/table[3]/tr[position()>1]')
items = []
for codigo in codigos:
item = IngresanteItem()
item['codigo'] = codigo.xpath('td[2]/text()').extract()
item['nombres'] = codigo.xpath('td[3]/text()').extract()
item['p1'] = codigo.xpath('td[4]/text()').extract()
item['p2'] = codigo.xpath('td[5]/text()').extract()
item['p3'] = codigo.xpath('td[6]/text()').extract()
item['acumulado'] = codigo.xpath('td[7]/text()').extract()
item['vocacional'] = codigo.xpath('td[8]/text()').extract()
item['cne'] = codigo.xpath('td[9]/text()').extract()
item['arq'] = codigo.xpath('td[10]/text()').extract()
item['final'] = codigo.xpath('td[11]/text()').extract()
item['ingreso'] = codigo.xpath('td[12]/text()').extract()
item['merito_modalidad'] = codigo.xpath('td[13]/text()').extract()
item['modalidad_ingreso'] = codigo.xpath('td[14]/text()').extract()
item['especialidad_ingreso'] = codigo.xpath('td[15]/text()').extract()
item['obs'] = codigo.xpath('td[16]/text()').extract()
items.append(item)
return items
| mit | -7,886,927,225,431,843,000 | 44.395349 | 125 | 0.572746 | false | 3.194763 | false | false | false |
z0rkuM/stockbros | stockbros-ws/Stocks/Stocks.py | 1 | 1638 | #!flask/bin/python
from flask import Blueprint, jsonify, abort, request, make_response
from Authentication.Authentication import *
from util import *
api_stocks = Blueprint('api_stocks', __name__)
@api_stocks.route("/stocks", methods=['GET'])
@auth.login_required
def get_stocks():
documentStocks = db.stocks
stocks = []
for stock in documentStocks.find({}):
stock.pop("_id")
stocks.append(stock)
return jsonify({'stocks': [make_public_stock(s) for s in stocks]})
@api_stocks.route('/stocks/<stock_id>', methods=['GET'])
@auth.login_required
def get_stock(stock_id):
documentStocks = db.stocks
stock = documentStocks.find_one({'stock':stock_id})
if not stock:
abort(404)
stock.pop("_id")
return jsonify({'stocks':[make_public_stock(stock)]})
@api_stocks.route('/stocks', methods=['PUT'])
@auth.login_required
def create_or_update_stock():
if not request.json:
abort(400)
stocksSize = len(request.json)
documentStocks = db.stocks
#matchedCount = 0
#modifiedCount = 0
for stock in request.json:
#result = documentStocks.replace_one({"$and":[{"market": {'$eq':stock['market']}},{"stock": {'$eq':stock['stock']}}]}, stock, True)
#matchedCount = matchedCount + result.matched_count
#modifiedCount = modifiedCount + result.modified_count
result = documentStocks.find_and_modify(query={"$and":[{"market": stock['market']},{"stock": stock['stock']}]}, update=stock, new=True, upsert=True)
#return make_response(jsonify({'stocks':[{ "stocks_inserted" : stocksSize - modifiedCount},{ "stocks_modified" : modifiedCount}]}), 200)
return make_response(jsonify({'stocks':[{ "result" : "ok"}]}), 200)
| mit | 4,684,054,567,005,255,000 | 31.117647 | 150 | 0.691697 | false | 3.067416 | false | false | false |
ssssam/calliope | calliope/diff/__init__.py | 1 | 1093 | # Calliope
# Copyright (C) 2018 Sam Thursfield <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import sys
import calliope
log = logging.getLogger(__name__)
def diff(playlist1, playlist2):
items1 = {item.id(): item for item in playlist1}
items2 = {item.id(): item for item in playlist2}
ids1 = set(items1.keys())
ids2 = set(items2.keys())
diff = sorted(ids1.difference(ids2))
diff_items = [items1[i] for i in diff]
return diff_items
| gpl-2.0 | 551,779,125,708,441,300 | 28.540541 | 71 | 0.721866 | false | 3.619205 | false | false | false |
nodebox/nodebox-opengl | examples/07-filter/07-blend.py | 1 | 1356 | # Add the upper directory (where the nodebox module is) to the search path.
import os, sys; sys.path.insert(0, os.path.join("..",".."))
from nodebox.graphics import *
# Blend modes are used to combine the pixels of two images,
# in different ways than standard transparency.
# NodeBox supports the most common blend modes as filters:
# add(), subtract(), darken(), lighten(), multiply(), screen(), overlay(), hue().
# These can be used to adjust the lighting in an image
# (by blending it with a copy of itself),
# or to obtain many creative texturing effects.
img1 = Image("creature.png")
img2 = Image("creature.png")
def draw(canvas):
canvas.clear()
# Press the mouse to compare the blend to normal ("source over") mode:
if not canvas.mouse.pressed:
image(
# Try changing this to another blend filter:
multiply(img1, img2,
# All blend modes (and mask()) have optional dx and dy parameters
# that define the offset of the blend layer.
dx = canvas.mouse.x - img1.width/2,
dy = canvas.mouse.y - img1.height/2))
else:
image(img1)
image(img2,
x = canvas.mouse.x - img1.width/2,
y = canvas.mouse.y - img1.height/2)
# Start the application:
canvas.fps = 30
canvas.size = 500, 500
canvas.run(draw) | bsd-3-clause | -3,006,431,536,299,593,000 | 33.794872 | 81 | 0.635693 | false | 3.625668 | false | false | false |
0--key/lib | portfolio/Python/scrapy/ldmountaincentre/google.py | 2 | 2822 | import csv
import os
import copy
import json
from decimal import Decimal
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse, FormRequest
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from scrapy.http.cookies import CookieJar
from pricecheck import valid_price
from product_spiders.items import Product, ProductLoaderWithNameStrip as ProductLoader
HERE = os.path.abspath(os.path.dirname(__file__))
KEYS = ('AIzaSyByntiQdJrTyFw78jaVS6-IdMqMuISqX5Y',
'AIzaSyBK8RtRt-v1JHYhbPszQDv2LlAbIZHuyMo',
'AIzaSyDbmM13l-e_f7bpJH3D6bynBhedKfwszYo')
FILTER_DOMAINS = ('ldmountaincentre', 'ebay')
class GoogleSpider(BaseSpider):
name = 'ldmountaincentre-googleapis.com'
allowed_domains = ['googleapis.com']
def start_requests(self):
with open(os.path.join(HERE, 'product_skus.csv')) as f:
reader = csv.DictReader(f)
for i, row in enumerate(reader):
sku = row['sku']
query = (row['name']).replace(' ', '+')
url = 'https://www.googleapis.com/shopping/search/v1/public/products' + \
'?key=%s&country=GB&' + \
'q=%s&restrictBy=condition=new'
yield Request(url % (KEYS[i % len(KEYS)], query), meta={'sku': sku,
'price': row['price'].replace('$', '')})
def _get_item(self, data, i, response):
if i >= len(data.get('items', [])):
return
item = data['items'][i]
pr = Product()
pr['name'] = (item['product']['title'] + ' ' + item.get('product', {}).get('author', {}).get('name', '')).strip()
pr['url'] = item['product']['link']
pr['price'] = Decimal(str(data['items'][i]['product']['inventories'][0]['price']))
pr['sku'] = response.meta['sku']
pr['identifier'] = response.meta['sku']
return pr, item
def parse(self, response):
data = json.loads(response.body)
i = 0
lowest = None
while True:
res = self._get_item(data, i, response)
if not res:
break
pr = res[0]
item = res[1]
invalid_domain = any([self._check_domain(domain, pr['url']) for domain in FILTER_DOMAINS])
if invalid_domain:
i += 1
else:
if valid_price(response.meta['price'], pr['price']) and \
(lowest is None or lowest['price'] > pr['price']):
lowest = pr
i += 1
if lowest:
yield lowest
def _check_domain(self, domain, url):
if domain in url:
return True
| apache-2.0 | 1,565,566,366,338,615,300 | 34.721519 | 121 | 0.557406 | false | 3.71805 | false | false | false |
cl1ck/tutsplus-downloader | Tutsplus.py | 1 | 4354 | #! /usr/pkg/bin/python
#-*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
import os
import re
class Tutsplus:
login_url= 'https://tutsplus.com/sign_in'
login_post = 'https://tutsplus.com/sessions'
home_url = 'https://tutsplus.com'
def __init__(self, username, password):
self.username = username
self.password = password
self.login()
# Return the html source for a specified url
def get_source(self, url):
r = self.s.get(url)
return r.content
# It logs in and store the session for the future requests
def login(self):
self.s = requests.session()
soup = BeautifulSoup(self.get_source(self.login_url))
self.token = soup.find(attrs={"name":"csrf-token"})['content']
data = {
"session[login]": self.username,
"session[password]": self.password,
"authenticity_token": self.token,
"utf8": "✓"
}
self.s.post(self.login_post, data = data)
return True
# remove special characters for windows users
def sanitize_filename(self, name):
if os.name == "nt":
return re.sub('[<>:"/\\|?*]+', '', name)
else:
return name.replace('/','-')
# Download all video from a course url
def download_course(self, url):
# Variable needed to increment the video number
video_number = 1
# get source
source = self.get_source(url)
# update csrf token for each course
soup = BeautifulSoup(source)
self.token = soup.find(attrs={"name":"csrf-token"})['content']
# the course's name
course_title = self.sanitize_filename(soup.select('h1')[0].string.encode("utf-8"))
print "######### " + course_title + " #########"
if not os.path.exists(course_title) :
os.makedirs(course_title)
# store course page
with open(course_title + '/course.html', 'w') as fid:
fid.write(source)
# if the course includes sourcefiles download them first
sourcefile = soup.select('.course-actions__download-button')
if sourcefile:
print "[+] Downloading source files"
filename = course_title + '/sources.zip'
link = sourcefile[0]['href']
self.download_file(link, filename)
# array who stores the information about a course
course_info = self.get_info_from_course(soup)
for video in course_info:
print "[+] Downloading " + video['titolo'].encode("utf-8")
filename = course_title + '/[' + str(video_number).zfill(2) + '] ' + self.sanitize_filename(video['titolo']) + '.mp4'
self.download_video(video['link'], filename)
video_number = video_number + 1
def download_courses(self, courses):
for course in courses:
self.download_course(course)
def download_video(self, url, filename):
# the trick for video links is not to follow the redirect, but to fetch the download link manually
# otherwise we'll get an SignatureDoesNotMatch error from S3
data = {
"authenticity_token": self.token,
"_method": 'post'
}
soup = BeautifulSoup(self.s.post(url, data = data, allow_redirects=False).content)
url = soup.find_all('a')[0]['href']
self.download_file(url, filename)
# Function who downloads the file itself
def download_file(self, url, filename):
r = self.s.get(url, stream=True)
if not os.path.isfile(filename) :
with open(filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
# return an array with all the information about a video (title, url)
def get_info_from_course(self, soup):
arr = []
videos = soup.select('.lesson-index__lesson')
for video in videos:
titolo = video.select('.lesson-index__lesson-title')[0].string
link = video.select('a')[0]['href']
info = {
"titolo": titolo,
"link": link,
}
arr.append(info)
return arr
| mit | -5,994,445,261,014,313,000 | 32.736434 | 129 | 0.569853 | false | 3.945603 | false | false | false |
evernote/pootle | pootle/apps/pootle_notifications/templatetags/notification_tags.py | 1 | 1687 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009 Zuza Software Foundation
# Copyright 2013-2014 Evernote Corporation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
from django import template
from django.contrib.auth import get_user_model
from pootle_app.models.directory import Directory
from pootle_app.models.permissions import check_user_permission
from pootle_notifications.models import Notice
register = template.Library()
@register.inclusion_tag('notifications/_latest.html', takes_context=True)
def render_latest_news(context, path, num):
try:
directory = Directory.objects.get(pootle_path='/%s' % path)
user = context['user']
User = get_user_model()
can_view = check_user_permission(User.get(user), "view", directory)
if not can_view:
directory = None
except Directory.DoesNotExist:
directory = None
if directory is None:
return {'news_items': None}
news_items = Notice.objects.filter(directory=directory)[:num]
return {'news_items': news_items}
| gpl-2.0 | -5,791,673,693,518,483,000 | 34.893617 | 75 | 0.724363 | false | 3.941589 | false | false | false |
maginetv/exconf | exconf/utils.py | 1 | 8022 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import logbook
import logbook.more
import os
import re
import subprocess
import sys
import yaml
REGEXP_YAML_FILE = '.*\.(yaml|yml)$'
REGEXP_INVALID_FILE_NAME_CHARS = '[^-_.A-Za-z0-9]'
MAX_RECURSION_DEPTH = 30
def figure_out_log_level(given_level):
if isinstance(given_level, str):
return logbook.lookup_level(given_level.strip().upper())
else:
return given_level
def verbosity_level_to_log_level(verbosity):
if int(verbosity) == 0:
return 'warning'
elif int(verbosity) == 1:
return 'info'
return 'debug'
def init_logging_stderr(log_level='notset', bubble=False):
handler = logbook.more.ColorizedStderrHandler(level=figure_out_log_level(log_level),
bubble=bubble)
handler.format_string = '{record.time:%Y-%m-%dT%H:%M:%S.%f} ' \
'{record.level_name} {record.channel}: {record.message}'
handler.push_application()
def get_logger(logger_name="magine-services"):
return logbook.Logger(logger_name)
LOG = get_logger()
def read_yaml(file_path, out=sys.stdout):
try:
return yaml.load(open(file_path).read())
except FileNotFoundError:
raise FileNotFoundError("Oops! That was no file in {file_path}.".format(**locals()))
except yaml.scanner.ScannerError:
raise yaml.scanner.ScannerError("Oops! File {file_path} is not a valid yaml.".format(**locals()))
def call_shell(work_dir, shell_cmd, print_output=True):
output_lines = []
LOG.info("Calling shell in dir '{}':\n{}", work_dir, shell_cmd)
proc = subprocess.Popen(shell_cmd, shell=True, cwd=work_dir,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# Poll process for new output until finished
while True:
next_line = proc.stdout.readline().decode('utf-8')
if next_line == '' and proc.poll() is not None:
break
output_lines.append(next_line)
if print_output:
sys.stdout.write(next_line)
sys.stdout.flush()
if proc.returncode != 0:
LOG.warn("Running shell failed with return code: {}", str(proc.returncode))
return proc.returncode, output_lines
def read_and_combine_yamls_in_dir(the_dir):
LOG.debug("Loading variables in YAML files from directory: {}", the_dir)
all_vars = {}
if os.path.isdir(the_dir):
for file_path in files_in_dir(the_dir, REGEXP_YAML_FILE):
all_vars.update(read_yaml(file_path))
else:
LOG.info("Directory does not exist: {}", the_dir)
return all_vars
def files_in_dir(the_dir, filter_regexp=None):
for file_name in sorted(os.listdir(the_dir)):
if filter_regexp is None or re.match(filter_regexp, file_name):
file_path = os.path.join(the_dir, file_name)
yield file_path
def list_files_not_seen(source_dir, seen_file_names):
file_paths = []
if os.path.isdir(source_dir):
for x in os.listdir(source_dir):
x_path = os.path.join(source_dir, x)
if os.path.isfile(x_path) and x not in seen_file_names:
seen_file_names.add(x)
file_paths.append(x_path)
return file_paths
class RecursionError(StandardError):
pass
def recursive_replace_vars(all_vars, require_all_replaced=True, comment_begin='#',
template_prefix='${{', template_suffix='}}'):
result = copy.deepcopy(all_vars)
for key in all_vars.keys():
try:
result[key] = substitute_vars_until_done(
str(result[key]), all_vars, require_all_replaced,
comment_begin, template_prefix, template_suffix)
except RecursionError as err:
LOG.error("Failed substituting key '{}'. {}", key, err)
raise err
return result
def substitute_vars_until_done(data, all_vars, require_all_replaced, comment_begin,
template_prefix, template_suffix):
iterations = 0
has_changed = True
while has_changed:
iterations += 1
data, has_changed = substitute_vars(data, all_vars, require_all_replaced,
comment_begin, template_prefix, template_suffix)
if iterations > MAX_RECURSION_DEPTH:
raise RecursionError("Too many iterations replacing template variables. Check your "
"variables for reference loops, or increase max recursion depth.")
return data
def substitute_vars(data, all_vars, require_all_replaced, comment_begin,
template_prefix, template_suffix):
"""Just simple string template substitution, like Python string templates etc.
Provides also line numbers for missing variables so they can be highlighted.
"""
output = []
missing_vars_with_lines = []
replaced_variables = []
has_changed = False
line_num = 0
for line in data.split('\n'):
line_num += 1
if not comment_begin or not line.strip().startswith(comment_begin):
i, j = 0, -1
while 0 <= i < len(line):
i = tag_begin = line.find(template_prefix, i)
if tag_begin >= 0:
i = tag_begin + len(template_prefix)
j = line.find(template_suffix, i)
if j > i:
var_name = line[i:j].strip()
i = j + len(template_suffix)
if var_name not in all_vars:
if require_all_replaced:
missing_vars_with_lines.append((line_num, var_name))
else:
var_value = all_vars.get(var_name)
replaced_variables.append(var_name)
line = line[0:tag_begin] + str(var_value) + line[i:]
has_changed = True
output.append(line)
if replaced_variables:
LOG.debug("Variables substituted: {}", replaced_variables)
if missing_vars_with_lines:
raise KeyError("Cannot replace key(s) in template (line, key_name): {}"
.format(missing_vars_with_lines))
return '\n'.join(output), has_changed
def parse_filename_var(file_name, all_vars, template_prefix='___', template_suffix='___'):
while template_prefix in file_name:
LOG.debug("Parsing string template variable in file name: {}", file_name)
i = file_name.find(template_prefix)
j = file_name.find(template_suffix, i + len(template_prefix))
if j > i:
filename_var = file_name[i + len(template_prefix):j]
if filename_var not in all_vars:
raise ValueError("Invalid file name variable '{}' in file name: {}".format(
filename_var, file_name))
substitute = all_vars[filename_var]
if re.search(REGEXP_INVALID_FILE_NAME_CHARS, substitute):
raise ValueError("Invalid file name substitute (var {}): {}"
.format(filename_var, substitute))
file_name = file_name[:i] + substitute + file_name[j + len(template_suffix):]
LOG.debug("File name after parsing: {}", file_name)
else:
LOG.info("Did not find file name template suffix for parsing: {}", file_name)
break
return file_name
| apache-2.0 | -1,949,287,264,025,592,000 | 37.2 | 105 | 0.597856 | false | 3.901751 | false | false | false |
bmander/graphserver | pygs/test/unit_test/test_timezoneperiod.py | 2 | 2177 | import unittest
from graphserver.core import *
from graphserver import util
import pickle
class TestTimezonePeriod(unittest.TestCase):
def test_basic(self):
tzp = TimezonePeriod(0, 100, -10)
assert tzp
assert tzp.begin_time == 0
assert tzp.end_time == 100
assert tzp.utc_offset == -10
def test_dict(self):
tzp = TimezonePeriod(3, 7, -11)
assert tzp.__getstate__() == (3, 7, -11)
ss = pickle.dumps( tzp )
laz = pickle.loads( ss )
assert laz.begin_time == 3
assert laz.end_time == 7
assert laz.utc_offset == -11
def test_time_since_midnight(self):
tzp = TimezonePeriod(0, 24*3600*256, -8*3600)
assert tzp.time_since_midnight( 8*3600 ) == 0
summer_tzp = TimezonePeriod( util.TimeHelpers.localtime_to_unix( 2008,6,1,0,0,0, "America/Los_Angeles" ),
util.TimeHelpers.localtime_to_unix( 2008,9,1,0,0,0, "America/Los_Angeles" ),
-7*3600 )
assert summer_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 7,1,0,0,0,"America/Los_Angeles" ) ) == 0
assert summer_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 7, 2, 2, 0, 0, "America/Los_Angeles" ) ) == 3600*2
winter_tzp = TimezonePeriod( util.TimeHelpers.localtime_to_unix( 2008,1,1,0,0,0, "America/Los_Angeles" ),
util.TimeHelpers.localtime_to_unix( 2008,4,1,0,0,0, "America/Los_Angeles" ),
-8*3600 )
assert winter_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 2,1,0,0,0,"America/Los_Angeles" ) ) == 0
assert winter_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 2, 2, 2, 0, 0, "America/Los_Angeles" ) ) == 3600*2
if __name__ == '__main__':
tl = unittest.TestLoader()
suite = tl.loadTestsFromTestCase(TestTimezonePeriod)
unittest.TextTestRunner(verbosity=2).run(suite) | bsd-3-clause | -1,947,312,984,700,045,600 | 43.44898 | 139 | 0.559486 | false | 3.30349 | true | false | false |
yorickdewid/Mavicona | ecoli/ecoli/pickledb.py | 1 | 6324 | #!/usr/bin/env python
# Copyright (c) 2015, Harrison Erd
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# Neither the name of the Harrison Erd nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "
# AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import os
import simplejson
def load(location, option):
'''Return a pickledb object. location is the path to the json file.'''
return pickledb(location, option)
class pickledb(object):
def __init__(self, location, option):
'''Creates a database object and loads the data from the location path.
If the file does not exist it will be created on the first update.'''
self.load(location, option)
def load(self, location, option):
'''Loads, reloads or changes the path to the db file.'''
location = os.path.expanduser(location)
self.loco = location
self.fsave = option
if os.path.exists(location):
self._loaddb()
else:
self.db = {}
return True
def dump(self):
'''Force dump memory db to file.'''
self._dumpdb(True)
return True
def set(self, key, value):
'''Set the (string,int,whatever) value of a key'''
self.db[key] = value
self._dumpdb(self.fsave)
return True
def get(self, key):
'''Get the value of a key'''
try:
return self.db[key]
except KeyError:
return None
def getall(self):
'''Return a list of all keys in db'''
return self.db.keys()
def rem(self, key):
'''Delete a key'''
del self.db[key]
self._dumpdb(self.fsave)
return True
def lcreate(self, name):
'''Create a list'''
self.db[name] = []
self._dumpdb(self.fsave)
return True
def ladd(self, name, value):
'''Add a value to a list'''
self.db[name].append(value)
self._dumpdb(self.fsave)
return True
def lextend(self, name, seq):
'''Extend a list with a sequence'''
self.db[name].extend(seq)
self._dumpdb(self.fsave)
return True
def lgetall(self, name):
'''Return all values in a list'''
return self.db[name]
def lget(self, name, pos):
'''Return one value in a list'''
return self.db[name][pos]
def lrem(self, name):
'''Remove a list and all of its values'''
number = len(self.db[name])
del self.db[name]
self._dumpdb(self.fsave)
return number
def lpop(self, name, pos):
'''Remove one value in a list'''
value = self.db[name][pos]
del self.db[name][pos]
self._dumpdb(self.fsave)
return value
def llen(self, name):
'''Returns the length of the list'''
return len(self.db[name])
def append(self, key, more):
'''Add more to a key's value'''
tmp = self.db[key]
self.db[key] = ('%s%s' % (tmp, more))
self._dumpdb(self.fsave)
return True
def lappend(self, name, pos, more):
'''Add more to a value in a list'''
tmp = self.db[name][pos]
self.db[name][pos] = ('%s%s' % (tmp, more))
self._dumpdb(self.fsave)
return True
def dcreate(self, name):
'''Create a dict'''
self.db[name] = {}
self._dumpdb(self.fsave)
return True
def dadd(self, name, pair):
'''Add a key-value pair to a dict, "pair" is a tuple'''
self.db[name][pair[0]] = pair[1]
self._dumpdb(self.fsave)
return True
def dget(self, name, key):
'''Return the value for a key in a dict'''
return self.db[name][key]
def dgetall(self, name):
'''Return all key-value pairs from a dict'''
return self.db[name]
def drem(self, name):
'''Remove a dict and all of its pairs'''
del self.db[name]
self._dumpdb(self.fsave)
return True
def dpop(self, name, key):
'''Remove one key-value pair in a dict'''
value = self.db[name][key]
del self.db[name][key]
self._dumpdb(self.fsave)
return value
def dkeys(self, name):
'''Return all the keys for a dict'''
return self.db[name].keys()
def dvals(self, name):
'''Return all the values for a dict'''
return self.db[name].values()
def dexists(self, name, key):
'''Determine if a key exists or not'''
if self.db[name][key] is not None:
return 1
else:
return 0
def deldb(self):
'''Delete everything from the database'''
self.db= {}
self._dumpdb(self.fsave)
return True
def _loaddb(self):
'''Load or reload the json info from the file'''
self.db = simplejson.load(open(self.loco, 'rb'))
def _dumpdb(self, forced):
'''Write/save the json dump into the file'''
if forced:
simplejson.dump(self.db, open(self.loco, 'wt')) | bsd-3-clause | 4,799,547,565,119,227,000 | 30.467662 | 82 | 0.607052 | false | 3.910946 | false | false | false |
mondhs/kaldi-liepa-train | opt/kaldi-liepa-train/tool_data_prep/01_transform_liepa2csv.py | 1 | 2734 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
'''
@author: Mindaugas Greibus
'''
import re
import wave
import os
def transform_transcription_file(transcription_path, output_file_map, total_duration_map, repo_type):
with open(transcription_path, 'r') as input_file:
for line in input_file:
line=re.sub(r'<sil[\+\w]+>',r'',line)
line=re.sub(r'<s> *<sil>',r'<s>',line)
line=re.sub(r'<sil> *</s>',r'</s>',line)
line=re.sub(r'( *<sil>)+',r'',line)
line=re.sub(r'(\s+)',r' ',line)
text =""
file_name=""
m = re.search('<s>(.*)</s>\s*\((.*)\)',line)
if(not m):
print(">>>> " + line)
raise ValueError('Cannot parse the line: ' + line)
#line text
text = m.group(1)
## find correct file path
file_name = m.group(2)
m = re.search('-(.+)$',file_name)
if(not m):
print(">>>> " + line)
raise ValueError('Dir not found: ' + file_name)
dir_name=m.group(1)
wav_name = "../{}_repo/{}/{}.wav".format(repo_type,dir_name, file_name)
##Calculate duration
audio = wave.open(wav_name)
duration = float(audio.getnframes()) / audio.getframerate()
audio.close()
kaldi_path = "./liepa_audio/{}/{}/{}.wav".format(repo_type,dir_name, file_name)
total_duration_map["total"] += duration
out = '{},{},{}\n'.format(duration, text, kaldi_path)
print(out)
if(duration>1):
#if shorter as given time training is crashing is code dump
total_duration_map[repo_type] += duration
output_file_map[repo_type].write(out)
else:
total_duration_map["short"] += duration
src_dir = "../"
test_transcription_path = os.path.join(src_dir, "./target/liepa_test.transcription")
train_transcription_path = os.path.join(src_dir, "./target/liepa_train.transcription")
with open('./target/liepa_test.csv', 'w') as test_repo_csv, \
open('./target/liepa_train.csv', 'w') as train_repo_csv:
output_file_map = {"test":test_repo_csv, "train": train_repo_csv}
total_duration_map = { "test":0, "train":0,"short":0, "total":0}
transform_transcription_file(test_transcription_path, output_file_map, total_duration_map, "test")
transform_transcription_file(train_transcription_path, output_file_map, total_duration_map, "train")
print(total_duration_map)
# cp sphinx_files/etc/liepa.dic ./target/data/local/dict/lexicon.txt
# inserto to ./target/data/local/dict/lexicon.bak "!SIL sil\n<UNK> spn\n"
| bsd-2-clause | 1,105,583,628,751,635,500 | 36.972222 | 104 | 0.553402 | false | 3.301932 | true | false | false |
ocelot-collab/ocelot | ocelot/rad/fel.py | 1 | 21453 | from __future__ import print_function
'''
basic fel calculations
'''
#from pylab import *
import numpy as np
import numpy.fft as fft
import scipy.special as sf
from ocelot.common.globals import m_e_eV, epsilon_0, speed_of_light, q_e, h_eV_s, lambda_C_r, I_Alfven, ro_e
import logging
from scipy.optimize import fmin
from copy import deepcopy
_logger = logging.getLogger(__name__)
#from matplotlib.figure import Figure
#from mpl_toolkits.mplot3d import Axes3D
#import fel
class FelParameters:
def __init__(self):
self.qf = 0
self.inaccurate = False # True if fitting formulas do not promise good accuracy
pass
def eval(self, method='mxie'):
_logger.debug('Calculating FEL parameters')
if np.size(self.I) > 1:
tdp=True
else:
tdp=False
if not hasattr(self, 'hn'):
self.hn=1 #harmonic number
if np.any(self.betax <= 0) or np.any(self.betay <= 0):
_logger.warning('betax or betay <= 0, returning lg3=np.nan')
self.lg3 = np.nan
return
self.rxbeam = np.sqrt(self.betax * self.emitx / self.gamma0)
self.rybeam = np.sqrt(self.betay * self.emity / self.gamma0)
if np.any(self.rxbeam <= 0) or np.any(self.rybeam <= 0):
_logger.warning('rxbeam or rybeam <= 0, returning lg3=np.nan')
self.lg3 = np.nan
return
self.deta = self.delgam / self.gamma0
if np.isnan(self.aw0):
_logger.warning('aw0 is nan')
self.inaccurate = True
self.lambda0 = self.xlamd / (2.0 * self.gamma0**2) * (1.0 + self.aw0**2) # resonant wavelength
if np.any(self.lambda0 < 0):
_logger.error('wavelength is not reachable with und_period {} gamma {} and K {}'.format(self.xlamd,self.gamma0,self.aw0))
self.inaccurate = True
self.lambdah = self.lambda0 / self.hn
self.k0 = 2 * np.pi / self.lambda0
# self.Ia = I_Alfven #remove
if self.iwityp == 0: #planar undulator
ja = self.aw0**2 / (2*(1 + self.aw0**2))
self.fc = sf.j0(ja) - sf.j1(ja)
# if self.hn != 1:
jah = self.hn * self.aw0**2 / (2*(1 + self.aw0**2))
self.fch = sf.jv((self.hn-1)/2, jah) - sf.jv((self.hn+1)/2, jah)
else: #helical undulator
self.fc = 1
if self.hn !=1:
_logger.warning('harmonic number != 1 and undulator is helical. Not implemented! Retunrning zero coupling at harmonic!')
self.inaccurate = True
self.fch = 0
else:
self.fch = 1
self.Pb = self.gamma0 * self.I * m_e_eV# beam power [Reiche]
# import first, ro_e * m_e_eV = 1.4399643147059695e-09
# self.N = self.I * self.lambda0 / 1.4399644850445153e-10
# self.sigb = 0.5 * (self.rxbeam + self.rybeam) # average beam size
emit_n = np.sqrt(self.emitx * self.emity)
# h_eV_s * speed_of_light / self.lambda0
self.emit_nn = 2 * np.pi * emit_n / self.lambdah / self.gamma0 ## emittance normalized as in Eq.6, 10.1103/PhysRevSTAB.15.080702
if (np.any(self.emit_nn < 1) or np.any(self.emit_nn) > 5):
self.inaccurate = True
if tdp:
_logger.warning('1 <! min(emittance) {} <! 5, SSY approx. might be incorrect'.format(np.nanmin(self.emit_nn)))
else:
_logger.warning('1 <! emittance {} <! 5, SSY approx. might be incorrect'.format(self.emit_nn))
#Eq.6, DOI:10.1103/PhysRevSTAB.15.080702
if self.qf == 1: #account for quantum fluctuations
if self.iwityp == 0: #planar undulator
F_aw = 1.7 * self.aw0 + 1 / (1 + 1.88 * self.aw0 + 0.8 * self.aw0**2)
#eq.B2, DOI:10.1103/PhysRevSTAB.15.080702,
#eq.11 DOI:10.1016/j.optcom.2004.02.071
else: #helical undulator
F_aw = 1.42 * self.aw0 + 1 / (1 + 1.5 * self.aw0 + 0.95 * self.aw0**2)
if method == 'mxie':
'''
M. Xie, “Exact and variational solutions of 3D eigenmodes in high gain FELs,” Nucl. Instruments Methods Phys. Res. Sect. A Accel. Spectrometers, Detect. Assoc. Equip., vol. 445, no. 1–3, pp. 59–66, 2000.
'''
# if self.hn != 1:
# _logger.warning('MXie estimation not implemented for harmonic radaition')
self.rho1 = (0.5 / self.gamma0) * np.power( (self.aw0 * self.fc * self.xlamd / 2 / np.pi )**2 / (self.rxbeam * self.rybeam) * self.I / I_Alfven, 1.0/3.0)
#self.power = 6.0 * np.sqrt(np.pi) * self.rho1**2 * self.Pb / (self.N * np.log(self.N / self.rho1) ) # shot noise power [W] [Reiche]
self.lg1 = self.xlamd / (4*np.pi * np.sqrt(3) * self.rho1) #power gain length [Xie]
self.zr = 4 * np.pi * self.rxbeam * self.rybeam / self.lambda0
a = [None, 0.45, 0.57, 0.55, 1.6, 3.0, 2.0, 0.35, 2.9, 2.4, 51.0, 0.95, 3.0, 5.4, 0.7, 1.9, 1140.0, 2.2, 2.9, 3.2]
self.xie_etad = self.lg1 / (2 * self.k0 * self.rxbeam * self.rybeam)
#self.xie_etae = 4 * pi * self.lg1 / (self.betax*2*pi) * self.k0 * (self.emitx / self.gamma0)
self.xie_etae = 4 * np.pi * self.lg1 * (self.emitx * self.emity) / self.lambda0 / (self.rxbeam * self.rybeam) / self.gamma0**2 * ((self.fc/self.fch)**2 / self.hn)**(1/3) / self.hn # expressed via average x-y beam size
self.xie_etagamma = self.deta / (self.rho1 * np.sqrt(3))
if self.hn !=1:
self.xie_etad *= ((self.fc/self.fch)**2 / self.hn)**(1/3) / self.hn
self.xie_etae *= ((self.fc/self.fch)**2 / self.hn)**(1/3) * self.hn
self.xie_etagamma *= ((self.fc/self.fch)**2 / self.hn)**(1/3) * self.hn #eq C2+ DOI:10.1103/PhysRevSTAB.15.080702
self.delta = (a[1] * self.xie_etad ** a[2] + a[3] * self.xie_etae ** a[4] + a[5] * self.xie_etagamma ** a[6]
+ a[7] * self.xie_etae ** a[8] * self.xie_etagamma ** a[9] + a[10] * self.xie_etad ** a[11] * self.xie_etagamma ** a[12] + a[13] * self.xie_etad ** a[14] * self.xie_etae ** a[15]
+ a[16] * self.xie_etad ** a[17] * self.xie_etae ** a[18] * self.xie_etagamma ** a[19])
# self.lg3 = self.lg1 * (1 + self.xie_lscale)
self.method = 'mxie'
elif method == 'ssy_opt':
'''
E. L. Saldin, E. A. Schneidmiller, and M. V. Yurkov, “Design formulas for short-wavelength FELs,” Opt. Commun., vol. 235, no. 4–6, pp. 415–420, May 2004.
'''
self.lg1 = 0.5 * 1.67 * np.sqrt(I_Alfven / self.I) * (emit_n * self.xlamd)**(5/6) / self.lambdah**(2/3) * (1 + self.aw0**2)**(1/3) / (self.hn**(5/6) * self.aw0 * self.fch)
#eq.4, DOI:10.1103/PhysRevSTAB.15.080702
# it is power gain length = 0.5 * field gain length
self.delta = 131 * (I_Alfven / self.I) * emit_n**(5/4) / (self.lambdah * self.xlamd**9)**(1/8) * self.hn**(9/8) * self.delgam**2 / (self.aw0 * self.fch)**2 / (1 + self.aw0**2)**(1/8) #eq.5, DOI:10.1103/PhysRevSTAB.15.080702
# if hasattr(self, 'qf'):
# if self.qf==1:
# self.lg3 = self.lg1 * (1 + self.delta_eff)
self.method = 'ssy_opt'
else:
_logger.error('method should be in ["mxie", "ssy_opt"]')
raise ValueError('method should be in ["mxie", "ssy_opt"]')
if self.qf == 1:
self.delta_q = 5.5e4 * (I_Alfven / self.I)**(3/2) * lambda_C_r * ro_e * emit_n**2 / self.lambda0**(11/4) / self.xlamd**(5/4) * (1 + self.aw0**2)**(9/4) * F_aw / (self.aw0 * self.fch**3 * self.hn**(5/3))
if np.any(self.delta_q >= 1):
_logger.warning('quantum fluctuation effect exceeds 1, estimation not applicable anymore')
self.delta_q = 0.999
self.inaccurate = True
else:
self.delta_q = 0
self.delta_eff = (self.delta + self.delta_q) / (1 - self.delta_q)
self.delta_criterion = 2.5 * (1 - np.exp(-0.5 * self.emit_nn**2))
if np.any(self.delta_eff > self.delta_criterion):
if tdp:
_logger.warning('delta_eff > delta_criterion; SSY approx. might be incorrect')
else:
_logger.warning('delta_eff {} > {}; SSY approx. might be incorrect'.format(self.delta_eff, self.delta_criterion))
self.inaccurate = True
#Eq.7, DOI:10.1103/PhysRevSTAB.15.080702
#Eq.14+text, DOI:10.1016/j.optcom.2004.02.071
self.beta_opt_calc = 11.2 * (I_Alfven / self.I)**(1/2) * (emit_n**3 * self.xlamd)**(1/2) / (self.lambdah* self.hn**(1/2) * self.aw0 * self.fch) / (1 + 8 * self.delta_eff)**(1/3)
self.lg3 = self.lg1 * (1 + self.delta_eff)
self.lg3 *= self.Lg_mult
if self.Lg_mult != 1:
_logger.info('lg3 multiplied by Lg_mult ({})'.format(self.Lg_mult))
self.rho3 = self.xlamd / (4*np.pi * np.sqrt(3) * self.lg3)
self.Nc = self.I / (q_e * self.rho3 * self.k0 * speed_of_light)
# self.P_sn = (3 * self.rho1 * self.Pb) / (self.Nc * np.sqrt(np.pi * np.log(self.Nc))) # shot noise power [W]
self.P_sn = (3 * self.rho3 * self.Pb) / (self.Nc * np.sqrt(np.pi * np.log(self.Nc))) # shot noise power [W]
self.z_sat_norm = 3 + 1/np.sqrt(3) * np.log(self.Nc) # normalized saturation length for slices
self.z_sat_magn = self.z_sat_norm * np.sqrt(3) * self.lg3 # magnetic length to reach saturation
self.theta_c = np.sqrt(self.lambdah / self.lg3) #critical angle
# _logger.debug('L_sat_norm = {}'.format(self.z_sat_norm))
self.z_sat_min = np.nanmin(self.z_sat_magn)
def beta_opt(self, method='mxie', apply=False, **kwargs):
if method == 'mxie':
beta_orig_x, beta_orig_y = self.betax, self.betay
beta_orig = np.mean([beta_orig_x, beta_orig_y])
fel_copy = deepcopy(self)
def f(x, method=method):
fel_copy.betax = fel_copy.betay = x
fel_copy.eval(method=method)
return fel_copy.lg3
err_dict = np.geterr()
np.seterr(all='ignore')
beta_opt = fmin(f, beta_orig, disp=0, **kwargs)
np.seterr(**err_dict)
elif method == 'ssy_opt':
beta_opt = self.beta_opt_calc
else:
_logger.error('method should be in ["mxie", "ssy_opt"]')
raise ValueError('method should be in ["mxie", "ssy_opt"]')
if apply:
self.betax = beta_opt
self.betay = beta_opt
self.eval(method)
else:
return beta_opt[0]
def log(self, type='debug'):
if type == 'debug':
_log_func = _logger.debug
elif type == 'info':
_log_func = _logger.info
elif type == 'log':
_log_func = _logger.log
elif type == 'print':
_log_func = print
_log_func('undulator period = {}'.format(self.xlamd))
_log_func('undulator K (rms) = {}'.format(self.aw0))
if self.iwityp == 0:
_log_func('undulator type - planar')
else:
_log_func('undulator type - helical')
# _log_func('beam E GeV = {}'.format(beam.E))
_log_func('beam gamma = {}'.format(self.gamma0))
_log_func('beam dgamma= {}'.format(self.delgam))
_log_func('beam current = {}'.format(self.I))
_log_func('beam power = {}'.format(self.Pb))
# _log_func('beam alphax = {}'.format(self.alphax))
# _log_func('beam alphay = {}'.format(self.alphay))
_log_func('beam betax = {}'.format(self.betax))
_log_func('beam betay = {}'.format(self.betay))
_log_func('beam emitx_norm = {}'.format(self.emitx))
_log_func('beam emity_norm = {}'.format(self.emity))
# _log_func('beam x = {}'.format(self.xbeam))
# _log_func('beam y = {}'.format(self.ybeam))
# _log_func('beam px = {}'.format(self.pxbeam))
# _log_func('beam py = {}'.format(self.pybeam))
_log_func('beam rx = {}'.format(self.rxbeam))
_log_func('beam ry = {}'.format(self.rybeam))
_log_func('')
_log_func('Estimation results')
_log_func('Rho 1D = {}'.format(self.rho1))
_log_func('FEL_wavelength = {} m'.format(self.lambda0))
_log_func('FEL_E_photon = {} eV'.format(h_eV_s * speed_of_light / self.lambda0))
_log_func('Lg 1D = {} m'.format(self.lg1))
_log_func('Z_Rayl = {} m'.format(self.zr))
_log_func('xie_eta_d = {}'.format(self.xie_etad))
_log_func('xie_eta_e = {}'.format(self.xie_etae))
_log_func('xie_eta_gamma = {}'.format(self.xie_etagamma))
# _log_func('xie_scaling_tot = {}'.format(self.xie_lscale))
_log_func('Lg 3D = {}'.format(self.lg3))
_log_func('Rho 3D = {}'.format(self.rho3))
_log_func('P_shnoise = {}'.format(self.P_sn))
_log_func('L_sat_magn = {}'.format(self.z_sat_magn))
_log_func('L_sat_min = {}'.format(self.z_sat_min))
_log_func('Theta_critical = {} rad'.format(self.theta_c))
def P(self, z=None):
'''
returns sase power at distance z
unfinished
'''
# Nc = self.Ip / (q_e * rho * self.k0 * speed_of_light)
# z_sat = 3 + 1/np.sqrt(3) * np.log(Nc)
# Psn = (3 * rho * self.Pb) / (Nc * np.sqrt(np.pi * np.log(Nc)))
if z is None:
zn = self.z_sat_min / (np.sqrt(3) * self.lg3)
elif z == 0:
return np.array(np.size(self.P_sn)*(np.NaN,))
else:
if np.size(z) > 1:
z = z[:,np.newaxis]
if (z > self.z_sat_min).any():
_logger.warning('Estimation applicable up to z_sat_min=%.2fm, limiting power to saturation level' %(self.z_sat_min))
idx = z > self.z_sat_min[:,np.newaxis]
z[idx] = self.z_sat_min[:,np.newaxis][idx]
else:
if (z > self.z_sat_min):
_logger.warning('Estimation applicable up to z_sat_min=%.2fm, while z=%.2fm requested, returning saturation power' %(self.z_sat_min, z))
z = self.z_sat_min
zn = z / (np.sqrt(3) * self.lg3)
Pz = self.P_sn * (1 + 1/9 * np.exp(np.sqrt(3) * zn) / np.sqrt(np.pi * zn))
# Pz = self.P_sn * (1 + 1/9 * np.exp(np.sqrt(3) * zn))
#Pz = p.P_sn * (1 + 1/9 * np.exp(np.sqrt(3) * zn))
if hasattr(self,'P_mult'):
if self.P_mult is not None:
Pz *= self.P_mult
return Pz
def E(self, z=None):
P = self.P(z)
P[np.isnan(P)] = 0
return np.trapz(P, self.s / speed_of_light)
def tcoh(self,z=None):
#check
if z is None:
z = self.z_sat_min
elif z > self.z_sat_min:
_logger.warning('estimation applicable up to z_sat_min=%.2fm, while z=%.2fm requested' %(z_sat_min, z))
tcoh = self.lambda0 / (6 * self.rho3 * speed_of_light ) * np.sqrt(z / (2 * np.pi * self.lg3))
return tcoh
def P_sat(self):
return self.P(self.z_sat_min)
@property
def phen0(self):
return h_eV_s * speed_of_light / self.lambda0
@property
def phenh(self):
return h_eV_s * speed_of_light / self.lambdah
def spectrogram(self, z=None):
#fast spectrogram evaluation
if z is None:
z = self.z_sat_min
Psat = self.P(z)
Psat[np.isnan(Psat)]=0
idx = self.idx
phen0 = self.phen0
dphen = phen0 * self.rho3
dp = dphen[idx] / 10
s_arr = self.s
phen_arr = np.arange(np.amin(phen0 - 3 * dphen), np.amax(phen0 + 3 * dphen), dp)
spec = np.zeros((s_arr.size, phen_arr.size))
for i in range(s_arr.size):
if dphen[i] != 0:
spec[i] = np.exp(-(phen_arr - phen0[i])**2 / 2 / dphen[i]**2) / np.sqrt(2 * np.pi * dphen[i]**2)
spec = spec * Psat[:, np.newaxis]
return (s_arr, phen_arr, spec.T)
def spectrum(self, z=None):
#fast total spectrum evaluation
s_arr, phen_arr, spectrogram = self.spectrogram(z = z)
spectrum = np.sum(spectrogram, axis=1)
return phen_arr, spectrum
class FelParametersArray(FelParameters):
def __init__(self):
super().__init__()
@property
def idx(self):
try:
idx = self.I.argmax()
except AttributeError:
idx = None
return idx
def calculateFelParameters(input, array=False, method='mxie'):
if array:
p = FelParametersArray()
else:
p = FelParameters()
p.iwityp = input.iwityp # undulator type: 0 == planar, other == helical
p.gamma0 = input.gamma0
p.delgam = input.delgam
p.xlamd = input.xlamd # undulator period
p.betax = input.betax
p.betay = input.betay
p.emitx = input.emitx #normalized emittance
p.emity = input.emity
if hasattr(input,'hn'):
p.hn = input.hn
if hasattr(input,'qf'):
p.qf = input.qf
p.Lg_mult = 1
if hasattr(input,'Lg_mult'):
if input.Lg_mult is not None:
p.Lg_mult = input.Lg_mult
p.P_mult = 1
if hasattr(input,'P_mult'):
if input.P_mult is not None:
p.P_mult = input.P_mult
# p.rxbeam = input.rxbeam
# p.rybeam = input.rybeam
p.aw0 = input.aw0 # rms undulator parameter K
p.I = input.curpeak
p.eval(method)
# if array:
# p.log('log')
# else:
# pass
# p.log('debug')
# if not array:
# try:
# p.idx = p.I.argmax()
# except AttributeError:
# p.idx = 0
return p
def beam2fel(beam, lu, K_peak, iwityp=0, method='mxie', hn=1, qf=0):
'''
tmp function to estimate fel parameters slice-wise
hn = harmonic number
qf = account for quantum fluctuations
'''
if beam.len() == 0:
raise ValueError('Beam length should not be zero')
class tmp():
pass
tmp.hn=hn
tmp.qf=qf
tmp.gamma0 = beam.g
tmp.delgam = beam.dg
tmp.xlamd = lu # undulator period
tmp.iwityp = iwityp
tmp.emitx = beam.emit_xn
tmp.emity = beam.emit_yn
if hasattr(beam,'beta_x_eff') and hasattr(beam,'beta_y_eff'):
tmp.betax = beam.beta_x_eff
tmp.betay = beam.beta_y_eff
else:
# print('use update_effective_beta() to increase estimation accuracy')
tmp.betax = beam.beta_x
tmp.betay = beam.beta_y
if K_peak == 0:
print('Warning, undulator K=0')
if iwityp == 0: #planar
tmp.aw0 = K_peak / np.sqrt(2)
else: #other
tmp.aw0 = K_peak
tmp.curpeak = beam.I
fel=calculateFelParameters(tmp, array=True, method=method)
fel.s = beam.s
return (fel)
def printFelParameters(p):
#print (input.parameters)
print ('******** FEL Parameters ********')
print ('ex=', p.emitx)
print ('ey=', p.emity)
print ('rxbeam=', p.rxbeam, ' [m]')
print ('rybeam=', p.rybeam, ' [m]')
print ('rel energy spread deta=', p.deta, ' [m]')
print ('xlamd=', p.xlamd)
print ('aw0=', p.aw0)
print ('coupling parameter fc=', p.fc)
print ('gamma0=', p.gamma0)
print ('Ip=', p.I, ' beam peak current [A]')
print ('lambda0=', p.lambda0)
print ('Pb= %.3e beam power [W]'%(p.Pb))
# print ('N=', p.N)
print ('rho (1D)=', p.rho1)
print ('gain length estimate lg (1D)=', p.lg1)
# print ('power=', p.power, ' equivalent shot noise power [W]')
print ('Rayleigh length estimate zr=', p.zr)
print ('')
print ('Ming Xie gain reduction estimates:')
print ('diffraction parameter eta_d=', p.xie_etad)
print ('emittance/focusing parameter eta_e=', p.xie_etae)
print ('energy spread parameter eta_gamma=', p.xie_etagamma)
# print ('gain length degradation lscale=', p.xie_lscale)
print ('scaled gain length lg (3D)=', p.lg3)
print ('scaled rho (3D)=', p.rho3)
print ('')
print ('Saturation magn. length=', p.z_sat_min)
print ('**************************************')
# CHECK with Xie paper parameters
#inp = GenesisInput()
#inp.curpeak = 3400
#inp.xlamd = 0.03
#inp.iwityp = 0
#inp.gamma0 = 28000
#inp.delgam = inp.gamma0 * 2e-4
#inp.betax = 18
#inp.betay = 18
#inp.emitx=1.5e-6
#inp.emity=1.5e-6
#inp.xlamd=0.03
#inp.aw0 = 3.7/sqrt(2)
#
#p = calculateFelParameters(inp)
#print(p.xie_lscale,'new')
#p.lg1
#p.rho1
#print(p.xie_etad, 0.0367)
#print(p.xie_etae, 0.739)
#print(p.xie_etagamma, 0.248) | gpl-3.0 | 8,590,741,184,945,990,000 | 38.120438 | 235 | 0.520689 | false | 2.949505 | false | false | false |
huangyh09/brie | brie/utils/count.py | 1 | 7351 | import sys
import numpy as np
from .sam_utils import load_samfile, fetch_reads
def _check_SE_event(gene):
"""Check SE event"""
if (len(gene.trans) != 2 or
gene.trans[0].exons.shape[0] != 3 or
gene.trans[1].exons.shape[0] != 2 or
np.mean(gene.trans[0].exons[[0, 2], :] ==
gene.trans[1].exons) != 1):
return False
else:
return True
def _get_segment(exons, read):
"""Get the length of segments by devidinig a read into exons.
The segments include one for each exon and two edges.
"""
if read is None:
return None
_seglens = [0] * (exons.shape[0] + 2)
_seglens[0] = np.sum(read.positions < exons[0, 0])
_seglens[-1] = np.sum(read.positions > exons[-1, -1])
for i in range(exons.shape[0]):
_seglens[i + 1] = np.sum(
(read.positions >= exons[i, 0]) * (read.positions <= exons[i, 1]))
return _seglens
def check_reads_compatible(transcript, reads, edge_hang=10, junc_hang=2):
"""Check if reads are compatible with a transcript
"""
is_compatible = [True] * len(reads)
for i in range(len(reads)):
_segs = _get_segment(transcript.exons, reads[i])
# check mismatch to regions not in this transcript
if len(reads[i].positions) - sum(_segs) >= junc_hang:
is_compatible[i] = False
continue
# check if edge hang is too short
if (_segs[0] > 0 or _segs[-1] > 0) and sum(_segs[1:-1]) < edge_hang:
is_compatible[i] = False
continue
# check if exon has been skipped
if len(_segs) > 4:
for j in range(2, len(_segs) - 2):
if (_segs[j-1] >= junc_hang and _segs[j+1] >= junc_hang and
transcript.exons[j-1, 1] - transcript.exons[j-1, 0] -
_segs[j] >= junc_hang):
is_compatible[i] = False
break
return np.array(is_compatible)
def SE_reads_count(gene, samFile, edge_hang=10, junc_hang=2, **kwargs):
"""Count the categorical reads mapped to a splicing event
rm_duplicate=True, inner_only=True,
mapq_min=0, mismatch_max=5, rlen_min=1, is_mated=True
"""
# Check SE event
if _check_SE_event(gene) == False:
print("This is not exon-skipping event!")
exit()
# Fetch reads (TODO: customise fetch_reads function, e.g., FLAG)
reads = fetch_reads(samFile, gene.chrom, gene.start, gene.stop, **kwargs)
# Check reads compatible
is_isoform1 = check_reads_compatible(gene.trans[0], reads["reads1"])
is_isoform2 = check_reads_compatible(gene.trans[1], reads["reads1"])
if len(reads["reads2"]) > 0:
is_isoform1 *= check_reads_compatible(gene.trans[0], reads["reads2"])
is_isoform2 *= check_reads_compatible(gene.trans[1], reads["reads2"])
is_isoform1 = np.append(is_isoform1,
check_reads_compatible(gene.trans[0], reads["reads1u"]))
is_isoform2 = np.append(is_isoform2,
check_reads_compatible(gene.trans[1], reads["reads1u"]))
is_isoform1 = np.append(is_isoform1,
check_reads_compatible(gene.trans[0], reads["reads2u"]))
is_isoform2 = np.append(is_isoform2,
check_reads_compatible(gene.trans[1], reads["reads2u"]))
# return Reads matrix
Rmat = np.zeros((len(is_isoform1), 2), dtype=bool)
Rmat[:, 0] = is_isoform1
Rmat[:, 1] = is_isoform2
return Rmat
def get_count_matrix(genes, sam_file, sam_num, edge_hang=10, junc_hang=2):
samFile = load_samfile(sam_file)
RV = []
for g in range(len(genes)):
_Rmat = SE_reads_count(genes[g], samFile, edge_hang=10, junc_hang=2,
rm_duplicate=True, inner_only=False, mapq_min=0, mismatch_max=5,
rlen_min=1, is_mated=True)
if _Rmat.shape[0] == 0:
continue
K = 2**(np.arange(_Rmat.shape[1]))
code_id, code_cnt = np.unique(np.dot(_Rmat, K), return_counts=True)
count_dict = {}
for i in range(len(code_id)):
count_dict["%d" %(code_id[i])] = code_cnt[i]
RV.append("%d\t%d\t%s" %(sam_num + 1, g + 1, str(count_dict)))
RV_line = ""
if len(RV) > 0:
RV_line = "\n".join(RV) + "\n"
return RV_line
def SE_probability(gene, rlen=75, edge_hang=10, junc_hang=2):
"""Get read categorical probability of each isoform.
In exon-skipping (SE) event, there are two isoform:
isoform1 for exon inclusion and isoform2 for exon exclusion.
Here, we only treat single-end reads. For paired-end reads,
we treat it as the single-end by only using the most informative
mate, namely the mate mapped to least number of isoform(s).
isoform1: l1 + l2 + l3 + rlen - 2 * edge_hang
p1: l2 + rlen - 2 * junc_hang
p3: l1 + l3 - 2 * edge_hang + 2 * junc_hang
isoform2: l1 + l3 + rlen - 2 * edge_hang
p1: rlen - 2 * junc_hang
p3: l1 + l3 - 2 * edge_hang + 2 * junc_hang
"""
# check SE event
if _check_SE_event(gene) == False:
print("This is not exon-skipping event!")
exit()
l1, l2, l3 = gene.trans[0].exons[:, 1] - gene.trans[0].exons[:, 0]
prob_mat = np.zeros((2, 3))
# Isoform 1
len_isoform1 = l1 + l2 + l3 + rlen - 2 * edge_hang
prob_mat[0, 0] = (l2 + rlen - 2 * junc_hang) / len_isoform1
prob_mat[0, 2] = (l1 + l3 - 2 * edge_hang + 2 * junc_hang) / len_isoform1
# Isoform 2
len_isoform2 = l1 + l3 + rlen - 2 * edge_hang
prob_mat[1, 1] = (rlen - 2 * junc_hang) / len_isoform2
prob_mat[1, 2] = (l1 + l3 - 2 * edge_hang + 2 * junc_hang) / len_isoform2
return prob_mat
def SE_effLen(gene, rlen=75, edge_hang=10, junc_hang=2):
"""Get effective length matrix for three read categories from two isoforms.
In exon-skipping (SE) event, there are two isoform:
isoform1 for exon inclusion and isoform2 for exon exclusion.
and three read groups:
group1: uniquely from isoform1
group2: uniquely from isoform2
group3: ambiguous identity
Here, we only treat single-end reads. For paired-end reads,
we treat it as the single-end by only using the most informative
mate, namely the mate mapped to least number of isoform(s).
isoform1: l1 + l2 + l3 + rlen - 2 * edge_hang
read group1: l2 + rlen - 2 * junc_hang
read group3: l1 + l3 - 2 * edge_hang + 2 * junc_hang
isoform2: l1 + l3 + rlen - 2 * edge_hang
read group2: rlen - 2 * junc_hang
read group3: l1 + l3 - 2 * edge_hang + 2 * junc_hang
"""
# check SE event
if _check_SE_event(gene) == False:
print("This is not exon-skipping event!")
exit()
l1, l2, l3 = gene.trans[0].exons[:, 1] - gene.trans[0].exons[:, 0]
isoLen_mat = np.zeros((2, 3))
# isoform length
len_isoform1 = l1 + l2 + l3 + rlen - 2 * edge_hang
len_isoform2 = l1 + l3 + rlen - 2 * edge_hang
# segments
isoLen_mat[0, 0] = l2 + rlen - 2 * junc_hang
isoLen_mat[1, 1] = rlen - 2 * junc_hang
isoLen_mat[0, 2] = l1 + l3 - 2 * edge_hang + 2 * junc_hang
isoLen_mat[1, 2] = l1 + l3 - 2 * edge_hang + 2 * junc_hang
# prob_mat = isoLen_mat / isoLen_mat.sum(1, keepdims=True)
return isoLen_mat
| apache-2.0 | -3,034,999,761,017,356,300 | 34.341346 | 79 | 0.577881 | false | 3.043892 | false | false | false |
kuc2477/news | tests/models/test_sqlalchemy_models.py | 1 | 1375 | from celery.states import ALL_STATES
def test_abstract_model_implementations(sa_session, sa_schedule, sa_child_news):
assert(isinstance(sa_schedule.id, int))
assert(isinstance(sa_child_news.id, int))
def test_abstract_schedule_implementation(
sa_scheduler, sa_session, sa_owner_model, sa_schedule):
assert(isinstance(sa_schedule.owner, sa_owner_model))
assert(isinstance(sa_schedule.url, str))
assert(isinstance(sa_schedule.cycle, int))
assert(isinstance(sa_schedule.options, dict))
assert(sa_schedule.get_state(sa_scheduler.celery) in ALL_STATES)
def test_abstract_news_implementation(
sa_session, sa_schedule, sa_root_news, sa_child_news):
assert(isinstance(sa_child_news.url, str))
assert(isinstance(sa_child_news.content, str))
assert(isinstance(sa_child_news.title, str))
assert(isinstance(sa_child_news.image, str) or sa_child_news.image is None)
assert(isinstance(sa_child_news.summary, str) or
sa_child_news.summary is None)
assert(sa_root_news.schedule == sa_schedule)
assert(sa_root_news.parent is None)
assert(sa_root_news.root == sa_root_news)
assert(sa_root_news.distance == 0)
assert(sa_child_news.schedule == sa_schedule)
assert(sa_child_news.parent == sa_root_news)
assert(sa_child_news.root == sa_root_news)
assert(sa_child_news.distance == 1)
| mit | -7,052,619,950,830,355,000 | 39.441176 | 80 | 0.711273 | false | 3.266033 | false | false | false |
haypo/fatoptimizer | test_fat_site.py | 1 | 1717 | """
Integration test: Test FAT mode with the fatoptimizer configured by the site
module.
"""
import dis
import fat
import io
import sys
import textwrap
import unittest
#if not any(transformer.name == 'fat' for transformer in sys.get_code_transformers()):
# raise Exception("test must be run with python3 -X fat")
def disassemble(obj):
output = io.StringIO()
dis.dis(obj, file=output)
return output.getvalue()
def call_builtin():
return len("abc")
class CallPureBuiltins(unittest.TestCase):
def test_code(self):
self.assertIn('LOAD_GLOBAL', disassemble(call_builtin))
self.assertEqual(len(fat.get_specialized(call_builtin)), 1)
code = fat.get_specialized(call_builtin)[0][0]
self.assertEqual(code.co_name, call_builtin.__name__)
self.assertNotIn('LOAD_GLOBAL', disassemble(code))
def test_import(self):
ns = {}
code = textwrap.dedent("""
from builtins import str as chr
def func():
# chr() is not the expected builtin function,
# it must not be optimized
return chr(65)
""")
exec(code, ns, ns)
func = ns['func']
self.assertEqual(fat.get_specialized(func), [])
def copy_builtin(x):
len(x)
def nested():
pass
return nested.__qualname__
class CopyBuiltinToConstant(unittest.TestCase):
def test_qualname(self):
self.assertEqual(len(fat.get_specialized(copy_builtin)), 1)
# optimizations must not modify the function name
qualname = copy_builtin("abc")
self.assertEqual(qualname, 'copy_builtin.<locals>.nested')
if __name__ == "__main__":
unittest.main()
| mit | 8,208,378,469,773,233,000 | 23.183099 | 86 | 0.627839 | false | 3.858427 | true | false | false |
edeposit/marcxml2mods | tests/test_transformators.py | 1 | 2292 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
import os
import os.path
from lxml import etree
from lxml import isoschematron
import pytest
from marcxml2mods import transformators
from test_xslt_transformer import DIRNAME
from test_xslt_transformer import OAI_FILENAME
# Functions & classes =========================================================
def get_test_data_context(fn):
return os.path.join(DIRNAME, fn)
def get_test_file_content(fn):
fn = get_test_data_context(fn)
with open(fn) as f:
return f.read()
def validity_test(xml):
xsd_doc = etree.parse(get_test_data_context("mods-3-4.xsd"))
xsd = etree.XMLSchema(xsd_doc)
xml = etree.fromstring(xml)
result = xsd.validate(xml)
if result == 0:
raise ValueError(xsd.error_log.filter_from_errors()[0])
# Fixtures ====================================================================
@pytest.fixture
def post_mono_example():
return get_test_file_content("postprocessed_mods.xml")
@pytest.fixture
def lang_example():
return get_test_file_content("lang_example.oai")
@pytest.fixture
def post_lang_example():
return get_test_file_content("postprocessed_lang_example.xml")
# Tests =======================================================================
def test_transform_to_mods_mono(post_mono_example):
result = transformators.transform_to_mods_mono(
OAI_FILENAME,
"someid",
"http://kitakitsune.org/raw",
)
assert result
assert result[0] == post_mono_example
validity_test(result[0])
def test_transform_to_mods_mono_lang_example(lang_example, post_lang_example):
result = transformators.transform_to_mods_mono(
lang_example,
"someid",
"http://kitakitsune.org/raw",
)
assert result
assert result[0] == post_lang_example
validity_test(result[0])
def test_marcxml2mods(post_mono_example):
result = transformators.marcxml2mods(
OAI_FILENAME,
"someid",
"http://kitakitsune.org/raw",
)
# TODO: Add tests for each type of document
assert result
assert result[0] == post_mono_example
validity_test(result[0])
| mit | -2,525,569,180,709,696,500 | 22.875 | 79 | 0.59555 | false | 3.63233 | true | false | false |
sounay/flaminggo-test | onadata/libs/authentication.py | 8 | 2833 | from django.utils.translation import ugettext as _
from django_digest import HttpDigestAuthenticator
from rest_framework.authentication import get_authorization_header
from rest_framework.authentication import BaseAuthentication
from rest_framework.authentication import TokenAuthentication
from rest_framework.exceptions import AuthenticationFailed
from rest_framework import exceptions
from onadata.apps.api.models.temp_token import TempToken
from django.utils import timezone
from django.conf import settings
def expired(time_token_created):
"""Checks if the time between when time_token_created and current time
is greater than the token expiry time.
:params time_token_created: The time the token we are checking was created.
:returns: Boolean True if not passed expired time, otherwise False.
"""
time_diff = (timezone.now() - time_token_created).total_seconds()
token_expiry_time = settings.DEFAULT_TEMP_TOKEN_EXPIRY_TIME
return True if time_diff > token_expiry_time else False
class DigestAuthentication(BaseAuthentication):
def __init__(self):
self.authenticator = HttpDigestAuthenticator()
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'digest':
return None
if self.authenticator.authenticate(request):
return request.user, None
else:
raise AuthenticationFailed(
_(u"Invalid username/password"))
def authenticate_header(self, request):
response = self.authenticator.build_challenge_response()
return response['WWW-Authenticate']
class TempTokenAuthentication(TokenAuthentication):
model = TempToken
def authenticate(self, request):
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != b'temptoken':
return None
if len(auth) == 1:
m = 'Invalid token header. No credentials provided.'
raise exceptions.AuthenticationFailed(m)
elif len(auth) > 2:
m = 'Invalid token header. Token string should not contain spaces.'
raise exceptions.AuthenticationFailed(m)
return self.authenticate_credentials(auth[1])
def authenticate_credentials(self, key):
try:
token = self.model.objects.get(key=key)
except self.model.DoesNotExist:
raise exceptions.AuthenticationFailed('Invalid token')
if not token.user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted')
if expired(token.created):
raise exceptions.AuthenticationFailed('Token expired')
return (token.user, token)
def authenticate_header(self, request):
return 'TempToken'
| bsd-2-clause | -2,830,075,401,119,703,600 | 33.54878 | 79 | 0.696788 | false | 4.659539 | false | false | false |
panosl/helios | helios/shipping/models.py | 1 | 2404 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from helios.location.models import Country
from helios.conf import settings
if settings.IS_MULTILINGUAL:
import multilingual
class Shipper(models.Model):
if settings.IS_MULTILINGUAL:
class Translation(multilingual.Translation):
name = models.CharField(_('name'), max_length=80)
desc = models.TextField(_('description'), blank=True)
else:
name = models.CharField(_('name'), max_length=80)
desc = models.TextField(_('description'), blank=True)
slug = models.SlugField(unique=True, max_length=80)
class Meta:
verbose_name = _('shipper')
verbose_name_plural = _('shippers')
def __unicode__(self):
return self.name
class ShippingRegion(models.Model):
if settings.IS_MULTILINGUAL:
class Translation(multilingual.Translation):
name = models.CharField(_('name'), max_length=80)
desc = models.TextField(_('description'), blank=True)
else:
name = models.CharField(_('name'), max_length=80)
desc = models.TextField(_('description'), blank=True)
slug = models.SlugField(unique=True, max_length=80)
countries = models.ManyToManyField(Country)
shipper = models.ForeignKey(Shipper)
class Meta:
verbose_name = _('shipping region')
verbose_name_plural = _('shipping regions')
def __unicode__(self):
return u'%s-%s' % (self.shipper, self.name)
class ShippingMethod(models.Model):
if settings.IS_MULTILINGUAL:
class Translation(multilingual.Translation):
name = models.CharField(_('name'), max_length=80)
desc = models.TextField(_('description'), blank=True)
else:
name = models.CharField(_('name'), max_length=80)
desc = models.TextField(_('description'), blank=True)
slug = models.SlugField(unique=True, max_length=80)
shipper = models.ForeignKey(Shipper)
shipping_regions = models.ManyToManyField(ShippingRegion, through='ShippingMethodRegions')
class Meta:
verbose_name = _('shipping method')
verbose_name_plural = _('shipping methods')
def _cost(self):
pass
def __unicode__(self):
return self.name
class ShippingMethodRegions(models.Model):
region = models.ForeignKey(ShippingRegion)
method = models.ForeignKey(ShippingMethod)
cost = models.DecimalField(_('price'), max_digits=6, decimal_places=2)
class Meta:
verbose_name_plural = _('shipping method regions')
def __unicode__(self):
return u'%s-%s' % (self.region, self.method,)
| bsd-3-clause | -6,225,284,416,697,045,000 | 29.43038 | 91 | 0.72421 | false | 3.376404 | false | false | false |
scrollback/kuma | vendor/packages/sqlalchemy/test/orm/test_dynamic.py | 6 | 21952 | from sqlalchemy.test.testing import eq_, ne_
import operator
from sqlalchemy.orm import dynamic_loader, backref
from sqlalchemy.test import testing
from sqlalchemy import Integer, String, ForeignKey, desc, select, func
from sqlalchemy.test.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, Query, attributes
from sqlalchemy.orm.dynamic import AppenderMixin
from sqlalchemy.test.testing import eq_, AssertsCompiledSQL, assert_raises_message
from sqlalchemy.util import function_named
from test.orm import _base, _fixtures
class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_basic(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
q = sess.query(User)
u = q.filter(User.id==7).first()
eq_([User(id=7,
addresses=[Address(id=1, email_address='[email protected]')])],
q.filter(User.id==7).all())
eq_(self.static.user_address_result, q.all())
@testing.resolve_artifact_names
def test_statement(self):
"""test that the .statement accessor returns the actual statement that
would render, without any _clones called."""
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
q = sess.query(User)
u = q.filter(User.id==7).first()
self.assert_compile(
u.addresses.statement,
"SELECT addresses.id, addresses.user_id, addresses.email_address FROM "
"addresses WHERE :param_1 = addresses.user_id",
use_default_dialect=True
)
@testing.resolve_artifact_names
def test_order_by(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u = sess.query(User).get(8)
eq_(
list(u.addresses.order_by(desc(Address.email_address))),
[Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'),
Address(email_address=u'[email protected]')]
)
@testing.resolve_artifact_names
def test_configured_order_by(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=desc(Address.email_address))
})
sess = create_session()
u = sess.query(User).get(8)
eq_(list(u.addresses), [Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')])
# test cancellation of None, replacement with something else
eq_(
list(u.addresses.order_by(None).order_by(Address.email_address)),
[Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')]
)
# test cancellation of None, replacement with nothing
eq_(
set(u.addresses.order_by(None)),
set([Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')])
)
@testing.resolve_artifact_names
def test_count(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u = sess.query(User).first()
eq_(u.addresses.count(), 1)
@testing.resolve_artifact_names
def test_backref(self):
mapper(Address, addresses, properties={
'user':relationship(User, backref=backref('addresses', lazy='dynamic'))
})
mapper(User, users)
sess = create_session()
ad = sess.query(Address).get(1)
def go():
ad.user = None
self.assert_sql_count(testing.db, go, 0)
sess.flush()
u = sess.query(User).get(7)
assert ad not in u.addresses
@testing.resolve_artifact_names
def test_no_count(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
q = sess.query(User)
# dynamic collection cannot implement __len__() (at least one that
# returns a live database result), else additional count() queries are
# issued when evaluating in a list context
def go():
eq_([User(id=7,
addresses=[Address(id=1,
email_address='[email protected]')])],
q.filter(User.id==7).all())
self.assert_sql_count(testing.db, go, 2)
@testing.resolve_artifact_names
def test_no_populate(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
u1 = User()
assert_raises_message(
NotImplementedError,
"Dynamic attributes don't support collection population.",
attributes.set_committed_value, u1, 'addresses', []
)
@testing.resolve_artifact_names
def test_m2m(self):
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, lazy="dynamic",
backref=backref('orders', lazy="dynamic"))
})
mapper(Item, items)
sess = create_session()
o1 = Order(id=15, description="order 10")
i1 = Item(id=10, description="item 8")
o1.items.append(i1)
sess.add(o1)
sess.flush()
assert o1 in i1.orders.all()
assert i1 in o1.items.all()
@testing.resolve_artifact_names
def test_association_nonaliased(self):
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items,
lazy="dynamic",
order_by=order_items.c.item_id)
})
mapper(Item, items)
sess = create_session()
o = sess.query(Order).first()
self.assert_compile(
o.items,
"SELECT items.id AS items_id, items.description AS items_description FROM items,"
" order_items WHERE :param_1 = order_items.order_id AND items.id = order_items.item_id"
" ORDER BY order_items.item_id",
use_default_dialect=True
)
# filter criterion against the secondary table
# works
eq_(
o.items.filter(order_items.c.item_id==2).all(),
[Item(id=2)]
)
@testing.resolve_artifact_names
def test_transient_detached(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u1 = User()
u1.addresses.append(Address())
eq_(u1.addresses.count(), 1)
eq_(u1.addresses[0], Address())
@testing.resolve_artifact_names
def test_custom_query(self):
class MyQuery(Query):
pass
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses),
query_class=MyQuery)
})
sess = create_session()
u = User()
sess.add(u)
col = u.addresses
assert isinstance(col, Query)
assert isinstance(col, MyQuery)
assert hasattr(col, 'append')
eq_(type(col).__name__, 'AppenderMyQuery')
q = col.limit(1)
assert isinstance(q, Query)
assert isinstance(q, MyQuery)
assert not hasattr(q, 'append')
eq_(type(q).__name__, 'MyQuery')
@testing.resolve_artifact_names
def test_custom_query_with_custom_mixin(self):
class MyAppenderMixin(AppenderMixin):
def add(self, items):
if isinstance(items, list):
for item in items:
self.append(item)
else:
self.append(items)
class MyQuery(Query):
pass
class MyAppenderQuery(MyAppenderMixin, MyQuery):
query_class = MyQuery
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses),
query_class=MyAppenderQuery)
})
sess = create_session()
u = User()
sess.add(u)
col = u.addresses
assert isinstance(col, Query)
assert isinstance(col, MyQuery)
assert hasattr(col, 'append')
assert hasattr(col, 'add')
eq_(type(col).__name__, 'MyAppenderQuery')
q = col.limit(1)
assert isinstance(q, Query)
assert isinstance(q, MyQuery)
assert not hasattr(q, 'append')
assert not hasattr(q, 'add')
eq_(type(q).__name__, 'MyQuery')
class SessionTest(_fixtures.FixtureTest):
run_inserts = None
@testing.resolve_artifact_names
def test_events(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u1 = User(name='jack')
a1 = Address(email_address='foo')
sess.add_all([u1, a1])
sess.flush()
eq_(testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)), 0)
u1 = sess.query(User).get(u1.id)
u1.addresses.append(a1)
sess.flush()
eq_(testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall(),
[(a1.id, u1.id, 'foo')])
u1.addresses.remove(a1)
sess.flush()
eq_(testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)), 0)
u1.addresses.append(a1)
sess.flush()
eq_(testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall(),
[(a1.id, u1.id, 'foo')])
a2 = Address(email_address='bar')
u1.addresses.remove(a1)
u1.addresses.append(a2)
sess.flush()
eq_(testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall(),
[(a2.id, u1.id, 'bar')])
@testing.resolve_artifact_names
def test_merge(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=addresses.c.email_address)
})
sess = create_session()
u1 = User(name='jack')
a1 = Address(email_address='a1')
a2 = Address(email_address='a2')
a3 = Address(email_address='a3')
u1.addresses.append(a2)
u1.addresses.append(a3)
sess.add_all([u1, a1])
sess.flush()
u1 = User(id=u1.id, name='jack')
u1.addresses.append(a1)
u1.addresses.append(a3)
u1 = sess.merge(u1)
eq_(attributes.get_history(u1, 'addresses'), (
[a1],
[a3],
[a2]
))
sess.flush()
eq_(
list(u1.addresses),
[a1, a3]
)
@testing.resolve_artifact_names
def test_flush(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session()
u1 = User(name='jack')
u2 = User(name='ed')
u2.addresses.append(Address(email_address='[email protected]'))
u1.addresses.append(Address(email_address='[email protected]'))
sess.add_all((u1, u2))
sess.flush()
from sqlalchemy.orm import attributes
eq_(attributes.get_history(u1, 'addresses'), ([], [Address(email_address='[email protected]')], []))
sess.expunge_all()
# test the test fixture a little bit
ne_(User(name='jack', addresses=[Address(email_address='wrong')]),
sess.query(User).first())
eq_(User(name='jack', addresses=[Address(email_address='[email protected]')]),
sess.query(User).first())
eq_([
User(name='jack', addresses=[Address(email_address='[email protected]')]),
User(name='ed', addresses=[Address(email_address='[email protected]')])
],
sess.query(User).all())
@testing.resolve_artifact_names
def test_hasattr(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
u1 = User(name='jack')
assert 'addresses' not in u1.__dict__.keys()
u1.addresses = [Address(email_address='test')]
assert 'addresses' in dir(u1)
@testing.resolve_artifact_names
def test_collection_set(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=addresses.c.email_address)
})
sess = create_session(autoflush=True, autocommit=False)
u1 = User(name='jack')
a1 = Address(email_address='a1')
a2 = Address(email_address='a2')
a3 = Address(email_address='a3')
a4 = Address(email_address='a4')
sess.add(u1)
u1.addresses = [a1, a3]
eq_(list(u1.addresses), [a1, a3])
u1.addresses = [a1, a2, a4]
eq_(list(u1.addresses), [a1, a2, a4])
u1.addresses = [a2, a3]
eq_(list(u1.addresses), [a2, a3])
u1.addresses = []
eq_(list(u1.addresses), [])
@testing.resolve_artifact_names
def test_rollback(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses))
})
sess = create_session(expire_on_commit=False, autocommit=False, autoflush=True)
u1 = User(name='jack')
u1.addresses.append(Address(email_address='[email protected]'))
sess.add(u1)
sess.flush()
sess.commit()
u1.addresses.append(Address(email_address='[email protected]'))
eq_(u1.addresses.order_by(Address.id).all(),
[Address(email_address='[email protected]'), Address(email_address='[email protected]')])
sess.rollback()
eq_(u1.addresses.all(), [Address(email_address='[email protected]')])
@testing.fails_on('maxdb', 'FIXME: unknown')
@testing.resolve_artifact_names
def test_delete_nocascade(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id,
backref='user')
})
sess = create_session(autoflush=True)
u = User(name='ed')
u.addresses.append(Address(email_address='a'))
u.addresses.append(Address(email_address='b'))
u.addresses.append(Address(email_address='c'))
u.addresses.append(Address(email_address='d'))
u.addresses.append(Address(email_address='e'))
u.addresses.append(Address(email_address='f'))
sess.add(u)
eq_(Address(email_address='c'), u.addresses[2])
sess.delete(u.addresses[2])
sess.delete(u.addresses[4])
sess.delete(u.addresses[3])
eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')],
list(u.addresses))
sess.expunge_all()
u = sess.query(User).get(u.id)
sess.delete(u)
# u.addresses relationship will have to force the load
# of all addresses so that they can be updated
sess.flush()
sess.close()
eq_(testing.db.scalar(addresses.count(addresses.c.user_id != None)), 0)
@testing.fails_on('maxdb', 'FIXME: unknown')
@testing.resolve_artifact_names
def test_delete_cascade(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id,
backref='user', cascade="all, delete-orphan")
})
sess = create_session(autoflush=True)
u = User(name='ed')
u.addresses.append(Address(email_address='a'))
u.addresses.append(Address(email_address='b'))
u.addresses.append(Address(email_address='c'))
u.addresses.append(Address(email_address='d'))
u.addresses.append(Address(email_address='e'))
u.addresses.append(Address(email_address='f'))
sess.add(u)
eq_(Address(email_address='c'), u.addresses[2])
sess.delete(u.addresses[2])
sess.delete(u.addresses[4])
sess.delete(u.addresses[3])
eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')],
list(u.addresses))
sess.expunge_all()
u = sess.query(User).get(u.id)
sess.delete(u)
# u.addresses relationship will have to force the load
# of all addresses so that they can be updated
sess.flush()
sess.close()
eq_(testing.db.scalar(addresses.count()), 0)
@testing.fails_on('maxdb', 'FIXME: unknown')
@testing.resolve_artifact_names
def test_remove_orphans(self):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id,
cascade="all, delete-orphan", backref='user')
})
sess = create_session(autoflush=True)
u = User(name='ed')
u.addresses.append(Address(email_address='a'))
u.addresses.append(Address(email_address='b'))
u.addresses.append(Address(email_address='c'))
u.addresses.append(Address(email_address='d'))
u.addresses.append(Address(email_address='e'))
u.addresses.append(Address(email_address='f'))
sess.add(u)
eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='c'),
Address(email_address='d'), Address(email_address='e'), Address(email_address='f')],
sess.query(Address).all())
eq_(Address(email_address='c'), u.addresses[2])
try:
del u.addresses[3]
assert False
except TypeError, e:
assert "doesn't support item deletion" in str(e), str(e)
for a in u.addresses.filter(Address.email_address.in_(['c', 'e', 'f'])):
u.addresses.remove(a)
eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')],
list(u.addresses))
eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')],
sess.query(Address).all())
sess.delete(u)
sess.close()
@testing.resolve_artifact_names
def _backref_test(self, autoflush, saveuser):
mapper(User, users, properties={
'addresses':dynamic_loader(mapper(Address, addresses), backref='user')
})
sess = create_session(autoflush=autoflush)
u = User(name='buffy')
a = Address(email_address='[email protected]')
a.user = u
if saveuser:
sess.add(u)
else:
sess.add(a)
if not autoflush:
sess.flush()
assert u in sess
assert a in sess
eq_(list(u.addresses), [a])
a.user = None
if not autoflush:
eq_(list(u.addresses), [a])
if not autoflush:
sess.flush()
eq_(list(u.addresses), [])
def test_backref_autoflush_saveuser(self):
self._backref_test(True, True)
def test_backref_autoflush_savead(self):
self._backref_test(True, False)
def test_backref_saveuser(self):
self._backref_test(False, True)
def test_backref_savead(self):
self._backref_test(False, False)
class DontDereferenceTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(40)),
Column('fullname', String(100)),
Column('password', String(15)))
Table('addresses', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('email_address', String(100), nullable=False),
Column('user_id', Integer, ForeignKey('users.id')))
@classmethod
@testing.resolve_artifact_names
def setup_mappers(cls):
class User(_base.ComparableEntity):
pass
class Address(_base.ComparableEntity):
pass
mapper(User, users, properties={
'addresses': relationship(Address, backref='user', lazy='dynamic')
})
mapper(Address, addresses)
@testing.resolve_artifact_names
def test_no_deref(self):
session = create_session()
user = User()
user.name = 'joe'
user.fullname = 'Joe User'
user.password = 'Joe\'s secret'
address = Address()
address.email_address = '[email protected]'
address.user = user
session.add(user)
session.flush()
session.expunge_all()
def query1():
session = create_session(testing.db)
user = session.query(User).first()
return user.addresses.all()
def query2():
session = create_session(testing.db)
return session.query(User).first().addresses.all()
def query3():
session = create_session(testing.db)
user = session.query(User).first()
return session.query(User).first().addresses.all()
eq_(query1(), [Address(email_address='[email protected]')])
eq_(query2(), [Address(email_address='[email protected]')])
eq_(query3(), [Address(email_address='[email protected]')])
| mpl-2.0 | -5,822,293,660,411,992,000 | 33.844444 | 154 | 0.577761 | false | 3.840448 | true | false | false |
smmosquera/serge | render.py | 2 | 12006 | """Classes to perform rendering"""
import pygame
import common
import serialize
import camera
import visual
import events
class DuplicateLayer(Exception): """The layer was already present"""
class UnknownLayer(Exception): """The layer was not found"""
class NoLayer(Exception): """A layer was not found when one was expected"""
class Renderer(common.Loggable, serialize.Serializable, common.EventAware):
"""The main rendering component"""
my_properties = (
serialize.L('layers', [], 'the layers we render to'),
serialize.I('width', 640, 'the width of the screen'),
serialize.I('height', 480, 'the height of the screen'),
serialize.S('title', 'Serge', 'the title of the main window'),
serialize.L('backcolour', (0,0,0), 'the background colour'),
serialize.O('camera', None, 'the camera for this renderer'),
serialize.O('icon', None, 'the icon for the main window'),
serialize.B('fullscreen', False, 'whether to display in full screen or not'),
)
def __init__(self, width=640, height=480, title='Serge', backcolour=(0,0,0), icon=None, fullscreen=False):
"""Initialise the Renderer"""
self.addLogger()
self.initEvents()
self.width = width
self.height = height
self.title = title
self.layers = []
self.backcolour = backcolour
self.fullscreen = fullscreen
self.camera = camera.Camera()
self.camera.setSpatial(0, 0, self.width, self.height)
self.icon = icon
self.init()
### Serializing ###
def init(self):
"""Initialise from serialized state"""
self.addLogger()
self.initEvents()
self._sort_needed = False
pygame.display.set_caption(self.title)
#
# Tried the following with flags but no impact pygame.FULLSCREEN|pygame.HWSURFACE|pygame.DOUBLEBUF
flags = pygame.FULLSCREEN if self.fullscreen else 0
self.surface = pygame.display.set_mode((self.width, self.height), flags | pygame.HWSURFACE)
for layer in self.layers:
layer.setSurface(pygame.Surface((self.width, self.height), pygame.SRCALPHA, 32))
layer.init()
self.camera.init()
self.camera.resizeTo(self.width, self.height)
if self.icon:
pygame.display.set_icon(visual.Register.getItem(self.icon).raw_image)
#
self._render_layer_dict = None
### Layers ###
def addLayer(self, layer):
"""Add a layer to the rendering"""
self.log.info('Adding layer "%s" at %d' % (layer.name, layer.order))
if layer in self.layers:
raise DuplicateLayer('The layer %s is already in the renderer' % layer)
else:
self.layers.append(layer)
self._sort_needed = True
self.resetSurfaces()
#
# Update the layer dictionary cache
self.getRenderingOrderDictionary()
#
return layer
def getLayer(self, name):
"""Return the named layer"""
for layer in self.layers:
if layer.name == name:
return layer
else:
raise UnknownLayer('No layer with name "%s" was found' % (name,))
def getLayerBefore(self, layer):
"""Return the layer before the specified one in terms of rendering order"""
for test_layer in reversed(self.getLayers()):
if test_layer.order < layer.order:
return test_layer
else:
raise NoLayer('There is no layer before %s' % layer.getNiceName())
def resetSurfaces(self):
"""Recreate the surfaces for our layers
When layers are added we sometimes need to reset the layers,
for instance, virtual layers need to be shifted around so
that they have the right order.
"""
self._sortLayers()
for layer in self.getLayers():
layer.initSurface(self)
def getLayers(self):
"""Return all the layers"""
return self.layers
def removeLayer(self, layer):
"""Remove the layer from the rendering"""
try:
self.layers.remove(layer)
except ValueError:
raise UnknownLayer('The layer %s was not found' % layer.getNiceName())
#
# Update the layer dictionary cache
self.getRenderingOrderDictionary()
def removeLayerNamed(self, name):
"""Remove the layer with the specific name"""
layer = self.getLayer(name)
self.removeLayer(layer)
def clearLayers(self):
"""Clear all the layers"""
self.layers = []
def _sortLayers(self):
"""Sort the layers into the right order"""
self.layers.sort(lambda l1, l2 : cmp(l1.order, l2.order))
self._sort_needed = False
def orderActors(self, actors):
"""Return the list of actors sorted by who should be processed first to correctly render
The actors are checked to see which layer they reside on and then
this is used to order the returned list.
"""
#
# Make a lookup table to quickly find layers
layers = dict([(layer.name, layer.order) for layer in self.getLayers()])
actor_list = [(layers.get(actor.getLayerName(), 0), actor) for actor in actors]
actor_list.sort()
#
return [actor for _, actor in actor_list]
def getRenderingOrder(self, layer):
"""Return the order that a layer will be rendered in (0 = first)"""
try:
return self.layers.index(layer)
except ValueError:
raise UnknownLayer('The layer %s was not found' % layer)
def getRenderingOrderDictionary(self):
"""Return a dictionary of the rendering orders of each layer by name ({name:0, name:1} etc)
The dictionary is actually a live copy that will be updated if you
add layers to the renderer so it is safe for you to cache it and
re-use it.
Changing the dictionary results in undefined behaviour.
"""
order = dict([(layer.name, idx) for idx, layer in enumerate(self.getLayers())])
if self._render_layer_dict is None:
#
# Set the dictionary
self._render_layer_dict = order
else:
#
# Clear and reset the cached copy of the dictionary
for k in self._render_layer_dict.keys():
del(self._render_layer_dict[k])
self._render_layer_dict.update(order)
#
return self._render_layer_dict
### Rendering ###
def clearSurface(self):
"""Clear the surface"""
self.surface.fill(self.backcolour)
def preRender(self):
"""Prepare for new rendering"""
self.clearSurface()
for layer in self.getLayers():
if layer.active:
layer.clearSurface()
layer.preRender()
def render(self):
"""Render all the layers"""
#
# Post rendering events
for layer in self.layers:
if layer.active:
layer.postRender()
#
# Put layers in the right order
if self._sort_needed:
self._sortLayers()
#
# Render all layers
for layer in self.layers:
if layer.active:
layer.render(self.surface)
#
self.processEvent((events.E_AFTER_RENDER, self))
def getSurface(self):
"""Return the overall surface"""
return self.surface
### Camera stuff ###
def setCamera(self, camera):
"""Set our camera"""
self.camera = camera
def getCamera(self):
"""Return our camera"""
return self.camera
def getScreenSize(self):
"""Returns the screen size"""
return (self.width, self.height)
class RenderingLayer(common.Loggable, serialize.Serializable, common.EventAware):
"""A layer on which to render things
This is the abstract version of the layer. Create
subclasses of this to do useful things.
"""
my_properties = (
serialize.S('name', '', 'the name of the layer'),
serialize.I('order', 0, 'the order to render (0=low)'),
serialize.B('active', True, 'whether this layer is active'),
serialize.B('static', False, 'whether this layer is static with respect to the camera'),
)
def __init__(self, name, order):
"""Initialise the Layer"""
super(RenderingLayer, self).__init__()
self.initEvents()
self.name = name
self.order = order
self.surface = None
self.active = True
self.static = False
def setSurface(self, surface):
"""Set our surface"""
self.surface = surface
def getSurface(self):
"""Return the surface"""
return self.surface
def initSurface(self, renderer):
"""Create the surface that we need to draw on"""
raise NotImplementedError
def getNiceName(self):
"""Return the nice name for this layer"""
return '<Layer %d: %s - order %d>' % (id(self), self.name, self.order)
def setStatic(self, static):
"""Determine whether this layer is static with respect to camera movements or not"""
self.static = static
### Serializing ###
def init(self):
"""Initialise from serialized state"""
self.initEvents()
### Rendering ###
def clearSurface(self):
"""Clear our surface"""
raise NotImplementedError
def preRender(self):
"""Called before the layer has anything rendered to"""
self.processEvent((events.E_BEFORE_RENDER, self))
def render(self, surface):
"""Render to a surface"""
raise NotImplementedError
def postRender(self):
"""Called after the layer has has had everything rendered on it"""
self.processEvent((events.E_AFTER_RENDER, self))
class Layer(RenderingLayer):
"""A rendering layer with its own surface
This type of layer is useful for compositing because
you can do things to this layer once it has been
rendered (eg shadows, glows, blurs etc).
"""
def initSurface(self, renderer):
"""Create the surface that we need to draw on
We create a surface that is identical to the background for the
main renderer.
"""
self.setSurface(pygame.Surface((renderer.width, renderer.height), pygame.SRCALPHA, 32))
def clearSurface(self):
"""Clear our surface"""
self.surface.fill((0,0,0,0))
def render(self, surface):
"""Render to a surface"""
surface.blit(self.surface, (0,0))
class VirtualLayer(RenderingLayer):
"""A rendering layer that doesn't have its own surface
This layer will render to the layer immediately
before it in the rendering cycle.
"""
def initSurface(self, renderer):
"""Create the surface that we need to draw on
We do not want a surface ourself but we need the next surface
in line as far as the renderer is concerned.
"""
try:
self.setSurface(renderer.getLayerBefore(self).getSurface())
except NoLayer:
self.setSurface(renderer.getSurface())
def clearSurface(self):
"""Clear our surface
Nothing to do here - handled by the real owner of the surface.
"""
pass
def render(self, surface):
"""Render to a surface
Nothing to do here - handled by the real owner of the surface.
"""
pass
| lgpl-3.0 | 6,961,334,540,211,068,000 | 31.536585 | 110 | 0.583208 | false | 4.458225 | false | false | false |
caperren/Archives | OSU Coursework/CS 361 - Software Engineering I/Assignment 1/5/kwic.py | 2 | 1142 | def split_by_periods(document):
output_array = []
sentence_array_temp = ""
for current_char in document:
if current_char != "\n":
sentence_array_temp += current_char
if current_char == ".":
output_array.append(sentence_array_temp)
sentence_array_temp = ""
if sentence_array_temp:
output_array.append(sentence_array_temp)
return output_array
def split_by_word_as_tuples(sentence_array):
output_array = []
index_incrementer = 0
for sentence in sentence_array:
words_array = sentence.split(" ")
words_array = filter(None, words_array)
output_array.append((words_array, index_incrementer))
index_incrementer += 1
return output_array
def kwic(document, listPairs=False, ignoreWords=None, periodsToBreaks=False):
if not document:
return []
if periodsToBreaks:
split_into_sentences = split_by_periods(document)
else:
split_into_sentences = document.splitlines()
split_into_word_tuples = split_by_word_as_tuples(split_into_sentences)
return split_into_word_tuples
| gpl-3.0 | 2,724,953,234,113,535,000 | 24.954545 | 77 | 0.641856 | false | 3.781457 | false | false | false |
Cadair/ginga | ginga/web/bokehw/CanvasRenderBokeh.py | 3 | 5041 | #
# CanvasRenderBokeh.py -- for rendering into a Bokeh widget
#
# Eric Jeschke ([email protected])
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
import numpy
from . import BokehHelp
from ginga.canvas.mixins import *
# force registration of all canvas types
import ginga.canvas.types.all
# Bokeh imports
from bokeh.plotting import figure
class RenderContext(object):
def __init__(self, viewer):
self.viewer = viewer
self.shape = None
# TODO: encapsulate this drawable
self.cr = BokehHelp.BokehContext(self.viewer.figure)
self.pen = None
self.brush = None
self.font = None
def set_line_from_shape(self, shape):
# TODO: support line width and style
alpha = getattr(shape, 'alpha', 1.0)
self.pen = self.cr.get_pen(shape.color, alpha=alpha)
def set_fill_from_shape(self, shape):
fill = getattr(shape, 'fill', False)
if fill:
if hasattr(shape, 'fillcolor') and shape.fillcolor:
color = shape.fillcolor
else:
color = shape.color
alpha = getattr(shape, 'alpha', 1.0)
alpha = getattr(shape, 'fillalpha', alpha)
self.brush = self.cr.get_brush(color, alpha=alpha)
else:
self.brush = None
def set_font_from_shape(self, shape):
if hasattr(shape, 'font'):
if hasattr(shape, 'fontsize') and shape.fontsize is not None:
fontsize = shape.fontsize
else:
fontsize = shape.scale_font(self.viewer)
alpha = getattr(shape, 'alpha', 1.0)
self.font = self.cr.get_font(shape.font, fontsize, shape.color,
alpha=alpha)
else:
self.font = None
def initialize_from_shape(self, shape, line=True, fill=True, font=True):
if line:
self.set_line_from_shape(shape)
if fill:
self.set_fill_from_shape(shape)
if font:
self.set_font_from_shape(shape)
def set_line(self, color, alpha=1.0, linewidth=1, style='solid'):
# TODO: support style
self.pen = self.cr.get_pen(color, alpha=alpha, linewidth=linewidth,
linestyle=style)
def set_fill(self, color, alpha=1.0):
if color is None:
self.brush = None
else:
self.brush = self.cr.get_brush(color, alpha=alpha)
def set_font(self, fontname, fontsize):
self.font = self.cr.get_font(fontname, fontsize, 'black',
alpha=1.0)
def text_extents(self, text):
return self.cr.text_extents(text, self.font)
##### DRAWING OPERATIONS #####
def draw_text(self, cx, cy, text, rot_deg=0.0):
self.cr.init(angle=[numpy.radians(rot_deg)])
self.cr.update_font(self.pen, self.font)
self.cr.plot.text(x=[cx], y=[cy], text=[text], **self.cr.kwdargs)
def draw_polygon(self, cpoints):
self.cr.init()
self.cr.update_patch(self.pen, self.brush)
xy = numpy.array(cpoints)
self.cr.plot.patches(xs=[xy.T[0]], ys=[xy.T[1]], **self.cr.kwdargs)
def draw_circle(self, cx, cy, cradius):
self.cr.init()
self.cr.update_patch(self.pen, self.brush)
self.cr.plot.circle(x=[cx], y=[cy], radius=[cradius],
**self.cr.kwdargs)
def draw_bezier_curve(self, verts):
self.cr.init()
self.cr.update_line(self.pen)
cx, cy = verts.T[0], verts.T[1]
self.cr.plot.bezier(x0=[cx[0]], y0=[cy[0]],
x1=[cx[3]], y1=[cy[3]],
cx0=[cx[1]], cy0=[cy[1]],
cx1=[cx[2]], cy1=[cy[2]],
**self.cr.kwdargs)
def draw_ellipse(self, cx, cy, cxradius, cyradius, theta):
self.cr.init()
self.cr.update_patch(self.pen, self.brush)
self.cr.plot.oval(x=[cx], y=[cy],
width=[cxradius*2.0], height=[cyradius*2.0],
angle=[numpy.radians(theta)], **self.cr.kwdargs)
def draw_line(self, cx1, cy1, cx2, cy2):
self.cr.init()
self.cr.update_line(self.pen)
self.cr.plot.line(x=[cx1, cx2], y=[cy1, cy2], **self.cr.kwdargs)
def draw_path(self, cpoints):
self.cr.init()
self.cr.update_line(self.pen)
xy = numpy.array(cpoints)
self.cr.plot.line(x=xy.T[0], y=xy.T[1], **self.cr.kwdargs)
class CanvasRenderer(object):
def __init__(self, viewer):
self.viewer = viewer
def setup_cr(self, shape):
cr = RenderContext(self.viewer)
cr.initialize_from_shape(shape)
return cr
def get_dimensions(self, shape):
cr = self.setup_cr(shape)
cr.set_font_from_shape(shape)
return cr.text_extents(shape.text)
#END
| bsd-3-clause | -4,610,996,261,689,369,000 | 29.36747 | 76 | 0.558024 | false | 3.3969 | false | false | false |
erh3cq/hyperspy | hyperspy/learn/ornmf.py | 3 | 13817 | # -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import logging
from itertools import chain
import numpy as np
from scipy.stats import halfnorm
from hyperspy.external.progressbar import progressbar
from hyperspy.misc.math_tools import check_random_state
_logger = logging.getLogger(__name__)
def _thresh(X, lambda1, vmax):
"""Soft-thresholding with clipping."""
res = np.abs(X) - lambda1
np.maximum(res, 0.0, out=res)
res *= np.sign(X)
np.clip(res, -vmax, vmax, out=res)
return res
def _mrdivide(B, A):
"""Solves xB = A as per Matlab."""
if isinstance(B, np.ndarray):
if len(B.shape) == 2 and B.shape[0] == B.shape[1]:
# square array
return np.linalg.solve(A.T, B.T).T
else:
# Set rcond default value to match numpy 1.14 default value with
# previous numpy version
rcond = np.finfo(float).eps * max(A.shape)
return np.linalg.lstsq(A.T, B.T, rcond=rcond)[0].T
else:
return B / A
def _project(W):
newW = W.copy()
np.maximum(newW, 0, out=newW)
sumsq = np.sqrt(np.sum(W ** 2, axis=0))
np.maximum(sumsq, 1, out=sumsq)
return _mrdivide(newW, np.diag(sumsq))
def _solveproj(v, W, lambda1, kappa=1, h=None, e=None, vmax=None):
m, n = W.shape
v = v.T
if vmax is None:
vmax = v.max()
if len(v.shape) == 2:
batch_size = v.shape[1]
eshape = (m, batch_size)
hshape = (n, batch_size)
else:
eshape = (m,)
hshape = (n,)
if h is None or h.shape != hshape:
h = np.zeros(hshape)
if e is None or e.shape != eshape:
e = np.zeros(eshape)
eta = kappa / np.linalg.norm(W, "fro") ** 2
maxiter = 1e6
iters = 0
while True:
iters += 1
# Solve for h
htmp = h
h = h - eta * W.T @ (W @ h + e - v)
np.maximum(h, 0.0, out=h)
# Solve for e
etmp = e
e = _thresh(v - W @ h, lambda1, vmax)
# Stop conditions
stoph = np.linalg.norm(h - htmp, 2)
stope = np.linalg.norm(e - etmp, 2)
stop = max(stoph, stope) / m
if stop < 1e-5 or iters > maxiter:
break
return h, e
class ORNMF:
"""Performs Online Robust NMF with missing or corrupted data.
The ORNMF code is based on a transcription of the online proximal gradient
descent (PGD) algorithm MATLAB code obtained from the authors of [Zhao2016]_.
It has been updated to also include L2-normalization cost function that
is able to deal with sparse corruptions and/or outliers slightly faster
(please see ORPCA implementation for details). A further modification
has been made to allow for a changing subspace W, where X ~= WH^T + E
in the ORNMF framework.
Read more in the :ref:`User Guide <mva.rnmf>`.
References
----------
.. [Zhao2016] Zhao, Renbo, and Vincent YF Tan. "Online nonnegative matrix
factorization with outliers." Acoustics, Speech and Signal Processing
(ICASSP), 2016 IEEE International Conference on. IEEE, 2016.
"""
def __init__(
self,
rank,
store_error=False,
lambda1=1.0,
kappa=1.0,
method="PGD",
subspace_learning_rate=1.0,
subspace_momentum=0.5,
random_state=None,
):
"""Creates Online Robust NMF instance that can learn a representation.
Parameters
----------
rank : int
The rank of the representation (number of components/factors)
store_error : bool, default False
If True, stores the sparse error matrix.
lambda1 : float
Nuclear norm regularization parameter.
kappa : float
Step-size for projection solver.
method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD'
* 'PGD' - Proximal gradient descent
* 'RobustPGD' - Robust proximal gradient descent
* 'MomentumSGD' - Stochastic gradient descent with momentum
subspace_learning_rate : float
Learning rate for the 'MomentumSGD' method. Should be a
float > 0.0
subspace_momentum : float
Momentum parameter for 'MomentumSGD' method, should be
a float between 0 and 1.
random_state : None or int or RandomState instance, default None
Used to initialize the subspace on the first iteration.
"""
self.n_features = None
self.iterating = False
self.t = 0
if store_error:
self.E = []
else:
self.E = None
self.rank = rank
self.robust = False
self.subspace_tracking = False
self.lambda1 = lambda1
self.kappa = kappa
self.subspace_learning_rate = subspace_learning_rate
self.subspace_momentum = subspace_momentum
self.random_state = check_random_state(random_state)
# Check options are valid
if method not in ("PGD", "RobustPGD", "MomentumSGD"):
raise ValueError("'method' not recognised")
if method == "RobustPGD":
self.robust = True
if method == "MomentumSGD":
self.subspace_tracking = True
if subspace_momentum < 0.0 or subspace_momentum > 1:
raise ValueError("'subspace_momentum' must be a float between 0 and 1")
def _setup(self, X):
self.h, self.e, self.v = None, None, None
if isinstance(X, np.ndarray):
n, m = X.shape
avg = np.sqrt(X.mean() / m)
iterating = False
else:
x = next(X)
m = len(x)
avg = np.sqrt(x.mean() / m)
X = chain([x], X)
iterating = True
self.n_features = m
self.iterating = iterating
self.W = halfnorm.rvs(
size=(self.n_features, self.rank), random_state=self.random_state
)
self.W = np.abs(avg * self.W / np.sqrt(self.rank))
self.H = []
if self.subspace_tracking:
self.vnew = np.zeros_like(self.W)
else:
self.A = np.zeros((self.rank, self.rank))
self.B = np.zeros((self.n_features, self.rank))
return X
def fit(self, X, batch_size=None):
"""Learn NMF components from the data.
Parameters
----------
X : {numpy.ndarray, iterator}
[n_samples x n_features] matrix of observations
or an iterator that yields samples, each with n_features elements.
batch_size : {None, int}
If not None, learn the data in batches, each of batch_size samples
or less.
"""
if self.n_features is None:
X = self._setup(X)
num = None
prod = np.outer
if batch_size is not None:
if not isinstance(X, np.ndarray):
raise ValueError("can't batch iterating data")
else:
prod = np.dot
length = X.shape[0]
num = max(length // batch_size, 1)
X = np.array_split(X, num, axis=0)
if isinstance(X, np.ndarray):
num = X.shape[0]
X = iter(X)
h, e = self.h, self.e
for v in progressbar(X, leave=False, total=num, disable=num == 1):
h, e = _solveproj(v, self.W, self.lambda1, self.kappa, h=h, e=e)
self.v = v
self.e = e
self.h = h
self.H.append(h)
if self.E is not None:
self.E.append(e)
self._solve_W(prod(h, h.T), prod((v.T - e), h.T))
self.t += 1
self.h = h
self.e = e
def _solve_W(self, A, B):
if not self.subspace_tracking:
self.A += A
self.B += B
eta = self.kappa / np.linalg.norm(self.A, "fro")
if self.robust:
# exactly as in the Zhao & Tan paper
n = 0
lasttwo = np.zeros(2)
while n <= 2 or (
np.abs((lasttwo[1] - lasttwo[0]) / lasttwo[0]) > 1e-5 and n < 1e9
):
self.W -= eta * (self.W @ self.A - self.B)
self.W = _project(self.W)
n += 1
lasttwo[0] = lasttwo[1]
lasttwo[1] = 0.5 * np.trace(
self.W.T.dot(self.W).dot(self.A)
) - np.trace(self.W.T.dot(self.B))
else:
# Tom Furnival (@tjof2) approach
# - copied from the ORPCA implementation
# of gradient descent in ./rpca.py
if self.subspace_tracking:
learn = self.subspace_learning_rate * (
1 + self.subspace_learning_rate * self.lambda1 * self.t
)
vold = self.subspace_momentum * self.vnew
self.vnew = (self.W @ A - B) / learn
self.W -= vold + self.vnew
else:
self.W -= eta * (self.W @ self.A - self.B)
np.maximum(self.W, 0.0, out=self.W)
self.W /= max(np.linalg.norm(self.W, "fro"), 1.0)
def project(self, X, return_error=False):
"""Project the learnt components on the data.
Parameters
----------
X : {numpy.ndarray, iterator}
[n_samples x n_features] matrix of observations
or an iterator that yields n_samples, each with n_features elements.
return_error : bool
If True, returns the sparse error matrix as well. Otherwise only
the weights (loadings)
"""
H = []
if return_error:
E = []
num = None
if isinstance(X, np.ndarray):
num = X.shape[0]
X = iter(X)
for v in progressbar(X, leave=False, total=num):
h, e = _solveproj(v, self.W, self.lambda1, self.kappa, vmax=np.inf)
H.append(h.copy())
if return_error:
E.append(e.copy())
H = np.stack(H, axis=-1)
if return_error:
return H, np.stack(E, axis=-1)
else:
return H
def finish(self):
"""Return the learnt factors and loadings."""
if len(self.H) > 0:
if len(self.H[0].shape) == 1:
H = np.stack(self.H, axis=-1)
else:
H = np.concatenate(self.H, axis=1)
return self.W, H
else:
return self.W, 1
def ornmf(
X,
rank,
store_error=False,
project=False,
batch_size=None,
lambda1=1.0,
kappa=1.0,
method="PGD",
subspace_learning_rate=1.0,
subspace_momentum=0.5,
random_state=None,
):
"""Perform online, robust NMF on the data X.
This is a wrapper function for the ORNMF class.
Parameters
----------
X : numpy array
The [n_samples, n_features] input data.
rank : int
The rank of the representation (number of components/factors)
store_error : bool, default False
If True, stores the sparse error matrix.
project : bool, default False
If True, project the data X onto the learnt model.
batch_size : {None, int}, default None
If not None, learn the data in batches, each of batch_size samples
or less.
lambda1 : float
Nuclear norm regularization parameter.
kappa : float
Step-size for projection solver.
method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD'
* 'PGD' - Proximal gradient descent
* 'RobustPGD' - Robust proximal gradient descent
* 'MomentumSGD' - Stochastic gradient descent with momentum
subspace_learning_rate : float
Learning rate for the 'MomentumSGD' method. Should be a
float > 0.0
subspace_momentum : float
Momentum parameter for 'MomentumSGD' method, should be
a float between 0 and 1.
random_state : None or int or RandomState instance, default None
Used to initialize the subspace on the first iteration.
Returns
-------
Xhat : numpy array
is the [n_features x n_samples] non-negative matrix
Only returned if store_error is True.
Ehat : numpy array
is the [n_features x n_samples] sparse error matrix
Only returned if store_error is True.
W : numpy array, shape [n_features, rank]
is the non-negative factors matrix
H : numpy array, shape [rank, n_samples]
is the non-negative loadings matrix
"""
X = X.T
_ornmf = ORNMF(
rank,
store_error=store_error,
lambda1=lambda1,
kappa=kappa,
method=method,
subspace_learning_rate=subspace_learning_rate,
subspace_momentum=subspace_momentum,
random_state=random_state,
)
_ornmf.fit(X, batch_size=batch_size)
if project:
W = _ornmf.W
H = _ornmf.project(X)
else:
W, H = _ornmf.finish()
if store_error:
Xhat = W @ H
Ehat = np.array(_ornmf.E).T
return Xhat, Ehat, W, H
else:
return W, H
| gpl-3.0 | -2,418,872,542,565,154,000 | 30.260181 | 87 | 0.556127 | false | 3.624607 | false | false | false |
lizardsystem/lizard-waterbalance | lizard_wbcomputation/impact_from_buckets.py | 1 | 4525 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# pylint: disable=C0111
# The lizard_wbcomputation package implements the computational core of the
# lizard waterbalance Django app.
#
# Copyright (C) 2012 Nelen & Schuurmans
#
# This package is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this package. If not, see <http://www.gnu.org/licenses/>.
import logging
from timeseries.timeseriesstub import add_timeseries
from timeseries.timeseriesstub import multiply_timeseries
from lizard_wbcomputation.bucket_computer import BucketOutcome
from lizard_wbcomputation.bucket_summarizer import BucketsSummary
from lizard_wbcomputation.load_computer import Load
logger = logging.getLogger(__name__)
class SummedLoadsFromBuckets(object):
"""Implements the calculation of the summed bucket loads.
"""
def __init__(self, start_date, end_date, bucket2outcome):
self.start_date, self.end_date = start_date, end_date
self.bucket2outcome = bucket2outcome
def compute(self, substance):
min_summary, inc_summary = self.compute_summary(substance)
min_loads = self._create_loads_from_summary(min_summary)
inc_loads = self._create_loads_from_summary(inc_summary)
return min_loads, inc_loads
def compute_summary(self, substance):
"""Compute and return the minimum and incremental the bucket loads.
This method returns a tuple of two BucketsSummary(s), where the first
summary contains the minimum bucket loads and the second the
incremental bucket loads.
The parameter specifies the substance for which to compute the load.
"""
min_summary = BucketsSummary()
inc_summary = BucketsSummary()
for bucket, outcome in self.bucket2outcome.items():
min_outcome = self.summary_load.compute(bucket, outcome, substance, 'min')
inc_outcome = self.summary_load.compute(bucket, outcome, substance, 'incr')
for attribute in self.interesting_labels:
self._add_timeseries(min_summary, min_outcome, attribute)
self._add_timeseries(inc_summary, inc_outcome, attribute)
return min_summary, inc_summary
def _add_timeseries(self, summary, timeseries, attribute):
new_timeseries = add_timeseries(getattr(summary, attribute), getattr(timeseries, attribute))
setattr(summary, attribute, new_timeseries)
def _create_loads_from_summary(self, summary):
loads = []
for attribute in self.interesting_labels:
load = Load(attribute)
load.timeseries = getattr(summary, attribute)
loads.append(load)
return loads
class SummaryLoad(object):
"""Implements the calculation of the loads of a single bucket."""
def __init__(self, buckets_summarizer):
self.summarizer = buckets_summarizer
def set_time_range(self, start_date, end_date):
self.start_date, self.end_date = start_date, end_date
def compute(self, bucket, outcome, substance, bound):
self._substance, self._bound = substance, bound
load_outcome = self._compute_load(bucket, outcome)
bucket2load_outcome = {bucket: load_outcome}
return self._compute_summary(bucket2load_outcome)
def _compute_load(self, bucket, outcome):
load_outcome = BucketOutcome()
concentration = self._get_concentration(bucket, 'flow_off')
load_outcome.flow_off = multiply_timeseries(outcome.flow_off, concentration)
concentration = self._get_concentration(bucket, 'drainage_indraft')
load_outcome.net_drainage = multiply_timeseries(outcome.net_drainage, concentration)
return load_outcome
def _compute_summary(self, bucket2load_outcome):
return self.summarizer.compute(bucket2load_outcome, self.start_date, self.end_date)
def _get_concentration(self, bucket, label):
attribute = '%s_concentr_%s_%s' % (self._bound, self._substance, label)
return getattr(bucket, attribute)
| gpl-3.0 | -9,105,309,254,465,833,000 | 39.765766 | 100 | 0.702099 | false | 4.029386 | false | false | false |
nileshk/url-shortener | shortener/models.py | 2 | 1541 | import datetime
from django.db import models
from django.conf import settings
#from django.contrib.auth.models import User
from django import forms
from urlweb.shortener.baseconv import base62
class Link(models.Model):
"""
Model that represents a shortened URL
# Initialize by deleting all Link objects
>>> Link.objects.all().delete()
# Create some Link objects
>>> link1 = Link.objects.create(url="http://www.google.com/")
>>> link2 = Link.objects.create(url="http://www.nileshk.com/")
# Get base 62 representation of id
>>> link1.to_base62()
'B'
>>> link2.to_base62()
'C'
# Set SITE_BASE_URL to something specific
>>> settings.SITE_BASE_URL = 'http://uu4.us/'
# Get short URL's
>>> link1.short_url()
'http://uu4.us/B'
>>> link2.short_url()
'http://uu4.us/C'
# Test usage_count
>>> link1.usage_count
0
>>> link1.usage_count += 1
>>> link1.usage_count
1
"""
url = models.URLField(verify_exists=True, unique=True)
date_submitted = models.DateTimeField(auto_now_add=True)
usage_count = models.IntegerField(default=0)
def to_base62(self):
return base62.from_decimal(self.id)
def short_url(self):
return settings.SITE_BASE_URL + self.to_base62()
def __unicode__(self):
return self.to_base62() + ' : ' + self.url
class LinkSubmitForm(forms.Form):
u = forms.URLField(verify_exists=True,
label='URL to be shortened:',
)
| mit | 1,286,833,389,382,425,300 | 24.683333 | 66 | 0.615185 | false | 3.502273 | false | false | false |
stephane-martin/salt-debian-packaging | salt-2016.3.2/salt/states/github.py | 2 | 4092 | # -*- coding: utf-8 -*-
'''
Github User State Module
.. versionadded:: 2016.3.0.
This state is used to ensure presence of users in the Organization.
.. code-block:: yaml
ensure user test is present in github:
github.present:
- name: 'Example TestUser1'
- email: [email protected]
- username: 'gitexample'
'''
def __virtual__():
'''
Only load if the github module is available in __salt__
'''
return 'github' if 'github.list_users' in __salt__ else False
def present(name, profile="github", **kwargs):
'''
Ensure a user is present
.. code-block:: yaml
ensure user test is present in github:
github.present:
- fullname: 'Example TestUser1'
- email: '[email protected]'
- name: 'gitexample'
The following parameters are required:
name
This is the github handle of the user in the organization
'''
email = kwargs.get('email')
full_name = kwargs.get('fullname')
ret = {
'name': name,
'changes': {},
'result': None,
'comment': ''
}
target = __salt__['github.get_user'](name, profile=profile, **kwargs)
# If the user has a valid github handle and is not in the org already
if not target:
ret['result'] = False
ret['comment'] = 'Couldnt find user {0}'.format(name)
elif isinstance(target, bool) and target:
ret['comment'] = 'User {0} is already in the org '.format(name)
ret['result'] = True
elif not target.get('in_org', False) and target.get('membership_state') != 'pending':
if __opts__['test']:
ret['comment'] = 'User {0} will be added to the org'.format(name)
return ret
# add the user
result = __salt__['github.add_user'](
name, profile=profile, **kwargs
)
if result:
ret['changes'].setdefault('old', None)
ret['changes'].setdefault('new', 'User {0} exists in the org now'.format(name))
ret['result'] = True
else:
ret['result'] = False
ret['comment'] = 'Failed to add user {0} to the org'.format(name)
else:
ret['comment'] = 'User {0} has already been invited.'.format(name)
ret['result'] = None
return ret
def absent(name, profile="github", **kwargs):
'''
Ensure a github user is absent
.. code-block:: yaml
ensure user test is absent in github:
github.absent:
- name: 'Example TestUser1'
- email: [email protected]
- username: 'gitexample'
The following parameters are required:
name
Github handle of the user in organization
'''
email = kwargs.get('email')
full_name = kwargs.get('fullname')
ret = {
'name': name,
'changes': {},
'result': None,
'comment': 'User {0} is absent.'.format(name)
}
target = __salt__['github.get_user'](name, profile=profile, **kwargs)
if not target:
ret['comment'] = 'User {0} does not exist'.format(name)
ret['result'] = True
return ret
elif isinstance(target, bool) and target:
if __opts__['test']:
ret['comment'] = "User {0} will be deleted".format(name)
ret['result'] = None
return ret
result = __salt__['github.remove_user'](name, profile=profile, **kwargs)
if result:
ret['comment'] = 'Deleted user {0}'.format(name)
ret['changes'].setdefault('old', 'User {0} exists'.format(name))
ret['changes'].setdefault('new', 'User {0} deleted'.format(name))
ret['result'] = True
else:
ret['comment'] = 'Failed to delete {0}'.format(name)
ret['result'] = False
else:
ret['comment'] = "User {0} has already been deleted!".format(name)
if __opts__['test']:
ret['result'] = None
return ret
ret['result'] = True
return ret
| apache-2.0 | 5,962,355,521,565,373,000 | 27.027397 | 91 | 0.544477 | false | 4.0555 | true | false | false |
openaps/oacids | oacids/exported/heartbeat.py | 1 | 1924 |
import os
import time
import dbus.service
from gi.repository import GObject as gobject
from datetime import datetime
from oacids.helpers.dbus_props import GPropSync, Manager, WithProperties
from ifaces import BUS, IFACE, PATH, INTROSPECTABLE_IFACE, TRIGGER_IFACE, OPENAPS_IFACE
# class Heartbeat (GPropSync, Manager):
class Heartbeat (GPropSync):
OWN_IFACE = OPENAPS_IFACE + '.Heartbeat'
active = False
sleep_interval = 1000
started_at = None
def __init__ (self, bus, ctrl):
self.bus = bus
self.path = PATH + '/Heartbeat'
self.master = ctrl
self.started_at = time.time( )
self.now = datetime.fromtimestamp(self.started_at)
GPropSync.__init__(self, bus, self.path)
self.handle = None
self.Start( )
PROP_SIGS = {
'interval': 'u'
, 'Ticking': 'b'
, 'StartedAt': 'd'
, 'Uptime': 'd'
}
@gobject.property(type=int, default=1000)
def interval (self):
return self.sleep_interval
@gobject.property(type=bool, default=False)
def Ticking (self):
return self.active
@gobject.property(type=float)
def StartedAt (self):
return self.started_at
@gobject.property(type=float)
def Uptime (self):
return time.time( ) - self.started_at
@dbus.service.method(dbus_interface=OWN_IFACE,
in_signature='u', out_signature='s')
def Start (self, ms=1000):
self.active = True
self.sleep_interval = ms
self.handle = gobject.timeout_add(self.interval, self._tick)
@dbus.service.method(dbus_interface=OWN_IFACE,
in_signature='', out_signature='s')
def Stop (self):
gobject.source_remove (self.handle)
self.active = False
self.handle = None
@dbus.service.signal(dbus_interface=OWN_IFACE,
signature='')
def Heartbeat (self):
# print "scanning"
pass
def _tick (self):
self.Heartbeat( )
return self.Ticking
| mit | 8,406,726,498,926,139,000 | 24.653333 | 87 | 0.647089 | false | 3.351916 | false | false | false |
eadains09/scripts | kobo/kobo_utils.py | 1 | 9703 | #!/usr/bin/env python
'''Utilities for reading and writing a Kobo ebook reader's database.
'''
import os, sys
import sqlite3
def escape_quotes(s):
return s.replace("'", "''")
class KoboDB:
'''Interact with a Kobo e-reader's database:
either one that's mounted live from the device, or a local copy.
'''
def __init__(self, mountpath):
'''Initialize with the path to where your Kobo is mounted,
or where you keep local copies of the files.
'''
self.mountpath = mountpath
self.dbpath = None
self.conn = None
self.cursor = None
def connect(self, dbpath=None):
'''Open the database at the specified path. Defaults to
.kobo/KoboReader.sqlite in the mountpath you've provided.
'''
if dbpath:
self.dbpath = dbpath
elif self.mountpath:
self.dbpath = os.path.join(mountpath, ".kobo/KoboReader.sqlite")
else:
print "No DB path specified"
return
self.conn = sqlite3.connect(self.dbpath)
self.cursor = self.conn.cursor()
def close(self):
'''Commit any changes and close the database.'''
self.conn.commit()
self.conn.close()
self.conn = None
self.cursor = None
def get_field_names(self, tablename):
'''Get names of fields within a specified table.
I haven't found documentation, but PRAGMA table_info returns:
(index, fieldname, type, None, 0)
I don't know what the None and 0 represent.
'''
self.cursor.execute('PRAGMA table_info(%s);' % tablename)
return [ row[1] for row in self.cursor.fetchall() ]
def get_list(self, tablename, **kwargs):
'''Usage: get_list(tablename, selectors='*', modifiers='', order='')
'''
selectors = '*'
modifiers = ''
order = ''
if kwargs:
if 'selectors' in kwargs and kwargs['selectors']:
if type(kwargs['selectors']) is list:
selectors = ','.join(kwargs['selectors'])
else:
selectors = kwargs['selectors']
if 'modifiers' in kwargs and kwargs['modifiers']:
if type(kwargs['modifiers']) is list:
modifiers = " WHERE " + 'AND'.join(kwargs['modifiers'])
else:
modifiers = " WHERE " + kwargs['modifiers']
if 'order' in kwargs and kwargs['order']:
order = " ORDER BY " + kwargs['order']
sql = "SELECT %s FROM %s%s%s;" % (selectors, tablename,
modifiers, order)
print sql
self.cursor.execute(sql)
return self.cursor.fetchall()
def get_dlist(self, tablename, **kwargs):
'''Usage: get_dlist(tablename, selectors='*', modifiers='', order='')
'''
l = self.get_list(tablename, **kwargs)
if kwargs and 'selectors' in kwargs:
fields = kwargs['selectors']
else:
fields = self.get_field_names(tablename)
return [ dict(zip(fields, values)) for values in l ]
def get_book_by_id(self, id):
sql = "SELECT Title,Attribution FROM content WHERE ContentID='%s';" \
% escape_quotes(id);
# print sql
self.cursor.execute(sql)
return self.cursor.fetchall()[0]
def list_books(self):
'''List all books in the database.
'''
books = self.get_dlist("content",
selectors=[ 'ContentID', 'Title', 'Attribution',
'Description', 'NumShortcovers',
'IsEncrypted', 'IsDownloaded',
'adobe_location' ],
modifiers="content.BookTitle is null",
order="content.Title")
for book in books:
print "%s (%s)" % (book["Title"], book["Attribution"])
print " ContentID:", book["ContentID"]
if book["NumShortcovers"]:
print " Chapters:", book["NumShortcovers"]
print " Encrypted?", book["IsEncrypted"],
print " Downloaded?", book["IsDownloaded"],
if book["adobe_location"]:
if book["adobe_location"] == book["ContentID"]:
print " adobe_location: Yes"
else:
print "\n adobe_location:", book["adobe_location"]
else:
print
# Description is very long; make this optional.
# print " Description:", book["Description"]
print
def list_shelves(self, names=None):
'''List all shelves (collections) in the database.
'''
allshelves = {}
if names:
modifiers = " AND ".join(["ShelfName=%s" % name for name in names])
else:
modifiers = None
sc = self.get_dlist("ShelfContent", modifiers=modifiers)
for item in sc:
if item["ShelfName"] not in allshelves:
allshelves[item["ShelfName"]] = [ item["ContentId"] ]
else:
allshelves[item["ShelfName"]].append(item["ContentId"])
for shelf in allshelves:
print "\n===", shelf, "==="
for id in allshelves[shelf]:
print " %s (%s)" % self.get_book_by_id(id)
def has_shelf(self, shelfname):
'''Does a given shelfname exist? Helpful when checking whether
to add a new shelf based on a tag.
'''
shelves = self.get_dlist("Shelf", selectors=[ "Name" ],
modifiers=[ "Name='%s'" % shelfname ])
print "Has shelf %s?" % shelfname, bool(shelves)
return bool(shelves)
def print_table(self, tablename, **kwargs):
'''Usage: print_table(tablename, selectors='*', modifiers='', order='')
'''
if kwargs and 'selectors' in kwargs and kwargs['selectors']:
fields = kwargs['selectors']
print "kwargs: fields =", fields
else:
fields = self.get_field_names(tablename)
print "no kwargs: fields =", fields
for row in self.get_list(tablename, **kwargs):
for i, f in enumerate(fields):
# Must coerce row[i] to unicode before encoding,
# even though it should be unicode already,
# because it could be null.
print f.encode('UTF-8'), ":", unicode(row[i]).encode('UTF-8')
# Adding entries to shelves:
def make_new_shelf(self, shelfname):
'''Create a new shelf/collection.
'''
print "=== Current shelves:"
self.print_table("Shelf", selectors=[ "Name" ])
print "==="
print "Making a new shelf called", shelfname
# Skip type since it's not clear what it is and it's never set.
# For the final three, PRAGMA table_info(Shelf); says they're
# type BOOL, and querying that table shows true and false there,
# but absolutely everyone on the web says you have to use
# 1 and 0 for sqlite3 and that there is no boolean type.
query = '''INSERT INTO Shelf(CreationDate, Id, InternalName,
LastModified, Name, _IsDeleted, _IsVisible, _IsSynced)
VALUES (DATETIME('now'), %s, '%s', DATETIME('now'), '%s', 0, 1, 1);
''' % (shelfname, shelfname, shelfname)
print query
self.cursor.execute(query)
def add_to_shelf(self, kobobook, shelfname):
print "==="
print "Adding", kobobook["Title"], "to shelf", shelfname
query = '''INSERT INTO ShelfContent(ShelfName, ContentId, DateModified,
_IsDeleted, _IsSynced)
VALUES ('%s', '%s', DATE('now'), 0, 0);''' % (shelfname,
escape_quotes(kobobook['ContentID']))
print query
self.cursor.execute(query)
self.conn.commit()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description="""Show details about a Kobo ebook reader.
By default, show a list of books.
Copyright 2015 by Akkana Peck; share and enjoy under the GPLv2 or later.""",
formatter_class=argparse.RawDescriptionHelpFormatter)
# Options:
parser.add_argument("-m", "--mountdir", default="/kobo",
help="""Path where the Kobo is mounted. Default: /kobo""")
parser.add_argument("-d", "--db",
default="$mountdir/.kobo/KoboReader.sqlite",
help="""Path to the database.
Default: $mountdir/.kobo/KoboReader.sqlite""")
# Things we can do:
parser.add_argument("-s", "--shelves", action='store_true', default=False,
help="""Show shelves""")
parser.add_argument("-S", "--shelfnames", action='store_true',
default=False,
help="""Show shelf names but not their contents""")
args = parser.parse_args()
args.db = args.db.replace('$mountdir', args.mountdir)
try:
koboDB = KoboDB(args.mountdir)
koboDB.connect(args.db)
except Exception, e:
print "Couldn't open database at %s for Kobo mounted at %s" % \
(args.db, args.mountdir)
print e
sys.exit(1)
if args.shelfnames:
shelves = koboDB.get_dlist("Shelf", selectors=[ "Name" ])
for shelf in shelves:
print shelf["Name"]
elif args.shelves:
koboDB.list_shelves()
else:
koboDB.list_books()
| gpl-2.0 | 3,140,875,779,706,617,000 | 37.05098 | 91 | 0.540658 | false | 4.078604 | false | false | false |
tairabiteru/NDL-DVCL2 | src/cfg.py | 1 | 5694 | """
PLEASE READ:
This is the config file for the program.
The file is written in python to make it easy for the program to read it,
however this means you must follow the formatting in the file.
For example, if the value is enclosed in quotation marks, it needs to stay that
way.
Additionally, do not edit anything past the DO NOT EDIT comment.
Everything past there contains code which allows the program to retrieve
this information.
"""
#-----WINDOW GEOMETRY SETTINGS-----
#Width and Height in pixels
WindowWidth = 1000
WindowHeight = 700
#Determines whether or not the windows can be resized.
AllowResizing = True
#-----DATABASE SETTINGS-----
#Visitor Card Database File Name
#The file extension will be automatically added
DBFileName = "data"
#-----LOGGING SETTINGS-----
#Determines the folder name log files are sent to.
loggingDir = "logs"
#If true, the program will use a different drive to save log files.
#NOTE: You MUST set this to true if using a different drive!
useDDrive = False
#If true, the program will periodically delete log files older than a certain
#age specified by logPurgePeriod.
doLogPurge = False
#Age in days for log files to be purged. For example, if you put in 7, log
#files older than 7 days will be removed on startup. However, in order to do
#this, doLogPurge must be set to True. If it is not, this will be ignored.
logPurgePeriod = 30
#-----OTHER SETTINGS-----
#Allows operators to issue visitor passes despite a patron being banned
overrideBans = False
#Determines whether or not the SAM message is displayed.
dispSAM = False
# Determines which characters are acceptable in the name fields
#---IMPORTANT!---
#Remember that as this config file is written in python, rules for characters
#which need to be escaped MUST BE FOLLOWED.
allowedNameChars = ["'", "-", " "]
#Determines which characters are acceptable in the location/municipality
#fields.
#---IMPORTANT!---
#Remember that as this config file is written in python, rules for characters
#which need to be escaped MUST BE FOLLOWED.
allowedMuniChars = ["'", "-", " "]
#Determines which states or regions appear in the drop down list for the
#state field.
#The states will appear on the list in the order they are entered in here.
#The very first one will be the default state.
states = ['MICHIGAN', 'ALABAMA', 'ALASKA', 'ARIZONA', 'ARKANSAS', 'CALIFORNIA',
'COLORADO', 'CONNECTICUT', 'DELAWARE', 'FLORIDA', 'GEORGIA', 'HAWAII',
'IDAHO', 'ILLINOIS', 'INDIANA', 'IOWA', 'KANSAS', 'KENTUCKY',
'LOUISIANA', 'MAINE', 'MARYLAND', 'MASSACHUSETTS', 'MINNESOTA',
'MISSISSIPPI', 'MISSOURI', 'MONTANA', 'NEBRASKA', 'NEVADA',
'NEW HAMPSHIRE', 'NEW JERSEY', 'NEW MEXICO', 'NEW YORK',
'NORTH CAROLINA', 'NORTH DAKOTA', 'OHIO', 'OKLAHOMA', 'OREGON',
'PENNSYLVANIA', 'RHODE ISLAND', 'SOUTH CAROLINA', 'SOUTH DAKOTA',
'TENNESSEE', 'TEXAS', 'UTAH', 'VERMONT', 'VIRGINIA', 'WASHINGTON',
'WEST VIRGINIA', 'WISCONSIN', 'WYOMING', 'AMERICAN SAMOA',
'DISTRICT OF COLUMBIA', 'GUAM', 'NORTHERN MARIANA ISLANDS',
'PUERTO RICO', 'VIRGIN ISLANDS', 'OTHER']
#Determines how the database will be sorted when the program is launched.
#Defaults to 'id'
#'id' = by ID number
#'last' = by last name
#'first' = by first name
#'middle' = by middle name
#'muni' = by municipality
#'state' = by state
#'cards' = by number of cards issued
#'status' = by status
defaultsort = 'last'
#Determines the order by which the default sort is sorted.
#Defaults to 'up'
#'up' = ascending
#'down' = descending
defaultsortorder = 'up'
#Determines the period within visitor cards can be issued.
#Essentially, if the period is set to 10 days and
#a patron gets a visitor card on 11-1-2015, 11-2-2015, and 11-3-2015,
#then they may not get another visitor card until 11-11-2015 at the earliest.
#Even then, that patron can only get 1 visitor card on that day. They'll have
#to wait a day to get 2, and another day to get three.
#The value is a positive integer, and is in days.
VisitorCardPeriod = 365
#The number of visitor cards per VisitorCardPeriod that a patron can get.
#This should be a positive whole number integer which is not zero.
CardsPerPeriod = 3
#If it is set, determines which municipality(s) the visitor card period above
#will apply to. The value is ALWAYS an array of strings, even if there is only
#one municipality. If you set it to a blank array [] then the period will apply to all
#patrons. Additionally, the municipalities must ALWAYS be in all caps.
PeriodDependentMunis = ['NORTHVILLE']
#If set to True, the program will check for duplicate records when adding or
#editing existing records. If it finds an existing record which matches,
#it will not allow you to add the new one.
EnforceNoDuplicates = True
#----------DO NOT EDIT PAST THIS LINE----------#
class Config():
def __init__(self):
self.geo = str(WindowWidth) + 'x' + str(WindowHeight)
self.lockgeo = AllowResizing
self.dbfn = DBFileName + ".db"
self.overrideBans = overrideBans
self.dispSAM = dispSAM
self.allowedNameChars = allowedNameChars
self.allowedMuniChars = allowedMuniChars
self.states = states
self.defaultsort = defaultsort
self.defaultsortorder = defaultsortorder
self.visitorcardperiod = VisitorCardPeriod
self.loggingDir = loggingDir + "/"
self.useDDrive = useDDrive
self.doLogPurge = doLogPurge
self.logPurgePeriod = logPurgePeriod
self.PeriodDependentMunis = PeriodDependentMunis
self.CardsPerPeriod = CardsPerPeriod
self.EnforceNoDuplicates = EnforceNoDuplicates
| cc0-1.0 | 3,923,560,303,382,618,000 | 36.460526 | 86 | 0.711626 | false | 3.375222 | false | false | false |
legnaleurc/acddl | tests/util.py | 1 | 3543 | import asyncio
import functools
from unittest import mock as utm
import hashlib
import arrow
from pyfakefs import fake_filesystem as ffs
class PathMock(utm.Mock):
def __init__(self, fs=None, *pathsegments, **kwargs):
super(PathMock, self).__init__()
self._fs = fs
self._path = self._fs.JoinPaths(*pathsegments)
def iterdir(self):
fake_os = ffs.FakeOsModule(self._fs)
for child in fake_os.listdir(self._path):
yield PathMock(self._fs, self._path, child)
def stat(self):
fake_os = ffs.FakeOsModule(self._fs)
return fake_os.stat(self._path)
def mkdir(self, mode=0o777, parents=False, exist_ok=False):
fake_os = ffs.FakeOsModule(self._fs)
try:
fake_os.makedirs(self._path)
except OSError as e:
# iDontCare
pass
return True
def is_file(self):
fake_os = ffs.FakeOsModule(self._fs)
return fake_os.path.isfile(self._path)
def is_dir(self):
fake_os = ffs.FakeOsModule(self._fs)
return fake_os.path.isdir(self._path)
def unlink(self):
fake_os = ffs.FakeOsModule(self._fs)
return fake_os.unlink(self._path)
def open(self, mode):
fake_open = ffs.FakeFileOpen(self._fs)
return fake_open(self._path, mode)
def __truediv__(self, name):
return PathMock(self._fs, self._path, name)
def __str__(self):
return self._path
class NodeMock(utm.Mock):
def __init__(self, fs, path, *args, **kwargs):
super(NodeMock, self).__init__()
self._fs = fs
self._path = path
@property
def name(self):
dirname, basename = self._fs.SplitPath(self._path)
return basename
@property
def modified(self):
f = self._fs.GetObject(self._path)
return arrow.fromtimestamp(f.st_mtime).replace(tzinfo='local')
@property
def trashed(self):
return False
@property
def is_folder(self):
fake_os = ffs.FakeOsModule(self._fs)
return fake_os.path.isdir(self._path)
@property
def size(self):
fake_os = ffs.FakeOsModule(self._fs)
return fake_os.path.getsize(self._path)
@property
def md5(self):
fake_open = ffs.FakeFileOpen(self._fs)
return get_md5(fake_open, self._path)
def create_async_mock(return_value=None):
loop = asyncio.get_event_loop()
f = loop.create_future()
f.set_result(return_value)
return utm.Mock(return_value=f)
def create_fake_local_file_system():
fs = ffs.FakeFilesystem()
file_1 = fs.CreateFile('/local/file_1.txt', contents='file 1')
file_1.st_mtime = 1467800000
file_2 = fs.CreateFile('/local/folder_1/file_2.txt', contents='file 2')
file_2.st_mtime = 1467801000
folder_1 = fs.GetObject('/local/folder_1')
folder_1.st_mtime = 1467802000
return fs
def create_fake_remote_file_system():
fs = ffs.FakeFilesystem()
file_3 = fs.CreateFile('/remote/file_3.txt', contents='file 3')
file_3.st_mtime = 1467803000
file_4 = fs.CreateFile('/remote/folder_2/file_4.txt', contents='file 4')
file_4.st_mtime = 1467804000
folder_2 = fs.GetObject('/remote/folder_2')
folder_2.st_mtime = 1467805000
return fs
def get_md5(open_, path):
hasher = hashlib.md5()
with open_(path, 'rb') as fin:
while True:
chunk = fin.read(65536)
if not chunk:
break
hasher.update(chunk)
return hasher.hexdigest()
| mit | 5,657,087,379,905,698,000 | 25.440299 | 76 | 0.607113 | false | 3.314312 | false | false | false |
KathleenLabrie/KLpyastro | klpyastro/redux/spec1d.py | 1 | 5364 | from __future__ import print_function
from math import pi
from astropy.io import fits
import numpy as np
import matplotlib.pyplot as plt
import stsci.convolve._lineshape as ls
from klpysci.fit import fittools as ft
# Utility function to open and plot original spectrum
def openNplot1d (filename, extname=('SCI',1)):
hdulist = fits.open(filename, 'readonly')
sp = hdulist[extname].data
x = np.arange(sp.shape[0])
plt.clf()
plt.plot (x, sp)
return hdulist
# Interactive specification of the section around the feature to work on
def getsubspec (sp):
# here it should be graphical, but I'm still working on that
x1 = input("Left edge pixel: ")
x2 = input("Right edge pixel: ")
flux = sp[x1:x2]
pixel = np.arange(x1,x2,1)
sub = np.zeros((2,flux.shape[0]))
sub[0] = pixel
sub[1] = flux
#plt.clf()
#plt.plot (pixel, flux)
#input('continue')
return sub
def plotresult (sp, bf, nsp):
plt.clf()
x = np.arange(0,sp.shape[0],1)
plt.plot (x, sp)
plt.plot (x, nsp)
x = np.arange(0,bf.shape[0],1)
plt.plot (x, bf)
def rmfeature (inspec, outspec, params=None, profile='voigt'):
#---- plot and get data
spin = openNplot1d(inspec)
specdata = spin['SCI'].data
spin.close()
#---- Get data for section around feature
linedata = getsubspec(specdata)
#---- Calculate and set initial parameter from linedata
if params is None:
contslope = (linedata[1][0] - linedata[1][-1]) / \
(linedata[0][0] - linedata[0][-1])
contlevel = linedata[1][0] - (contslope * linedata[0][0])
lineindex = linedata.argmin(1)[1]
lineposition = linedata[0][lineindex]
linestrength = linedata[1][lineindex] - \
((contslope*linedata[0][lineindex]) + contlevel)
linewidth = 20. # pixels. should find a better way.
cte = ft.Parameter(contlevel)
m = ft.Parameter(contslope)
A = ft.Parameter(linestrength)
mu = ft.Parameter(lineposition)
fwhmL = ft.Parameter(linewidth)
fwhmD = ft.Parameter(linewidth)
else:
cte = ft.Parameter(params[0])
m = ft.Parameter(params[1])
A = ft.Parameter(params[2])
mu = ft.Parameter(params[3])
fwhmL = ft.Parameter(params[4])
fwhmD = ft.Parameter(params[5])
#---- Define function [linear (continuum) + lorentz (feature)]
# I don't know where the factor 10 I need to apply to A() comes from.
# I'll need to figure it out.
#
# Also, those functions apparently need to be defined after the
# cte(), m(), etc. Parameter instances are defined.
def line(x):
return cte() + m()*x
def lorentz(x): # The version in numarray.convolve._lineshape is wrong
amp = A() * fwhmL() * pi / 2.
return amp * (fwhmL()/(2.*pi))/((x-mu())**2. + (fwhmL()/2.)**2.)
def voigt(x): # import numarray.convolve._lineshape as ls
# a = sqrt(log(2.)) * fwhmL() / (2. * fwhmD())
# b = 2. * sqrt(log(2.)) * (x - mu()) / fwhmD()
# H = exp(-(b**2)) + a/(sqrt(pi)*b**2.)
amp = A() * 1. # not right
# amp = A() * (1. + (fwhmL()/fwhmD())*(fwhmL()*pi/2.))
return amp * ls.voigt(x, (fwhmD(),fwhmL()), mu())
def contlorentz(x):
return line(x) + lorentz(x)
def contvoigt(x):
return line(x) + voigt(x)
#---- Non-linear least square fit (optimize.leastsq)
if (params==None):
if profile=='voigt': # Get initial params from Lorentz fit.
ft.nlfit(contlorentz, [cte, m, A, mu, fwhmL], linedata[1], x=linedata[0])
ft.nlfit(contvoigt, [cte, m, A, mu, fwhmD, fwhmL], linedata[1], x=linedata[0])
elif profile=='lorentz':
ft.nlfit(contlorentz, [cte, m, A, mu, fwhmL], linedata[1], x=linedata[0])
fwhmD=ft.Parameter(None)
else:
pass
#---- retrieve line profile parameters only and create a profile
# with zero continuum for the entire range for the original spectrum
# Then remove the feature
if profile=='voigt':
newspecdata = specdata - voigt(np.arange(0,specdata.shape[0],1))
bestfit = contvoigt(np.arange(0,specdata.shape[0],1))
elif profile=='lorentz':
newspecdata = specdata - lorentz(np.arange(0,specdata.shape[0],1))
bestfit = contlorentz(np.arange(0,specdata.shape[0],1))
#---- display the original spectrum, the best fit and the
# new spectrum. The feature should be gone
plotresult(specdata, bestfit, newspecdata)
print("Best Fit Parameters:")
print(" section = ",linedata[0][0],",",linedata[0][-1]+1)
print(" cte = ",cte())
print(" m = ",m())
print(" A = ",A())
print(" mu = ",mu())
print(" fwhmL = ",fwhmL())
print(" fwhmD = ",fwhmD())
try:
input = raw_input
except NameError:
pass
write = input('Write corrected spectrum to '+outspec+'? (y/n): ')
#---- write output spectrum
if write=='y':
spout = fits.open(inspec,'readonly') # just to create copy of HDUList
spout['SCI'].data = newspecdata
spout.writeto(outspec, output_verify='ignore')
#print ("Not implemented yet, but it isn't the app cool!")
else:
print("Too bad.")
| isc | -6,051,416,670,696,655,000 | 32.111111 | 90 | 0.582401 | false | 3.089862 | false | false | false |
walkerke/marble | marble/tests/test_clustering.py | 2 | 1612 | """ Tests for the clustering computation """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
#
# Synthetic data for tests
#
def grid():
""" Areal units arranged in a grid """
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
def checkerboard_city():
city = {0: {"A":100, "B":1},
1: {"A":1, "B":100},
2: {"A":100, "B":1},
3: {"A":1, "B":100},
4: {"A":100, "B":1},
5: {"A":1, "B":100},
6: {"A":100, "B":1},
7: {"A":1, "B":100},
8: {"A":100, "B":1}}
return city
def clustered_city():
city = {0: {"A":100, "B":1},
1: {"A":100, "B":1},
2: {"A":1, "B":100},
3: {"A":100, "B":1},
4: {"A":1, "B":100},
5: {"A":1, "B":100},
6: {"A":100, "B":1},
7: {"A":1, "B":100},
8: {"A":1, "B":100}}
return city
#
# Perform tests
#
class TestClustering(object):
def test_clustering_checkerboard(self):
units = grid()
city = checkerboard_city()
c = mb.clustering(city, units)
assert c["A"] == 0.0
assert c["B"] == 0.0
def test_clustering_checkerboard(self):
units = grid()
city = clustered_city()
c = mb.clustering(city, units)
assert c["A"] == 1.0
assert c["B"] == 1.0
| bsd-3-clause | 4,109,802,494,623,839,700 | 23.424242 | 66 | 0.42866 | false | 2.990724 | true | false | false |
babycaseny/meld | meld/build_helpers.py | 1 | 15910 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copied and adapted from the DistUtilsExtra project
# Created by Sebastian Heinlein and Martin Pitt
# Copyright Canonical Ltd.
# Modified by Kai Willadsen for the Meld project
# Copyright (C) 2013-2014 Kai Willadsen <[email protected]>
import distutils.cmd
import distutils.command.build
import distutils.command.build_py
import distutils.command.install
import distutils.command.install_data
import distutils.dir_util
import distutils.dist
import glob
import os.path
import platform
import sys
from distutils.log import info
try:
import distro
except ImportError:
python_version = tuple(int(x) for x in platform.python_version_tuple())
if python_version >= (3, 8):
print(
'Missing build requirement "distro" Python module; '
'install paths may be incorrect', file=sys.stderr)
def has_help(self):
return "build_help" in self.distribution.cmdclass and os.name != 'nt'
def has_icons(self):
return "build_icons" in self.distribution.cmdclass
def has_i18n(self):
return "build_i18n" in self.distribution.cmdclass and os.name != 'nt'
def has_data(self):
return "build_data" in self.distribution.cmdclass
distutils.command.build.build.sub_commands.extend([
("build_i18n", has_i18n),
("build_icons", has_icons),
("build_help", has_help),
("build_data", has_data),
])
class MeldDistribution(distutils.dist.Distribution):
global_options = distutils.dist.Distribution.global_options + [
("no-update-icon-cache", None, "Don't run gtk-update-icon-cache"),
("no-compile-schemas", None, "Don't compile gsettings schemas"),
]
def __init__(self, *args, **kwargs):
self.no_update_icon_cache = False
self.no_compile_schemas = False
super().__init__(*args, **kwargs)
class build_data(distutils.cmd.Command):
gschemas = [
('share/glib-2.0/schemas', ['data/org.gnome.meld.gschema.xml'])
]
frozen_gschemas = [
('share/meld', ['data/gschemas.compiled']),
]
# FIXME: This is way too much hard coding, but I really hope
# it also doesn't last that long.
resource_source = "meld/resources/meld.gresource.xml"
resource_target = "org.gnome.meld.gresource"
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_data_files(self):
data_files = []
build_path = os.path.join('build', 'data')
if not os.path.exists(build_path):
os.makedirs(build_path)
info("compiling gresources")
resource_dir = os.path.dirname(self.resource_source)
target = os.path.join(build_path, self.resource_target)
self.spawn([
"glib-compile-resources",
"--target={}".format(target),
"--sourcedir={}".format(resource_dir),
self.resource_source,
])
data_files.append(('share/meld', [target]))
if os.name == 'nt':
gschemas = self.frozen_gschemas
else:
gschemas = self.gschemas
data_files.extend(gschemas)
return data_files
def run(self):
data_files = self.distribution.data_files
data_files.extend(self.get_data_files())
class build_help(distutils.cmd.Command):
help_dir = 'help'
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_data_files(self):
data_files = []
name = self.distribution.metadata.name
if "LINGUAS" in os.environ:
self.selected_languages = os.environ["LINGUAS"].split()
else:
self.selected_languages = [
d for d in os.listdir(self.help_dir) if os.path.isdir(d)
]
if 'C' not in self.selected_languages:
self.selected_languages.append('C')
self.C_PAGES = glob.glob(os.path.join(self.help_dir, 'C', '*.page'))
self.C_EXTRA = glob.glob(os.path.join(self.help_dir, 'C', '*.xml'))
for lang in self.selected_languages:
source_path = os.path.join(self.help_dir, lang)
if not os.path.exists(source_path):
continue
build_path = os.path.join('build', self.help_dir, lang)
if not os.path.exists(build_path):
os.makedirs(build_path)
if lang != 'C':
po_file = os.path.join(source_path, lang + '.po')
mo_file = os.path.join(build_path, lang + '.mo')
msgfmt = ['msgfmt', po_file, '-o', mo_file]
self.spawn(msgfmt)
for page in self.C_PAGES:
itstool = [
'itstool', '-m', mo_file, '-o', build_path, page]
self.spawn(itstool)
for extra in self.C_EXTRA:
extra_path = os.path.join(
build_path, os.path.basename(extra))
if os.path.exists(extra_path):
os.unlink(extra_path)
os.symlink(os.path.relpath(extra, source_path), extra_path)
else:
distutils.dir_util.copy_tree(source_path, build_path)
xml_files = glob.glob('%s/*.xml' % build_path)
mallard_files = glob.glob('%s/*.page' % build_path)
path_help = os.path.join('share', 'help', lang, name)
path_figures = os.path.join(path_help, 'figures')
data_files.append((path_help, xml_files + mallard_files))
figures = glob.glob('%s/figures/*.png' % build_path)
if figures:
data_files.append((path_figures, figures))
return data_files
def run(self):
data_files = self.distribution.data_files
data_files.extend(self.get_data_files())
self.check_help()
def check_help(self):
for lang in self.selected_languages:
build_path = os.path.join('build', self.help_dir, lang)
if not os.path.exists(build_path):
continue
pages = [os.path.basename(p) for p in self.C_PAGES]
for page in pages:
page_path = os.path.join(build_path, page)
if not os.path.exists(page_path):
info("skipping missing file %s", page_path)
continue
lint = ['xmllint', '--noout', '--noent', '--path', build_path,
'--xinclude', page_path]
self.spawn(lint)
class build_icons(distutils.cmd.Command):
icon_dir = os.path.join("data", "icons")
target = "share/icons"
frozen_target = "share/meld/icons"
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
target_dir = self.frozen_target if os.name == 'nt' else self.target
data_files = self.distribution.data_files
for theme in glob.glob(os.path.join(self.icon_dir, "*")):
for size in glob.glob(os.path.join(theme, "*")):
for category in glob.glob(os.path.join(size, "*")):
icons = (glob.glob(os.path.join(category, "*.png")) +
glob.glob(os.path.join(category, "*.svg")))
icons = [
icon for icon in icons if not os.path.islink(icon)]
if not icons:
continue
data_files.append(("%s/%s/%s/%s" %
(target_dir,
os.path.basename(theme),
os.path.basename(size),
os.path.basename(category)),
icons))
class build_i18n(distutils.cmd.Command):
bug_contact = None
domain = "meld"
po_dir = "po"
merge_po = False
# FIXME: It's ridiculous to specify these here, but I know of no other
# way except magically extracting them from self.distribution.data_files
desktop_files = [('share/applications', glob.glob("data/*.desktop.in"))]
xml_files = [
('share/metainfo', glob.glob("data/*.appdata.xml.in")),
('share/mime/packages', glob.glob("data/mime/*.xml.in"))
]
schemas_files = []
key_files = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def _rebuild_po(self):
# If there is a po/LINGUAS file, or the LINGUAS environment variable
# is set, only compile the languages listed there.
selected_languages = None
linguas_file = os.path.join(self.po_dir, "LINGUAS")
if "LINGUAS" in os.environ:
selected_languages = os.environ["LINGUAS"].split()
elif os.path.isfile(linguas_file):
selected_languages = open(linguas_file).read().split()
# If we're on Windows, assume we're building frozen and make a bunch
# of insane assumptions.
if os.name == 'nt':
msgfmt = "C:\\Python27\\Tools\\i18n\\msgfmt"
else:
msgfmt = "msgfmt"
# Update po(t) files and print a report
# We have to change the working dir to the po dir for intltool
cmd = [
"intltool-update",
(self.merge_po and "-r" or "-p"), "-g", self.domain
]
wd = os.getcwd()
os.chdir(self.po_dir)
self.spawn(cmd)
os.chdir(wd)
max_po_mtime = 0
for po_file in glob.glob("%s/*.po" % self.po_dir):
lang = os.path.basename(po_file[:-3])
if selected_languages and lang not in selected_languages:
continue
mo_dir = os.path.join("build", "mo", lang, "LC_MESSAGES")
mo_file = os.path.join(mo_dir, "%s.mo" % self.domain)
if not os.path.exists(mo_dir):
os.makedirs(mo_dir)
cmd = [msgfmt, po_file, "-o", mo_file]
po_mtime = os.path.getmtime(po_file)
mo_mtime = (
os.path.exists(mo_file) and os.path.getmtime(mo_file) or 0)
if po_mtime > max_po_mtime:
max_po_mtime = po_mtime
if po_mtime > mo_mtime:
self.spawn(cmd)
targetpath = os.path.join("share/locale", lang, "LC_MESSAGES")
self.distribution.data_files.append((targetpath, (mo_file,)))
self.max_po_mtime = max_po_mtime
def run(self):
if self.bug_contact is not None:
os.environ["XGETTEXT_ARGS"] = "--msgid-bugs-address=%s " % \
self.bug_contact
self._rebuild_po()
intltool_switches = [
(self.xml_files, "-x"),
(self.desktop_files, "-d"),
(self.schemas_files, "-s"),
(self.key_files, "-k"),
]
for file_set, switch in intltool_switches:
for target, files in file_set:
build_target = os.path.join("build", target)
if not os.path.exists(build_target):
os.makedirs(build_target)
files_merged = []
for file in files:
file_merged = os.path.basename(file)
if file_merged.endswith(".in"):
file_merged = file_merged[:-3]
file_merged = os.path.join(build_target, file_merged)
cmd = ["intltool-merge", switch, self.po_dir, file,
file_merged]
mtime_merged = (os.path.exists(file_merged) and
os.path.getmtime(file_merged) or 0)
mtime_file = os.path.getmtime(file)
if (mtime_merged < self.max_po_mtime or
mtime_merged < mtime_file):
# Only build if output is older than input (.po,.in)
self.spawn(cmd)
files_merged.append(file_merged)
self.distribution.data_files.append((target, files_merged))
class build_py(distutils.command.build_py.build_py):
"""Insert real package installation locations into conf module
Adapted from gottengeography
"""
data_line = 'DATADIR = "%s"'
locale_line = 'LOCALEDIR = "%s"'
def build_module(self, module, module_file, package):
if module_file == 'meld/conf.py':
with open(module_file) as f:
contents = f.read()
try:
options = self.distribution.get_option_dict('install')
prefix = options['prefix'][1]
except KeyError as e:
print(e)
prefix = sys.prefix
datadir = os.path.join(prefix, 'share', 'meld')
localedir = os.path.join(prefix, 'share', 'locale')
start, end = 0, 0
lines = contents.splitlines()
for i, line in enumerate(lines):
if line.startswith('# START'):
start = i
elif line.startswith('# END'):
end = i
if start and end:
lines[start:end + 1] = [
self.data_line % datadir,
self.locale_line % localedir,
]
module_file = module_file + "-installed"
contents = "\n".join(lines)
with open(module_file, 'w') as f:
f.write(contents)
distutils.command.build_py.build_py.build_module(
self, module, module_file, package)
class install(distutils.command.install.install):
def finalize_options(self):
special_cases = ('debian', 'ubuntu', 'linuxmint')
if platform.system() == 'Linux':
# linux_distribution has been removed in Python 3.8; we require
# the third-party distro package for future handling.
try:
distribution = platform.linux_distribution()[0].lower()
except AttributeError:
try:
distribution = distro.id()
except NameError:
distribution = 'unknown'
if distribution in special_cases:
# Maintain an explicit install-layout, but use deb by default
specified_layout = getattr(self, 'install_layout', None)
self.install_layout = specified_layout or 'deb'
distutils.command.install.install.finalize_options(self)
class install_data(distutils.command.install_data.install_data):
def run(self):
distutils.command.install_data.install_data.run(self)
if not self.distribution.no_update_icon_cache:
# TODO: Generalise to non-hicolor icon themes
info("running gtk-update-icon-cache")
icon_path = os.path.join(self.install_dir, "share/icons/hicolor")
self.spawn(["gtk-update-icon-cache", "-q", "-t", icon_path])
if not self.distribution.no_compile_schemas:
info("compiling gsettings schemas")
gschema_path = build_data.gschemas[0][0]
gschema_install = os.path.join(self.install_dir, gschema_path)
self.spawn(["glib-compile-schemas", gschema_install])
| gpl-2.0 | -8,316,151,223,959,306,000 | 34.355556 | 79 | 0.55726 | false | 3.885226 | false | false | false |
dmccloskey/SBaaS_MFA | SBaaS_MFA/stage02_isotopomer_measuredData_execute.py | 1 | 18694 | #SBaaS
from .stage02_isotopomer_measuredData_io import stage02_isotopomer_measuredData_io
from SBaaS_isotopomer.stage01_isotopomer_averages_query import stage01_isotopomer_averages_query
from SBaaS_physiology.stage01_physiology_rates_query import stage01_physiology_rates_query
from SBaaS_LIMS.lims_msMethod_query import lims_msMethod_query
#SBaaS
from .stage02_isotopomer_measuredData_postgresql_models import *
from genomeScale_MFA.MFA_utilities import MFA_utilities
#resources
import re
from math import sqrt
class stage02_isotopomer_measuredData_execute(stage02_isotopomer_measuredData_io,
stage01_isotopomer_averages_query,
lims_msMethod_query,
stage01_physiology_rates_query):
def execute_makeMeasuredFragments(self,experiment_id_I, sample_name_abbreviations_I = [], time_points_I = [], scan_types_I = [], met_ids_I = []):
'''Collect and format MS data from data_stage01_isotopomer_averagesNormSum for fluxomics simulation'''
mfautilities = MFA_utilities();
# get experiment information:
met_id_conv_dict = {'Hexose_Pool_fru_glc-D':'glc-D',
'Pool_2pg_3pg':'3pg',
'23dpg':'13dpg'};
data_O = [];
experiment_stdev = [];
# get sample names and sample name abbreviations
if sample_name_abbreviations_I:
sample_abbreviations = sample_name_abbreviations_I;
st = 'Unknown';
sample_types_lst = [];
sample_types_lst.extend([st for i in range(len(sample_abbreviations))]);
else:
sample_abbreviations = [];
sample_types = ['Unknown'];
sample_types_lst = [];
for st in sample_types:
sample_abbreviations_tmp = [];
sample_abbreviations_tmp = self.get_sampleNameAbbreviations_experimentIDAndSampleType_dataStage01AveragesNormSum(experiment_id_I,st);
sample_abbreviations.extend(sample_abbreviations_tmp);
sample_types_lst.extend([st for i in range(len(sample_abbreviations_tmp))]);
for sna_cnt,sna in enumerate(sample_abbreviations):
print('Collecting experimental MS data for sample name abbreviation ' + sna);
# get time points
if time_points_I:
time_points = time_points_I;
else:
time_points = [];
time_points = self.get_timePoint_experimentIDAndSampleNameAbbreviation_dataStage01AveragesNormSum(experiment_id_I,sna);
for tp in time_points:
print('Collecting experimental MS data for time-point ' + str(tp));
# get the scan_types
if scan_types_I:
scan_types = [];
scan_types_tmp = [];
scan_types_tmp = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01AveragesNormSum(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
scan_types = [st for st in scan_types_tmp if st in scan_types_I];
else:
scan_types = [];
scan_types = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01AveragesNormSum(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
for scan_type in scan_types:
print('Collecting experimental MS data for scan type ' + scan_type)
# met_ids
if not met_ids_I:
met_ids = [];
met_ids = self.get_metIDs_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanType_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type);
else:
met_ids = met_ids_I;
if not(met_ids): continue #no component information was found
for met in met_ids:
print('Collecting experimental MS data for metabolite ' + met);
# format the metabolite
if met in list(met_id_conv_dict.keys()):
met_formatted = met_id_conv_dict[met];
else: met_formatted = met;
met_formatted = re.sub('-','_DASH_',met_formatted)
met_formatted = re.sub('[(]','_LPARANTHES_',met_formatted)
met_formatted = re.sub('[)]','_RPARANTHES_',met_formatted)
# fragments
fragment_formulas = [];
fragment_formulas = self.get_fragmentFormula_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanTypeAndMetID_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met);
# frag c map
frag_cmap = {};
frag_cmap = self.get_precursorFormulaAndProductFormulaAndCMapsAndPositions_metID(met,'-','tuning');
for frag in fragment_formulas:
# data
data_mat = [];
data_mat_cv = [];
data_mat_n = [];
data_mat, data_mat_cv, data_mat_n = self.get_spectrum_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanTypeAndMetIDAndFragmentFormula_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met,frag);
# combine into a structure
positions,elements = [],[];
positions,elements = mfautilities.convert_fragmentAndElements2PositionAndElements(frag_cmap[frag]['fragment'],frag_cmap[frag]['fragment_elements']);
#fragname = met_formatted+'_c'+'_'+ re.sub('[-+]','',frag);
fragname = met_formatted+'_c'+'_'+ re.sub('[-+]','',frag)+'_'+scan_type;
data_names = [];
data_stdev = [];
data_stderr = [];
for i,d in enumerate(data_mat):
stdev = 0.0;
stderr = 0.0;
if data_mat_cv[i]:
if data_mat_n[i]==1:
stdev = 0.05;
else:
stdev = data_mat[i]*data_mat_cv[i]/100;
stderr = stdev/sqrt(data_mat_n[i]);
data_names.append(fragname+str(i));
data_stdev.append(stdev);
data_stderr.append(stderr);
experiment_stdev.append(stdev);
data_tmp = {'experiment_id':experiment_id_I,
'sample_name_abbreviation':sna,
'sample_type':sample_types_lst[sna_cnt],
'time_point':tp,
'met_id':met_formatted+'_c',
'fragment_id':fragname,
'fragment_formula':frag,
'intensity_normalized_average':data_mat,
'intensity_normalized_cv':data_mat_cv,
'intensity_normalized_stdev':data_stdev,
'intensity_normalized_n':data_mat_n,
'intensity_normalized_units':'normSum',
'met_elements':elements,
'met_atompositions':positions};
data_O.append(data_tmp);
#add data to the database
row = [];
row = data_stage02_isotopomer_measuredFragments(
experiment_id_I,
sna,
tp,
met_formatted+'_c',
fragname,
frag,
data_mat,
data_mat_cv,
data_stdev,
'normSum',
scan_type,
elements,
positions,
True,
None);
self.session.add(row);
self.session.commit();
def execute_addMeasuredFluxes(self,experiment_id_I, ko_list={}, flux_dict={}, model_ids_I=[], sample_name_abbreviations_I=[]):
'''Add flux data for physiological simulation'''
#Input:
#flux_dict = {};
#flux_dict['iJO1366'] = {};
#flux_dict['iJO1366'] = {};
#flux_dict['iJO1366']['sna'] = {};
#flux_dict['iJO1366']['sna']['Ec_biomass_iJO1366_WT_53p95M'] = {'ave':None,'stdev':None,'units':'mmol*gDCW-1*hr-1','lb':0.704*0.9,'ub':0.704*1.1};
#flux_dict['iJO1366']['sna']['EX_ac_LPAREN_e_RPAREN_'] = {'ave':None,'stdev':None,'units':'mmol*gDCW-1*hr-1','lb':2.13*0.9,'ub':2.13*1.1};
#flux_dict['iJO1366']['sna']['EX_o2_LPAREN_e_RPAREN__reverse'] = {'ave':None,'units':'mmol*gDCW-1*hr-1','stdev':None,'lb':0,'ub':16};
#flux_dict['iJO1366']['sna']['EX_glc_LPAREN_e_RPAREN_'] = {'ave':None,'stdev':None,'units':'mmol*gDCW-1*hr-1','lb':-7.4*1.1,'ub':-7.4*0.9};
data_O = [];
# get the model ids:
if model_ids_I:
model_ids = model_ids_I;
else:
model_ids = [];
model_ids = self.get_modelID_experimentID_dataStage02IsotopomerSimulation(experiment_id_I);
for model_id in model_ids:
# get sample names and sample name abbreviations
if sample_name_abbreviations_I:
sample_name_abbreviations = sample_name_abbreviations_I;
else:
sample_name_abbreviations = [];
sample_name_abbreviations = self.get_sampleNameAbbreviations_experimentIDAndModelID_dataStage02IsotopomerSimulation(experiment_id_I,model_id);
for sna_cnt,sna in enumerate(sample_name_abbreviations):
print('Adding experimental fluxes for sample name abbreviation ' + sna);
if flux_dict:
for k,v in flux_dict[model_id][sna].items():
# record the data
data_tmp = {'experiment_id':experiment_id_I,
'model_id':model_id,
'sample_name_abbreviation':sna,
'rxn_id':k,
'flux_average':v['ave'],
'flux_stdev':v['stdev'],
'flux_lb':v['lb'],
'flux_ub':v['ub'],
'flux_units':v['units'],
'used_':True,
'comment_':None}
data_O.append(data_tmp);
##add data to the database
#row = [];
#row = data_stage02_isotopomer_measuredFluxes(
# experiment_id_I,
# model_id,
# sna,
# k,
# v['ave'],
# v['stdev'],
# v['lb'],
# v['ub'],
# v['units'],
# True,
# None);
#self.session.add(row);
if ko_list:
for k in ko_list[model_id][sna]:
# record the data
data_tmp = {'experiment_id':experiment_id_I,
'model_id':model_id,
'sample_name_abbreviation':sna,
'rxn_id':k,
'flux_average':0.0,
'flux_stdev':0.0,
'flux_lb':0.0,
'flux_ub':0.0,
'flux_units':'mmol*gDCW-1*hr-1',
'used_':True,
'comment_':None}
data_O.append(data_tmp);
##add data to the database
#row = [];
#row = data_stage02_isotopomer_measuredFluxes(
# experiment_id_I,
# model_id,
# sna,
# k,
# 0.0,
# 0.0,
# 0.0,
# 0.0,
# 'mmol*gDCW-1*hr-1',
# True,
# None);
#self.session.add(row);
# add data to the DB
self.add_data_stage02_isotopomer_measuredFluxes(data_O);
#self.session.commit();
def execute_makeMeasuredFluxes(self,experiment_id_I, metID2RxnID_I = {}, sample_name_abbreviations_I = [], met_ids_I = [],snaIsotopomer2snaPhysiology_I={},
correct_EX_glc_LPAREN_e_RPAREN_I = True):
'''Collect and flux data from data_stage01_physiology_ratesAverages for fluxomics simulation
INPUT:
metID2RxnID_I = e.g. {'glc-D':{'model_id':'140407_iDM2014','rxn_id':'EX_glc_LPAREN_e_RPAREN_'},
'ac':{'model_id':'140407_iDM2014','rxn_id':'EX_ac_LPAREN_e_RPAREN_'},
'succ':{'model_id':'140407_iDM2014','rxn_id':'EX_succ_LPAREN_e_RPAREN_'},
'lac-L':{'model_id':'140407_iDM2014','rxn_id':'EX_lac_DASH_L_LPAREN_e_RPAREN_'},
'biomass':{'model_id':'140407_iDM2014','rxn_id':'Ec_biomass_iJO1366_WT_53p95M'}};
snaIsotopomer2snaPhysiology_I = {'OxicEvo04Ecoli13CGlc':'OxicEvo04EcoliGlc',
'OxicEvo04gndEcoli13CGlc':'OxicEvo04gndEcoliGlc',
'OxicEvo04pgiEcoli13CGlc':'OxicEvo04pgiEcoliGlc',
'OxicEvo04sdhCBEcoli13CGlc':'OxicEvo04sdhCBEcoliGlc',
'OxicEvo04tpiAEcoli13CGlc':'OxicEvo04tpiAEcoliGlc'}
TODO:
Need to implement a way to detect the direction of the reaction,
and change direction of the rate accordingly
'''
data_O = [];
# get sample names and sample name abbreviations
if sample_name_abbreviations_I:
sample_name_abbreviations = sample_name_abbreviations_I;
else:
sample_name_abbreviations = [];
sample_name_abbreviations = self.get_sampleNameAbbreviations_experimentID_dataStage02IosotopomerSimulation(experiment_id_I);
for sna in sample_name_abbreviations:
print('Collecting experimental fluxes for sample name abbreviation ' + sna);
query_sna = sna;
if snaIsotopomer2snaPhysiology_I: query_sna = snaIsotopomer2snaPhysiology_I[sna];
# get met_ids
if not met_ids_I:
met_ids = [];
met_ids = self.get_metID_experimentIDAndSampleNameAbbreviation_dataStage01PhysiologyRatesAverages(experiment_id_I,query_sna);
else:
met_ids = met_ids_I;
if not(met_ids): continue #no component information was found
for met in met_ids:
print('Collecting experimental fluxes for metabolite ' + met);
# get rateData
slope_average, intercept_average, rate_average, rate_lb, rate_ub, rate_units, rate_var = None,None,None,None,None,None,None;
slope_average, intercept_average, rate_average, rate_lb, rate_ub, rate_units, rate_var = self.get_rateData_experimentIDAndSampleNameAbbreviationAndMetID_dataStage01PhysiologyRatesAverages(experiment_id_I,query_sna,met);
rate_stdev = sqrt(rate_var);
model_id = metID2RxnID_I[met]['model_id'];
rxn_id = metID2RxnID_I[met]['rxn_id'];
# correct for glucose uptake
if rxn_id == 'EX_glc_LPAREN_e_RPAREN_' and correct_EX_glc_LPAREN_e_RPAREN_I:
rate_lb_tmp,rate_ub_tmp = rate_lb,rate_ub;
rate_lb = min([abs(x) for x in [rate_lb_tmp,rate_ub_tmp]]);
rate_ub = max([abs(x) for x in [rate_lb_tmp,rate_ub_tmp]]);
rate_average = abs(rate_average);
# record the data
data_tmp = {'experiment_id':experiment_id_I,
'model_id':model_id,
'sample_name_abbreviation':sna,
'rxn_id':rxn_id,
'flux_average':rate_average,
'flux_stdev':rate_stdev,
'flux_lb':rate_lb,
'flux_ub':rate_ub,
'flux_units':rate_units,
'used_':True,
'comment_':None}
data_O.append(data_tmp);
##add data to the database
#row = [];
#row = data_stage02_isotopomer_measuredFluxes(
# experiment_id_I,
# model_id,
# sna,
# rxn_id,
# rate_average,
# rate_stdev,
# rate_lb,
# rate_ub,
# rate_units,
# True,
# None);
#self.session.add(row);
#add data to the DB
self.add_data_stage02_isotopomer_measuredFluxes(data_O);
#self.session.commit(); | mit | -4,211,725,522,628,708,000 | 57.059006 | 235 | 0.467423 | false | 4.035838 | false | false | false |
rousseab/pymatgen | pymatgen/util/tests/test_string_utils.py | 1 | 2048 | # coding: utf-8
from __future__ import division, unicode_literals
"""
FIXME: Proper module docstring
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Aug 26, 2012"
import unittest
from pymatgen.util.string_utils import generate_latex_table, str_delimited, \
str_aligned, formula_double_format, latexify, latexify_spacegroup
class FuncTest(unittest.TestCase):
def test_latexify(self):
self.assertEqual(latexify("Li3Fe2(PO4)3"),
"Li$_{3}$Fe$_{2}$(PO$_{4}$)$_{3}$")
self.assertEqual(latexify("Li0.2Na0.8Cl"),
"Li$_{0.2}$Na$_{0.8}$Cl")
def test_latexify_spacegroup(self):
self.assertEqual(latexify_spacegroup("Fd-3m"), "Fd$\overline{3}$m")
self.assertEqual(latexify_spacegroup("P2_1/c"), "P2$_{1}$/c")
def test_str_aligned_delimited(self):
data = [["a", "bb"], ["ccc", "dddd"]]
ans = """ a bb
ccc dddd"""
self.assertEqual(str_aligned(data), ans)
self.assertEqual(str_aligned(data, header=["X", "Y"]),
' X Y\n----------\n a bb\nccc dddd')
self.assertEqual(str_delimited(data), 'a\tbb\nccc\tdddd')
def test_generate_latex_table(self):
data = [["a", "bb"], ["ccc", "dddd"]]
self.assertEqual(generate_latex_table(data), '\\begin{table}[H]\n\\caption{Caption}\n\\label{Label}\n\\begin{tabular*}{\\textwidth}{@{\\extracolsep{\\fill}}cc}\n\\hline\na & bb\\\\\nccc & dddd\\\\\n\\hline\n\\end{tabular*}\n\\end{table}')
def test_formula_double_format(self):
self.assertEqual(formula_double_format(1.00), "")
self.assertEqual(formula_double_format(2.00), "2")
self.assertEqual(formula_double_format(2.10), "2.1")
self.assertEqual(formula_double_format(2.10000000002), "2.1")
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| mit | -5,392,121,690,976,006,000 | 34.929825 | 246 | 0.590332 | false | 3.029586 | true | false | false |
CarlFK/wafer | wafer/management/commands/wafer_speaker_tickets.py | 1 | 1606 | import sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = ("List speakers and associated tickets. By default, only lists"
" speakers for accepted talk, but this can be overriden by"
" the --all option")
option_list = BaseCommand.option_list + tuple([
make_option('--all', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = get_user_model().objects.filter(
talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['all']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = u'%d' % tickets[0].barcode
else:
ticket = u'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
| isc | 5,173,316,226,204,295,000 | 33.170213 | 74 | 0.566625 | false | 4.160622 | false | false | false |
danceos/dosek | generator/coder/syscall_fsm.py | 1 | 14851 | from .syscall_full import FullSystemCalls, AlarmTemplate
from .elements import Statement, Comment
from .elements import CodeTemplate, Include, VariableDefinition, \
Block, Statement, Comment, Function, Hook, DataObject, DataObjectArray
from generator.tools import unwrap_seq
from generator.analysis.AtomicBasicBlock import E,S
from generator.analysis.SystemSemantic import SystemState
from generator.analysis import Subtask
import logging
class FSMSystemCalls(FullSystemCalls):
def __init__(self, use_pla = False):
super(FSMSystemCalls, self).__init__()
self.alarms = FSMAlarmTemplate
if use_pla:
self.fsm_template = PLA_FSMTemplate
else:
self.fsm_template = SimpleFSMTemplate
def generate_system_code(self):
self.generator.source_file.include("syscall.h")
# Grab the finite state machine
self.fsm = self.system_graph.get_pass("fsm").fsm
# Use the fsm template
self.generator.source_file.include("reschedule-ast.h")
self.generator.source_file.include("os/scheduler/task.h")
self.impl = self.fsm_template(self)
self.impl.add_transition_table()
self.generator.source_file.declarations.append(self.impl.expand())
self.generator.source_file.include("os/alarm.h")
self.generator.source_file.include("os/util/redundant.h")
self.generator.source_file.declarations.append(self.alarms(self).expand())
def StartOS(self, block):
block.unused_parameter(0)
for subtask in self.system_graph.real_subtasks:
# Use Reset the stack pointer for all all tasks
self.call_function(block,
self.task_desc(subtask) + ".tcb.reset",
"void", [])
# Call the StartupHook
self.call_function(block, "CALL_HOOK", "void", ["StartupHook"])
# Bootstrap: Do the initial syscall
dispatch_func = Function("__OS_StartOS_dispatch", "void", ["int"], extern_c = True)
self.generator.source_file.function_manager.add(dispatch_func)
# Initial SystemCall
for ev in self.fsm.events:
if ev.name.isA(S.StartOS):
self.fsm_schedule(ev.name, block, dispatch_func)
break
self.call_function(block, "arch::syscall", "void",
[dispatch_func.function_name])
self.call_function(block, "Machine::unreachable", "void", [])
# Forward fsm_schedule and fsm_event
def fsm_event(self, *args, **kwargs):
self.impl.fsm_event(*args, **kwargs)
def fsm_schedule(self, *args, **kwargs):
self.impl.fsm_schedule(*args, **kwargs)
def iret(self, *args, **kwargs):
self.impl.fsm_iret(*args, **kwargs)
def kickoff(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
if not syscall.subtask.conf.is_isr:
self.arch_rules.kickoff(syscall, userspace)
def TerminateTask(self, syscall, userspace, kernelspace):
self.call_function(kernelspace, self.task_desc(syscall.subtask) + ".tcb.reset",
"void", [])
self.fsm_schedule(syscall, userspace, kernelspace)
ChainTask = TerminateTask
ActivateTask = fsm_schedule
WaitEvent = fsm_schedule
ClearEvent = fsm_schedule
SetEvent = fsm_schedule
GetResource = fsm_schedule
ReleaseResource = fsm_schedule
def ASTSchedule(self, function):
pass
def AdvanceCounter(self, abb, userspace, kernelspace):
raise NotImplementedError
################################################################
# These system calls are only enhanced by the FSM step function
################################################################
# Do not overwrite: SetRelAlarm
# Do not overwrite: GetAlarm
# Do not overwrite: CancelAlarm
# Do not overwrite: DisableAllInterrupts
# Do not overwrite: SuspendAllInterrupts
# Do not overwrite: SuspendOSInterrupts
# Do not overwrite: EnableAllInterrupts
# Do not overwrite: ResumeAllInterrupts
# Do not overwrite: ResumeOSInterrupts
# Do not overwrite: AcquireCheckedObject
# Do not overwrite: ReleaseCheckedObject
def SetRelAlarm(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.SetRelAlarm(self, syscall, userspace, kernelspace)
def GetAlarm(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.GetAlarm(self, syscall, userspace, kernelspace)
def CancelAlarm(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.CancelAlarm(self, syscall, userspace, kernelspace)
def DisableAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.DisableAllInterrupts(self, syscall, userspace, kernelspace)
def SuspendAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.SuspendAllInterrupts(self, syscall, userspace, kernelspace)
def SuspendOSInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.SuspendOSInterrupts(self, syscall, userspace, kernelspace)
def EnableAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.EnableAllInterrupts(self, syscall, userspace, kernelspace)
def ResumeAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.ResumeAllInterrupts(self, syscall, userspace, kernelspace)
def ResumeOSInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.ResumeOSInterrupts(self, syscall, userspace, kernelspace)
def AcquireCheckedObject(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.AcquireCheckedObject(self, syscall, userspace, kernelspace)
def ReleaseCheckedObject(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.ReleaseCheckedObject(self, syscall, userspace, kernelspace)
def do_assertions(self, block, assertions):
"""We do not support assertions for a FSM kernel"""
logging.error("Assertions are not implemented for the FSM coder")
class SimpleFSMTemplate(CodeTemplate):
def __init__(self, syscall_fsm):
CodeTemplate.__init__(self, syscall_fsm.generator, "os/fsm/simple-fsm.h.in")
self.syscall_fsm = syscall_fsm
self.system_graph = self.generator.system_graph
self.syscall_fsm.generator.source_file.include("os/fsm/simple-fsm.h")
self.fsm = self.syscall_fsm.fsm
def add_transition_table(self):
self.syscall_map = {}
# Rename action labels to their task id
def action_rename(action):
task_id = action.impl.task_id
if task_id == None:
task_id = 255
return task_id
self.fsm.rename(actions = action_rename)
# Generate the transition table
for event in self.fsm.events:
self.syscall_map[event.name] = event
# Do not generate a transition table, if there is only one
# transition.
if len(event.transitions) == 1:
event.impl.transition_table = None
continue
table = DataObjectArray("os::fsm::SimpleFSM::Transition",
"fsm_table_" + event.name.generated_function_name(),
str(len(event.transitions)))
table.static_initializer = []
for t in event.transitions:
table.static_initializer\
.append("{%d, %d, %d}" % (t.source, t.target, t.action))
event.impl.transition_table = table
self.syscall_fsm.generator.source_file.data_manager\
.add(table, namespace = ('os', 'fsm'))
def fsm_event(self, syscall, userspace, kernelspace):
if not syscall in self.syscall_map:
return
event = self.syscall_map[syscall]
if event.impl.transition_table:
transition_table = event.impl.transition_table.name
transition_length = str(len(event.transitions))
# kernelspace.add(Statement('kout << "%s" << endl' % syscall.path()))
task = self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.event",
"SimpleFSM::task_t", [transition_table, transition_length])
else:
followup_state = event.impl.followup_state = event.transitions[0].target
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.set_state",
"void", [str(followup_state)])
task = event.transitions[0].action
return task
def fsm_schedule(self, syscall, userspace, kernelspace):
if not syscall in self.syscall_map:
return
task = self.fsm_event(syscall, userspace, kernelspace)
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [task.name])
def fsm_iret(self, syscall, userspace, kernelspace):
if not syscall in self.syscall_map:
return
task = self.fsm_event(syscall, userspace, kernelspace)
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [task.name])
################################################################
# Used in Template Code
################################################################
def subtask_desc(self, snippet, args):
return self._subtask.impl.task_descriptor.name
def subtask_id(self, snippet, args):
return str(self._subtask.impl.task_id)
def foreach_subtask_sorted(self, snippet, args):
body = args[0]
ret = []
for subtask in sorted(self.system_graph.real_subtasks, key=lambda s: s.impl.task_id):
self._subtask = subtask
ret.append(self.expand_snippet(body))
return ret
class PLA_FSMTemplate(CodeTemplate):
def __init__(self, syscall_fsm):
CodeTemplate.__init__(self, syscall_fsm.generator, "os/fsm/pla-fsm.h.in")
self.syscall_fsm = syscall_fsm
self.system_graph = self.generator.system_graph
self.logic = self.system_graph.get_pass("LogicMinimizer")
self.fsm = self.logic.fsm
def add_transition_table(self):
# Truth table is generated in pla-fsm.h
return
def fsm_event(self, syscall, userspace, kernelspace):
event = None
for ev in self.fsm.events:
if self.fsm.event_mapping[ev.name] == syscall:
event = ev
break
if not event:
return # No Dispatch
# kernelspace.add(Statement('kout << "%s" << endl' % syscall.path()))
task = self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.event",
"unsigned", [str(int(event.name, 2))])
return task
def fsm_schedule(self, syscall, userspace, kernelspace):
task = self.fsm_event(syscall, userspace, kernelspace)
if not task:
return
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [task.name])
def fsm_iret(self, syscall, userspace, kernelspace):
task = self.fsm_event(syscall, userspace, kernelspace)
if not task:
return
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [task.name])
################################################################
# Used in Template Code
################################################################
def truth_table(self, *args):
# Generate the transition table
initializer = []
for (mask, pattern, output_state, output_action) in self.logic.truth_table:
initializer.append("{{{0}, {1}, {2}, {3}}}".format(
int(mask, 2),
int(pattern, 2),
int(output_state, 2),
int(output_action, 2)))
return "{" + (", ".join(initializer)) + "}"
def mask_pattern_len(self, *args):
return str(self.logic.event_len + self.logic.state_len)
def truth_table_entries(self, *args):
return str(len(self.logic.truth_table))
def initial_state(self, *args):
return str(int(self.fsm.initial_state,2))
def dispatch_table(self, *args):
mapping = {}
for k, subtask in self.fsm.action_mapping.items():
mapping[int(k, 2)] = subtask
if not 0 in mapping:
mapping[0] = None
self.NO_DISPATCH = 0
initializer = []
for k,subtask in sorted(mapping.items(), key = lambda x:x[0]):
if not subtask or subtask.conf.is_isr:
initializer.append("0 /* NO_DISPATCH */")
elif subtask == self.system_graph.idle_subtask:
initializer.append("0 /* IDLE */")
self.IDLE = k
else:
initializer.append("&" +subtask.impl.task_descriptor.name)
if not hasattr(self, "IDLE"):
self.IDLE = len(mapping) + 100
return ", ".join(initializer)
class FSMAlarmTemplate(AlarmTemplate):
def __init__(self, rules):
AlarmTemplate.__init__(self, rules)
| lgpl-3.0 | 857,813,653,266,763,300 | 39.355978 | 109 | 0.595313 | false | 4.074348 | false | false | false |
Azure/azure-sdk-for-python | sdk/cognitiveservices/azure-cognitiveservices-knowledge-qnamaker/azure/cognitiveservices/knowledge/qnamaker/models/feedback_record_dto.py | 1 | 1420 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FeedbackRecordDTO(Model):
"""Active learning feedback record.
:param user_id: Unique identifier for the user.
:type user_id: str
:param user_question: The suggested question being provided as feedback.
:type user_question: str
:param qna_id: The qnaId for which the suggested question is provided as
feedback.
:type qna_id: int
"""
_validation = {
'user_question': {'max_length': 1000},
}
_attribute_map = {
'user_id': {'key': 'userId', 'type': 'str'},
'user_question': {'key': 'userQuestion', 'type': 'str'},
'qna_id': {'key': 'qnaId', 'type': 'int'},
}
def __init__(self, **kwargs):
super(FeedbackRecordDTO, self).__init__(**kwargs)
self.user_id = kwargs.get('user_id', None)
self.user_question = kwargs.get('user_question', None)
self.qna_id = kwargs.get('qna_id', None)
| mit | 1,054,254,704,202,606,500 | 33.634146 | 76 | 0.573944 | false | 4.022663 | false | false | false |
lukas/scikit-class | examples/keras-sign/perceptron.py | 2 | 1111 | # A very simple perceptron for classifying american sign language letters
import signdata
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Flatten, Dropout, Conv2D, MaxPooling2D, Reshape
from keras.utils import np_utils
import wandb
from wandb.keras import WandbCallback
# logging code
run = wandb.init()
config = run.config
config.loss = "mae"
config.optimizer = "adam"
config.epochs = 10
# load data
(X_test, y_test) = signdata.load_test_data()
(X_train, y_train) = signdata.load_train_data()
img_width = X_test.shape[1]
img_height = X_test.shape[2]
# one hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_train.shape[1]
# create model
model = Sequential()
model.add(Flatten())
model.add(Dense(num_classes, activation="softmax"))
model.compile(loss=config.loss, optimizer=config.optimizer,
metrics=['accuracy'])
# Fit the model
model.fit(X_train, y_train, epochs=10, validation_data=(X_test, y_test),
callbacks=[WandbCallback(data_type="image", labels=signdata.letters)])
| gpl-2.0 | -6,721,303,665,081,432,000 | 28.236842 | 80 | 0.738074 | false | 3.035519 | true | false | false |
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractDreamsOfJianghu.py | 1 | 1217 | def extractDreamsOfJianghu(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
bad = ['pictures']
if any([(tmp in item['tags']) for tmp in bad]):
return None
tagmap = [
('TBVW', 'To Be A Virtuous Wife', 'translated'),
('WC', 'World of Cultivation', 'translated'),
('8TT', 'Eight Treasure Trousseau', 'translated'),
('4.6', '4.6 Billion Years Symphony of Evolution', 'translated'),
('Zuo Mo', 'World of Cultivation', 'translated'),
('lpj', 'Like Pearl and Jade', 'translated'),
('ZX', 'Zhui Xu', 'translated'),
('AUW', 'An Unyielding Wind', 'translated'),
('ADND', 'Ascending, Do Not Disturb', 'translated'),
('sd', 'Sword Dynasty', 'translated'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | bsd-3-clause | 7,790,751,708,507,386,000 | 39.6 | 104 | 0.525062 | false | 3.316076 | false | false | false |
endlessm/chromium-browser | native_client/pnacl/driver/pnacl-compress.py | 2 | 6470 | #!/usr/bin/python
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Uses llvm tool pnacl-bccompress to add abbreviations into the input file.
# It runs pnacl-bccompress multiple times, using a hill-climbing solution
# to try and find a good local minima for file size.
from __future__ import print_function
from driver_env import env
from driver_log import Log
import driver_tools
import pathtools
import os
import shutil
EXTRA_ENV = {
'INPUTS': '',
'OUTPUT': '',
'MAX_ATTEMPTS': '25',
'RETRIES': '3',
'SUFFIX': '-c',
'VERBOSE': '0',
}
PrepPatterns = [
( ('-o','(.*)'), "env.set('OUTPUT', pathtools.normalize($0))"),
( '--max-attempts=(.*)', "env.set('MAX_ATTEMPTS', $0)"),
( '--retries=(.*)', "env.set('RETRIES', $0)"),
( ('--suffix','(.*)'), "env.set('SUFFIX', $0)"),
( '--verbose', "env.set('VERBOSE', '1')"),
( '-v', "env.set('VERBOSE', '1')"),
( '(-.*)', driver_tools.UnrecognizedOption),
( '(.*)', "env.append('INPUTS', pathtools.normalize($0))"),
]
def Compress(f_input, f_output):
""" Hill climb to smallest file.
This code calls pnacl-compress multiple times to attempt to
compress the file. That tool works by adding abbreviations that
have a good likelyhood of shrinking the bitcode file. Unfortunately,
any time abbreviations are added to PNaCl bitcode files, they can
get larger because the addition of more abbreviations will require
more bits to save abbreviation indices, resulting in the file
actually increasing in size.
To mitigate this, this driver hill climbs assuming that there
may be local minima that are the best solution. Hence, anytime
a local minima is reached, an additional number of attempts
to see if we can find a smaller bitcode file, which implies
you are moving closer to another local minima.
"""
verbose = env.getbool('VERBOSE')
# Number of times we will continue to retry after finding local
# minimum file size.
# max_retry_count: The maximum number of retries.
# retry_count: The number of retries left before we give up.
max_retry_count = int(env.getone('RETRIES'))
retry_count = max_retry_count
if max_retry_count < 1:
Log.Fatal("RETRIES must be >= 1")
# The suffix to append to the input file, to generate intermediate files.
# test_suffix: The prefix of the suffix.
# test_index: The index of the current test file (appened to test_suffix).
test_suffix = env.getone('SUFFIX')
test_index = 1
# The maximum number of times we will attempt to compress the file before
# giving up.
max_attempts = int(env.getone('MAX_ATTEMPTS'))
if max_attempts < 1:
Log.Fatal("MAX_ATTEMPTS must be >= 1")
# The result of the last attempt to compress a file.
# last_file: The name of the file
# last_size: The number of bytes in last_file.
# last_saved: True if we did not remove last_file.
last_file = f_input
last_size = pathtools.getsize(f_input)
last_saved = True
# Keeps track of the current best compressed file.
current_smallest_file = last_file
current_smallest_size = last_size
while max_attempts > 0 and retry_count > 0:
next_file = f_input + test_suffix + str(test_index)
if verbose:
print("Compressing %s: %s bytes" % (last_file, last_size))
driver_tools.Run('"${PNACL_COMPRESS}" ' + last_file + ' -o ' + next_file)
next_size = pathtools.getsize(next_file)
if not last_saved:
os.remove(last_file)
if next_size < current_smallest_size:
old_file = current_smallest_file
current_smallest_file = next_file
current_smallest_size = next_size
if (f_input != old_file):
os.remove(old_file)
retry_count = max_retry_count
next_saved = True
else:
next_saved = False
retry_count -= 1
last_file = next_file
last_size = next_size
last_saved = next_saved
max_attempts -= 1
test_index += 1
# Install results.
if verbose:
print("Compressed %s: %s bytes" % (last_file, last_size))
print("Best %s: %s bytes" % (current_smallest_file,
current_smallest_size))
if not last_saved:
os.remove(last_file)
if (f_input == f_output):
if (f_input == current_smallest_file): return
# python os.rename/shutil.move on Windows will raise an error when
# dst already exists, and f_input already exists.
f_temp = f_input + test_suffix + "0"
shutil.move(f_input, f_temp)
shutil.move(current_smallest_file, f_input)
os.remove(f_temp)
elif f_input == current_smallest_file:
shutil.copyfile(current_smallest_file, f_output)
else:
shutil.move(current_smallest_file, f_output)
def main(argv):
env.update(EXTRA_ENV)
driver_tools.ParseArgs(argv, PrepPatterns)
inputs = env.get('INPUTS')
output = env.getone('OUTPUT')
for path in inputs + [output]:
driver_tools.CheckPathLength(path)
if len(inputs) != 1:
Log.Fatal('Can only have one input')
f_input = inputs[0]
# Allow in-place file changes if output isn't specified.
if output != '':
f_output = output
else:
f_output = f_input
Compress(f_input, f_output)
return 0
def get_help(unused_argv):
script = env.getone('SCRIPT_NAME')
return """Usage: %s <options> in-file
This tool compresses a pnacl bitcode (PEXE) file. It does so by
generating a series of intermediate files. Each file represents
an attempt to compress the previous file in the series. Uses
hill-climbing to find the smallest file to use, and sets the
output file to the best found case.
The options are:
-h --help Display this output
-o <file> Place the output into <file>. Otherwise, the
input file is modified in-place.
--max-attempts=N Maximum number of attempts to reduce file size.
--retries=N Number of additional attempts to try after
a local minimum is found before quiting.
--suffix XX Create intermediate compressed files by adding
suffix XXN (where N is a number).
-v --verbose Show generated intermediate files and corresponding
sizes.
""" % script
| bsd-3-clause | -6,841,414,445,066,758,000 | 33.784946 | 79 | 0.642504 | false | 3.643018 | true | false | false |
nccgroup/umap2 | umap2/core/usb.py | 1 | 1543 | '''
USB definitions and enumerations from the USB spec.
'''
# USB.py
#
# Contains definition of USB class, which is just a container for a bunch of
# constants/enums associated with the USB protocol.
#
# TODO: would be nice if this module could re-export the other USB* classes so
# one need import only USB to get all the functionality
class DescriptorType(object):
device = 0x01
configuration = 0x02
string = 0x03
interface = 0x04
endpoint = 0x05
device_qualifier = 0x06
other_speed_configuration = 0x07
interface_power = 0x08
bos = 0x0f
device_capability = 0x10
hid = 0x21
report = 0x22
cs_interface = 0x24
cs_endpoint = 0x25
hub = 0x29
class USB(object):
feature_endpoint_halt = 0
feature_device_remote_wakeup = 1
feature_test_mode = 2
# while this holds for HID, it may not be a correct model for the USB
# ecosystem at large
if_class_to_desc_type = {
0x03: DescriptorType.hid,
0x0b: DescriptorType.hid
}
class State(object):
detached = 0
attached = 1
powered = 2
default = 3
address = 4
configured = 5
suspended = 6
class Request(object):
direction_host_to_device = 0
direction_device_to_host = 1
type_standard = 0
type_class = 1
type_vendor = 2
recipient_device = 0
recipient_interface = 1
recipient_endpoint = 2
recipient_other = 3
def interface_class_to_descriptor_type(interface_class):
return USB.if_class_to_desc_type.get(interface_class, None)
| agpl-3.0 | 2,192,557,851,825,431,600 | 21.042857 | 78 | 0.66429 | false | 3.522831 | false | false | false |
ProjetSigma/backend | sigma_core/models/group_member.py | 1 | 1578 | from django.db import models
from django.http import Http404
from sigma_core.models.user import User
class GroupMember(models.Model):
"""
Modelize a membership relation between an User and a Group.
"""
class Meta:
# TODO: Make a primary key once Django supports it
unique_together = (("user", "group"),)
user = models.ForeignKey('User', related_name='memberships')
group = models.ForeignKey('Group', related_name='memberships')
created = models.DateTimeField(auto_now_add=True)
join_date = models.DateField(blank=True, null=True)
leave_date = models.DateField(blank=True, null=True)
# if super_administrator = True then is_administrator = True
# administrators must have all the rights below
is_administrator = models.BooleanField(default=False)
is_super_administrator = models.BooleanField(default=False)
can_invite = models.BooleanField(default=False)
can_be_contacted = models.BooleanField(default=False)
can_publish = models.BooleanField(default=False)
can_kick = models.BooleanField(default=False)
is_accepted = models.BooleanField(default=False)
can_modify_group_infos = models.BooleanField(default=False)
# Related fields:
# - values (model GroupMemberValue)
def __str__(self):
return "User \"%s\" in Group \"%s\"" % (self.user.__str__(), self.group.__str__())
# Perms for admin site
def has_perm(self, perm, obj=None): # pragma: no cover
return True
def has_module_perms(self, app_label): # pragma: no cover
return True
| agpl-3.0 | -6,175,557,414,385,646,000 | 36.571429 | 90 | 0.687579 | false | 3.8867 | false | false | false |
julianofischer/gitiscool | gitiscool.py | 1 | 1681 | import os, json
'''
This file is part of gitiscool.
gitiscool is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
gitiscool is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Foobar. If not, see <http://www.gnu.org/licenses/>.
'''
class Repo:
def __init__(self):
self.id = None
self.owner = None #GitUser
self.main_repo = False
self.name = None
self.forks = []
def __str__(self):
output = "id: %s\nname: %s\nowner: %s" % (str(self.id), self.name, self.owner.username)
return output
def add_fork(self, fork):
self.forks.append(fork)
class GitUser:
def __init__(self):
self.id = None
self.username = None
self.avatar_url = None
self.email = None
class Student(GitUser):
def __init__(self):
self.repo = None
class Problem:
def __init__(self):
self.number = None
self.description = None
self.committed_score = None
self.first_solution_score = None
self.datetime = None
class Solution:
def __init__(self):
self.problem = None
self.student = None
self.is_first_solution = False
self.datetime = None
| gpl-3.0 | -8,494,251,118,630,038,000 | 24.469697 | 95 | 0.625818 | false | 3.955294 | false | false | false |
Brounredo/Revolution | main.py | 1 | 5549 | # Инициализация
import Functions # Определения функций
import Names # Случайные имена
import Vars # Переменные
import AllCountries # Список реальных стран
import sys # Системная библиотека
import random # Генератор случайных чисел
# Интерактивная инициализация
Vars.is_lose = random.choice([True, False]) # Выиграет игрок или проиграет
print("")
print("Revolution v1.0")
print("-----")
print("Введите своё имя: ", end='')
Vars.MyPlayer.name = input() # Имя игрока
print("Введите свой возраст (от 14 до 50 лет): ", end='')
age_temp = int(input())
if age_temp < 14:
print("Маленький ещё страной управлять!")
sys.exit()
elif age_temp > 50:
print("Староват уже.")
sys.exit()
Vars.MyPlayer.age = int(age_temp) # Возраст игрока
Vars.MyCountry = random.choice(AllCountries.allcountries) # Страна игрока
print("Ваша страна - ", Vars.MyCountry.name)
Vars.MyPlayer.place = Vars.MyCountry.capital # Местоположение игрока
print("Введите количество ботов: ", end='')
bots = int(input()) # Количество ботов
for j in range(bots): # Добавление ботов
Vars.countries.append(random.choice(AllCountries.allcountries))
for q in range(5): # "Созыв" министров
Vars.MyCountry.min_now[q] = Names.random_name()
Functions.gen_gamemap() # Генерация карты
# Цикл игры
while 1:
# Вывод основной информации
print("")
print("Год:", Vars.year)
print("Ваш возраст:", Vars.MyPlayer.age)
print("Ваша популярность:", Vars.MyPlayer.popular)
print("Денег в казне:", Vars.MyCountry.money, "руб.")
print("Население страны:", Vars.MyCountry.population, "чел.")
print("Личных денег:", Vars.MyPlayer.money, "руб.")
print("Вы находитесь в:", Vars.MyPlayer.place)
print("Новости:", Vars.news)
print("-----")
print("Для помощи напишите 'помощь' (без кавычек)")
print("Введите команду: ", end='')
# Ввод и обработка команды
command = input() # Воод команды
if command == "конец хода":
Functions.next_year()
elif command == "министры":
print("")
print("Кабинет министров:")
print("Премьер-министр:", Vars.MyCountry.min_now[0], "| Уровень:", Vars.MyCountry.min_stats[0])
print("Министр внутренних дел:", Vars.MyCountry.min_now[1], "| Уровень:", Vars.MyCountry.min_stats[1])
print("Министр финансов:", Vars.MyCountry.min_now[2], "| Уровень:", Vars.MyCountry.min_stats[2])
print("Министр иностранных дел:", Vars.MyCountry.min_now[3], "| Уровень:", Vars.MyCountry.min_stats[3])
print("Министр народного просвещения:", Vars.MyCountry.min_now[4], "| Уровень:", Vars.MyCountry.min_stats[4])
elif command == "сменить министра":
Functions.change_min()
elif command == "выступить":
for x in range(10):
print(Names.random_phrase(), '.')
elif command == "выход":
sys.exit()
elif command == "помощь":
print('помощь, выход, конец хода, министры, сменить министра, выступить, тайл, карта')
elif command == "карта":
Functions.draw_gamemap()
elif command == "тайл":
print("Введите строку: ", end='')
y = int(input())
print("Введите столбец: ", end='')
x = int(input())
tile = Vars.gamemap[Functions.xy_to_index(x, y)]
print("Страна: " + str(tile.Country.name))
print("Защита: " + str(tile.Defence))
print("Армия: " + str(tile.Army))
elif command == "перебросить":
print("Введите строку (откуда): ", end='')
y1 = int(input())
print("Введите столбец (откуда): ", end='')
x1 = int(input())
print("Введите строку (куда): ", end='')
y2 = int(input())
print("Введите столбец (куда) ", end='')
x2 = int(input())
print("Сколько войск перебросить: ", end='')
n = int(input())
Functions.move(Functions.xy_to_index(x1, y1), Functions.xy_to_index(x2, y2), n)
elif command == "атаковать":
print("Введите строку (откуда): ", end='')
y1 = int(input())
print("Введите столбец (откуда): ", end='')
x1 = int(input())
print("Введите строку (куда): ", end='')
y2 = int(input())
print("Введите столбец (куда) ", end='')
x2 = int(input())
print("Сколько человек послать: ", end='')
Functions.attack(Functions.xy_to_index(x1, y1), Functions.xy_to_index(x2, y2), n)
else:
print("Нет такой команды!")
| gpl-2.0 | -5,076,489,527,068,514,000 | 39.495413 | 117 | 0.622111 | false | 2.099905 | false | false | false |
TheSimoms/Kaloot | src/kaloot.py | 1 | 2037 | import os
import logging
import glob
import bot
class Kaloot:
def __init__(self, args):
self.nicknames = self.parse_nicknames(args)
def parse_nicknames(self, args):
nickname_files = None
nickname_path = '%s/nicknames' % os.path.dirname(__file__)
if args.nicknames is None:
nickname_files = glob.glob('%s/*.txt' % nickname_path)
elif args.nicknames != 'None':
nickname_files = []
for filename in args.nicknames.split(','):
if os.path.isfile(filename):
nickname_files.append(filename)
else:
nickname_file = '%s/%s.txt' % (nickname_path, filename)
if os.path.isfile(nickname_file):
nickname_files.append(nickname_file)
if nickname_files:
return self.fetch_nicknames(nickname_files)
@staticmethod
def fetch_nicknames(nickname_files):
nicknames = set()
for filename in nickname_files:
try:
with open(filename) as f:
nicknames |= set([
nickname.strip() for nickname in f.readlines() if len(nickname.strip()) > 0
])
except FileNotFoundError:
logging.error('File %s.txt not found in nicknames folder' % filename)
return list(nicknames)
class RandomKaloot(Kaloot):
def __init__(self, args):
super(RandomKaloot, self).__init__(args)
threads = []
for i in range(args.n):
arguments = {
'index': i,
'game_id': args.game,
'prefix': args.prefix,
'delay': args.delay
}
if self.nicknames is not None and i < len(self.nicknames):
arguments['nickname'] = self.nicknames[i]
threads.append(bot.RandomBot(**arguments))
threads[i].start()
for i in range(args.n):
threads[i].join()
| gpl-2.0 | -5,601,243,374,530,525,000 | 27.690141 | 99 | 0.52381 | false | 4.297468 | false | false | false |
SouthGreenPlatform/arcad-hts | sp5_gbs/demultadapt.py | 1 | 18149 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
AUTHOR
Written by Vincent MAILLOL (modified by Gautier Sarah)
BUGS
[email protected]
COPYRIGHT
Copyright © 2011 DAVEM, 2014 AGAP. Licence GPLv3+ : GNU
GPL version 3 ou supérieures <http://gnu.org/licenses/gpl.html>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/> or
write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import sys, os
sys.path.append("")
from davem_fastq import Fastq_read, Fastq_file
import argparse
from itertools import izip
from bisect import bisect_left
if sys.version_info[0] == 2:
if sys.version_info[1] < 6:
msg = "ERROR: Python should be in version 2.6 or higher"
sys.stderr.write("%s\n\n" % msg)
sys.exit(1)
class FastqFileType( object ) :
"""
Fabrique Fastq_file
"""
def __init__( self, mode ) :
self.mode = mode
def __call__( self, path_name) :
return Fastq_file( path_name, self.mode )
class Selector( object ) :
"""
Abstract class to look for a line in a table_adaptator.
table_adaptator is like :
[ (adaptator-1, output-file-A1, output-file-B1),
(adaptator-2, output-file-A2, output-file-B2),
...
(adaptator-N, output-file-AN, output-file-BN)
]
In single end mode, table adaptator only have one output file by tuple
You must implement methods __single_select and __paired_select.
"""
def __init__(self, table_adaptator, single_end) :
"""
If single_end is True, a call to monSelector.select( sequence )
will execute the method _single_select_ otherwise, the call will be
monSelector.select( sequence-1, sequence-2 ) and will execute the
method _paired_select_
"""
self.table_adaptator = table_adaptator
if single_end :
self.select = self._single_select
else :
self.select = self._paired_select
def _single_select( self, sequence ) :
"""
Look for a line in table_adaptator with only one sequence
"""
raise NotImplementedError
def _paired_select( self, sequence_1, sequence_2 ) :
"""
Look for a line in table_adaptator with two sequences
"""
raise NotImplementedError
class Levenshtein_selector( Selector ) :
table_adaptator = None
single_end = False
rate = 0
def __init__( self, table_adaptator, single_end, rate ) :
if not isinstance( rate, float ) :
raise ValueError( "rate argument must be a float not %s" % type( rate ) )
Selector.__init__( self, table_adaptator, single_end)
self.rate = rate
def _single_select( self, sequence) :
from Levenshtein import ratio
distances = []
for (adaptator, output_file) in self.table_adaptator :
dist = ratio( adaptator, sequence[ : len( adaptator ) ] )
if dist == 1.0 :
return (adaptator, output_file)
distances.append( dist )
max_dist = max( distances )
if max_dist >= self.rate and distances.count( max_dist ) == 1 :
return self.table_adaptator[ distances.index( max_dist ) ]
return None
def _paired_select( self, sequence_1, sequence_2) :
from Levenshtein import ratio
distances_1 = []
distances_2 = []
for line in self.table_adaptator :
adaptator = line[ 0 ]
dist_1 = ratio( adaptator, sequence_1[ : len( adaptator ) ] )
dist_2 = ratio( adaptator, sequence_2[ : len( adaptator ) ] )
distances_1.append( dist_1 )
distances_2.append( dist_2 )
max_dist_1 = max( distances_1 )
max_dist_2 = max( distances_2 )
if max_dist_1 > max_dist_2 :
if max_dist_1 >= self.rate and distances_1.count( max_dist_1 ) == 1 :
return self.table_adaptator[ distances_1.index( max_dist_1 ) ]
elif max_dist_1 < max_dist_2 :
if max_dist_2 >= self.rate and distances_2.count( max_dist_2 ) == 1 :
return self.table_adaptator[ distances_2.index( max_dist_2 ) ]
else :
if max_dist_1 >= self.rate :
if distances_1.count( max_dist_1 ) == 1 :
index_1 = distances_1.index( max_dist_1 )
index_2 = distances_2.index( max_dist_2 )
if index_1 == index_2 :
return self.table_adaptator[ index_1 ]
elif distances_2.count( max_dist_2 ) == 1 :
index_1 = distances_1.index( max_dist_1 )
index_2 = distances_2.index( max_dist_2 )
if index_1 == index_2 :
return self.table_adaptator[ distances_2.index( max_dist_2 ) ]
return None
class LevenshteinAllSelector( Levenshtein_selector ) :
"""
Same as Levenshtein_selector except that in paired-end, both members
of the pair must be above or equal to the min ratio and adaptators of
both members must be identical
"""
def _paired_select( self, sequence_1, sequence_2) :
from Levenshtein import ratio
distances_1 = []
distances_2 = []
for line in self.table_adaptator :
adaptator = line[ 0 ]
dist_1 = ratio( adaptator, sequence_1[ : len( adaptator ) ] )
dist_2 = ratio( adaptator, sequence_2[ : len( adaptator ) ] )
distances_1.append( dist_1 )
distances_2.append( dist_2 )
max_dist_1 = max( distances_1 )
max_dist_2 = max( distances_2 )
if ( max_dist_1 >= self.rate and max_dist_2 >= self.rate
and distances_1.count( max_dist_1 ) == distances_2.count( max_dist_2 ) == 1 ) :
adapt_1 = self.table_adaptator[ distances_1.index( max_dist_1 ) ]
adapt_2 = self.table_adaptator[ distances_2.index( max_dist_2 ) ]
if adapt_1 == adapt_2 :
return adapt_1
else :
return None
class Std_selector( Selector ):
"""
Dichotomic search in list_adaptator
table_adaptator
If provided index is empty, return None
"""
def _paired_select( self, sequence_1, sequence_2):
l1 = self._single_select( sequence_1 )
l2 = self._single_select( sequence_2 )
if l1 is None :
return l2
if l2 is None :
return l1
if l1 == l2 :
return l1
return None
def _single_select( self, sequence):
a = 0
b = len( self.table_adaptator ) -1
if b == -1 :
return None
while a <= b :
m = ( a + b ) // 2
adaptator = self.table_adaptator[ m ][ 0 ]
start_seq = sequence[ : len( adaptator ) ]
if adaptator > start_seq :
b = m - 1
elif adaptator < start_seq :
a = m + 1
else :
return self.table_adaptator[ m ]
if adaptator == sequence[ : len( adaptator ) ] :
return self.table_adaptator[ m ]
return None
def get_adapt_counter( opened_adapt_file ) :
"""
Return a hash where keys are the adaptators
and values are initialized with [ name_tag, 0 ]
"""
d = {}
opened_adapt_file.seek(0)
for line in opened_adapt_file :
if not line.isspace() :
try :
adapt, name_tag = line.split()
except ValueError :
print >> sys.stderr, "File '%s' is malformed." % opened_adapt_file.name
exit( 1 )
d[ adapt ] = [ name_tag, 0 ]
return d
def get_maximal_annalogie( file_adapt ) :
"""
Compute maximal levenshtein between all adaptators
"""
from Levenshtein import ratio
adaptators = []
for line in file_adapt :
if line :
(adapt, name) = line.split()
if adapt != "*" :
adaptators.append( adapt )
ratio_max = 0.0
for i, adapt in enumerate( adaptators ) :
for adapt2 in adaptators[i+1:] :
ratio_max = max( ratio_max,ratio( adapt, adapt2 ) )
return ratio_max
def get_output_files( opened_adapt_file, prefix, paired_end=True ) :
"""
Create output files and put them in a list:
if paired_end is True, twa files by adaptator are created
[ (adaptator, output_file_1, output_file_2 ), ... ]
otherwise only one
[ (adaptator, output_file ), ... ]
The function return the files table and the Trash files
Two trash files for paired-end, one for single-end
( table, (trash-file, ) )
"""
ada_files = []
default = None
cache_name_file_by_adapt = {}
for line in opened_adapt_file :
if not line.isspace() :
try :
adapt, suffix_file = line.split()
except ValueError :
print >> sys.stderr, "File '%s' is malformed." % opened_adapt_file.name
exit( 1 )
if paired_end :
if line[0] == '*' :
default = ( Fastq_file( "%s-%s_1.fastq" % (prefix, suffix_file), "w" ),
Fastq_file( "%s-%s_2.fastq" % (prefix, suffix_file), "w" ), )
else :
if suffix_file in cache_name_file_by_adapt :
f1, f2 = cache_name_file_by_adapt[ suffix_file ]
ada_files.append( ( adapt, f1, f2 ) )
else :
f1 = Fastq_file( "%s-%s_1.fastq" % (prefix, suffix_file), "w" )
f2 = Fastq_file( "%s-%s_2.fastq" % (prefix, suffix_file), "w" )
ada_files.append( (adapt, f1, f2) )
cache_name_file_by_adapt[ suffix_file ] = (f1, f2)
else :
# TODO Make cache system for single mode.
if line[0] == '*' :
default = ( Fastq_file( "%s-%s.fastq" % (prefix, suffix_file), "w" ) , )
else :
if suffix_file in cache_name_file_by_adapt :
f1 = cache_name_file_by_adapt[ suffix_file ]
ada_files.append( ( adapt, f1 ) )
else:
f1 = Fastq_file( "%s-%s.fastq" % (prefix, suffix_file), "w" )
ada_files.append( ( adapt, f1 ) )
cache_name_file_by_adapt[ suffix_file ] = ( f1 )
if default is None :
print >> sys.stderr, "File '%s' doesn't have a line with the joker tag *.\nAdd a line '* tag_name_for_trash'." % opened_adapt_file.name
sys.exit(1)
ada_files.sort()
return ada_files, default
def parse_user_argument() :
"""
Recover user argument
"""
parser = argparse.ArgumentParser( description="demultiplex fastq_file(s)" )
parser.add_argument( '-V', '--version', action='version', help="Print the version and license",
version="%(prog)s 1.1\nCopyright (C) 2011 DAVEM, 2014 AGAP\nGPL3+\nWritten by Vincent Maillol" )
parser.add_argument( '-v', '--verbose', dest="verbose", action='store_true',
help="Be verbose" )
parser.add_argument( 'file_adapt', metavar="FILE_ADAPT", nargs=1, type=argparse.FileType('r'),
help="Format is one line by adaptor, such as: adaptor_1<tab>id_sample_1, etc. Last line should be like *<tab>name_trash")
parser.add_argument( '-f', '--fastq_1', dest="fastq_1", type=FastqFileType( "r" ), action='store',
help="For a single-end file or the first paired-end file" )
parser.add_argument( '-F', '--fastq_2', dest="fastq_2", type=FastqFileType( "r" ), action='store', default=None,
help="For the 2nd paired-end file" )
parser.add_argument( '-p', '--output_prefix', dest="output_prefix", default="", action='store',
help="Output files have name PREFIX-ADAPTOR.fastq" )
parser.add_argument( '-l', '--levenshtein', dest="levenshtein", action='store', type=float, default=None,
help="Use a Levenshtein distance to demultiplex" )
parser.add_argument( '-a', '--analogy', dest="analogy", action='store_true',
help="Compute the maximal Levenshtein ratio between adaptors" )
parser.add_argument( '--all', dest="all", action='store_true',
help="If this option is used with option levenshtein in paired-end, both members should be higher than the ratio and each should be close to one adaptor. If option levenshtein is not used, this option is not used either." )
user_args = parser.parse_args()
user_args.file_adapt = user_args.file_adapt[0]
user_args.single_end = user_args.fastq_2 is None
return user_args
def main() :
user_args = parse_user_argument()
if user_args.analogy :
print "Maximal Levenshtein ratio between adaptors is %f" % get_maximal_annalogie( user_args.file_adapt )
sys.exit(0)
output_files_by_adapt, defaults_files = get_output_files( user_args.file_adapt,
user_args.output_prefix,
not user_args.single_end )
nb_reads_writen = get_adapt_counter( user_args.file_adapt )
user_args.file_adapt.close()
if user_args.levenshtein :
if user_args.all :
select_output_file = LevenshteinAllSelector( output_files_by_adapt,
user_args.single_end,
user_args.levenshtein )
else :
select_output_file = Levenshtein_selector( output_files_by_adapt,
user_args.single_end,
user_args.levenshtein )
else :
select_output_file = Std_selector( output_files_by_adapt,
user_args.single_end )
if user_args.single_end :
print "single end"
default_file = defaults_files[0]
for str_read in user_args.fastq_1 :
read = Fastq_read( str_read )
adapt_and_line = select_output_file.select( read.seq )
if adapt_and_line is None :
if user_args.verbose :
print "Read '%s' start with %s... and go to *" % (read.name, read.seq[ : 14 ])
default_file.write( "%s" % str( read ) )
nb_reads_writen[ '*' ][ 1 ] += 1
else :
(adapt, output_file) = adapt_and_line
if user_args.verbose :
print "Read '%s' start with %s... and go to %s" % (read.name, read.seq[ : len( adapt ) ], adapt)
read.cut_start( len( adapt ) )
output_file.write( "%s" % str( read ) )
nb_reads_writen[ adapt ][ 1 ] += 1
user_args.fastq_1.close()
for adapt, output_file in output_files_by_adapt :
if not output_file.closed:
output_file.write("")
output_file.close()
if not default_file.closed:
default_file.write("")
default_file.close()
else :
print "paired-end"
(default_file_1, default_file_2) = defaults_files
for str_read_1, str_read_2 in izip( user_args.fastq_1, user_args.fastq_2 ) :
read_1 = Fastq_read( str_read_1 )
read_2 = Fastq_read( str_read_2 )
adapt_and_line = select_output_file.select( read_1.seq, read_2.seq )
if adapt_and_line is None :
default_file_1.write( "%s" % str( read_1 ) )
default_file_2.write( "%s" % str( read_2 ) )
nb_reads_writen[ '*' ][1] += 1
else :
(adapt, output_file_1, output_file_2 ) = adapt_and_line
read_1.cut_start( len( adapt ) )
read_2.cut_start( len( adapt ) )
output_file_1.write( "%s" % str( read_1 ) )
output_file_2.write( "%s" % str( read_2 ) )
nb_reads_writen[ adapt ][1] += 1
user_args.fastq_1.close()
user_args.fastq_2.close()
for adapt, file_1, file_2 in output_files_by_adapt :
if not file_1.closed:
file_1.write("")
file_1.close()
if not file_2.closed:
file_2.write("")
file_2.close()
if not default_file_1.closed:
default_file_1.write("")
default_file_1.close()
if not default_file_2.closed:
default_file_2.write("")
default_file_2.close()
# show stat.
for nb_reads_by_name in nb_reads_writen.values() :
print "%s %d reads" % tuple( nb_reads_by_name )
if __name__ == '__main__':
main()
| gpl-3.0 | -8,934,429,547,450,170,000 | 35.586694 | 248 | 0.527635 | false | 3.802808 | false | false | false |
nsmoooose/csp | csp/data/ui/scripts/utils.py | 1 | 2369 | #!/usr/bin/python
# Combat Simulator Project
# Copyright (C) 2002-2005 The Combat Simulator Project
# http://csp.sourceforge.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Combat Simulator Project : Utils script
"""
import csp.cspsim
class SlotProxy(csp.cspsim.Slot):
def __init__(self, method):
csp.cspsim.Slot.__init__(self)
self.method = method
def notify(self, data):
self.method()
class SlotManager:
def __init__(self):
self.slots = []
def connectToClickSignal(self, control, method):
signal = control.getClickSignal()
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
def connectToCheckedChangedSignal(self, control, method):
signal = control.getCheckedChangedSignal()
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
def connectToSelectedItemChangedSignal(self, control, method):
signal = control.getSelectedItemChangedSignal()
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
def connectToInputInterfaceAction(self, cspsim, action, method):
gameScreen = cspsim.getCurrentScreen()
interfaceAdapter = gameScreen.getInputInterfaceWfAdapter()
signal = interfaceAdapter.registerActionSignal(action)
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
class ListBoxManager:
def addListToControl(self, listbox, texts):
for text in texts:
listBoxItem = csp.cspsim.ListBoxItem()
listBoxItem.setText(text)
listbox.addItem(listBoxItem)
| gpl-2.0 | 9,143,938,031,847,058,000 | 32.842857 | 77 | 0.686788 | false | 4.022071 | false | false | false |
FRVisser/loramotedata | data.py | 1 | 3344 | #!/usr/bin/env python3
import json
import base64
import binascii
import csv
from datetime import datetime
with open ( 'data.json' ) as data_file:
data = json.load ( data_file )
lenData = len ( data["points"] )
hexData = []
for i in range ( 0, lenData ):
hexData.append(i)
hexData[i] = ( binascii.b2a_hex ( binascii.a2b_base64 ( data["points"][i]["data"] )))
led = []
pressure = []
temperature = []
altitude = []
battery = []
latitude = []
longitude = []
elevation = []
time = []
delta = []
for i in range ( 0, lenData ):
led.append(i)
pressure.append(i)
temperature.append(i)
altitude.append(i)
battery.append(i)
latitude.append(i)
longitude.append(i)
elevation.append(i)
time.append(i)
led[i] = int(hexData[i][:2], 16)
pressure[i] = int(hexData[i][2:-26], 16) * 10.0
temperature[i] = int(hexData[i][6:-22], 16) / 100.0
altitude[i] = int(hexData[i][10:-18], 16) / 10.0
battery[i] = (int(hexData[i][14:-16], 16) / 255.0)
latitude[i] = hexData[i][16:-10]
if int(latitude[i],16) & 0x800000:
latitude[i] = ( ( int(latitude[i],16) & 0x00FFFFFF ) + 1 ) * -90.0 / 0x800000;
else:
latitude[i] = int(latitude[i],16) * 90.0 / 0x7FFFFF;
longitude[i] = hexData[i][22:-4]
if int(longitude[i],16) & 0x800000:
longitude[i] = ( ( int(longitude[i],16) & 0x00FFFFFF ) + 1 ) * -180.0 / 0x800000;
else:
longitude[i] = int(longitude[i],16) * 180.0 / 0x7FFFFF;
elevation[i] = hexData[i][28:]
time[i] = datetime.strptime(data["points"][i]["time"][11:][:8], '%H:%M:%S')
startTime = min(time)
for i in range ( 0, lenData ):
delta.append(i)
delta[i] = time[i] - startTime
print ( led[0] )
print ( pressure[0] )
print ( temperature[0] )
print ( altitude[0] )
print ( battery[0] )
print ( latitude[0] )
print ( longitude[0] )
print ( elevation[0] )
print ( time[0])
print ( hexData[0] )
print ( hexData[lenData - 1])
with open('data.csv', 'wb') as csvfile:
csv = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
# csv.writerow(['latitude', 'longitude'])
#
# for i in range ( 0, lenData ):
# csv.writerow([latitude[i], longitude[i]])
csv.writerow(['delta', 'time', 'node_eui', 'gateway_eui', 'led', 'pressure', 'temperature', 'altitude', 'battery', 'latitude', 'longitude', 'elevation'])
for i in range ( 0, lenData ):
csv.writerow([delta[i], time[i], data["points"][i]["node_eui"], data["points"][i]["gateway_eui"], led[i], pressure[i], temperature[i], altitude[i], battery[i], latitude[i], longitude[i], elevation[i]])
# print ("second time")
#
#
# with open('map.csv', 'wb') as csvfile:
# csv = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
# # csv.writerow(['latitude', 'longitude'])
# #
# # for i in range ( 0, lenData ):
# # csv.writerow([latitude[i], longitude[i]])
# csv.writerow(['latitude', 'longitude'])
#
# for i in range ( 0, lenData ):
# csv.writerow([latitude[i], longitude[i]])
# time = [h , m, s]
# for i in range ( 0, lenData ):
# time.append(i)
# time0h = int(data["points"][0]["time"][11:-14])
# time0m = int(data["points"][0]["time"][14:-11])
# time0s = int(data["points"][0]["time"][17:-8])
#time1 = data["points"][10]["time"][11:-8]
| gpl-2.0 | -2,641,337,523,662,153,000 | 28.078261 | 209 | 0.586124 | false | 2.807725 | false | false | false |
shanzi/tchelper | tchelper/settings.py | 1 | 2980 | """
Django settings for tchelper project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+%m9a62-i($(xc4ok(3y5!1!=dvydl$n5p$+$85%3g_((un=e@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = (os.environ.get('DJANGO_DEBUG', 'on') == 'on')
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ('127.0.0.1',)
# Application definition
INSTALLED_APPS = (
# 'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 3rd party
'rest_framework',
# user apps
'frontpage',
'api',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tchelper.urls'
WSGI_APPLICATION = 'tchelper.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
import dj_database_url
DATABASES = {
'default': dj_database_url.config(default='sqlite:///db.sqlite3')
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles/')
# Email settings
EMAIL_BACKEND = 'api.utils.MailgunEmailBackEnd'
EMAIL_BASE_URL = os.environ.get('EMAIL_BASE_URL', '')
MAILGUN_KEY = os.environ.get('MAILGUN_KEY', '')
MAILGUN_URL = os.environ.get('MAILGUN_URL', '')
DEFAULT_FROM_EMAIL = os.environ.get('DEFAULT_FROM_EMAIL', '')
# Rest Frameworks
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
}
# Account settings
LOGIN_URL = 'login'
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
AUTH_PROFILE_MODULE = 'api.UserProfile'
SECURE_SSL_REDIRECT = True
APPEND_SLASH = False
| bsd-2-clause | 3,063,481,958,019,438,600 | 24.689655 | 78 | 0.716779 | false | 3.23913 | false | false | false |
LTS5/connectomeviewer | cviewer/plugins/cff2/csurface_darray.py | 1 | 2088 | """ The ConnectomeViewer wrapper for a cfflib object """
# Copyright (C) 2009-2010, Ecole Polytechnique Federale de Lausanne (EPFL) and
# University Hospital Center and University of Lausanne (UNIL-CHUV)
#
# Modified BSD License
# Standard library imports
import os
# Enthought library imports
from traits.api import HasTraits, Str, Bool, CBool, Any, Dict, implements, \
List, Instance, DelegatesTo, Property
from traitsui.api import View, Item, auto_close_message, message
# ConnectomeViewer imports
import cfflib
try:
from nibabel.nifti1 import intent_codes
except ImportError:
print("Please install Nibabel >= 1.1.0")
# Logging import
import logging
logger = logging.getLogger('root.'+__name__)
class CSurfaceDarray(HasTraits):
""" The implementation of the Connectome Surface data array """
def __init__(self, darray, **traits):
super(CSurfaceDarray, self).__init__(**traits)
self.data = darray
if not self.data.meta is None:
getdict = self.data.get_metadata()
prim = ''
if getdict.has_key('AnatomicalStructurePrimary'):
prim = getdict['AnatomicalStructurePrimary']
sec = ''
if getdict.has_key('AnatomicalStructureSecondary'):
sec = getdict['AnatomicalStructureSecondary']
# name resolution
if prim == '':
if sec == '':
dname = 'Data arrays (%s)' % str(intent_codes.label[self.data.intent])
else:
dname = '%s (%s)' % (sec, str(intent_codes.label[self.data.intent]))
else:
if sec == '':
dname = '%s (%s)' % (prim, str(intent_codes.label[self.data.intent]))
else:
dname = '%s / %s (%s)' % (prim, sec, str(intent_codes.label[self.data.intent]))
else:
dname = 'Data arrays (%s)' % str(intent_codes.label[self.data.intent])
self.dname = dname
# attach it to parent?
| bsd-3-clause | 2,409,175,742,572,920,300 | 33.229508 | 99 | 0.58046 | false | 3.866667 | false | false | false |
stlemme/python-dokuwiki-export | modgrammar/examples/infix_precedence.py | 2 | 1898 | #!/usr/bin/python3
import sys
from modgrammar import *
grammar_whitespace_mode = 'optional'
class Number (Grammar):
grammar = (OPTIONAL('-'), WORD('0-9'), OPTIONAL('.', WORD('0-9')))
def value(self):
return float(self.string)
class ParenExpr (Grammar):
grammar = (L('('), REF('Expr'), L(')'))
def value(self):
return self[1].value()
class P0Term (Grammar):
grammar = (ParenExpr | Number)
def value(self):
return self[0].value()
class P0Expr (Grammar):
grammar = (P0Term, ONE_OR_MORE(L('/'), P0Term))
def value(self):
value = self[0].value()
for e in self[1]:
value /= e[1].value()
return value
class P1Term (Grammar):
grammar = (P0Expr | ParenExpr | Number)
def value(self):
return self[0].value()
class P1Expr (Grammar):
grammar = (P1Term, ONE_OR_MORE(L('*'), P1Term))
def value(self):
value = self[0].value()
for e in self[1]:
value *= e[1].value()
return value
class P2Term (Grammar):
grammar = (P0Expr | P1Expr | ParenExpr | Number)
def value(self):
return self[0].value()
class P2Expr (Grammar):
grammar = (P2Term, ONE_OR_MORE(L('+') | L('-'), P2Term))
def value(self):
value = self[0].value()
for e in self[1]:
if e[0].string == '+':
value += e[1].value()
else:
value -= e[1].value()
return value
class Expr (Grammar):
grammar = (P2Expr | P1Expr | P0Expr | ParenExpr | Number)
def value(self):
return self[0].value()
if __name__ == '__main__':
parser = Expr.parser()
result = parser.parse_text(sys.argv[1], eof=True)
remainder = parser.remainder()
print("Parsed Text: {}".format(result))
print("Unparsed Text: {}".format(remainder))
print("Value: {}".format(result.value()))
| mit | -539,265,212,046,042,400 | 22.725 | 70 | 0.553741 | false | 3.353357 | false | false | false |
rmyers/trove-dashboard | trove_dashboard/databases/views.py | 1 | 5557 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing instances.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from horizon import tables
from horizon import workflows
from trove_dashboard import api
from .tabs import InstanceDetailTabs
from .tables import InstancesTable
from .workflows import LaunchInstance
LOG = logging.getLogger(__name__)
class IndexView(tables.DataTableView):
table_class = InstancesTable
template_name = 'project/databases/index.html'
def has_more_data(self, table):
return self._more
def get_data(self):
marker = self.request.GET. \
get(InstancesTable._meta.pagination_param, None)
# Gather our instances
try:
instances = api.trove.instance_list(self.request, marker=marker)
LOG.info(msg=_("Obtaining instances at %s class"
% repr(IndexView.__class__)))
self._more = False
except:
self._more = False
instances = []
LOG.critical("Http 500. Internal server error. "
"Unable to retrieve instances.")
return instances
#exceptions.handle(self.request, ignore=True)
# Gather our flavors and correlate our instances to them
if instances:
try:
flavors = api.trove.flavor_list(self.request)
LOG.info(msg=_("Obtaining flavor list from nova at %s class"
% repr(IndexView.__class__)))
except:
flavors = []
LOG.critical(msg=_("Nova exception while obtaining "
"flavor list at % class"
% repr(IndexView.__class__)))
exceptions.handle(self.request, ignore=True)
full_flavors = SortedDict([(str(flavor.id), flavor)
for flavor in flavors])
# Loop through instances to get flavor info.
for instance in instances:
try:
flavor_id = instance.flavor["id"]
if flavor_id in full_flavors:
instance.full_flavor = full_flavors[flavor_id]
else:
# If the flavor_id is not in full_flavors list,
# get it via nova api.trove.
instance.full_flavor = api.trove.flavor_get(
self.request, flavor_id)
except:
msg = _('Unable to retrieve instance size information')
LOG.critical(msg + _(" at %s class"
% repr(IndexView.__class__)))
exceptions.handle(self.request, msg)
return instances
class LaunchInstanceView(workflows.WorkflowView):
workflow_class = LaunchInstance
template_name = "project/databases/launch.html"
def get_initial(self):
initial = super(LaunchInstanceView, self).get_initial()
initial['project_id'] = self.request.user.tenant_id
initial['user_id'] = self.request.user.id
return initial
class DetailView(tabs.TabbedTableView):
tab_group_class = InstanceDetailTabs
template_name = 'project/databases/detail.html'
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
context["instance"] = self.get_data()
return context
def get_data(self):
if not hasattr(self, "_instance"):
try:
instance_id = self.kwargs['instance_id']
instance = api.trove.instance_get(self.request, instance_id)
LOG.info(msg=_("Obtaining instance for detailed view "
"at %s class" % repr(DetailView.__class__)))
instance.full_flavor = api.trove.flavor_get(
self.request, instance.flavor["id"])
except:
redirect = reverse('horizon:project:databases:index')
LOG.critical(msg=_("Exception while btaining instance"
" for detailed view at %s class"
% repr(DetailView.__class__)))
exceptions.handle(self.request,
_('Unable to retrieve details for '
'instance "%s".') % instance_id,
redirect=redirect)
self._instance = instance
return self._instance
def get_tabs(self, request, *args, **kwargs):
instance = self.get_data()
return self.tab_group_class(request, instance=instance, **kwargs)
| apache-2.0 | -2,986,611,437,899,686,000 | 38.692857 | 78 | 0.573511 | false | 4.741468 | false | false | false |
ratnania/pyccel | src_old/tests/examples/projectors/projectors_2d.py | 1 | 7068 | # coding: utf-8
#$ header legendre(int)
def legendre(p):
k = p + 1
x = zeros(k, double)
w = zeros(k, double)
if p == 1:
x[0] = -0.577350269189625765
x[1] = 0.577350269189625765
w[0] = 1.0
w[1] = 1.0
elif p == 2:
x[0] = -0.774596669241483377
x[1] = 0.0
x[2] = 0.774596669241483377
w[0] = 0.55555555555555556
w[1] = 0.888888888888888889
w[2] = 0.55555555555555556
elif p == 3:
x[0] = -0.861136311594052575
x[1] = -0.339981043584856265
x[2] = 0.339981043584856265
x[3] = 0.861136311594052575
w[0] = 0.347854845137453853
w[1] = 0.65214515486254615
w[2] = 0.65214515486254614
w[3] = 0.34785484513745386
return x,w
#$ header make_knots(int, int)
def make_knots(n,p):
n_elements = n-p
m = n+p+1
knots = zeros(m, double)
for i in range(0, p+1):
knots[i] = 0.0
for i in range(p+1, n):
j = i-p
knots[i] = j / n_elements
for i in range(n, n+p+1):
knots[i] = 1.0
return knots
#$ header make_greville(double [:], int, int)
def make_greville(knots, n, p):
greville = zeros(n, double)
for i in range(0, n):
s = 0.0
for j in range(i+1, i+p+1):
s = s + knots[j]
greville[i] = s / p
return greville
#$ header f_scalar(double, double)
def f_scalar(x, y):
f = x * y
return f
#$ header f_vector(double, double)
def f_vector(x, y):
f1 = x*y
f2 = x*y
return f1, f2
#$ header integrate_edge(int, int, double, double [:], double [:], double, double, int)
def integrate_edge(component, axis, y, us, ws, x_min, x_max, p):
r = 0.0
d = x_max - x_min
for j in range(0, p+1):
u = us[j]
w = ws[j]
x = x_min + d * u
w = 0.5 * d * w
if axis==0:
f1, f2 = f_vector(x, y)
else:
f1, f2 = f_vector(y, x)
if component == 0:
f = f1
else:
f = f2
r = r + f * w
return r
#$ header interpolate_V_0(double [:], double [:], int, int, int, int)
def interpolate_V_0(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
nu1 = n_elements_u+1
nv1 = n_elements_v+1
r = zeros(nu1*nv1, double)
i = 0
for i_u in range(0, n_elements_u+1):
for i_v in range(0, n_elements_v+1):
r[i] = f_scalar(t_u[i_u], t_v[i_v])
i = i + 1
return r
#$ header interpolate_V_1(double [:], double [:], int, int, int, int)
def interpolate_V_1(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
us, wus = legendre(p_u)
vs, wvs = legendre(p_v)
us = us + 1.0
us = 0.5 * us
vs = vs + 1.0
vs = 0.5 * vs
nu1 = n_elements_u
nv1 = n_elements_v+1
nu2 = n_elements_u+1
nv2 = n_elements_v
r_0 = zeros((nu1, nv1), double)
r_1 = zeros((nu2, nv2), double)
component = 0
axis = 0
for i_u in range(0, n_elements_u):
x_min = t_u[i_u]
x_max = t_u[i_u+1]
for i_v in range(0, n_elements_v+1):
y = t_v[i_v]
r_0[i_u, i_v] = integrate_edge(component, axis, y, us, wus, x_min, x_max, p_u)
component = 1
axis = 1
for i_u in range(0, n_elements_u+1):
y = t_u[i_u]
for i_v in range(0, n_elements_v):
x_min = t_v[i_v]
x_max = t_v[i_v+1]
r_1[i_u, i_v] = integrate_edge(component, axis, y, vs, wvs, x_min, x_max, p_v)
m = nu1 * nv1 + nu2 * nv2
r = zeros(m, double)
i = 0
for i_u in range(0, nu1):
for i_v in range(0, nv1):
r[i] = r_0[i_u, i_v]
i = i + 1
for i_u in range(0, nu2):
for i_v in range(0, nv2):
r[i] = r_1[i_u, i_v]
i = i + 1
return r
#$ header interpolate_V_2(double [:], double [:], int, int, int, int)
def interpolate_V_2(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
us, wus = legendre(p_u)
vs, wvs = legendre(p_v)
us = us + 1.0
us = 0.5 * us
vs = vs + 1.0
vs = 0.5 * vs
nu1 = n_elements_u+1
nv1 = n_elements_v
nu2 = n_elements_u
nv2 = n_elements_v+1
r_0 = zeros((nu1, nv1), double)
r_1 = zeros((nu2, nv2), double)
component = 0
axis = 1
for i_u in range(0, n_elements_u+1):
y = t_u[i_u]
for i_v in range(0, n_elements_v):
x_min = t_v[i_v]
x_max = t_v[i_v+1]
r_0[i_u, i_v] = integrate_edge(component, axis, y, vs, wvs, x_min, x_max, p_v)
component = 1
axis = 0
for i_u in range(0, n_elements_u):
x_min = t_u[i_u]
x_max = t_u[i_u+1]
for i_v in range(0, n_elements_v+1):
y = t_v[i_v]
r_1[i_u, i_v] = integrate_edge(component, axis, y, us, wus, x_min, x_max, p_u)
m = nu1 * nv1 + nu2 * nv2
r = zeros(m, double)
i = 0
for i_u in range(0, nu1):
for i_v in range(0, nv1):
r[i] = r_0[i_u, i_v]
i = i + 1
for i_u in range(0, nu2):
for i_v in range(0, nv2):
r[i] = r_1[i_u, i_v]
i = i + 1
return r
#$ header interpolate_V_3(double [:], double [:], int, int, int, int)
def interpolate_V_3(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
us, wus = legendre(p_u)
vs, wvs = legendre(p_v)
us = us + 1.0
us = 0.5 * us
vs = vs + 1.0
vs = 0.5 * vs
r = zeros(n_elements_u*n_elements_v, double)
i = 0
for i_u in range(0, n_elements_u):
x_min = t_u[i_u]
x_max = t_u[i_u+1]
dx = x_max - x_min
for i_v in range(0, n_elements_v):
y_min = t_v[i_v]
y_max = t_v[i_v+1]
dy = y_max - y_min
contribution = 0.0
for j_u in range(0, p_u+1):
x = x_min + dx * us[j_u]
for j_v in range(0, p_v+1):
y = y_min + dy * vs[j_v]
w = wus[j_u] * wvs[j_v]
w = 0.5 * dx * dy * w
f = f_scalar(x,y)
contribution = contribution + w * f
r[i] = contribution
i = i + 1
return r
n_elements_u = 2
n_elements_v = 2
p_u = 2
p_v = 2
n_u = p_u + n_elements_u
n_v = p_v + n_elements_v
knots_u = make_knots(n_u, p_u)
knots_v = make_knots(n_v, p_v)
greville_u = make_greville(knots_u, n_u, p_u)
greville_v = make_greville(knots_v, n_v, p_v)
#print("knots_u = ", knots_u)
#print("knots_v = ", knots_v)
#print("greville_u = ", greville_u)
#print("greville_v = ", greville_v)
r_0 = interpolate_V_0(greville_u, greville_v, n_u, n_v, p_u, p_v)
r_1 = interpolate_V_1(greville_u, greville_v, n_u, n_v, p_u, p_v)
r_2 = interpolate_V_2(greville_u, greville_v, n_u, n_v, p_u, p_v)
r_3 = interpolate_V_3(greville_u, greville_v, n_u, n_v, p_u, p_v)
print("r_0 = ", r_0)
print("r_1 = ", r_1)
print("r_2 = ", r_2)
print("r_3 = ", r_3)
| mit | 5,627,866,280,000,007,000 | 25.772727 | 90 | 0.477929 | false | 2.318898 | false | false | false |
shimpe/frescobaldi | frescobaldi_app/widgets/listedit.py | 1 | 6216 | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Widgets to edit a list of items in a flexible way.
"""
from __future__ import unicode_literals
from PyQt4.QtCore import pyqtSignal
from PyQt4.QtGui import (
QFileDialog, QGridLayout, QListWidget, QListWidgetItem, QPushButton, QWidget)
import app
import icons
class ListEdit(QWidget):
"""A widget to edit a list of items (e.g. a list of directories)."""
# emitted when anything changed in the listbox.
changed = pyqtSignal()
def __init__(self, *args, **kwargs):
QWidget.__init__(self, *args, **kwargs)
layout = QGridLayout(self)
self.setLayout(layout)
self.addButton = QPushButton(icons.get('list-add'), '')
self.editButton = QPushButton(icons.get('document-edit'), '')
self.removeButton = QPushButton(icons.get('list-remove'), '')
self.listBox = QListWidget()
layout.setContentsMargins(1, 1, 1, 1)
layout.setSpacing(0)
layout.addWidget(self.listBox, 0, 0, 8, 1)
layout.addWidget(self.addButton, 0, 1)
layout.addWidget(self.editButton, 1, 1)
layout.addWidget(self.removeButton, 2, 1)
@self.addButton.clicked.connect
def addClicked():
item = self.createItem()
if self.openEditor(item):
self.addItem(item)
@self.editButton.clicked.connect
def editClicked():
item = self.listBox.currentItem()
item and self.editItem(item)
@self.removeButton.clicked.connect
def removeClicked():
item = self.listBox.currentItem()
if item:
self.removeItem(item)
@self.listBox.itemDoubleClicked.connect
def itemDoubleClicked(item):
item and self.editItem(item)
self.listBox.model().layoutChanged.connect(self.changed)
def updateSelection():
selected = bool(self.listBox.currentItem())
self.editButton.setEnabled(selected)
self.removeButton.setEnabled(selected)
self.changed.connect(updateSelection)
self.listBox.itemSelectionChanged.connect(updateSelection)
updateSelection()
app.translateUI(self)
def translateUI(self):
self.addButton.setText(_("&Add..."))
self.editButton.setText(_("&Edit..."))
self.removeButton.setText(_("&Remove"))
def createItem(self):
return QListWidgetItem()
def addItem(self, item):
self.listBox.addItem(item)
self.itemChanged(item)
self.changed.emit()
def removeItem(self, item):
self.listBox.takeItem(self.listBox.row(item))
self.changed.emit()
def editItem(self, item):
if self.openEditor(item):
self.itemChanged(item)
self.changed.emit()
def setCurrentItem(self, item):
self.listBox.setCurrentItem(item)
def setCurrentRow(self, row):
self.listBox.setCurrentRow(row)
def openEditor(self, item):
"""Opens an editor (dialog) for the item.
Returns True if the dialog was accepted and the item edited.
Returns False if the dialog was cancelled (the item must be left
unedited).
"""
pass
def itemChanged(self, item):
"""Called after an item has been added or edited.
Re-implement to do something at this moment if needed, e.g. alter the
text or display of other items.
"""
pass
def setValue(self, strings):
"""Sets the listbox to a list of strings."""
self.listBox.clear()
self.listBox.addItems(strings)
self.changed.emit()
def value(self):
"""Returns the list of paths in the listbox."""
return [self.listBox.item(i).text()
for i in range(self.listBox.count())]
def setItems(self, items):
"""Sets the listbox to a list of items."""
self.listBox.clear()
for item in items:
self.listBox.addItem(item)
self.itemChanged(item)
self.changed.emit()
def items(self):
"""Returns the list of items in the listbox."""
return [self.listBox.item(i)
for i in range(self.listBox.count())]
def clear(self):
"""Clears the listbox."""
self.listBox.clear()
self.changed.emit()
class FilePathEdit(ListEdit):
"""
A widget to edit a list of directories (e.g. a file path).
"""
def __init__(self, *args, **kwargs):
super(FilePathEdit, self).__init__(*args, **kwargs)
def fileDialog(self):
"""The QFileDialog this widget is using."""
try:
return self._filedialog
except AttributeError:
self._filedialog = d = QFileDialog(self)
d.setFileMode(QFileDialog.Directory)
return d
def openEditor(self, item):
"""Asks the user for an (existing) directory."""
directory = item.text()
dlg = self.fileDialog()
dlg.selectFile(directory)
if dlg.exec_():
item.setText(dlg.selectedFiles()[0])
return True
return False
| gpl-2.0 | 7,993,347,465,381,578,000 | 31.715789 | 81 | 0.605051 | false | 4.16622 | false | false | false |
secnot/uva-onlinejudge-solutions | 10033 - Interpreter/main.py | 1 | 2573 | import sys
from itertools import count
def digit(n, num):
return num%(10**(n+1))//10**n
class Interpreter(object):
def __init__(self, program):
self._reg = [0 for r in range(10)]
self._mem = [0]*1000
self._pc = 0
self._instructions = [
self.i000, self.i100, self.i200,\
self.i300, self.i400, self.i500,\
self.i600, self.i700, self.i800, self.i900]
for n, instruction in enumerate(program):
self._mem[n] = instruction
self._icounter = 0 # executed instructions count
def i100(self, op1, op0):
self._pc = None
def i200(self, op1, op0):
self._reg[op1] = op0
self._pc += 1
def i300(self, op1, op0):
self._reg[op1] = (self._reg[op1]+op0)%1000
self._pc += 1
def i400(self, op1, op0):
self._reg[op1] = (self._reg[op1]*op0)%1000
self._pc += 1
def i500(self, op1, op0):
self._reg[op1] = self._reg[op0]
self._pc += 1
def i600(self, op1, op0):
self._reg[op1] = (self._reg[op1]+self._reg[op0])%1000
self._pc += 1
def i700(self, op1, op0):
self._reg[op1] = (self._reg[op1]*self._reg[op0])%1000
self._pc += 1
def i800(self, op1, op0):
self._reg[op1] = self._mem[self._reg[op0]]
self._pc += 1
def i900(self, op1, op0):
self._mem[self._reg[op0]] = self._reg[op1]
self._pc += 1
def i000(self, op1, op0):
if not self._reg[op0]:
self._pc += 1
else:
self._pc = self._reg[op1]
def decode_execute(self, ins):
family, op1, op0 = digit(2, ins), digit(1, ins), digit(0, ins)
self._instructions[family](op1, op0)
def run(self):
while self._pc is not None:
ins = self._mem[self._pc]
self.decode_execute(ins)
self._icounter +=1
return self._icounter
def load_num():
line = sys.stdin.readline()
if line in ('', '\n'):
return None
else:
return int(line)
def load_prog():
prog = []
while True:
instruction = load_num()
if instruction is None:
break
prog.append(instruction)
return prog
if __name__ == '__main__':
# Number of programs
nprog = load_num()
# Discard empty line
sys.stdin.readline()
for n in range(nprog):
prog = load_prog()
inter = Interpreter(prog)
print(inter.run())
if n+1 < nprog:
print('')
| mit | 7,271,072,620,770,763,000 | 20.441667 | 70 | 0.508745 | false | 3.236478 | false | false | false |
tfiedor/perun | perun/check/linear_regression.py | 1 | 4513 | """The module contains the method for detection with using linear regression.
This module contains method for classification the perfomance change between two profiles
according to computed metrics and models from these profiles, based on the linear regression.
"""
import scipy.stats as stats
import perun.utils as utils
import perun.check.general_detection as detect
import perun.check.fast_check as fast_check
def linear_regression(baseline_profile, target_profile, **_):
"""Temporary function, which call the general function and subsequently returns the
information about performance changes to calling function.
:param dict baseline_profile: base against which we are checking the degradation
:param dict target_profile: profile corresponding to the checked minor version
:param dict _: unification with other detection methods (unused in this method)
:returns: tuple (degradation result, degradation location, degradation rate, confidence)
"""
return detect.general_detection(
baseline_profile, target_profile, detect.ClassificationMethod.LinearRegression
)
def exec_linear_regression(
uid, baseline_x_pts, lin_abs_error, threshold, linear_diff_b1,
baseline_model, target_model, baseline_profile
):
"""Function executes the classification of performance change between two profiles with using
function from scipy module, concretely linear regression and regression analysis. If that fails
classification using linear regression, so it will be used regression analysis to the result of
absolute error. The absolute error is regressed in the all approach used in this method. This
error is calculated from the linear models from both profiles.
:param str uid: uid for which we are computing the linear regression
:param np_array baseline_x_pts: values of the independent variables from both profiles
:param np_array lin_abs_error: the value absolute error computed from the linear models obtained
from both profiles
:param integer threshold: the appropriate value for distinction individual state of detection
:param integer linear_diff_b1: difference coefficients b1 from both linear models
:param ModelRecord baseline_model: the best model from the baseline profile
:param ModelRecord target_model: the best model from the target profile
:param dict baseline_profile: baseline against which we are checking the degradation
:returns: string (classification of the change)
"""
# executing the linear regression
diff_b0 = target_model.b0 - baseline_model.b0
gradient, intercept, r_value, _, _ = stats.linregress(baseline_x_pts, lin_abs_error)
# check the first two types of change
change_type = ''
if baseline_model.type == 'linear' or baseline_model.type == 'constant':
if utils.abs_in_absolute_range(gradient, threshold) \
and utils.abs_in_relative_range(diff_b0, intercept, 0.05) \
and abs(diff_b0 - intercept) < 0.000000000001:
change_type = 'constant'
elif utils.abs_in_relative_range(linear_diff_b1, gradient, 0.3) \
and r_value**2 > 0.95:
change_type = 'linear'
else:
if utils.abs_in_absolute_range(gradient, threshold) \
and utils.abs_in_relative_range(diff_b0, intercept, 0.05):
change_type = 'constant'
elif utils.abs_in_relative_range(linear_diff_b1, gradient, 0.3) \
and r_value**2 > 0.95:
change_type = 'linear'
std_err_profile = fast_check.exec_fast_check(
uid, baseline_profile, baseline_x_pts, lin_abs_error
)
# obtaining the models (linear and quadratic) from the new regressed profile
quad_err_model = detect.get_filtered_best_models_of(
std_err_profile, group='param', model_filter=detect.create_filter_by_model('quadratic')
)
linear_err_model = detect.get_filtered_best_models_of(
std_err_profile, group='param', model_filter=detect.create_filter_by_model('linear')
)
# check the last quadratic type of change
if quad_err_model[uid].r_square > 0.90 \
and abs(quad_err_model[uid].r_square - linear_err_model[uid].r_square) > 0.01:
change_type = 'quadratic'
# We did not classify the change
if not change_type:
std_err_model = detect.get_filtered_best_models_of(std_err_profile, group='param')
change_type = std_err_model[uid].type
return change_type
| gpl-3.0 | 3,713,429,883,736,428,000 | 47.010638 | 100 | 0.709949 | false | 4.08047 | false | false | false |
graingert/dockhand | shipwright/base.py | 1 | 1971 | from __future__ import absolute_import
from . import build, dependencies, docker, push
class Shipwright(object):
def __init__(self, source_control, docker_client, tags):
self.source_control = source_control
self.docker_client = docker_client
self.tags = tags
def targets(self):
return self.source_control.targets()
def build(self, build_targets):
targets = dependencies.eval(build_targets, self.targets())
this_ref_str = self.source_control.this_ref_str()
return self._build(this_ref_str, targets)
def _build(self, this_ref_str, targets):
# what needs building
for evt in build.do_build(self.docker_client, this_ref_str, targets):
yield evt
# now that we're built and tagged all the images.
# (either during the process of building or forwarding the tag)
# tag all containers with the human readable tags.
tags = self.source_control.default_tags() + self.tags + [this_ref_str]
for image in targets:
for tag in tags:
yield docker.tag_container(
self.docker_client,
image,
tag,
)
def push(self, build_targets, no_build=False):
"""
Pushes the latest images to the repository.
"""
targets = dependencies.eval(build_targets, self.targets())
this_ref_str = self.source_control.this_ref_str()
if not no_build:
for evt in self.build_tree(this_ref_str, targets):
yield evt
this_ref_str = self.source_control.this_ref_str()
tags = self.source_control.default_tags() + self.tags + [this_ref_str]
names_and_tags = []
for image in targets:
for tag in tags:
names_and_tags.append((image.name, tag))
for evt in push.do_push(self.docker_client, names_and_tags):
yield evt
| apache-2.0 | 4,241,055,058,138,033,700 | 34.196429 | 78 | 0.591071 | false | 4.030675 | false | false | false |
Tatsh/flac-tools | flactools/util.py | 1 | 1549 | import logging
import subprocess as sp
import sys
import tempfile
_log = logging.getLogger('flactools')
def encode_wav_to_mp3(wav_filename, output_filename, lame_options=['-V', '0']):
cmd = ['lame'] + lame_options + ['-S'] # Enforce no progress bar, etc
cmd += [wav_filename, output_filename]
_log.info(' '.join(cmd))
stdout, stderr = sp.Popen(cmd,
stdout=sp.PIPE,
stderr=sp.PIPE).communicate()
# stdout does not have anything
_log.debug(stderr.decode('utf-8').strip())
with open(output_filename, 'rb'):
pass
_log.info('Finished encoding to MP3')
def escape_quotes_for_cue(path):
return path.replace('"', r'\"')
def merge_audio(list_of_files):
merged_wav_output = tempfile.mkstemp(prefix='flactools.util.merge_audio__',
suffix='__.wav')[1]
sox_cmd = ['sox'] + list_of_files + [merged_wav_output]
_log.info(' '.join(sox_cmd))
p = sp.Popen(sox_cmd)
p.wait()
with open(merged_wav_output, 'rb'):
pass
return merged_wav_output
def get_logger(level=logging.ERROR, channel=None):
logger = logging.getLogger('flactools')
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(log_format)
if not channel:
channel = logging.StreamHandler(sys.stderr)
logger.setLevel(level)
channel.setLevel(level)
channel.setFormatter(formatter)
logger.addHandler(channel)
return logger
| mit | -3,189,935,023,120,839,700 | 24.816667 | 79 | 0.608134 | false | 3.56092 | false | false | false |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/OpenGL/GL/ARB/enhanced_layouts.py | 9 | 4210 | '''OpenGL extension ARB.enhanced_layouts
This module customises the behaviour of the
OpenGL.raw.GL.ARB.enhanced_layouts to provide a more
Python-friendly API
Overview (from the spec)
This extension adds the following functionality to layout qualifiers,
including broadening the API where this functionality is reflected.
The following are added:
1) Use compile-time constant expressions. E.g.,
const int start = 6;
layout(location = start + 2) int vec4 v;
2) Specify explicit byte offsets within a uniform or shader storage block.
For example, if you want two vec4 variables "batman" and "robin" to
appear at byte offsets 0 and 64 in your block, you can say:
uniform Block {
layout(offset = 0) vec4 batman;
layout(offset = 64) vec4 robin;
};
3) Force alignment within a uniform or shader storage block. The previous
example could also be expressed:
uniform Block {
vec4 batman;
layout(align = 64) vec4 robin;
};
This says the member 'robin' must start at the next address that is a
multiple of 64. It allows constructing the same layout in C and in GLSL
without inventing explicit offsets.
Explicit offsets and aligned offsets can be combined:
uniform Block {
vec4 batman;
layout(offset = 44, align = 8) vec4 robin;
};
would make 'robin' be at the first 8-byte aligned address, starting at
44, which is 48. This is more useful when using the *align* at
the block level, which will apply to all members.
4) Specify component numbers to more fully utilize the vec4-slot interfaces
between shader outputs and shader inputs.
For example, you could fit the following
- an array of 32 vec3
- a single float
into the space of 32 vec4 slots using the following code:
// consume X/Y/Z components of 32 vectors
layout(location = 0) in vec3 batman[32];
// consumes W component of first vector
layout(location = 0, component = 3) in float robin;
Further, an array of vec3 and an array of float can be stored
interleaved, using the following.
// consumes W component of 32 vectors
layout(location = 0, component = 3) in float robin[32];
// consume X/Y/Z components of 32 vectors
layout(location = 0) in vec3 batman[32];
5) Specify transform/feedback buffers, locations, and widths. For example:
layout(xfb_buffer = 0, xfb_offset = 0) out vec3 var1;
layout(xfb_buffer = 0, xfb_offset = 24) out vec3 var2;
layout(xfb_buffer = 1, xfb_offset = 0) out vec4 var3;
The second line above says to write var2 out to byte offset 24 of
transform/feedback buffer 0. (When doing this, output are only
captured when xfb_offset is used.)
To specify the total number of bytes per entry in a buffer:
layout(xfb_buffer = 1, xfb_stride = 32) out;
This is necessary if, say, var3 above, which uses bytes 0-11,
does not fully fill the buffer, which in this case takes 32 bytes.
Use of this feature effectively eliminates the need to use previously
existing API commands to describe the transform feedback layout.
6) Allow locations on input and output blocks for SSO interface matching.
For example:
layout(location = 4) in block {
vec4 batman; // gets location 4
vec4 robin; // gets location 5
layout(location = 7) vec4 joker; // gets location 7
vec4 riddler; // location 8
};
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/enhanced_layouts.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.enhanced_layouts import *
from OpenGL.raw.GL.ARB.enhanced_layouts import _EXTENSION_NAME
def glInitEnhancedLayoutsARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-3.0 | 709,087,629,149,364,600 | 33.235772 | 76 | 0.686223 | false | 3.841241 | false | false | false |
ggm/vm-for-transfer | src/vm/interpreter.py | 1 | 18577 | #Copyright (C) 2011 Gabriel Gregori Manzano
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from string import capwords
from constants import VM_STATUS, TRANSFER_STAGE
from instructions import OpCodes
from assemblyloader import AssemblyLoader
from interpretererror import InterpreterError
class Interpreter:
"""Interprets an op code and executes the appropriate instruction."""
def __init__(self, vm):
#Access to the data structures of the vm is needed.
self.vm = vm
self.systemStack = vm.stack
self.callStack = vm.callStack
#Record if the last instruction modified the vm's PC.
self.modifiedPC = False
#Create a dictionary with the opCode as key and its processing method.
self.methods = {}
for attribute in dir(OpCodes):
if not attribute.startswith("__"):
opCode = getattr(OpCodes, attribute)
methodName = "execute"
methodName += capwords(attribute, '_').replace('_', '')
self.methods[opCode] = methodName
def raiseError(self, msg):
"""Raise an error to handle it in the main process."""
self.vmStatus = VM_STATUS.FAILED
raise InterpreterError("{}".format(msg))
def modifyPC(self, newPC):
"""Modify the vm's PC and set it as modified for the interpreter."""
self.vm.PC = newPC
self.modifiedPC = True
def preprocess(self):
"""Execute all the code inside the preprocessing code section."""
for instr in self.vm.preprocessCode: self.execute(instr)
def execute(self, instr):
"""Execute a instruction, modifying the vm accordingly."""
opCode = instr[0]
methodName = self.methods[opCode]
if not hasattr(self, methodName):
self.raiseError("Can't find processing method {} for instruction {}"
.format(methodName,
AssemblyLoader.reversedOpCodes[instr[0]]))
else:
method = getattr(self, methodName)
method(instr)
#If the last instruction didn't modify the PC, point it to the next
#instruction. In the other case, keep the modified PC.
if not self.modifiedPC: self.vm.PC += 1
else: self.modifiedPC = False
def getNOperands(self, n):
"""Get n operands from the stack and return them reversed."""
ops = []
for i in range(n): ops.insert(0, self.systemStack.pop())
return ops
def getOperands(self, instr):
"""Get the operands of instr from the stack and return them reversed."""
numOps = int(instr[1])
ops = []
for i in range(numOps): ops.insert(0, self.systemStack.pop())
return ops
def getCase(self, string):
"""Get the case of a string, defaulting to capitals."""
isFirstUpper = string[0].isupper()
isUpper = string.isupper()
#If it's a 1-length string and is upper, capitalize it.
if isUpper and len(string) == 1: return "Aa"
elif isFirstUpper and not isUpper: return "Aa"
elif isUpper: return "AA"
else: return "aa"
def getSourceLexicalUnit(self, pos):
"""Get a word from the source side for every transfer stage."""
if self.vm.transferStage == TRANSFER_STAGE.CHUNKER:
return self.vm.words[self.vm.currentWords[pos - 1]].source
elif self.vm.transferStage == TRANSFER_STAGE.INTERCHUNK:
return self.vm.words[self.vm.currentWords[pos - 1]].chunk
else:
word = self.vm.words[self.vm.currentWords[0]]
#If it's a macro, get the position passed as a parameter.
if len(self.vm.currentWords) > 1: pos = self.vm.currentWords[pos]
if pos == 0: return word.chunk
else: return word.content[pos - 1]
def getTargetLexicalUnit(self, pos):
"""Get a word from the target side only for the chunker stage."""
return self.vm.words[self.vm.currentWords[pos - 1]].target
def executeAddtrie(self, instr):
#Append N number of patterns.
pattern = []
numberOfPatterns = self.systemStack.pop()
while numberOfPatterns > 0:
pattern.insert(0, self.systemStack.pop().replace("\"", ''))
numberOfPatterns -= 1
#Add the pattern with the rule number to the trie.
ruleNumber = instr[1]
self.vm.trie.addPattern(pattern, ruleNumber)
def executeAnd(self, instr):
#Get all the operands.
ops = self.getOperands(instr)
#Return false (0) if one operand if false.
for op in ops:
if op == 0:
self.systemStack.push(0)
return
#Else, return true (1).
self.systemStack.push(1)
def executeOr(self, instr):
#Get all the operands.
ops = self.getOperands(instr)
#Return true (1) if one operand if true.
for op in ops:
if op == 1:
self.systemStack.push(1)
return
#Else, return false (0).
self.systemStack.push(0)
def executeNot(self, instr):
op1 = self.systemStack.pop()
if op1 == 0: self.systemStack.push(1)
elif op1 == 1: self.systemStack.push(0)
def executeAppend(self, instr):
ops = self.getOperands(instr)
string = ""
for op in ops: string += op
varName = self.systemStack.pop()
self.vm.variables[varName] = self.vm.variables[varName] + string
def executeBeginsWith(self, instr):
prefixes = self.systemStack.pop()
word = self.systemStack.pop()
for prefix in prefixes.split("|"):
if word.startswith(prefix):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeBeginsWithIg(self, instr):
prefixes = self.systemStack.pop()
word = self.systemStack.pop().lower()
for prefix in prefixes.split("|"):
if word.startswith(prefix.lower()):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeCall(self, instr):
#Save current PC to return later when the macro ends.
self.callStack.saveCurrentPC()
#Get the words passed as argument to the macro.
ops = self.getNOperands(self.systemStack.pop())
words = []
#For the postchunk append the index of the only current word and then
#append all the parameters.
if self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
words.append(self.vm.currentWords[0])
for op in ops: words.append(op)
#For the rest, just append the index of the current words.
else:
for op in ops: words.append(self.vm.currentWords[op - 1])
#Create an entry in the call stack with the macro called.
macroNumber = int(instr[1])
self.callStack.push("macros", macroNumber, words)
#Tell the interpreter that the PC has been modified, so it does not.
self.modifyPC(self.vm.PC)
def executeRet(self, instr):
#Restore the last code section and its PC.
self.callStack.pop()
def executeClip(self, instr):
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
if len(instr) > 1: linkTo = str(instr[1].replace('"', ''))
else: linkTo = None
lemmaAndTags = lu.attrs['lem'] + lu.attrs['tags']
self.handleClipInstruction(parts, lu, lemmaAndTags, linkTo)
def executeClipsl(self, instr):
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
if len(instr) > 1: linkTo = str(instr[1].replace('"', ''))
else: linkTo = None
self.handleClipInstruction(parts, lu, lu.lu, linkTo)
def executeCliptl(self, instr):
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getTargetLexicalUnit(pos)
if len(instr) > 1: linkTo = str(instr[1].replace('"', ''))
else: linkTo = None
self.handleClipInstruction(parts, lu, lu.lu, linkTo)
def handleClipInstruction(self, parts, lu, lemmaAndTags, linkTo):
if linkTo is None and parts in ("lem", "lemh", "lemq", "tags", "chcontent"):
try:
self.systemStack.push(lu.attrs[parts])
except KeyError:
self.systemStack.push("")
return
elif linkTo is None and parts == "whole":
self.systemStack.push(lu.lu)
return
else:
longestMatch = ""
for part in parts.split('|'):
if part in lemmaAndTags:
if linkTo:
self.systemStack.push(linkTo)
return
else:
if len(part) > len(longestMatch): longestMatch = part
if longestMatch:
self.systemStack.push(longestMatch)
return
#If the lu doesn't have the part needed, return "".
self.systemStack.push("")
def executeCmp(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1 == op2: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeCmpi(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1.lower() == op2.lower(): self.systemStack.push(1)
else: self.systemStack.push(0)
def executeCmpSubstr(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1 in op2: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeCmpiSubstr(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1.lower() in op2.lower(): self.systemStack.push(1)
else: self.systemStack.push(0)
def executeIn(self, instr):
list = self.systemStack.pop()
list = list.split('|')
value = self.systemStack.pop()
if value in list: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeInig(self, instr):
list = self.systemStack.pop()
list = list.split('|')
list = [w.lower() for w in list]
value = self.systemStack.pop()
value = value.lower()
if value in list: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeConcat(self, instr):
ops = self.getOperands(instr)
string = ""
for op in ops: string += op
self.systemStack.push(string)
def executeChunk(self, instr):
ops = self.getOperands(instr)
#If there is only one operand it's the full content of the chunk.
if len(ops) == 1:
chunk = '^' + ops[0] + '$'
else:
name = ops[0]
tags = ops[1]
chunk = '^' + name + tags
if len(ops) > 2:
#Only output enclosing {} in the chunker, in the interchunk the
#'chcontent' will already have the {}.
if self.vm.transferStage == TRANSFER_STAGE.CHUNKER: chunk += '{'
for op in ops[2:]: chunk += op
if self.vm.transferStage == TRANSFER_STAGE.CHUNKER: chunk += '}'
chunk += '$'
self.systemStack.push(chunk)
def executeEndsWith(self, instr):
suffixes = self.systemStack.pop()
word = self.systemStack.pop()
for suffix in suffixes.split("|"):
if word.endswith(suffix):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeEndsWithIg(self, instr):
suffixes = self.systemStack.pop()
word = self.systemStack.pop().lower()
for suffix in suffixes.split("|"):
if word.endswith(suffix.lower()):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeJmp(self, instr):
jumpTo = int(instr[1])
self.modifyPC(jumpTo)
def executeJz(self, instr):
condition = self.systemStack.pop()
if condition == 0:
jumpTo = int(instr[1])
self.modifyPC(jumpTo)
def executeJnz(self, instr):
condition = self.systemStack.pop()
if condition != 0:
jumpTo = int(instr[1])
self.modifyPC(jumpTo)
def executeLu(self, instr):
ops = self.getOperands(instr)
lu = "^"
for op in ops: lu += op
lu += "$"
#If the lu is empty, only the ^$, then push an empty string.
if len(lu) == 2: self.systemStack.push("")
else: self.systemStack.push(lu)
def executeLuCount(self, instr):
chunk = self.vm.words[self.vm.currentWords[0]]
self.systemStack.push(len(chunk.content))
def executeMlu(self, instr):
ops = self.getOperands(instr)
#Append the lexical units, removing its ^...$
mlu = "^" + ops[0][1:-1]
for op in ops[1:]: mlu += "+" + op[1:-1]
mlu += "$"
self.systemStack.push(mlu)
def executeCaseOf(self, instr):
value = self.systemStack.pop()
case = self.getCase(value)
self.systemStack.push(case)
def executeGetCaseFrom(self, instr):
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
lem = lu.attrs['lem']
case = self.getCase(lem)
self.systemStack.push(case)
def executeModifyCase(self, instr):
case = self.systemStack.pop()
container = self.systemStack.pop()
if container != "":
if case == "aa": container = container.lower()
elif case == "Aa": container = container[0].upper() + container[1:]
elif case == "AA": container = container.upper()
self.systemStack.push(container)
def executeOut(self, instr):
ops = self.getOperands(instr)
out = ""
for op in ops: out += op
self.vm.writeOutput(out)
def executePush(self, instr):
#If it's a string, push it without quotes.
if '"' in instr[1]: self.systemStack.push(instr[1].replace('"', ''))
#Push strings containing numbers as int.
elif instr[1].isnumeric(): self.systemStack.push(int(instr[1]))
#If it's a variable reference, eval it and push the value.
else:
varName = instr[1]
try:
self.systemStack.push(self.vm.variables[varName])
except:
self.vm.variables[varName] = ""
self.systemStack.push("")
def executePushbl(self, instr):
self.systemStack.push(" ")
def executePushsb(self, instr):
#The position is relative to the current word(s), so we have to get the
#actual one. For the postchunk, the relative is the actual one because
#each chunk stores the blanks in their content.
relativePos = int(instr[1])
try:
if self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
word = self.vm.words[self.vm.currentWords[0]]
self.systemStack.push(word.blanks[relativePos])
else:
actualPos = relativePos + self.vm.currentWords[0]
self.systemStack.push(self.vm.superblanks[actualPos])
except:
self.systemStack.push("")
def executeStorecl(self, instr):
value = self.systemStack.pop()
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
lemmaAndTags = lu.attrs['lem'] + lu.attrs['tags']
self.handleStoreClipInstruction(parts, lu, lemmaAndTags, value)
def executeStoresl(self, instr):
value = self.systemStack.pop()
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
self.handleStoreClipInstruction(parts, lu, lu.lu, value)
def executeStoretl(self, instr):
value = self.systemStack.pop()
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getTargetLexicalUnit(pos)
self.handleStoreClipInstruction(parts, lu, lu.lu, value)
def handleStoreClipInstruction(self, parts, lu, lemmaAndTags, value):
oldLu = lu.lu
change = False
if parts in ('lem', 'lemh', 'lemq', 'tags'):
lu.modifyAttr(parts, value)
change = True
elif parts == 'chcontent':
lu.modifyAttr(parts, value)
if self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
#If we are in the postchunk stage and change the chunk content
#we need to parse it again, so we can use it as lexical units.
chunkWord = self.vm.words[self.vm.currentWords[0]]
chunkWord.parseChunkContent()
elif parts == 'whole':
lu.modifyAttr(parts, value)
change = True
else:
longestMatch = ""
for part in parts.split('|'):
if part in lemmaAndTags:
if len(part) > len(longestMatch): longestMatch = part
if longestMatch:
lu.modifyTag(longestMatch, value)
change = True
if change and self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
#Update the chunk content when changing a lu inside the chunk.
chunkWord = self.vm.words[self.vm.currentWords[0]]
chunkWord.updateChunkContent(oldLu, lu.lu)
def executeStorev(self, instr):
value = self.systemStack.pop()
varName = self.systemStack.pop()
self.vm.variables[varName] = value
| gpl-2.0 | -2,843,016,054,337,406,000 | 32.961609 | 84 | 0.58847 | false | 3.925824 | false | false | false |
pieleric/odemis | src/odemis/acq/align/spot.py | 2 | 23192 | # -*- coding: utf-8 -*-
"""
Created on 14 Apr 2014
@author: Kimon Tsitsikas
Copyright © 2013-2014 Kimon Tsitsikas, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License version 2 as published by the Free
Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
from __future__ import division
from concurrent.futures._base import CancelledError, CANCELLED, FINISHED, \
RUNNING
import logging
import math
import numpy
from odemis import model
from odemis.acq.align import coordinates, autofocus
from odemis.acq.align.autofocus import AcquireNoBackground, MTD_EXHAUSTIVE
from odemis.dataio import tiff
from odemis.util import executeAsyncTask
from odemis.util.spot import FindCenterCoordinates, GridPoints, MaximaFind, EstimateLatticeConstant
from odemis.util.transform import AffineTransform
import os
from scipy.spatial import cKDTree as KDTree
import threading
import time
ROUGH_MOVE = 1 # Number of max steps to reach the center in rough move
FINE_MOVE = 10 # Number of max steps to reach the center in fine move
FOV_MARGIN = 250 # pixels
# Type of move in order to center the spot
STAGE_MOVE = "Stage move"
BEAM_SHIFT = "Beam shift"
OBJECTIVE_MOVE = "Objective lens move"
def MeasureSNR(image):
# Estimate noise
bl = image.metadata.get(model.MD_BASELINE, 0)
if image.max() < bl * 2:
return 0 # nothing looks like signal
sdn = numpy.std(image[image < (bl * 2)])
ms = numpy.mean(image[image >= (bl * 2)]) - bl
# Guarantee no negative snr
if ms <= 0 or sdn <= 0:
return 0
snr = ms / sdn
return snr
def AlignSpot(ccd, stage, escan, focus, type=OBJECTIVE_MOVE, dfbkg=None, rng_f=None, logpath=None):
"""
Wrapper for DoAlignSpot. It provides the ability to check the progress of
spot mode procedure or even cancel it.
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
focus (model.Actuator): The optical focus
type (string): Type of move in order to align
dfbkg (model.DataFlow): dataflow of se- or bs- detector for background
subtraction
rng_f (tuple of floats): range to apply Autofocus on if needed
returns (model.ProgressiveFuture): Progress of DoAlignSpot,
whose result() will return:
returns (float): Final distance to the center (m)
"""
# Create ProgressiveFuture and update its state to RUNNING
est_start = time.time() + 0.1
f = model.ProgressiveFuture(start=est_start,
end=est_start + estimateAlignmentTime(ccd.exposureTime.value))
f._task_state = RUNNING
# Task to run
f.task_canceller = _CancelAlignSpot
f._alignment_lock = threading.Lock()
f._done = threading.Event()
# Create autofocus and centerspot module
f._autofocusf = model.InstantaneousFuture()
f._centerspotf = model.InstantaneousFuture()
# Run in separate thread
executeAsyncTask(f, _DoAlignSpot,
args=(f, ccd, stage, escan, focus, type, dfbkg, rng_f, logpath))
return f
def _DoAlignSpot(future, ccd, stage, escan, focus, type, dfbkg, rng_f, logpath):
"""
Adjusts settings until we have a clear and well focused optical spot image,
detects the spot and manipulates the stage so as to move the spot center to
the optical image center. If no spot alignment is achieved an exception is
raised.
future (model.ProgressiveFuture): Progressive future provided by the wrapper
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
focus (model.Actuator): The optical focus
type (string): Type of move in order to align
dfbkg (model.DataFlow): dataflow of se- or bs- detector
rng_f (tuple of floats): range to apply Autofocus on if needed
returns (float): Final distance to the center #m
raises:
CancelledError() if cancelled
IOError
"""
init_binning = ccd.binning.value
init_et = ccd.exposureTime.value
init_cres = ccd.resolution.value
init_scale = escan.scale.value
init_eres = escan.resolution.value
# TODO: allow to pass the precision as argument. As for the Delphi, we don't
# need such an accuracy on the alignment (as it's just for twin stage calibration).
# TODO: take logpath as argument, to store images later on
logging.debug("Starting Spot alignment...")
try:
if future._task_state == CANCELLED:
raise CancelledError()
# Configure CCD and set ebeam to spot mode
logging.debug("Configure CCD and set ebeam to spot mode...")
_set_blanker(escan, False)
ccd.binning.value = ccd.binning.clip((2, 2))
ccd.resolution.value = ccd.resolution.range[1]
ccd.exposureTime.value = 0.3
escan.scale.value = (1, 1)
escan.resolution.value = (1, 1)
if future._task_state == CANCELLED:
raise CancelledError()
logging.debug("Adjust exposure time...")
if dfbkg is None:
# Long exposure time to compensate for no background subtraction
ccd.exposureTime.value = 1.1
else:
# TODO: all this code to decide whether to pick exposure 0.3 or 1.5?
# => KISS! Use always 1s... or allow up to 5s?
# Estimate noise and adjust exposure time based on "Rose criterion"
image = AcquireNoBackground(ccd, dfbkg)
snr = MeasureSNR(image)
while snr < 5 and ccd.exposureTime.value < 1.5:
ccd.exposureTime.value = ccd.exposureTime.value + 0.2
image = AcquireNoBackground(ccd, dfbkg)
snr = MeasureSNR(image)
logging.debug("Using exposure time of %g s", ccd.exposureTime.value)
if logpath:
tiff.export(os.path.join(logpath, "align_spot_init.tiff"), [image])
hqet = ccd.exposureTime.value # exposure time for high-quality (binning == 1x1)
if ccd.binning.value == (2, 2):
hqet *= 4 # To compensate for smaller binning
logging.debug("Trying to find spot...")
for i in range(3):
if future._task_state == CANCELLED:
raise CancelledError()
if i == 0:
future._centerspotf = CenterSpot(ccd, stage, escan, ROUGH_MOVE, type, dfbkg)
dist, vector = future._centerspotf.result()
elif i == 1:
logging.debug("Spot not found, auto-focusing...")
try:
# When Autofocus set binning 8 if possible, and use exhaustive
# method to be sure not to miss the spot.
ccd.binning.value = ccd.binning.clip((8, 8))
future._autofocusf = autofocus.AutoFocus(ccd, None, focus, dfbkg, rng_focus=rng_f, method=MTD_EXHAUSTIVE)
lens_pos, fm_level = future._autofocusf.result()
# Update progress of the future
future.set_progress(end=time.time() +
estimateAlignmentTime(hqet, dist, 1))
except IOError as ex:
logging.error("Autofocus on spot image failed: %s", ex)
raise IOError('Spot alignment failure. AutoFocus failed.')
logging.debug("Trying again to find spot...")
future._centerspotf = CenterSpot(ccd, stage, escan, ROUGH_MOVE, type, dfbkg)
dist, vector = future._centerspotf.result()
elif i == 2:
if dfbkg is not None:
# In some case background subtraction goes wrong, and makes
# things worse, so try without.
logging.debug("Trying again to find spot, without background subtraction...")
dfbkg = None
future._centerspotf = CenterSpot(ccd, stage, escan, ROUGH_MOVE, type, dfbkg)
dist, vector = future._centerspotf.result()
if dist is not None:
if logpath:
image = AcquireNoBackground(ccd, dfbkg)
tiff.export(os.path.join(logpath, "align_spot_found.tiff"), [image])
break
else:
raise IOError('Spot alignment failure. Spot not found')
ccd.binning.value = (1, 1)
ccd.exposureTime.value = ccd.exposureTime.clip(hqet)
# Update progress of the future
future.set_progress(end=time.time() +
estimateAlignmentTime(hqet, dist, 1))
logging.debug("After rough alignment, spot center is at %s m", vector)
# Limit FoV to save time
logging.debug("Cropping FoV...")
CropFoV(ccd, dfbkg)
if future._task_state == CANCELLED:
raise CancelledError()
# Update progress of the future
future.set_progress(end=time.time() +
estimateAlignmentTime(hqet, dist, 0))
# Center spot
if future._task_state == CANCELLED:
raise CancelledError()
logging.debug("Aligning spot...")
# No need to be so precise with a stage move (eg, on the DELPHI), as the
# stage is quite imprecise anyway and the alignment is further adjusted
# using the beam shift (later).
mx_steps = FINE_MOVE if type != STAGE_MOVE else ROUGH_MOVE
future._centerspotf = CenterSpot(ccd, stage, escan, mx_steps, type, dfbkg, logpath)
dist, vector = future._centerspotf.result()
if dist is None:
raise IOError('Spot alignment failure. Cannot reach the center.')
logging.info("After fine alignment, spot center is at %s m", vector)
return dist, vector
finally:
ccd.binning.value = init_binning
ccd.exposureTime.value = init_et
ccd.resolution.value = init_cres
escan.scale.value = init_scale
escan.resolution.value = init_eres
_set_blanker(escan, True)
with future._alignment_lock:
future._done.set()
if future._task_state == CANCELLED:
raise CancelledError()
future._task_state = FINISHED
def _CancelAlignSpot(future):
"""
Canceller of _DoAlignSpot task.
"""
logging.debug("Cancelling spot alignment...")
with future._alignment_lock:
if future._task_state == FINISHED:
return False
future._task_state = CANCELLED
future._autofocusf.cancel()
future._centerspotf.cancel()
logging.debug("Spot alignment cancelled.")
# Do not return until we are really done (modulo 10 seconds timeout)
future._done.wait(10)
return True
def estimateAlignmentTime(et, dist=None, n_autofocus=2):
"""
Estimates spot alignment procedure duration
et (float): exposure time #s
dist (float): distance from center #m
n_autofocus (int): number of autofocus procedures
returns (float): process estimated time #s
"""
return estimateCenterTime(et, dist) + n_autofocus * autofocus.estimateAutoFocusTime(et) # s
def _set_blanker(escan, active):
"""
Set the blanker to the given state iif the blanker doesn't support "automatic"
mode (ie, None).
escan (ebeam scanner)
active (bool): True = blanking = no ebeam
"""
try:
if (model.hasVA(escan, "blanker")
and not None in escan.blanker.choices
):
# Note: we assume that this is blocking, until the e-beam is
# ready to acquire an image.
escan.blanker.value = active
except Exception:
logging.exception("Failed to set the blanker to %s", active)
def FindSpot(image, sensitivity_limit=100):
"""
This function detects the spot and calculates and returns the coordinates of
its center. The algorithms for spot detection and center calculation are
similar to the ones that are used in Fine alignment.
image (model.DataArray): Optical image
sensitivity_limit (int): Limit of sensitivity in spot detection
returns (tuple of floats): Position of the spot center in px (from the
left-top corner of the image), possibly with sub-pixel resolution.
raises:
LookupError() if spot was not found
"""
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(image, (1, 1), 20, sensitivity_limit)
if not subimages:
raise LookupError("No spot detected")
spot_coordinates = [FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
# Too many spots detected
if len(optical_coordinates) > 10:
logging.info("Found %d potential spots on image with data %s -> %s",
len(optical_coordinates), image.min(), image.max())
raise LookupError("Too many spots detected")
# Pick the brightest one
max_intensity = 0
max_pos = optical_coordinates[0]
for i in optical_coordinates:
x, y = int(round(i[1])), int(round(i[0]))
if image[x, y] >= max_intensity:
max_pos = i
max_intensity = image[x, y]
return max_pos
def FindGridSpots(image, repetition):
"""
Find the coordinates of a grid of spots in an image. And find the
corresponding transformation to transform a grid centered around the origin
to the spots in an image.
Parameters
----------
image : array like
Data array containing the greyscale image.
repetition : tuple of ints
Number of expected spots in (X, Y).
Returns
-------
spot_coordinates : array like
A 2D array of shape (N, 2) containing the coordinates of the spots,
in respect to the top left of the image.
translation : tuple of two floats
Translation from the origin to the center of the grid in image space,
origin is top left of the image. Primary axis points right and the
secondary axis points down.
scaling : tuple of two floats
Scaling factors for primary and secondary axis.
rotation : float
Rotation in image space, positive rotation is clockwise.
shear : float
Horizontal shear factor. A positive shear factor transforms a coordinate
in the positive x direction parallel to the x axis.
"""
# Find the center coordinates of the spots in the image.
spot_positions = MaximaFind(image, repetition[0] * repetition[1])
if len(spot_positions) < repetition[0] * repetition[1]:
logging.warning('Not enough spots found, returning only the found spots.')
return spot_positions, None, None, None
# Estimate the two most common (orthogonal) directions in the grid of spots, defined in the image coordinate system.
lattice_constants = EstimateLatticeConstant(spot_positions)
# Each row in the lattice_constants array corresponds to one direction. By transposing the array the direction
# vectors are on the columns of the array. This allows us to directly use them as a transformation matrix.
transformation_matrix = numpy.transpose(lattice_constants)
# Translation is the mean of the spots, which is the distance from the origin to the center of the grid of spots.
translation = numpy.mean(spot_positions, axis=0)
transform_to_spot_positions = AffineTransform(matrix=transformation_matrix, translation=translation)
# Iterative closest point algorithm - single iteration, to fit a grid to the found spot positions
grid = GridPoints(*repetition)
spot_grid = transform_to_spot_positions(grid)
tree = KDTree(spot_positions)
dd, ii = tree.query(spot_grid, k=1)
# Sort the original spot positions by mapping them to the order of the GridPoints.
pos_sorted = spot_positions[ii.ravel(), :]
# Find the transformation from a grid centered around the origin to the sorted positions.
transformation = AffineTransform.from_pointset(grid, pos_sorted)
spot_coordinates = transformation(grid)
return spot_coordinates, translation, transformation.scale, transformation.rotation, transformation.shear
def CropFoV(ccd, dfbkg=None):
"""
Limit the ccd FoV to just contain the spot, in order to save some time
on AutoFocus process.
ccd (model.DigitalCamera): The CCD
"""
image = AcquireNoBackground(ccd, dfbkg)
center_pxs = ((image.shape[1] / 2),
(image.shape[0] / 2))
try:
spot_pxs = FindSpot(image)
except LookupError:
logging.warning("Couldn't locate spot when cropping CCD image, will use whole FoV")
ccd.binning.value = (1, 1)
ccd.resolution.value = ccd.resolution.range[1]
return
tab_pxs = [a - b for a, b in zip(spot_pxs, center_pxs)]
max_dim = int(max(abs(tab_pxs[0]), abs(tab_pxs[1])))
range_x = (ccd.resolution.range[0][0], ccd.resolution.range[1][0])
range_y = (ccd.resolution.range[0][1], ccd.resolution.range[1][1])
ccd.resolution.value = (sorted((range_x[0], 2 * max_dim + FOV_MARGIN, range_x[1]))[1],
sorted((range_y[0], 2 * max_dim + FOV_MARGIN, range_y[1]))[1])
ccd.binning.value = (1, 1)
def CenterSpot(ccd, stage, escan, mx_steps, type=OBJECTIVE_MOVE, dfbkg=None, logpath=None):
"""
Wrapper for _DoCenterSpot.
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
mx_steps (int): Maximum number of steps to reach the center
type (*_MOVE or BEAM_SHIFT): Type of move in order to align
dfbkg (model.DataFlow or None): If provided, will be used to start/stop
the e-beam emission (it must be the dataflow of se- or bs-detector) in
order to do background subtraction. If None, no background subtraction is
performed.
returns (model.ProgressiveFuture): Progress of _DoCenterSpot,
whose result() will return:
(float): Final distance to the center #m
(2 floats): vector to the spot from the center (m, m)
"""
# Create ProgressiveFuture and update its state to RUNNING
est_start = time.time() + 0.1
f = model.ProgressiveFuture(start=est_start,
end=est_start + estimateCenterTime(ccd.exposureTime.value))
f._spot_center_state = RUNNING
f.task_canceller = _CancelCenterSpot
f._center_lock = threading.Lock()
# Run in separate thread
executeAsyncTask(f, _DoCenterSpot,
args=(f, ccd, stage, escan, mx_steps, type, dfbkg, logpath))
return f
def _DoCenterSpot(future, ccd, stage, escan, mx_steps, type, dfbkg, logpath):
"""
Iteratively acquires an optical image, finds the coordinates of the spot
(center) and moves the stage to this position. Repeats until the found
coordinates are at the center of the optical image or a maximum number of
steps is reached.
future (model.ProgressiveFuture): Progressive future provided by the wrapper
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
mx_steps (int): Maximum number of steps to reach the center
type (*_MOVE or BEAM_SHIFT): Type of move in order to align
dfbkg (model.DataFlow or None): If provided, will be used to start/stop
the e-beam emmision (it must be the dataflow of se- or bs-detector) in
order to do background subtraction. If None, no background subtraction is
performed.
returns (float or None): Final distance to the center (m)
(2 floats): vector to the spot from the center (m, m)
raises:
CancelledError() if cancelled
"""
try:
logging.debug("Aligning spot...")
steps = 0
# Stop once spot is found on the center of the optical image
dist = None
while True:
if future._spot_center_state == CANCELLED:
raise CancelledError()
# Wait to make sure no previous spot is detected
image = AcquireNoBackground(ccd, dfbkg)
if logpath:
tiff.export(os.path.join(logpath, "center_spot_%d.tiff" % (steps,)), [image])
try:
spot_pxs = FindSpot(image)
except LookupError:
return None, None
# Center of optical image
pixelSize = image.metadata[model.MD_PIXEL_SIZE]
center_pxs = (image.shape[1] / 2, image.shape[0] / 2)
# Epsilon distance below which the lens is considered centered. The worse of:
# * 1.5 pixels (because the CCD resolution cannot give us better)
# * 1 µm (because that's the best resolution of our actuators)
err_mrg = max(1.5 * pixelSize[0], 1e-06) # m
tab_pxs = [a - b for a, b in zip(spot_pxs, center_pxs)]
tab = (tab_pxs[0] * pixelSize[0], tab_pxs[1] * pixelSize[1])
logging.debug("Found spot @ %s px", spot_pxs)
# Stop if spot near the center or max number of steps is reached
dist = math.hypot(*tab)
if steps >= mx_steps or dist <= err_mrg:
break
# Move to the found spot
if type == OBJECTIVE_MOVE:
f = stage.moveRel({"x": tab[0], "y":-tab[1]})
f.result()
elif type == STAGE_MOVE:
f = stage.moveRel({"x":-tab[0], "y": tab[1]})
f.result()
else:
escan.translation.value = (-tab_pxs[0], -tab_pxs[1])
steps += 1
# Update progress of the future
future.set_progress(end=time.time() +
estimateCenterTime(ccd.exposureTime.value, dist))
return dist, tab
finally:
with future._center_lock:
if future._spot_center_state == CANCELLED:
raise CancelledError()
future._spot_center_state = FINISHED
def _CancelCenterSpot(future):
"""
Canceller of _DoCenterSpot task.
"""
logging.debug("Cancelling spot center...")
with future._center_lock:
if future._spot_center_state == FINISHED:
return False
future._spot_center_state = CANCELLED
logging.debug("Spot center cancelled.")
return True
def estimateCenterTime(et, dist=None):
"""
Estimates duration of reaching the center
"""
if dist is None:
steps = FINE_MOVE
else:
err_mrg = 1e-06
steps = math.log(dist / err_mrg) / math.log(2)
steps = min(steps, FINE_MOVE)
return steps * (et + 2) # s
| gpl-2.0 | 2,529,318,862,185,431,000 | 40.044248 | 125 | 0.633376 | false | 3.864356 | false | false | false |
jonatanolofsson/req | req/filesystem.py | 1 | 1429 | """
Filesystem related methods
"""
import os
import yaml
from . import command as subprocess
from . import util
GIT = ['git']
REQCONF_FILE = '.reqconfig'
def read_file(path, ref=None):
"""
Read file from filesystem or git tree
"""
def _load_file_from_fs():
"""
Read and parse file from filesystem
"""
with open(path) as file_:
return file_.read()
def _load_file_from_git():
"""
Load file from git tree
"""
blob_sha1 = subprocess.get_output(
GIT + ['ls-tree', ref, path]
).split()[2]
return subprocess.get_output(
GIT + ['cat-file', 'blob', blob_sha1]
)
return _load_file_from_git() if ref else _load_file_from_fs()
def load_yamlfile(reqfile, ref=None, multiple=False):
"""
Load requirement file
"""
data = read_file(reqfile, ref)
return yaml.load_all(data) if multiple else yaml.load(data)
def reqroot():
"""
Get .req-dir
"""
def _find_reqroot():
"""
Find reqroot
"""
for dir_ in util.walkup(os.getcwd()):
if os.path.exists(os.path.join(dir_, REQCONF_FILE)):
return dir_
raise Exception("Not inside req directory")
cwd = os.getcwd()
if cwd not in reqroot.cache:
reqroot.cache[cwd] = _find_reqroot()
return reqroot.cache[cwd]
reqroot.cache = {}
| gpl-3.0 | -1,169,414,203,491,116,800 | 20.651515 | 65 | 0.555633 | false | 3.599496 | false | false | false |
googleapis/python-aiplatform | samples/model-builder/init_sample.py | 1 | 1303 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from google.auth import credentials as auth_credentials
from google.cloud import aiplatform
# [START aiplatform_sdk_init_sample]
def init_sample(
project: Optional[str] = None,
location: Optional[str] = None,
experiment: Optional[str] = None,
staging_bucket: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
encryption_spec_key_name: Optional[str] = None,
):
aiplatform.init(
project=project,
location=location,
experiment=experiment,
staging_bucket=staging_bucket,
credentials=credentials,
encryption_spec_key_name=encryption_spec_key_name,
)
# [END aiplatform_sdk_init_sample]
| apache-2.0 | 6,944,361,281,353,380,000 | 31.575 | 74 | 0.726017 | false | 3.877976 | false | false | false |
eieio/pyy | modules/http.py | 1 | 1883 | ''' http.py
Copyright 2008 Corey Tabaka
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from bot import MessageHandler, BotCommandHandler
from admin import trusted
from threaded import threaded
import re, urllib
_reURL = re.compile('.*?((?:(https?)://|(?=www\.))[\w:#@%/;$()~_?\+-=\\\.&]+).*?', re.I)
_reTitle = re.compile('<title>(.*?)</title>', re.I | re.M)
_enabled = True
@BotCommandHandler('http')
@trusted
def _http(context):
'''Usage: http [enable|disable]\nEnables or disables URL titles; no param returns state'''
m = re.match('\s*(enable|disable)\s*', context.args or '', re.I)
if m:
op, = m.groups()
global _enabled
_enabled = op.lower() == 'enable'
elif not (context.args or ''):
context.reply('http titles %s' % ['DISABLED', 'ENABLED'][_enabled])
else:
context.reply('Usage: http [enable|disable]')
@MessageHandler
@threaded
def _handler(context):
m = _reURL.match(context.message)
if _enabled and m:
address, proto = m.groups()
proto = (proto or '').lower()
if not proto:
address = 'http://' + address
if proto in ('http', 'https', None):
fin = urllib.urlopen(address)
if fin.headers.gettype() == 'text/html':
title = ' '.join(_reTitle.findall(fin.read(4096))).strip()
fin.close()
if title:
context.reply('Title: ' + title)
| apache-2.0 | -550,603,477,425,224,450 | 28.370968 | 91 | 0.640998 | false | 3.405063 | false | false | false |
ceibal-tatu/sugar | src/jarabe/util/emulator.py | 4 | 6072 | # Copyright (C) 2006-2008, Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import signal
import subprocess
import sys
import time
from optparse import OptionParser
from gettext import gettext as _
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
from sugar3 import env
ERROR_NO_DISPLAY = 30
ERROR_NO_SERVER = 31
default_dimensions = (800, 600)
def _run_xephyr(display, dpi, dimensions, fullscreen):
cmd = ['Xephyr']
cmd.append(':%d' % display)
cmd.append('-ac')
cmd += ['-title', _('Sugar in a window')]
screen_size = (Gdk.Screen.width(), Gdk.Screen.height())
if (not dimensions) and (fullscreen is None) and \
(screen_size <= default_dimensions):
# no forced settings, screen too small => fit screen
fullscreen = True
elif not dimensions:
# screen is big enough or user has en/disabled fullscreen manually
# => use default size (will get ignored for fullscreen)
dimensions = '%dx%d' % default_dimensions
if not dpi:
dpi = Gtk.Settings.get_default().get_property('gtk-xft-dpi') / 1024
if fullscreen:
cmd.append('-fullscreen')
if dimensions:
cmd.append('-screen')
cmd.append(dimensions)
if dpi:
cmd.append('-dpi')
cmd.append('%d' % dpi)
cmd.append('-noreset')
try:
pipe = subprocess.Popen(cmd)
except OSError, exc:
sys.stderr.write('Error executing server: %s\n' % (exc, ))
return None
return pipe
def _check_server(display):
result = subprocess.call(['xdpyinfo', '-display', ':%d' % display],
stdout=open(os.devnull, 'w'),
stderr=open(os.devnull, 'w'))
return result == 0
def _kill_pipe(pipe):
"""Terminate and wait for child process."""
try:
os.kill(pipe.pid, signal.SIGTERM)
except OSError:
pass
pipe.wait()
def _start_xephyr(dpi, dimensions, fullscreen):
for display in range(30, 40):
if not _check_server(display):
pipe = _run_xephyr(display, dpi, dimensions, fullscreen)
if pipe is None:
return None, None
for i_ in range(10):
if _check_server(display):
return pipe, display
time.sleep(0.1)
_kill_pipe(pipe)
return None, None
def _start_window_manager():
cmd = ['metacity']
cmd.extend(['--no-force-fullscreen'])
GObject.spawn_async(cmd, flags=GObject.SPAWN_SEARCH_PATH)
def _setup_env(display, scaling, emulator_pid):
# We need to remove the environment related to gnome-keyring-daemon,
# so a new instance of gnome-keyring-daemon can be started and
# registered properly.
for variable in ['GPG_AGENT_INFO', 'SSH_AUTH_SOCK',
'GNOME_KEYRING_CONTROL', 'GNOME_KEYRING_PID']:
if variable in os.environ:
del os.environ[variable]
os.environ['SUGAR_EMULATOR'] = 'yes'
os.environ['GABBLE_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'telepathy-gabble.log')
os.environ['SALUT_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'telepathy-salut.log')
os.environ['MC_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'mission-control.log')
os.environ['STREAM_ENGINE_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'telepathy-stream-engine.log')
os.environ['DISPLAY'] = ':%d' % (display)
os.environ['SUGAR_EMULATOR_PID'] = emulator_pid
os.environ['MC_ACCOUNT_DIR'] = os.path.join(
env.get_profile_path(), 'accounts')
if scaling:
os.environ['SUGAR_SCALING'] = scaling
def main():
"""Script-level operations"""
parser = OptionParser()
parser.add_option('-d', '--dpi', dest='dpi', type='int',
help='Emulator dpi')
parser.add_option('-s', '--scaling', dest='scaling',
help='Sugar scaling in %')
parser.add_option('-i', '--dimensions', dest='dimensions',
help='Emulator dimensions (ex. 1200x900)')
parser.add_option('-f', '--fullscreen', dest='fullscreen',
action='store_true', default=None,
help='Run emulator in fullscreen mode')
parser.add_option('-F', '--no-fullscreen', dest='fullscreen',
action='store_false',
help='Do not run emulator in fullscreen mode')
(options, args) = parser.parse_args()
if not os.environ.get('DISPLAY'):
sys.stderr.write('DISPLAY not set, cannot connect to host X server.\n')
return ERROR_NO_DISPLAY
server, display = _start_xephyr(options.dpi, options.dimensions,
options.fullscreen)
if server is None:
sys.stderr.write('Failed to start server. Please check output above'
' for any error message.\n')
return ERROR_NO_SERVER
_setup_env(display, options.scaling, str(server.pid))
command = ['dbus-launch', '--exit-with-session']
if not args:
command.append('sugar')
else:
_start_window_manager()
if args[0].endswith('.py'):
command.append('python')
command.append(args[0])
subprocess.call(command)
_kill_pipe(server)
| gpl-2.0 | -6,252,993,579,261,596,000 | 30.298969 | 79 | 0.612813 | false | 3.780822 | false | false | false |
google/makani | gs/monitor2/apps/plugins/layouts/crosswind_template.py | 1 | 3851 | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Layout to monitor crosswind flight status."""
from makani.control import system_params
from makani.gs.monitor import monitor_params
from makani.gs.monitor2.apps.layout import base
from makani.gs.monitor2.apps.plugins import common
from makani.gs.monitor2.apps.plugins.indicators import control
from makani.gs.monitor2.apps.plugins.indicators import ground_station
from makani.gs.monitor2.apps.plugins.indicators import motor
MONITOR_PARAMS = monitor_params.GetMonitorParams().contents
SYSTEM_PARAMS = system_params.GetSystemParams().contents
class CrosswindLayout(base.BaseLayout):
"""The crosswind layout."""
_NAME = 'Crosswind'
_DESIRED_VIEW_COLS = 12
_ORDER_HORIZONTALLY = False
# Derived class should set the _MODE.
_MODE = '<unset>'
def Initialize(self):
self._AddIndicators('Indicators', [
control.FlightPlanIndicator(),
control.FlightModeIndicator(self._MODE),
control.FlightModeGatesIndicator(self._MODE),
control.ControlTimeIndicator(self._MODE),
control.LoopCountIndicator(self._MODE),
control.TetherSphereDeviationIndicator(self._MODE),
control.CrosswindPlaybookIndicator(),
control.AlphaErrorIndicator(),
control.BetaErrorIndicator(),
control.AirspeedErrorIndicator(),
], properties={'cols': 2})
self._AddIndicators('Altitude', [
control.AltitudeChart(
self._MODE, panel_ratio=0.26, aspect_ratio=1.8, num_yticks=5,
ylim=[-20, 500]),
ground_station.DetwistErrorChart(
num_yticks=5, aspect_ratio=1.5),
], properties={'cols': 2})
self._AddIndicators('', [
motor.StackBusPowerChart(
self._MODE, 'Gen. Wing Power', num_yticks=5,
aspect_ratio=2.5, ylim=[-1000, 1000]),
ground_station.WindIndicator(),
], properties={'cols': 3})
widget_kwargs = {
'panel_ratio': 0.17,
'aspect_ratio': 7.5,
'num_yticks': 7,
}
max_tension_kn = round(MONITOR_PARAMS.tether.proof_load / 1e3)
self._AddIndicators('Charts', [
control.TensionChart(self._MODE, ylim=[0.0, max_tension_kn],
**widget_kwargs),
control.AeroAnglesChart(self._MODE, ylim=[-10, 10], **widget_kwargs),
control.AirSpeedChart(self._MODE, ylim=[0, 80], **widget_kwargs),
control.BodyRatesChart(self._MODE, ylim=(-15.0, 15.0),
angles=['Pitch', 'Roll'], **widget_kwargs),
control.CrosswindDeltasChart(
self._MODE, ylim=[-15, 15], **widget_kwargs),
], properties={'cols': 8})
self._AddBreak()
self._AddIndicators('Flight Circle', [
# TODO: Use full comms mode currently, because target
# path radius is not in TetherDown yet.
control.CrosswindCircleWindow(common.FULL_COMMS_MODE),
], properties={'cols': 2})
self._AddIndicators('Aero angles', [
control.AeroAnglesXYPlot(self._MODE),
], properties={'cols': 2})
self._AddIndicators('Trans-in Trajectory', [
control.TransInTrajectoryChart(self._MODE),
], properties={'cols': 2})
self._AddIndicators('Crosswind Stats', [
control.LowBoundLoopAltitudeIndicator(self._MODE),
], properties={'cols': 2})
| apache-2.0 | 6,053,131,682,668,815,000 | 36.38835 | 77 | 0.666061 | false | 3.720773 | false | false | false |
mmottahedi/neuralnilm_prototype | scripts/e259.py | 2 | 5253 | from __future__ import print_function, division
import matplotlib
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import Net, RealApplianceSource, BLSTMLayer, DimshuffleLayer
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import crossentropy, mse
from lasagne.init import Uniform, Normal
from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer, FeaturePoolLayer
from lasagne.updates import nesterov_momentum
from functools import partial
import os
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment
from neuralnilm.net import TrainingError
import __main__
from copy import deepcopy
from math import sqrt
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 250
GRADIENT_STEPS = 100
"""
e233
based on e131c but with:
* lag=32
* pool
e234
* init final layer and conv layer
235
no lag
236
should be exactly as 131c: no pool, no lag, no init for final and conv layer
237
putting the pool back
238
seems pooling hurts us! disable pooling.
enable lag = 32
239
BLSTM
lag = 20
240
LSTM not BLSTM
various lags
241
output is prediction
252
attempt to predict fdiff 1 sample ahead. Unfair?
253
regurgitate fdiff from 1 sample ago
254
lag of 10 time steps
255
lag of 5 time steps
257
slowly increasing lag
258
output is different appliances diff
259
start off just trying to regurgitate diff of aggregate
then swap to disaggregation (to diff)
"""
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
max_appliance_powers=[2500]*5,
on_power_thresholds=[5] * 5,
max_input_power=5900,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1500,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.0,
n_seq_per_batch=50,
# subsample_target=5,
include_diff=True,
include_power=False,
clip_appliance_power=True,
target_is_prediction=True,
lag=1,
target_is_diff=True
)
def change_learning_rate(net, epoch, learning_rate):
net.updates = partial(nesterov_momentum, learning_rate=learning_rate)
net.compile()
def change_lag(net, epoch, lag):
net.source.lag = lag
net.compile()
from theano.ifelse import ifelse
import theano.tensor as T
THRESHOLD = 0
def scaled_cost(x, t):
sq_error = (x - t) ** 2
def mask_and_mean_sq_error(mask):
masked_sq_error = sq_error[mask.nonzero()]
mean = masked_sq_error.mean()
mean = ifelse(T.isnan(mean), 0.0, mean)
return mean
above_thresh_mean = mask_and_mean_sq_error(t > THRESHOLD)
below_thresh_mean = mask_and_mean_sq_error(t <= THRESHOLD)
return (above_thresh_mean + below_thresh_mean) / 2.0
def new_source(net, epoch):
source_dict_copy = deepcopy(source_dict)
source_dict_copy['target_is_prediction'] = False
net.source = RealApplianceSource(**source_dict_copy)
net.generate_validation_data_and_set_shapes()
net.loss_function = scaled_cost
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
loss_function=mse,
updates=partial(nesterov_momentum, learning_rate=0.1),
layers_config=[
{
'type': LSTMLayer,
'num_units': 50,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False,
'W_in_to_cell': Normal(std=1.)
}
],
layer_changes={
1001: {
'remove_from': -3,
'callback': new_source,
'new_layers': [
{
'type': DenseLayer,
'num_units': 5,
'nonlinearity': None,
'W': Normal(std=(1/sqrt(50)))
}
]
}
}
)
def exp_x(name):
global source
# source = RealApplianceSource(**source_dict)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].append(
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': None,
'W': Normal(std=(1/sqrt(50)))
}
)
net = Net(**net_dict_copy)
return net
def main():
experiment = 'a'
full_exp_name = NAME + experiment
path = os.path.join(PATH, full_exp_name)
print("***********************************")
print("Preparing", full_exp_name, "...")
try:
net = exp_x(full_exp_name)
run_experiment(net, path, epochs=None)
except KeyboardInterrupt:
return
except TrainingError as exception:
print("EXCEPTION:", exception)
except Exception as exception:
print("EXCEPTION:", exception)
if __name__ == "__main__":
main()
| mit | 8,540,140,778,415,116,000 | 24.133971 | 93 | 0.63221 | false | 3.358696 | false | false | false |
d2emon/newspaperizer | src/world/migrations/0002_auto_20160919_1719.py | 1 | 1245 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-19 17:19
from __future__ import unicode_literals
import django.core.files.storage
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('world', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='world',
name='description',
),
migrations.AddField(
model_name='world',
name='rating',
field=models.PositiveIntegerField(default=80, verbose_name='Rating'),
),
migrations.AlterField(
model_name='world',
name='image',
field=models.ImageField(storage=django.core.files.storage.FileSystemStorage(base_url='/web/worlds/__attach/', location='../media/worlds/__attach/'), upload_to='', verbose_name='Image'),
),
migrations.AlterField(
model_name='world',
name='slug',
field=models.SlugField(unique=True, verbose_name='Slug'),
),
migrations.AlterField(
model_name='world',
name='title',
field=models.CharField(max_length=255, verbose_name='Title'),
),
]
| gpl-3.0 | 1,082,585,476,274,517,900 | 30.125 | 197 | 0.5751 | false | 4.353147 | false | false | false |
Jauntbox/hydro | src/sample_ffts.py | 1 | 1589 | #The purpose of this program is to quickly evaluate and test different DFT routines in Python for comparison to the slightly more *ahem* arcane implementation of FFTW
import scipy
import math
import numpy as np
import matplotlib.pyplot as pyplot
size = 20 #Array size for functions
def main():
f = []
print np.arange(size)
for i in np.arange(size):
#f.append(5) #Contant function
#f.append(math.sin(2*math.pi*i/size)) #Single-frequency sine wave
f.append(math.sin(2*math.pi*i/size) + math.sin(10*math.pi*i/size)) #Multiple sine waves
#pyplot.plot(2*math.pi*np.arange(size)/size, f)
pyplot.plot(np.arange(size), f)
pyplot.show()
npf = np.array(f)
print npf
npf_fft = np.fft.fft(npf)
print npf_fft
#pyplot.plot(2*math.pi*np.arange(size)/size, np.imag(npf_fft), 'b')
#pyplot.plot(2*math.pi*np.arange(size)/size, np.real(npf_fft), 'r')
#pyplot.plot(2*math.pi*np.arange(size)/size, np.abs(npf_fft), 'k')
pyplot.plot(np.arange(size), np.imag(npf_fft), 'b')
pyplot.plot(np.arange(size), np.real(npf_fft), 'r')
pyplot.plot(np.arange(size), np.abs(npf_fft), 'k')
pyplot.show()
npf_fft_ifft = np.fft.ifft(npf_fft)
print npf_fft_ifft
#pyplot.plot(2*math.pi*np.arange(size)/size, np.real(npf), 'b')
#pyplot.plot(2*math.pi*np.arange(size)/size, np.real(npf_fft_ifft), 'r')
pyplot.plot(np.arange(size), np.real(npf), 'b')
pyplot.plot(np.arange(size), np.real(npf_fft_ifft), 'r')
pyplot.show()
if __name__ == '__main__':
main() | gpl-2.0 | 983,974,798,390,252,000 | 35.976744 | 166 | 0.629956 | false | 2.69322 | false | false | false |
Aliced3645/DataCenterMarketing | impacket/build/lib.linux-i686-2.7/impacket/smb.py | 3 | 204090 | # Copyright (c) 2003-2012 CORE Security Technologies)
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: smb.py 602 2012-07-12 16:22:04Z [email protected] $
#
# Copyright (C) 2001 Michael Teo <[email protected]>
# smb.py - SMB/CIFS library
#
# This software is provided 'as-is', without any express or implied warranty.
# In no event will the author be held liable for any damages arising from the
# use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
#
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
#
# 3. This notice cannot be removed or altered from any source distribution.
#
# Altered source done by Alberto Solino
# Todo:
# [ ] Try [SMB]transport fragmentation using Transact requests
# [ ] Try other methods of doing write (write_raw, transact2, write, write_and_unlock, write_and_close, write_mpx)
# [-] Try replacements for SMB_COM_NT_CREATE_ANDX (CREATE, T_TRANSACT_CREATE, OPEN_ANDX works
# [x] Fix forceWriteAndx, which needs to send a RecvRequest, because recv() will not send it
# [x] Fix Recv() when using RecvAndx and the answer comes splet in several packets
# [ ] Try [SMB]transport fragmentation with overlaping segments
# [ ] Try [SMB]transport fragmentation with out of order segments
# [x] Do chained AndX requests
# [ ] Transform the rest of the calls to structure
# [ ] Implement TRANS/TRANS2 reassembly for list_path
import os, sys, socket, string, re, select, errno
import nmb
import types
from binascii import a2b_hex
import ntlm
import random
import datetime, time
from random import randint
from struct import *
from dcerpc import samr
import struct
from structure import Structure
from contextlib import contextmanager
# For signing
import hashlib
unicode_support = 0
unicode_convert = 1
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Shared Device Type
SHARED_DISK = 0x00
SHARED_DISK_HIDDEN = 0x80000000
SHARED_PRINT_QUEUE = 0x01
SHARED_DEVICE = 0x02
SHARED_IPC = 0x03
# Extended attributes mask
ATTR_ARCHIVE = 0x020
ATTR_COMPRESSED = 0x800
ATTR_NORMAL = 0x080
ATTR_HIDDEN = 0x002
ATTR_READONLY = 0x001
ATTR_TEMPORARY = 0x100
ATTR_DIRECTORY = 0x010
ATTR_SYSTEM = 0x004
# Service Type
SERVICE_DISK = 'A:'
SERVICE_PRINTER = 'LPT1:'
SERVICE_IPC = 'IPC'
SERVICE_COMM = 'COMM'
SERVICE_ANY = '?????'
# Server Type (Can be used to mask with SMBMachine.get_type() or SMBDomain.get_type())
SV_TYPE_WORKSTATION = 0x00000001
SV_TYPE_SERVER = 0x00000002
SV_TYPE_SQLSERVER = 0x00000004
SV_TYPE_DOMAIN_CTRL = 0x00000008
SV_TYPE_DOMAIN_BAKCTRL = 0x00000010
SV_TYPE_TIME_SOURCE = 0x00000020
SV_TYPE_AFP = 0x00000040
SV_TYPE_NOVELL = 0x00000080
SV_TYPE_DOMAIN_MEMBER = 0x00000100
SV_TYPE_PRINTQ_SERVER = 0x00000200
SV_TYPE_DIALIN_SERVER = 0x00000400
SV_TYPE_XENIX_SERVER = 0x00000800
SV_TYPE_NT = 0x00001000
SV_TYPE_WFW = 0x00002000
SV_TYPE_SERVER_NT = 0x00004000
SV_TYPE_POTENTIAL_BROWSER = 0x00010000
SV_TYPE_BACKUP_BROWSER = 0x00020000
SV_TYPE_MASTER_BROWSER = 0x00040000
SV_TYPE_DOMAIN_MASTER = 0x00080000
SV_TYPE_LOCAL_LIST_ONLY = 0x40000000
SV_TYPE_DOMAIN_ENUM = 0x80000000
# Options values for SMB.stor_file and SMB.retr_file
SMB_O_CREAT = 0x10 # Create the file if file does not exists. Otherwise, operation fails.
SMB_O_EXCL = 0x00 # When used with SMB_O_CREAT, operation fails if file exists. Cannot be used with SMB_O_OPEN.
SMB_O_OPEN = 0x01 # Open the file if the file exists
SMB_O_TRUNC = 0x02 # Truncate the file if the file exists
# Share Access Mode
SMB_SHARE_COMPAT = 0x00
SMB_SHARE_DENY_EXCL = 0x10
SMB_SHARE_DENY_WRITE = 0x20
SMB_SHARE_DENY_READEXEC = 0x30
SMB_SHARE_DENY_NONE = 0x40
SMB_ACCESS_READ = 0x00
SMB_ACCESS_WRITE = 0x01
SMB_ACCESS_READWRITE = 0x02
SMB_ACCESS_EXEC = 0x03
TRANS_DISCONNECT_TID = 1
TRANS_NO_RESPONSE = 2
STATUS_SUCCESS = 0x00000000
STATUS_LOGON_FAILURE = 0xC000006D
STATUS_LOGON_TYPE_NOT_GRANTED = 0xC000015B
MAX_TFRAG_SIZE = 5840
EVASION_NONE = 0
EVASION_LOW = 1
EVASION_HIGH = 2
EVASION_MAX = 3
RPC_X_BAD_STUB_DATA = 0x6F7
# SMB_FILE_ATTRIBUTES
SMB_FILE_ATTRIBUTE_NORMAL = 0x0000
SMB_FILE_ATTRIBUTE_READONLY = 0x0001
SMB_FILE_ATTRIBUTE_HIDDEN = 0x0002
SMB_FILE_ATTRIBUTE_SYSTEM = 0x0004
SMB_FILE_ATTRIBUTE_VOLUME = 0x0008
SMB_FILE_ATTRIBUTE_DIRECORY = 0x0010
SMB_FILE_ATTRIBUTE_ARCHIVE = 0x0020
SMB_SEARCH_ATTRIBUTE_READONLY = 0x0100
SMB_SEARCH_ATTRIBUTE_HIDDEN = 0x0200
SMB_SEARCH_ATTRIBUTE_SYSTEM = 0x0400
SMB_SEARCH_ATTRIBUTE_DIRECTORY = 0x1000
SMB_SEARCH_ATTRIBUTE_ARCHIVE = 0x2000
# Session SetupAndX Action flags
SMB_SETUP_GUEST = 0x01
SMB_SETUP_USE_LANMAN_KEY = 0x02
# QUERY_INFORMATION levels
SMB_INFO_ALLOCATION = 0x0001
SMB_INFO_VOLUME = 0x0002
SMB_QUERY_FS_VOLUME_INFO = 0x0102
SMB_QUERY_FS_SIZE_INFO = 0x0103
SMB_QUERY_FILE_EA_INFO = 0x0103
SMB_QUERY_FS_DEVICE_INFO = 0x0104
SMB_QUERY_FS_ATTRIBUTE_INFO = 0x0105
SMB_QUERY_FILE_BASIC_INFO = 0x0101
SMB_QUERY_FILE_STANDARD_INFO = 0x0102
SMB_QUERY_FILE_ALL_INFO = 0x0107
# SET_INFORMATION levels
SMB_SET_FILE_DISPOSITION_INFO = 0x0102
SMB_SET_FILE_BASIC_INFO = 0x0101
SMB_SET_FILE_END_OF_FILE_INFO = 0x0104
# File System Attributes
FILE_CASE_SENSITIVE_SEARCH = 0x00000001
FILE_CASE_PRESERVED_NAMES = 0x00000002
FILE_UNICODE_ON_DISK = 0x00000004
FILE_PERSISTENT_ACLS = 0x00000008
FILE_FILE_COMPRESSION = 0x00000010
FILE_VOLUME_IS_COMPRESSED = 0x00008000
# FIND_FIRST2 flags and levels
SMB_FIND_CLOSE_AFTER_REQUEST = 0x0001
SMB_FIND_CLOSE_AT_EOS = 0x0002
SMB_FIND_RETURN_RESUME_KEYS = 0x0004
SMB_FIND_CONTINUE_FROM_LAST = 0x0008
SMB_FIND_WITH_BACKUP_INTENT = 0x0010
FILE_DIRECTORY_FILE = 0x00000001
FILE_DELETE_ON_CLOSE = 0x00001000
FILE_NON_DIRECTORY_FILE = 0x00000040
SMB_FIND_INFO_STANDARD = 0x0001
SMB_FIND_FILE_DIRECTORY_INFO = 0x0101
SMB_FIND_FILE_FULL_DIRECTORY_INFO= 0x0102
SMB_FIND_FILE_NAMES_INFO = 0x0103
SMB_FIND_FILE_BOTH_DIRECTORY_INFO= 0x0104
SMB_FIND_FILE_ID_FULL_DIRECTORY_INFO = 0x105
SMB_FIND_FILE_ID_BOTH_DIRECTORY_INFO = 0x106
# DesiredAccess flags
FILE_READ_DATA = 0x00000001
FILE_WRITE_DATA = 0x00000002
FILE_APPEND_DATA = 0x00000004
FILE_EXECUTE = 0x00000020
MAXIMUM_ALLOWED = 0200000000
GENERIC_ALL = 0x10000000
GENERIC_EXECUTE = 0x20000000
GENERIC_WRITE = 0x40000000
GENERIC_READ = 0x80000000
# ShareAccess flags
FILE_SHARE_NONE = 0x00000000
FILE_SHARE_READ = 0x00000001
FILE_SHARE_WRITE = 0x00000002
FILE_SHARE_DELETE = 0x00000004
# CreateDisposition flags
FILE_SUPERSEDE = 0x00000000
FILE_OPEN = 0x00000001
FILE_CREATE = 0x00000002
FILE_OPEN_IF = 0x00000003
FILE_OVERWRITE = 0x00000004
FILE_OVERWRITE_IF = 0x00000005
############### GSS Stuff ################
GSS_API_SPNEGO_UUID = '\x2b\x06\x01\x05\x05\x02'
ASN1_SEQUENCE = 0x30
ASN1_SEQUENCE = 0x30
ASN1_AID = 0x60
ASN1_OID = 0x06
ASN1_OCTET_STRING = 0x04
ASN1_MECH_TYPE = 0xa0
ASN1_MECH_TOKEN = 0xa2
ASN1_SUPPORTED_MECH = 0xa1
ASN1_RESPONSE_TOKEN = 0xa2
ASN1_ENUMERATED = 0x0a
MechTypes = {
'+\x06\x01\x04\x01\x827\x02\x02\x1e': 'SNMPv2-SMI::enterprises.311.2.2.30',
'+\x06\x01\x04\x01\x827\x02\x02\n': 'NTLMSSP - Microsoft NTLM Security Support Provider',
'*\x86H\x82\xf7\x12\x01\x02\x02': 'MS KRB5 - Microsoft Kerberos 5',
'*\x86H\x86\xf7\x12\x01\x02\x02': 'KRB5 - Kerberos 5',
'*\x86H\x86\xf7\x12\x01\x02\x02\x03': 'KRB5 - Kerberos 5 - User to User'
}
TypesMech = dict((v,k) for k, v in MechTypes.iteritems())
def asn1encode(data = ''):
#res = asn1.SEQUENCE(str).encode()
#import binascii
#print '\nalex asn1encode str: %s\n' % binascii.hexlify(str)
if len(data) >= 0 and len(data) <= 0x7F:
res = pack('B', len(data)) + data
elif len(data) >= 0x80 and len(data) <= 0xFF:
res = pack('BB', 0x81, len(data)) + data
elif len(data) >= 0x100 and len(data) <= 0xFFFF:
res = pack('!BH', 0x82, len(data)) + data
elif len(data) >= 0x10000 and len(data) <= 0xffffff:
res = pack('!BBH', 0x83, len(data) >> 16, len(data) & 0xFFFF) + data
elif len(data) >= 0x1000000 and len(data) <= 0xffffffff:
res = pack('!BL', 0x84, len(data)) + data
else:
raise Exception('Error in asn1encode')
return str(res)
def asn1decode(data = ''):
len1 = unpack('B', data[:1])[0]
data = data[1:]
if len1 == 0x81:
pad = calcsize('B')
len2 = unpack('B',data[:pad])[0]
data = data[pad:]
ans = data[:len2]
elif len1 == 0x82:
pad = calcsize('H')
len2 = unpack('!H', data[:pad])[0]
data = data[pad:]
ans = data[:len2]
elif len1 == 0x83:
pad = calcsize('B') + calcsize('!H')
len2, len3 = unpack('!BH', data[:pad])
data = data[pad:]
ans = data[:len2 << 16 + len3]
elif len1 == 0x84:
pad = calcsize('!L')
len2 = unpack('!L', data[:pad])[0]
data = data[pad:]
ans = data[:len2]
# 1 byte length, string <= 0x7F
else:
pad = 0
ans = data[:len1]
return ans, len(ans)+pad+1
class GSSAPI():
# Generic GSSAPI Header Format
def __init__(self, data = None):
self.fields = {}
self['UUID'] = GSS_API_SPNEGO_UUID
if data:
self.fromString(data)
pass
def __setitem__(self,key,value):
self.fields[key] = value
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __len__(self):
return len(self.getData())
def __str__(self):
return len(self.getData())
def fromString(self, data = None):
# Manual parse of the GSSAPI Header Format
# It should be something like
# AID = 0x60 TAG, BER Length
# OID = 0x06 TAG
# GSSAPI OID
# UUID data (BER Encoded)
# Payload
next_byte = unpack('B',data[:1])[0]
if next_byte != ASN1_AID:
raise Exception('Unknown AID=%x' % next_byte)
data = data[1:]
decode_data, total_bytes = asn1decode(data)
# Now we should have a OID tag
next_byte = unpack('B',decode_data[:1])[0]
if next_byte != ASN1_OID:
raise Exception('OID tag not found %x' % next_byte)
decode_data = decode_data[1:]
# Now the OID contents, should be SPNEGO UUID
uuid, total_bytes = asn1decode(decode_data)
self['OID'] = uuid
# the rest should be the data
self['Payload'] = decode_data[total_bytes:]
#pass
def dump(self):
for i in self.fields.keys():
print "%s: {%r}" % (i,self[i])
def getData(self):
ans = pack('B',ASN1_AID)
ans += asn1encode(
pack('B',ASN1_OID) +
asn1encode(self['UUID']) +
self['Payload'] )
return ans
class SPNEGO_NegTokenResp():
# http://tools.ietf.org/html/rfc4178#page-9
# NegTokenResp ::= SEQUENCE {
# negState [0] ENUMERATED {
# accept-completed (0),
# accept-incomplete (1),
# reject (2),
# request-mic (3)
# } OPTIONAL,
# -- REQUIRED in the first reply from the target
# supportedMech [1] MechType OPTIONAL,
# -- present only in the first reply from the target
# responseToken [2] OCTET STRING OPTIONAL,
# mechListMIC [3] OCTET STRING OPTIONAL,
# ...
# }
# This structure is not prepended by a GSS generic header!
SPNEGO_NEG_TOKEN_RESP = 0xa1
SPNEGO_NEG_TOKEN_TARG = 0xa0
def __init__(self, data = None):
self.fields = {}
if data:
self.fromString(data)
pass
def __setitem__(self,key,value):
self.fields[key] = value
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __len__(self):
return len(self.getData())
def __str__(self):
return len(self.getData())
def fromString(self, data = 0):
payload = data
next_byte = unpack('B', payload[:1])[0]
if next_byte != SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_RESP:
raise Exception('NegTokenResp not found %x' % next_byte)
payload = payload[1:]
decode_data, total_bytes = asn1decode(payload)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SEQUENCE:
raise Exception('SEQUENCE tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
next_byte = unpack('B',decode_data[:1])[0]
if next_byte != ASN1_MECH_TYPE:
# MechType not found, could be an AUTH answer
if next_byte != ASN1_RESPONSE_TOKEN:
raise Exception('MechType/ResponseToken tag not found %x' % next_byte)
else:
decode_data2 = decode_data[1:]
decode_data2, total_bytes = asn1decode(decode_data2)
next_byte = unpack('B', decode_data2[:1])[0]
if next_byte != ASN1_ENUMERATED:
raise Exception('Enumerated tag not found %x' % next_byte)
decode_data2 = decode_data2[1:]
item, total_bytes2 = asn1decode(decode_data)
self['NegResult'] = item
decode_data = decode_data[1:]
decode_data = decode_data[total_bytes:]
# Do we have more data?
if len(decode_data) == 0:
return
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SUPPORTED_MECH:
if next_byte != ASN1_RESPONSE_TOKEN:
raise Exception('Supported Mech/ResponseToken tag not found %x' % next_byte)
else:
decode_data2 = decode_data[1:]
decode_data2, total_bytes = asn1decode(decode_data2)
next_byte = unpack('B', decode_data2[:1])[0]
if next_byte != ASN1_OID:
raise Exception('OID tag not found %x' % next_byte)
decode_data2 = decode_data2[1:]
item, total_bytes2 = asn1decode(decode_data2)
self['SuportedMech'] = item
decode_data = decode_data[1:]
decode_data = decode_data[total_bytes:]
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_RESPONSE_TOKEN:
raise Exception('Response token tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_OCTET_STRING:
raise Exception('Octet string token tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
self['ResponseToken'] = decode_data
def dump(self):
for i in self.fields.keys():
print "%s: {%r}" % (i,self[i])
def getData(self):
ans = pack('B',SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_RESP)
if self.fields.has_key('NegResult') and self.fields.has_key('SupportedMech'):
# Server resp
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B',SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_TARG) +
asn1encode(
pack('B',ASN1_ENUMERATED) +
asn1encode( self['NegResult'] )) +
pack('B',ASN1_SUPPORTED_MECH) +
asn1encode(
pack('B',ASN1_OID) +
asn1encode(self['SupportedMech'])) +
pack('B',ASN1_RESPONSE_TOKEN ) +
asn1encode(
pack('B', ASN1_OCTET_STRING) + asn1encode(self['ResponseToken']))))
elif self.fields.has_key('NegResult'):
# Server resp
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B', SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_TARG) +
asn1encode(
pack('B',ASN1_ENUMERATED) +
asn1encode( self['NegResult'] ))))
else:
# Client resp
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B', ASN1_RESPONSE_TOKEN) +
asn1encode(
pack('B', ASN1_OCTET_STRING) + asn1encode(self['ResponseToken']))))
return ans
class SPNEGO_NegTokenInit(GSSAPI):
# http://tools.ietf.org/html/rfc4178#page-8
# NegTokeInit :: = SEQUENCE {
# mechTypes [0] MechTypeList,
# reqFlags [1] ContextFlags OPTIONAL,
# mechToken [2] OCTET STRING OPTIONAL,
# mechListMIC [3] OCTET STRING OPTIONAL,
# }
SPNEGO_NEG_TOKEN_INIT = 0xa0
def fromString(self, data = 0):
GSSAPI.fromString(self, data)
payload = self['Payload']
next_byte = unpack('B', payload[:1])[0]
if next_byte != SPNEGO_NegTokenInit.SPNEGO_NEG_TOKEN_INIT:
raise Exception('NegTokenInit not found %x' % next_byte)
payload = payload[1:]
decode_data, total_bytes = asn1decode(payload)
# Now we should have a SEQUENCE Tag
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SEQUENCE:
raise Exception('SEQUENCE tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes2 = asn1decode(decode_data)
next_byte = unpack('B',decode_data[:1])[0]
if next_byte != ASN1_MECH_TYPE:
raise Exception('MechType tag not found %x' % next_byte)
decode_data = decode_data[1:]
remaining_data = decode_data
decode_data, total_bytes3 = asn1decode(decode_data)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SEQUENCE:
raise Exception('SEQUENCE tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes4 = asn1decode(decode_data)
# And finally we should have the MechTypes
self['MechTypes'] = []
i = 1
while decode_data:
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_OID:
# Not a valid OID, there must be something else we won't unpack
break
decode_data = decode_data[1:]
item, total_bytes = asn1decode(decode_data)
self['MechTypes'].append(item)
decode_data = decode_data[total_bytes:]
# Do we have MechTokens as well?
decode_data = remaining_data[total_bytes3:]
if len(decode_data) > 0:
next_byte = unpack('B', decode_data[:1])[0]
if next_byte == ASN1_MECH_TOKEN:
# We have tokens in here!
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte == ASN1_OCTET_STRING:
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
self['MechToken'] = decode_data
def getData(self):
mechTypes = ''
for i in self['MechTypes']:
mechTypes += pack('B', ASN1_OID)
mechTypes += asn1encode(i)
mechToken = ''
# Do we have tokens to send?
if self.fields.has_key('MechToken'):
mechToken = pack('B', ASN1_MECH_TOKEN) + asn1encode(
pack('B', ASN1_OCTET_STRING) + asn1encode(
self['MechToken']))
ans = pack('B',SPNEGO_NegTokenInit.SPNEGO_NEG_TOKEN_INIT)
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B', ASN1_MECH_TYPE) +
asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(mechTypes)) + mechToken ))
self['Payload'] = ans
return GSSAPI.getData(self)
def strerror(errclass, errcode):
if errclass == 0x01:
return 'OS error', ERRDOS.get(errcode, 'Unknown error')
elif errclass == 0x02:
return 'Server error', ERRSRV.get(errcode, 'Unknown error')
elif errclass == 0x03:
return 'Hardware error', ERRHRD.get(errcode, 'Unknown error')
# This is not a standard error class for SMB
#elif errclass == 0x80:
# return 'Browse error', ERRBROWSE.get(errcode, 'Unknown error')
elif errclass == 0xff:
return 'Bad command', 'Bad command. Please file bug report'
else:
return 'Unknown error', 'Unknown error'
# Raised when an error has occured during a session
class SessionError(Exception):
# SMB X/Open error codes for the ERRDOS error class
ERRsuccess = 0
ERRbadfunc = 1
ERRbadfile = 2
ERRbadpath = 3
ERRnofids = 4
ERRnoaccess = 5
ERRbadfid = 6
ERRbadmcb = 7
ERRnomem = 8
ERRbadmem = 9
ERRbadenv = 10
ERRbadaccess = 12
ERRbaddata = 13
ERRres = 14
ERRbaddrive = 15
ERRremcd = 16
ERRdiffdevice = 17
ERRnofiles = 18
ERRgeneral = 31
ERRbadshare = 32
ERRlock = 33
ERRunsup = 50
ERRnetnamedel = 64
ERRnosuchshare = 67
ERRfilexists = 80
ERRinvalidparam = 87
ERRcannotopen = 110
ERRinsufficientbuffer = 122
ERRinvalidname = 123
ERRunknownlevel = 124
ERRnotlocked = 158
ERRrename = 183
ERRbadpipe = 230
ERRpipebusy = 231
ERRpipeclosing = 232
ERRnotconnected = 233
ERRmoredata = 234
ERRnomoreitems = 259
ERRbaddirectory = 267
ERReasnotsupported = 282
ERRlogonfailure = 1326
ERRbuftoosmall = 2123
ERRunknownipc = 2142
ERRnosuchprintjob = 2151
ERRinvgroup = 2455
# here's a special one from observing NT
ERRnoipc = 66
# These errors seem to be only returned by the NT printer driver system
ERRdriveralreadyinstalled = 1795
ERRunknownprinterport = 1796
ERRunknownprinterdriver = 1797
ERRunknownprintprocessor = 1798
ERRinvalidseparatorfile = 1799
ERRinvalidjobpriority = 1800
ERRinvalidprintername = 1801
ERRprinteralreadyexists = 1802
ERRinvalidprintercommand = 1803
ERRinvaliddatatype = 1804
ERRinvalidenvironment = 1805
ERRunknownprintmonitor = 3000
ERRprinterdriverinuse = 3001
ERRspoolfilenotfound = 3002
ERRnostartdoc = 3003
ERRnoaddjob = 3004
ERRprintprocessoralreadyinstalled = 3005
ERRprintmonitoralreadyinstalled = 3006
ERRinvalidprintmonitor = 3007
ERRprintmonitorinuse = 3008
ERRprinterhasjobsqueued = 3009
# Error codes for the ERRSRV class
ERRerror = 1
ERRbadpw = 2
ERRbadtype = 3
ERRaccess = 4
ERRinvnid = 5
ERRinvnetname = 6
ERRinvdevice = 7
ERRqfull = 49
ERRqtoobig = 50
ERRinvpfid = 52
ERRsmbcmd = 64
ERRsrverror = 65
ERRfilespecs = 67
ERRbadlink = 68
ERRbadpermits = 69
ERRbadpid = 70
ERRsetattrmode = 71
ERRpaused = 81
ERRmsgoff = 82
ERRnoroom = 83
ERRrmuns = 87
ERRtimeout = 88
ERRnoresource = 89
ERRtoomanyuids = 90
ERRbaduid = 91
ERRuseMPX = 250
ERRuseSTD = 251
ERRcontMPX = 252
ERRbadPW = None
ERRnosupport = 0
ERRunknownsmb = 22
# Error codes for the ERRHRD class
ERRnowrite = 19
ERRbadunit = 20
ERRnotready = 21
ERRbadcmd = 22
ERRdata = 23
ERRbadreq = 24
ERRseek = 25
ERRbadmedia = 26
ERRbadsector = 27
ERRnopaper = 28
ERRwrite = 29
ERRread = 30
ERRgeneral = 31
ERRwrongdisk = 34
ERRFCBunavail = 35
ERRsharebufexc = 36
ERRdiskfull = 39
hard_msgs = {
19: ("ERRnowrite", "Attempt to write on write-protected diskette."),
20: ("ERRbadunit", "Unknown unit."),
21: ("ERRnotready", "Drive not ready."),
22: ("ERRbadcmd", "Unknown command."),
23: ("ERRdata", "Data error (CRC)."),
24: ("ERRbadreq", "Bad request structure length."),
25: ("ERRseek", "Seek error."),
26: ("ERRbadmedia", "Unknown media type."),
27: ("ERRbadsector", "Sector not found."),
28: ("ERRnopaper", "Printer out of paper."),
29: ("ERRwrite", "Write fault."),
30: ("ERRread", "Read fault."),
31: ("ERRgeneral", "General failure."),
32: ("ERRbadshare", "An open conflicts with an existing open."),
33: ("ERRlock", "A Lock request conflicted with an existing lock or specified an invalid mode, or an Unlock requested attempted to remove a lock held by another process."),
34: ("ERRwrongdisk", "The wrong disk was found in a drive."),
35: ("ERRFCBUnavail", "No FCBs are available to process request."),
36: ("ERRsharebufexc", "A sharing buffer has been exceeded.")
}
nt_msgs = {
0x0000: ("NT_STATUS_OK","The operation completed successfully."),
0x0001: ("NT_STATUS_UNSUCCESSFUL","A device attached to the system is not functioning."),
0x0002: ("NT_STATUS_NOT_IMPLEMENTED","Incorrect function."),
0x0003: ("NT_STATUS_INVALID_INFO_CLASS","The parameter is incorrect."),
0x0004: ("NT_STATUS_INFO_LENGTH_MISMATCH","The program issued a command but the command length is incorrect."),
0x0005: ("NT_STATUS_ACCESS_VIOLATION","Invalid access to memory location."),
0x0006: ("NT_STATUS_IN_PAGE_ERROR","Error performing inpage operation."),
0x0007: ("NT_STATUS_PAGEFILE_QUOTA","Insufficient quota to complete the requested service."),
0x0008: ("NT_STATUS_INVALID_HANDLE","The handle is invalid."),
0x0009: ("NT_STATUS_BAD_INITIAL_STACK","Recursion too deep, stack overflowed."),
0x000a: ("NT_STATUS_BAD_INITIAL_PC","Not a valid Windows NT application."),
0x000b: ("NT_STATUS_INVALID_CID","The parameter is incorrect."),
0x000c: ("NT_STATUS_TIMER_NOT_CANCELED","NT_STATUS_TIMER_NOT_CANCELED"),
0x000d: ("NT_STATUS_INVALID_PARAMETER","The parameter is incorrect."),
0x000e: ("NT_STATUS_NO_SUCH_DEVICE","The system cannot find the file specified."),
0x000f: ("NT_STATUS_NO_SUCH_FILE","The system cannot find the file specified."),
0x0010: ("NT_STATUS_INVALID_DEVICE_REQUEST","Incorrect function."),
0x0011: ("NT_STATUS_END_OF_FILE","Reached end of file."),
0x0012: ("NT_STATUS_WRONG_VOLUME","The wrong diskette is in the drive. Insert %2 (Volume Serial Number: %3) into drive %1."),
0x0013: ("NT_STATUS_NO_MEDIA_IN_DEVICE","The device is not ready."),
0x0014: ("NT_STATUS_UNRECOGNIZED_MEDIA","The disk media is not recognized. It may not be formatted."),
0x0015: ("NT_STATUS_NONEXISTENT_SECTOR","The drive cannot find the sector requested."),
0x0016: ("NT_STATUS_MORE_PROCESSING_REQUIRED","More data is available."),
0x0017: ("NT_STATUS_NO_MEMORY","Not enough storage is available to process this command."),
0x0018: ("NT_STATUS_CONFLICTING_ADDRESSES","Attempt to access invalid address."),
0x0019: ("NT_STATUS_NOT_MAPPED_VIEW","Attempt to access invalid address."),
0x001a: ("NT_STATUS_UNABLE_TO_FREE_VM","The parameter is incorrect."),
0x001b: ("NT_STATUS_UNABLE_TO_DELETE_SECTION","The parameter is incorrect."),
0x001c: ("NT_STATUS_INVALID_SYSTEM_SERVICE","Incorrect function."),
0x001d: ("NT_STATUS_ILLEGAL_INSTRUCTION","NT_STATUS_ILLEGAL_INSTRUCTION"),
0x001e: ("NT_STATUS_INVALID_LOCK_SEQUENCE","Access is denied."),
0x001f: ("NT_STATUS_INVALID_VIEW_SIZE","Access is denied."),
0x0020: ("NT_STATUS_INVALID_FILE_FOR_SECTION","Not a valid Windows NT application."),
0x0021: ("NT_STATUS_ALREADY_COMMITTED","Access is denied."),
0x0022: ("NT_STATUS_ACCESS_DENIED","Access is denied."),
0x0023: ("NT_STATUS_BUFFER_TOO_SMALL","The data area passed to a system call is too small."),
0x0024: ("NT_STATUS_OBJECT_TYPE_MISMATCH","The handle is invalid."),
0x0025: ("NT_STATUS_NONCONTINUABLE_EXCEPTION","NT_STATUS_NONCONTINUABLE_EXCEPTION"),
0x0026: ("NT_STATUS_INVALID_DISPOSITION","NT_STATUS_INVALID_DISPOSITION"),
0x0027: ("NT_STATUS_UNWIND","NT_STATUS_UNWIND"),
0x0028: ("NT_STATUS_BAD_STACK","NT_STATUS_BAD_STACK"),
0x0029: ("NT_STATUS_INVALID_UNWIND_TARGET","NT_STATUS_INVALID_UNWIND_TARGET"),
0x002a: ("NT_STATUS_NOT_LOCKED","The segment is already unlocked."),
0x002b: ("NT_STATUS_PARITY_ERROR","NT_STATUS_PARITY_ERROR"),
0x002c: ("NT_STATUS_UNABLE_TO_DECOMMIT_VM","Attempt to access invalid address."),
0x002d: ("NT_STATUS_NOT_COMMITTED","Attempt to access invalid address."),
0x002e: ("NT_STATUS_INVALID_PORT_ATTRIBUTES","NT_STATUS_INVALID_PORT_ATTRIBUTES"),
0x002f: ("NT_STATUS_PORT_MESSAGE_TOO_LONG","NT_STATUS_PORT_MESSAGE_TOO_LONG"),
0x0030: ("NT_STATUS_INVALID_PARAMETER_MIX","The parameter is incorrect."),
0x0031: ("NT_STATUS_INVALID_QUOTA_LOWER","NT_STATUS_INVALID_QUOTA_LOWER"),
0x0032: ("NT_STATUS_DISK_CORRUPT_ERROR","The disk structure is corrupt and non-readable."),
0x0033: ("NT_STATUS_OBJECT_NAME_INVALID","The filename, directory name, or volume label syntax is incorrect."),
0x0034: ("NT_STATUS_OBJECT_NAME_NOT_FOUND","The system cannot find the file specified."),
0x0035: ("NT_STATUS_OBJECT_NAME_COLLISION","Cannot create a file when that file already exists."),
0x0036: ("NT_STATUS_HANDLE_NOT_WAITABLE","NT_STATUS_HANDLE_NOT_WAITABLE"),
0x0037: ("NT_STATUS_PORT_DISCONNECTED","The handle is invalid."),
0x0038: ("NT_STATUS_DEVICE_ALREADY_ATTACHED","NT_STATUS_DEVICE_ALREADY_ATTACHED"),
0x0039: ("NT_STATUS_OBJECT_PATH_INVALID","The specified path is invalid."),
0x003a: ("NT_STATUS_OBJECT_PATH_NOT_FOUND","The system cannot find the path specified."),
0x003b: ("NT_STATUS_OBJECT_PATH_SYNTAX_BAD","The specified path is invalid."),
0x003c: ("NT_STATUS_DATA_OVERRUN","The request could not be performed because of an I/O device error."),
0x003d: ("NT_STATUS_DATA_LATE_ERROR","The request could not be performed because of an I/O device error."),
0x003e: ("NT_STATUS_DATA_ERROR","Data error (cyclic redundancy check)"),
0x003f: ("NT_STATUS_CRC_ERROR","Data error (cyclic redundancy check)"),
0x0040: ("NT_STATUS_SECTION_TOO_BIG","Not enough storage is available to process this command."),
0x0041: ("NT_STATUS_PORT_CONNECTION_REFUSED","Access is denied."),
0x0042: ("NT_STATUS_INVALID_PORT_HANDLE","The handle is invalid."),
0x0043: ("NT_STATUS_SHARING_VIOLATION","The process cannot access the file because it is being used by another process."),
0x0044: ("NT_STATUS_QUOTA_EXCEEDED","Not enough quota is available to process this command."),
0x0045: ("NT_STATUS_INVALID_PAGE_PROTECTION","The parameter is incorrect."),
0x0046: ("NT_STATUS_MUTANT_NOT_OWNED","Attempt to release mutex not owned by caller."),
0x0047: ("NT_STATUS_SEMAPHORE_LIMIT_EXCEEDED","Too many posts were made to a semaphore."),
0x0048: ("NT_STATUS_PORT_ALREADY_SET","The parameter is incorrect."),
0x0049: ("NT_STATUS_SECTION_NOT_IMAGE","The parameter is incorrect."),
0x004a: ("NT_STATUS_SUSPEND_COUNT_EXCEEDED","The recipient process has refused the signal."),
0x004b: ("NT_STATUS_THREAD_IS_TERMINATING","Access is denied."),
0x004c: ("NT_STATUS_BAD_WORKING_SET_LIMIT","The parameter is incorrect."),
0x004d: ("NT_STATUS_INCOMPATIBLE_FILE_MAP","The parameter is incorrect."),
0x004e: ("NT_STATUS_SECTION_PROTECTION","The parameter is incorrect."),
0x004f: ("NT_STATUS_EAS_NOT_SUPPORTED","NT_STATUS_EAS_NOT_SUPPORTED"),
0x0050: ("NT_STATUS_EA_TOO_LARGE","The extended attributes are inconsistent."),
0x0051: ("NT_STATUS_NONEXISTENT_EA_ENTRY","The file or directory is corrupt and non-readable."),
0x0052: ("NT_STATUS_NO_EAS_ON_FILE","The file or directory is corrupt and non-readable."),
0x0053: ("NT_STATUS_EA_CORRUPT_ERROR","The file or directory is corrupt and non-readable."),
0x0054: ("NT_STATUS_FILE_LOCK_CONFLICT","The process cannot access the file because another process has locked a portion of the file."),
0x0055: ("NT_STATUS_LOCK_NOT_GRANTED","The process cannot access the file because another process has locked a portion of the file."),
0x0056: ("NT_STATUS_DELETE_PENDING","Access is denied."),
0x0057: ("NT_STATUS_CTL_FILE_NOT_SUPPORTED","The network request is not supported."),
0x0058: ("NT_STATUS_UNKNOWN_REVISION","The revision level is unknown."),
0x0059: ("NT_STATUS_REVISION_MISMATCH","Indicates two revision levels are incompatible."),
0x005a: ("NT_STATUS_INVALID_OWNER","This security ID may not be assigned as the owner of this object."),
0x005b: ("NT_STATUS_INVALID_PRIMARY_GROUP","This security ID may not be assigned as the primary group of an object."),
0x005c: ("NT_STATUS_NO_IMPERSONATION_TOKEN","An attempt has been made to operate on an impersonation token by a thread that is not currently impersonating a client."),
0x005d: ("NT_STATUS_CANT_DISABLE_MANDATORY","The group may not be disabled."),
0x005e: ("NT_STATUS_NO_LOGON_SERVERS","There are currently no logon servers available to service the logon request."),
0x005f: ("NT_STATUS_NO_SUCH_LOGON_SESSION","A specified logon session does not exist. It may already have been terminated."),
0x0060: ("NT_STATUS_NO_SUCH_PRIVILEGE","A specified privilege does not exist."),
0x0061: ("NT_STATUS_PRIVILEGE_NOT_HELD","A required privilege is not held by the client."),
0x0062: ("NT_STATUS_INVALID_ACCOUNT_NAME","The name provided is not a properly formed account name."),
0x0063: ("NT_STATUS_USER_EXISTS","The specified user already exists."),
0x0064: ("NT_STATUS_NO_SUCH_USER","The specified user does not exist."),
0x0065: ("NT_STATUS_GROUP_EXISTS","The specified group already exists."),
0x0066: ("NT_STATUS_NO_SUCH_GROUP","The specified group does not exist."),
0x0067: ("NT_STATUS_MEMBER_IN_GROUP","Either the specified user account is already a member of the specified group, or the specified group cannot be deleted because it contains a member."),
0x0068: ("NT_STATUS_MEMBER_NOT_IN_GROUP","The specified user account is not a member of the specified group account."),
0x0069: ("NT_STATUS_LAST_ADMIN","The last remaining administration account cannot be disabled or deleted."),
0x006a: ("NT_STATUS_WRONG_PASSWORD","The specified network password is not correct."),
0x006b: ("NT_STATUS_ILL_FORMED_PASSWORD","Unable to update the password. The value provided for the new password contains values that are not allowed in passwords."),
0x006c: ("NT_STATUS_PASSWORD_RESTRICTION","Unable to update the password because a password update rule has been violated."),
0x006d: ("NT_STATUS_LOGON_FAILURE","Logon failure: unknown user name or bad password."),
0x006e: ("NT_STATUS_ACCOUNT_RESTRICTION","Logon failure: user account restriction."),
0x006f: ("NT_STATUS_INVALID_LOGON_HOURS","Logon failure: account logon time restriction violation."),
0x0070: ("NT_STATUS_INVALID_WORKSTATION","Logon failure: user not allowed to log on to this computer."),
0x0071: ("NT_STATUS_PASSWORD_EXPIRED","Logon failure: the specified account password has expired."),
0x0072: ("NT_STATUS_ACCOUNT_DISABLED","Logon failure: account currently disabled."),
0x0073: ("NT_STATUS_NONE_MAPPED","No mapping between account names and security IDs was done."),
0x0074: ("NT_STATUS_TOO_MANY_LUIDS_REQUESTED","Too many local user identifiers (LUIDs) were requested at one time."),
0x0075: ("NT_STATUS_LUIDS_EXHAUSTED","No more local user identifiers (LUIDs) are available."),
0x0076: ("NT_STATUS_INVALID_SUB_AUTHORITY","The subauthority part of a security ID is invalid for this particular use."),
0x0077: ("NT_STATUS_INVALID_ACL","The access control list (ACL) structure is invalid."),
0x0078: ("NT_STATUS_INVALID_SID","The security ID structure is invalid."),
0x0079: ("NT_STATUS_INVALID_SECURITY_DESCR","The security descriptor structure is invalid."),
0x007a: ("NT_STATUS_PROCEDURE_NOT_FOUND","The specified procedure could not be found."),
0x007b: ("NT_STATUS_INVALID_IMAGE_FORMAT","%1 is not a valid Windows NT application."),
0x007c: ("NT_STATUS_NO_TOKEN","An attempt was made to reference a token that does not exist."),
0x007d: ("NT_STATUS_BAD_INHERITANCE_ACL","The inherited access control list (ACL) or access control entry (ACE) could not be built."),
0x007e: ("NT_STATUS_RANGE_NOT_LOCKED","The segment is already unlocked."),
0x007f: ("NT_STATUS_DISK_FULL","There is not enough space on the disk."),
0x0080: ("NT_STATUS_SERVER_DISABLED","The server is currently disabled."),
0x0081: ("NT_STATUS_SERVER_NOT_DISABLED","The server is currently enabled."),
0x0082: ("NT_STATUS_TOO_MANY_GUIDS_REQUESTED","The name limit for the local computer network adapter card was exceeded."),
0x0083: ("NT_STATUS_GUIDS_EXHAUSTED","No more data is available."),
0x0084: ("NT_STATUS_INVALID_ID_AUTHORITY","The value provided was an invalid value for an identifier authority."),
0x0085: ("NT_STATUS_AGENTS_EXHAUSTED","No more data is available."),
0x0086: ("NT_STATUS_INVALID_VOLUME_LABEL","The volume label you entered exceeds the label character limit of the target file system."),
0x0087: ("NT_STATUS_SECTION_NOT_EXTENDED","Not enough storage is available to complete this operation."),
0x0088: ("NT_STATUS_NOT_MAPPED_DATA","Attempt to access invalid address."),
0x0089: ("NT_STATUS_RESOURCE_DATA_NOT_FOUND","The specified image file did not contain a resource section."),
0x008a: ("NT_STATUS_RESOURCE_TYPE_NOT_FOUND","The specified resource type can not be found in the image file."),
0x008b: ("NT_STATUS_RESOURCE_NAME_NOT_FOUND","The specified resource name can not be found in the image file."),
0x008c: ("NT_STATUS_ARRAY_BOUNDS_EXCEEDED","NT_STATUS_ARRAY_BOUNDS_EXCEEDED"),
0x008d: ("NT_STATUS_FLOAT_DENORMAL_OPERAND","NT_STATUS_FLOAT_DENORMAL_OPERAND"),
0x008e: ("NT_STATUS_FLOAT_DIVIDE_BY_ZERO","NT_STATUS_FLOAT_DIVIDE_BY_ZERO"),
0x008f: ("NT_STATUS_FLOAT_INEXACT_RESULT","NT_STATUS_FLOAT_INEXACT_RESULT"),
0x0090: ("NT_STATUS_FLOAT_INVALID_OPERATION","NT_STATUS_FLOAT_INVALID_OPERATION"),
0x0091: ("NT_STATUS_FLOAT_OVERFLOW","NT_STATUS_FLOAT_OVERFLOW"),
0x0092: ("NT_STATUS_FLOAT_STACK_CHECK","NT_STATUS_FLOAT_STACK_CHECK"),
0x0093: ("NT_STATUS_FLOAT_UNDERFLOW","NT_STATUS_FLOAT_UNDERFLOW"),
0x0094: ("NT_STATUS_INTEGER_DIVIDE_BY_ZERO","NT_STATUS_INTEGER_DIVIDE_BY_ZERO"),
0x0095: ("NT_STATUS_INTEGER_OVERFLOW","Arithmetic result exceeded 32 bits."),
0x0096: ("NT_STATUS_PRIVILEGED_INSTRUCTION","NT_STATUS_PRIVILEGED_INSTRUCTION"),
0x0097: ("NT_STATUS_TOO_MANY_PAGING_FILES","Not enough storage is available to process this command."),
0x0098: ("NT_STATUS_FILE_INVALID","The volume for a file has been externally altered such that the opened file is no longer valid."),
0x0099: ("NT_STATUS_ALLOTTED_SPACE_EXCEEDED","No more memory is available for security information updates."),
0x009a: ("NT_STATUS_INSUFFICIENT_RESOURCES","Insufficient system resources exist to complete the requested service."),
0x009b: ("NT_STATUS_DFS_EXIT_PATH_FOUND","The system cannot find the path specified."),
0x009c: ("NT_STATUS_DEVICE_DATA_ERROR","Data error (cyclic redundancy check)"),
0x009d: ("NT_STATUS_DEVICE_NOT_CONNECTED","The device is not ready."),
0x009e: ("NT_STATUS_DEVICE_POWER_FAILURE","The device is not ready."),
0x009f: ("NT_STATUS_FREE_VM_NOT_AT_BASE","Attempt to access invalid address."),
0x00a0: ("NT_STATUS_MEMORY_NOT_ALLOCATED","Attempt to access invalid address."),
0x00a1: ("NT_STATUS_WORKING_SET_QUOTA","Insufficient quota to complete the requested service."),
0x00a2: ("NT_STATUS_MEDIA_WRITE_PROTECTED","The media is write protected."),
0x00a3: ("NT_STATUS_DEVICE_NOT_READY","The device is not ready."),
0x00a4: ("NT_STATUS_INVALID_GROUP_ATTRIBUTES","The specified attributes are invalid, or incompatible with the attributes for the group as a whole."),
0x00a5: ("NT_STATUS_BAD_IMPERSONATION_LEVEL","Either a required impersonation level was not provided, or the provided impersonation level is invalid."),
0x00a6: ("NT_STATUS_CANT_OPEN_ANONYMOUS","Cannot open an anonymous level security token."),
0x00a7: ("NT_STATUS_BAD_VALIDATION_CLASS","The validation information class requested was invalid."),
0x00a8: ("NT_STATUS_BAD_TOKEN_TYPE","The type of the token is inappropriate for its attempted use."),
0x00a9: ("NT_STATUS_BAD_MASTER_BOOT_RECORD","NT_STATUS_BAD_MASTER_BOOT_RECORD"),
0x00aa: ("NT_STATUS_INSTRUCTION_MISALIGNMENT","NT_STATUS_INSTRUCTION_MISALIGNMENT"),
0x00ab: ("NT_STATUS_INSTANCE_NOT_AVAILABLE","All pipe instances are busy."),
0x00ac: ("NT_STATUS_PIPE_NOT_AVAILABLE","All pipe instances are busy."),
0x00ad: ("NT_STATUS_INVALID_PIPE_STATE","The pipe state is invalid."),
0x00ae: ("NT_STATUS_PIPE_BUSY","All pipe instances are busy."),
0x00af: ("NT_STATUS_ILLEGAL_FUNCTION","Incorrect function."),
0x00b0: ("NT_STATUS_PIPE_DISCONNECTED","No process is on the other end of the pipe."),
0x00b1: ("NT_STATUS_PIPE_CLOSING","The pipe is being closed."),
0x00b2: ("NT_STATUS_PIPE_CONNECTED","There is a process on other end of the pipe."),
0x00b3: ("NT_STATUS_PIPE_LISTENING","Waiting for a process to open the other end of the pipe."),
0x00b4: ("NT_STATUS_INVALID_READ_MODE","The pipe state is invalid."),
0x00b5: ("NT_STATUS_IO_TIMEOUT","The semaphore timeout period has expired."),
0x00b6: ("NT_STATUS_FILE_FORCED_CLOSED","Reached end of file."),
0x00b7: ("NT_STATUS_PROFILING_NOT_STARTED","NT_STATUS_PROFILING_NOT_STARTED"),
0x00b8: ("NT_STATUS_PROFILING_NOT_STOPPED","NT_STATUS_PROFILING_NOT_STOPPED"),
0x00b9: ("NT_STATUS_COULD_NOT_INTERPRET","NT_STATUS_COULD_NOT_INTERPRET"),
0x00ba: ("NT_STATUS_FILE_IS_A_DIRECTORY","Access is denied."),
0x00bb: ("NT_STATUS_NOT_SUPPORTED","The network request is not supported."),
0x00bc: ("NT_STATUS_REMOTE_NOT_LISTENING","The remote computer is not available."),
0x00bd: ("NT_STATUS_DUPLICATE_NAME","A duplicate name exists on the network."),
0x00be: ("NT_STATUS_BAD_NETWORK_PATH","The network path was not found."),
0x00bf: ("NT_STATUS_NETWORK_BUSY","The network is busy."),
0x00c0: ("NT_STATUS_DEVICE_DOES_NOT_EXIST","The specified network resource or device is no longer available."),
0x00c1: ("NT_STATUS_TOO_MANY_COMMANDS","The network BIOS command limit has been reached."),
0x00c2: ("NT_STATUS_ADAPTER_HARDWARE_ERROR","A network adapter hardware error occurred."),
0x00c3: ("NT_STATUS_INVALID_NETWORK_RESPONSE","The specified server cannot perform the requested operation."),
0x00c4: ("NT_STATUS_UNEXPECTED_NETWORK_ERROR","An unexpected network error occurred."),
0x00c5: ("NT_STATUS_BAD_REMOTE_ADAPTER","The remote adapter is not compatible."),
0x00c6: ("NT_STATUS_PRINT_QUEUE_FULL","The printer queue is full."),
0x00c7: ("NT_STATUS_NO_SPOOL_SPACE","Space to store the file waiting to be printed is not available on the server."),
0x00c8: ("NT_STATUS_PRINT_CANCELLED","Your file waiting to be printed was deleted."),
0x00c9: ("NT_STATUS_NETWORK_NAME_DELETED","The specified network name is no longer available."),
0x00ca: ("NT_STATUS_NETWORK_ACCESS_DENIED","Network access is denied."),
0x00cb: ("NT_STATUS_BAD_DEVICE_TYPE","The network resource type is not correct."),
0x00cc: ("NT_STATUS_BAD_NETWORK_NAME","The network name cannot be found."),
0x00cd: ("NT_STATUS_TOO_MANY_NAMES","The name limit for the local computer network adapter card was exceeded."),
0x00ce: ("NT_STATUS_TOO_MANY_SESSIONS","The network BIOS session limit was exceeded."),
0x00cf: ("NT_STATUS_SHARING_PAUSED","The remote server has been paused or is in the process of being started."),
0x00d0: ("NT_STATUS_REQUEST_NOT_ACCEPTED","No more connections can be made to this remote computer at this time because there are already as many connections as the computer can accept."),
0x00d1: ("NT_STATUS_REDIRECTOR_PAUSED","The specified printer or disk device has been paused."),
0x00d2: ("NT_STATUS_NET_WRITE_FAULT","A write fault occurred on the network."),
0x00d3: ("NT_STATUS_PROFILING_AT_LIMIT","NT_STATUS_PROFILING_AT_LIMIT"),
0x00d4: ("NT_STATUS_NOT_SAME_DEVICE","The system cannot move the file to a different disk drive."),
0x00d5: ("NT_STATUS_FILE_RENAMED","NT_STATUS_FILE_RENAMED"),
0x00d6: ("NT_STATUS_VIRTUAL_CIRCUIT_CLOSED","The session was cancelled."),
0x00d7: ("NT_STATUS_NO_SECURITY_ON_OBJECT","Unable to perform a security operation on an object which has no associated security."),
0x00d8: ("NT_STATUS_CANT_WAIT","NT_STATUS_CANT_WAIT"),
0x00d9: ("NT_STATUS_PIPE_EMPTY","The pipe is being closed."),
0x00da: ("NT_STATUS_CANT_ACCESS_DOMAIN_INFO","Indicates a Windows NT Server could not be contacted or that objects within the domain are protected such that necessary information could not be retrieved."),
0x00db: ("NT_STATUS_CANT_TERMINATE_SELF","NT_STATUS_CANT_TERMINATE_SELF"),
0x00dc: ("NT_STATUS_INVALID_SERVER_STATE","The security account manager (SAM) or local security authority (LSA) server was in the wrong state to perform the security operation."),
0x00dd: ("NT_STATUS_INVALID_DOMAIN_STATE","The domain was in the wrong state to perform the security operation."),
0x00de: ("NT_STATUS_INVALID_DOMAIN_ROLE","This operation is only allowed for the Primary Domain Controller of the domain."),
0x00df: ("NT_STATUS_NO_SUCH_DOMAIN","The specified domain did not exist."),
0x00e0: ("NT_STATUS_DOMAIN_EXISTS","The specified domain already exists."),
0x00e1: ("NT_STATUS_DOMAIN_LIMIT_EXCEEDED","An attempt was made to exceed the limit on the number of domains per server."),
0x00e2: ("NT_STATUS_OPLOCK_NOT_GRANTED","NT_STATUS_OPLOCK_NOT_GRANTED"),
0x00e3: ("NT_STATUS_INVALID_OPLOCK_PROTOCOL","NT_STATUS_INVALID_OPLOCK_PROTOCOL"),
0x00e4: ("NT_STATUS_INTERNAL_DB_CORRUPTION","Unable to complete the requested operation because of either a catastrophic media failure or a data structure corruption on the disk."),
0x00e5: ("NT_STATUS_INTERNAL_ERROR","The security account database contains an internal inconsistency."),
0x00e6: ("NT_STATUS_GENERIC_NOT_MAPPED","Generic access types were contained in an access mask which should already be mapped to non-generic types."),
0x00e7: ("NT_STATUS_BAD_DESCRIPTOR_FORMAT","A security descriptor is not in the right format (absolute or self-relative)."),
0x00e8: ("NT_STATUS_INVALID_USER_BUFFER","The supplied user buffer is not valid for the requested operation."),
0x00e9: ("NT_STATUS_UNEXPECTED_IO_ERROR","NT_STATUS_UNEXPECTED_IO_ERROR"),
0x00ea: ("NT_STATUS_UNEXPECTED_MM_CREATE_ERR","NT_STATUS_UNEXPECTED_MM_CREATE_ERR"),
0x00eb: ("NT_STATUS_UNEXPECTED_MM_MAP_ERROR","NT_STATUS_UNEXPECTED_MM_MAP_ERROR"),
0x00ec: ("NT_STATUS_UNEXPECTED_MM_EXTEND_ERR","NT_STATUS_UNEXPECTED_MM_EXTEND_ERR"),
0x00ed: ("NT_STATUS_NOT_LOGON_PROCESS","The requested action is restricted for use by logon processes only. The calling process has not registered as a logon process."),
0x00ee: ("NT_STATUS_LOGON_SESSION_EXISTS","Cannot start a new logon session with an ID that is already in use."),
0x00ef: ("NT_STATUS_INVALID_PARAMETER_1","The parameter is incorrect."),
0x00f0: ("NT_STATUS_INVALID_PARAMETER_2","The parameter is incorrect."),
0x00f1: ("NT_STATUS_INVALID_PARAMETER_3","The parameter is incorrect."),
0x00f2: ("NT_STATUS_INVALID_PARAMETER_4","The parameter is incorrect."),
0x00f3: ("NT_STATUS_INVALID_PARAMETER_5","The parameter is incorrect."),
0x00f4: ("NT_STATUS_INVALID_PARAMETER_6","The parameter is incorrect."),
0x00f5: ("NT_STATUS_INVALID_PARAMETER_7","The parameter is incorrect."),
0x00f6: ("NT_STATUS_INVALID_PARAMETER_8","The parameter is incorrect."),
0x00f7: ("NT_STATUS_INVALID_PARAMETER_9","The parameter is incorrect."),
0x00f8: ("NT_STATUS_INVALID_PARAMETER_10","The parameter is incorrect."),
0x00f9: ("NT_STATUS_INVALID_PARAMETER_11","The parameter is incorrect."),
0x00fa: ("NT_STATUS_INVALID_PARAMETER_12","The parameter is incorrect."),
0x00fb: ("NT_STATUS_REDIRECTOR_NOT_STARTED","The system cannot find the path specified."),
0x00fc: ("NT_STATUS_REDIRECTOR_STARTED","NT_STATUS_REDIRECTOR_STARTED"),
0x00fd: ("NT_STATUS_STACK_OVERFLOW","Recursion too deep, stack overflowed."),
0x00fe: ("NT_STATUS_NO_SUCH_PACKAGE","A specified authentication package is unknown."),
0x00ff: ("NT_STATUS_BAD_FUNCTION_TABLE","NT_STATUS_BAD_FUNCTION_TABLE"),
0x0101: ("NT_STATUS_DIRECTORY_NOT_EMPTY","The directory is not empty."),
0x0102: ("NT_STATUS_FILE_CORRUPT_ERROR","The file or directory is corrupt and non-readable."),
0x0103: ("NT_STATUS_NOT_A_DIRECTORY","The directory name is invalid."),
0x0104: ("NT_STATUS_BAD_LOGON_SESSION_STATE","The logon session is not in a state that is consistent with the requested operation."),
0x0105: ("NT_STATUS_LOGON_SESSION_COLLISION","The logon session ID is already in use."),
0x0106: ("NT_STATUS_NAME_TOO_LONG","The filename or extension is too long."),
0x0107: ("NT_STATUS_FILES_OPEN","NT_STATUS_FILES_OPEN"),
0x0108: ("NT_STATUS_CONNECTION_IN_USE","The device is being accessed by an active process."),
0x0109: ("NT_STATUS_MESSAGE_NOT_FOUND","NT_STATUS_MESSAGE_NOT_FOUND"),
0x010a: ("NT_STATUS_PROCESS_IS_TERMINATING","Access is denied."),
0x010b: ("NT_STATUS_INVALID_LOGON_TYPE","A logon request contained an invalid logon type value."),
0x010c: ("NT_STATUS_NO_GUID_TRANSLATION","NT_STATUS_NO_GUID_TRANSLATION"),
0x010d: ("NT_STATUS_CANNOT_IMPERSONATE","Unable to impersonate via a named pipe until data has been read from that pipe."),
0x010e: ("NT_STATUS_IMAGE_ALREADY_LOADED","An instance of the service is already running."),
0x010f: ("NT_STATUS_ABIOS_NOT_PRESENT","NT_STATUS_ABIOS_NOT_PRESENT"),
0x0110: ("NT_STATUS_ABIOS_LID_NOT_EXIST","NT_STATUS_ABIOS_LID_NOT_EXIST"),
0x0111: ("NT_STATUS_ABIOS_LID_ALREADY_OWNED","NT_STATUS_ABIOS_LID_ALREADY_OWNED"),
0x0112: ("NT_STATUS_ABIOS_NOT_LID_OWNER","NT_STATUS_ABIOS_NOT_LID_OWNER"),
0x0113: ("NT_STATUS_ABIOS_INVALID_COMMAND","NT_STATUS_ABIOS_INVALID_COMMAND"),
0x0114: ("NT_STATUS_ABIOS_INVALID_LID","NT_STATUS_ABIOS_INVALID_LID"),
0x0115: ("NT_STATUS_ABIOS_SELECTOR_NOT_AVAILABLE","NT_STATUS_ABIOS_SELECTOR_NOT_AVAILABLE"),
0x0116: ("NT_STATUS_ABIOS_INVALID_SELECTOR","NT_STATUS_ABIOS_INVALID_SELECTOR"),
0x0117: ("NT_STATUS_NO_LDT","NT_STATUS_NO_LDT"),
0x0118: ("NT_STATUS_INVALID_LDT_SIZE","NT_STATUS_INVALID_LDT_SIZE"),
0x0119: ("NT_STATUS_INVALID_LDT_OFFSET","NT_STATUS_INVALID_LDT_OFFSET"),
0x011a: ("NT_STATUS_INVALID_LDT_DESCRIPTOR","NT_STATUS_INVALID_LDT_DESCRIPTOR"),
0x011b: ("NT_STATUS_INVALID_IMAGE_NE_FORMAT","%1 is not a valid Windows NT application."),
0x011c: ("NT_STATUS_RXACT_INVALID_STATE","The transaction state of a Registry subtree is incompatible with the requested operation."),
0x011d: ("NT_STATUS_RXACT_COMMIT_FAILURE","An internal security database corruption has been encountered."),
0x011e: ("NT_STATUS_MAPPED_FILE_SIZE_ZERO","The volume for a file has been externally altered such that the opened file is no longer valid."),
0x011f: ("NT_STATUS_TOO_MANY_OPENED_FILES","The system cannot open the file."),
0x0120: ("NT_STATUS_CANCELLED","The I/O operation has been aborted because of either a thread exit or an application request."),
0x0121: ("NT_STATUS_CANNOT_DELETE","Access is denied."),
0x0122: ("NT_STATUS_INVALID_COMPUTER_NAME","The format of the specified computer name is invalid."),
0x0123: ("NT_STATUS_FILE_DELETED","Access is denied."),
0x0124: ("NT_STATUS_SPECIAL_ACCOUNT","Cannot perform this operation on built-in accounts."),
0x0125: ("NT_STATUS_SPECIAL_GROUP","Cannot perform this operation on this built-in special group."),
0x0126: ("NT_STATUS_SPECIAL_USER","Cannot perform this operation on this built-in special user."),
0x0127: ("NT_STATUS_MEMBERS_PRIMARY_GROUP","The user cannot be removed from a group because the group is currently the user's primary group."),
0x0128: ("NT_STATUS_FILE_CLOSED","The handle is invalid."),
0x0129: ("NT_STATUS_TOO_MANY_THREADS","NT_STATUS_TOO_MANY_THREADS"),
0x012a: ("NT_STATUS_THREAD_NOT_IN_PROCESS","NT_STATUS_THREAD_NOT_IN_PROCESS"),
0x012b: ("NT_STATUS_TOKEN_ALREADY_IN_USE","The token is already in use as a primary token."),
0x012c: ("NT_STATUS_PAGEFILE_QUOTA_EXCEEDED","NT_STATUS_PAGEFILE_QUOTA_EXCEEDED"),
0x012d: ("NT_STATUS_COMMITMENT_LIMIT","The paging file is too small for this operation to complete."),
0x012e: ("NT_STATUS_INVALID_IMAGE_LE_FORMAT","%1 is not a valid Windows NT application."),
0x012f: ("NT_STATUS_INVALID_IMAGE_NOT_MZ","%1 is not a valid Windows NT application."),
0x0130: ("NT_STATUS_INVALID_IMAGE_PROTECT","%1 is not a valid Windows NT application."),
0x0131: ("NT_STATUS_INVALID_IMAGE_WIN_16","%1 is not a valid Windows NT application."),
0x0132: ("NT_STATUS_LOGON_SERVER_CONFLICT","NT_STATUS_LOGON_SERVER_CONFLICT"),
0x0133: ("NT_STATUS_TIME_DIFFERENCE_AT_DC","NT_STATUS_TIME_DIFFERENCE_AT_DC"),
0x0134: ("NT_STATUS_SYNCHRONIZATION_REQUIRED","NT_STATUS_SYNCHRONIZATION_REQUIRED"),
0x0135: ("NT_STATUS_DLL_NOT_FOUND","The specified module could not be found."),
0x0136: ("NT_STATUS_OPEN_FAILED","NT_STATUS_OPEN_FAILED"),
0x0137: ("NT_STATUS_IO_PRIVILEGE_FAILED","NT_STATUS_IO_PRIVILEGE_FAILED"),
0x0138: ("NT_STATUS_ORDINAL_NOT_FOUND","The operating system cannot run %1."),
0x0139: ("NT_STATUS_ENTRYPOINT_NOT_FOUND","The specified procedure could not be found."),
0x013a: ("NT_STATUS_CONTROL_C_EXIT","NT_STATUS_CONTROL_C_EXIT"),
0x013b: ("NT_STATUS_LOCAL_DISCONNECT","The specified network name is no longer available."),
0x013c: ("NT_STATUS_REMOTE_DISCONNECT","The specified network name is no longer available."),
0x013d: ("NT_STATUS_REMOTE_RESOURCES","The remote computer is not available."),
0x013e: ("NT_STATUS_LINK_FAILED","An unexpected network error occurred."),
0x013f: ("NT_STATUS_LINK_TIMEOUT","An unexpected network error occurred."),
0x0140: ("NT_STATUS_INVALID_CONNECTION","An unexpected network error occurred."),
0x0141: ("NT_STATUS_INVALID_ADDRESS","An unexpected network error occurred."),
0x0142: ("NT_STATUS_DLL_INIT_FAILED","A dynamic link library (DLL) initialization routine failed."),
0x0143: ("NT_STATUS_MISSING_SYSTEMFILE","NT_STATUS_MISSING_SYSTEMFILE"),
0x0144: ("NT_STATUS_UNHANDLED_EXCEPTION","NT_STATUS_UNHANDLED_EXCEPTION"),
0x0145: ("NT_STATUS_APP_INIT_FAILURE","NT_STATUS_APP_INIT_FAILURE"),
0x0146: ("NT_STATUS_PAGEFILE_CREATE_FAILED","NT_STATUS_PAGEFILE_CREATE_FAILED"),
0x0147: ("NT_STATUS_NO_PAGEFILE","NT_STATUS_NO_PAGEFILE"),
0x0148: ("NT_STATUS_INVALID_LEVEL","The system call level is not correct."),
0x0149: ("NT_STATUS_WRONG_PASSWORD_CORE","The specified network password is not correct."),
0x014a: ("NT_STATUS_ILLEGAL_FLOAT_CONTEXT","NT_STATUS_ILLEGAL_FLOAT_CONTEXT"),
0x014b: ("NT_STATUS_PIPE_BROKEN","The pipe has been ended."),
0x014c: ("NT_STATUS_REGISTRY_CORRUPT","The configuration registry database is corrupt."),
0x014d: ("NT_STATUS_REGISTRY_IO_FAILED","An I/O operation initiated by the Registry failed unrecoverably. The Registry could not read in, or write out, or flush, one of the files that contain the system's image of the Registry."),
0x014e: ("NT_STATUS_NO_EVENT_PAIR","NT_STATUS_NO_EVENT_PAIR"),
0x014f: ("NT_STATUS_UNRECOGNIZED_VOLUME","The volume does not contain a recognized file system. Please make sure that all required file system drivers are loaded and that the volume is not corrupt."),
0x0150: ("NT_STATUS_SERIAL_NO_DEVICE_INITED","No serial device was successfully initialized. The serial driver will unload."),
0x0151: ("NT_STATUS_NO_SUCH_ALIAS","The specified local group does not exist."),
0x0152: ("NT_STATUS_MEMBER_NOT_IN_ALIAS","The specified account name is not a member of the local group."),
0x0153: ("NT_STATUS_MEMBER_IN_ALIAS","The specified account name is already a member of the local group."),
0x0154: ("NT_STATUS_ALIAS_EXISTS","The specified local group already exists."),
0x0155: ("NT_STATUS_LOGON_NOT_GRANTED","Logon failure: the user has not been granted the requested logon type at this computer."),
0x0156: ("NT_STATUS_TOO_MANY_SECRETS","The maximum number of secrets that may be stored in a single system has been exceeded."),
0x0157: ("NT_STATUS_SECRET_TOO_LONG","The length of a secret exceeds the maximum length allowed."),
0x0158: ("NT_STATUS_INTERNAL_DB_ERROR","The local security authority database contains an internal inconsistency."),
0x0159: ("NT_STATUS_FULLSCREEN_MODE","The requested operation cannot be performed in full-screen mode."),
0x015a: ("NT_STATUS_TOO_MANY_CONTEXT_IDS","During a logon attempt, the user's security context accumulated too many security IDs."),
0x015b: ("NT_STATUS_LOGON_TYPE_NOT_GRANTED","Logon failure: the user has not been granted the requested logon type at this computer."),
0x015c: ("NT_STATUS_NOT_REGISTRY_FILE","The system has attempted to load or restore a file into the Registry, but the specified file is not in a Registry file format."),
0x015d: ("NT_STATUS_NT_CROSS_ENCRYPTION_REQUIRED","A cross-encrypted password is necessary to change a user password."),
0x015e: ("NT_STATUS_DOMAIN_CTRLR_CONFIG_ERROR","NT_STATUS_DOMAIN_CTRLR_CONFIG_ERROR"),
0x015f: ("NT_STATUS_FT_MISSING_MEMBER","The request could not be performed because of an I/O device error."),
0x0160: ("NT_STATUS_ILL_FORMED_SERVICE_ENTRY","NT_STATUS_ILL_FORMED_SERVICE_ENTRY"),
0x0161: ("NT_STATUS_ILLEGAL_CHARACTER","NT_STATUS_ILLEGAL_CHARACTER"),
0x0162: ("NT_STATUS_UNMAPPABLE_CHARACTER","No mapping for the Unicode character exists in the target multi-byte code page."),
0x0163: ("NT_STATUS_UNDEFINED_CHARACTER","NT_STATUS_UNDEFINED_CHARACTER"),
0x0164: ("NT_STATUS_FLOPPY_VOLUME","NT_STATUS_FLOPPY_VOLUME"),
0x0165: ("NT_STATUS_FLOPPY_ID_MARK_NOT_FOUND","No ID address mark was found on the floppy disk."),
0x0166: ("NT_STATUS_FLOPPY_WRONG_CYLINDER","Mismatch between the floppy disk sector ID field and the floppy disk controller track address."),
0x0167: ("NT_STATUS_FLOPPY_UNKNOWN_ERROR","The floppy disk controller reported an error that is not recognized by the floppy disk driver."),
0x0168: ("NT_STATUS_FLOPPY_BAD_REGISTERS","The floppy disk controller returned inconsistent results in its registers."),
0x0169: ("NT_STATUS_DISK_RECALIBRATE_FAILED","While accessing the hard disk, a recalibrate operation failed, even after retries."),
0x016a: ("NT_STATUS_DISK_OPERATION_FAILED","While accessing the hard disk, a disk operation failed even after retries."),
0x016b: ("NT_STATUS_DISK_RESET_FAILED","While accessing the hard disk, a disk controller reset was needed, but even that failed."),
0x016c: ("NT_STATUS_SHARED_IRQ_BUSY","Unable to open a device that was sharing an interrupt request (IRQ) with other devices. At least one other device that uses that IRQ was already opened."),
0x016d: ("NT_STATUS_FT_ORPHANING","The request could not be performed because of an I/O device error."),
0x0172: ("NT_STATUS_PARTITION_FAILURE","Tape could not be partitioned."),
0x0173: ("NT_STATUS_INVALID_BLOCK_LENGTH","When accessing a new tape of a multivolume partition, the current blocksize is incorrect."),
0x0174: ("NT_STATUS_DEVICE_NOT_PARTITIONED","Tape partition information could not be found when loading a tape."),
0x0175: ("NT_STATUS_UNABLE_TO_LOCK_MEDIA","Unable to lock the media eject mechanism."),
0x0176: ("NT_STATUS_UNABLE_TO_UNLOAD_MEDIA","Unable to unload the media."),
0x0177: ("NT_STATUS_EOM_OVERFLOW","Physical end of tape encountered."),
0x0178: ("NT_STATUS_NO_MEDIA","No media in drive."),
0x017a: ("NT_STATUS_NO_SUCH_MEMBER","A new member could not be added to a local group because the member does not exist."),
0x017b: ("NT_STATUS_INVALID_MEMBER","A new member could not be added to a local group because the member has the wrong account type."),
0x017c: ("NT_STATUS_KEY_DELETED","Illegal operation attempted on a Registry key which has been marked for deletion."),
0x017d: ("NT_STATUS_NO_LOG_SPACE","System could not allocate the required space in a Registry log."),
0x017e: ("NT_STATUS_TOO_MANY_SIDS","Too many security IDs have been specified."),
0x017f: ("NT_STATUS_LM_CROSS_ENCRYPTION_REQUIRED","A cross-encrypted password is necessary to change this user password."),
0x0180: ("NT_STATUS_KEY_HAS_CHILDREN","Cannot create a symbolic link in a Registry key that already has subkeys or values."),
0x0181: ("NT_STATUS_CHILD_MUST_BE_VOLATILE","Cannot create a stable subkey under a volatile parent key."),
0x0182: ("NT_STATUS_DEVICE_CONFIGURATION_ERROR","The parameter is incorrect."),
0x0183: ("NT_STATUS_DRIVER_INTERNAL_ERROR","The request could not be performed because of an I/O device error."),
0x0184: ("NT_STATUS_INVALID_DEVICE_STATE","The device does not recognize the command."),
0x0185: ("NT_STATUS_IO_DEVICE_ERROR","The request could not be performed because of an I/O device error."),
0x0186: ("NT_STATUS_DEVICE_PROTOCOL_ERROR","The request could not be performed because of an I/O device error."),
0x0187: ("NT_STATUS_BACKUP_CONTROLLER","NT_STATUS_BACKUP_CONTROLLER"),
0x0188: ("NT_STATUS_LOG_FILE_FULL","The event log file is full."),
0x0189: ("NT_STATUS_TOO_LATE","The media is write protected."),
0x018a: ("NT_STATUS_NO_TRUST_LSA_SECRET","The workstation does not have a trust secret."),
0x018b: ("NT_STATUS_NO_TRUST_SAM_ACCOUNT","The SAM database on the Windows NT Server does not have a computer account for this workstation trust relationship."),
0x018c: ("NT_STATUS_TRUSTED_DOMAIN_FAILURE","The trust relationship between the primary domain and the trusted domain failed."),
0x018d: ("NT_STATUS_TRUSTED_RELATIONSHIP_FAILURE","The trust relationship between this workstation and the primary domain failed."),
0x018e: ("NT_STATUS_EVENTLOG_FILE_CORRUPT","The event log file is corrupt."),
0x018f: ("NT_STATUS_EVENTLOG_CANT_START","No event log file could be opened, so the event logging service did not start."),
0x0190: ("NT_STATUS_TRUST_FAILURE","The network logon failed."),
0x0191: ("NT_STATUS_MUTANT_LIMIT_EXCEEDED","NT_STATUS_MUTANT_LIMIT_EXCEEDED"),
0x0192: ("NT_STATUS_NETLOGON_NOT_STARTED","An attempt was made to logon, but the network logon service was not started."),
0x0193: ("NT_STATUS_ACCOUNT_EXPIRED","The user's account has expired."),
0x0194: ("NT_STATUS_POSSIBLE_DEADLOCK","A potential deadlock condition has been detected."),
0x0195: ("NT_STATUS_NETWORK_CREDENTIAL_CONFLICT","The credentials supplied conflict with an existing set of credentials."),
0x0196: ("NT_STATUS_REMOTE_SESSION_LIMIT","An attempt was made to establish a session to a network server, but there are already too many sessions established to that server."),
0x0197: ("NT_STATUS_EVENTLOG_FILE_CHANGED","The event log file has changed between reads."),
0x0198: ("NT_STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT","The account used is an interdomain trust account. Use your global user account or local user account to access this server."),
0x0199: ("NT_STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT","The account used is a Computer Account. Use your global user account or local user account to access this server."),
0x019a: ("NT_STATUS_NOLOGON_SERVER_TRUST_ACCOUNT","The account used is an server trust account. Use your global user account or local user account to access this server."),
0x019b: ("NT_STATUS_DOMAIN_TRUST_INCONSISTENT","The name or security ID (SID) of the domain specified is inconsistent with the trust information for that domain."),
0x019c: ("NT_STATUS_FS_DRIVER_REQUIRED","NT_STATUS_FS_DRIVER_REQUIRED"),
0x0202: ("NT_STATUS_NO_USER_SESSION_KEY","There is no user session key for the specified logon session."),
0x0203: ("NT_STATUS_USER_SESSION_DELETED","An unexpected network error occurred."),
0x0204: ("NT_STATUS_RESOURCE_LANG_NOT_FOUND","The specified resource language ID cannot be found in the image file."),
0x0205: ("NT_STATUS_INSUFF_SERVER_RESOURCES","Not enough server storage is available to process this command."),
0x0206: ("NT_STATUS_INVALID_BUFFER_SIZE","The supplied user buffer is not valid for the requested operation."),
0x0207: ("NT_STATUS_INVALID_ADDRESS_COMPONENT","The format of the specified network name is invalid."),
0x0208: ("NT_STATUS_INVALID_ADDRESS_WILDCARD","The format of the specified network name is invalid."),
0x0209: ("NT_STATUS_TOO_MANY_ADDRESSES","The name limit for the local computer network adapter card was exceeded."),
0x020a: ("NT_STATUS_ADDRESS_ALREADY_EXISTS","A duplicate name exists on the network."),
0x020b: ("NT_STATUS_ADDRESS_CLOSED","The specified network name is no longer available."),
0x020c: ("NT_STATUS_CONNECTION_DISCONNECTED","The specified network name is no longer available."),
0x020d: ("NT_STATUS_CONNECTION_RESET","The specified network name is no longer available."),
0x020e: ("NT_STATUS_TOO_MANY_NODES","The name limit for the local computer network adapter card was exceeded."),
0x020f: ("NT_STATUS_TRANSACTION_ABORTED","An unexpected network error occurred."),
0x0210: ("NT_STATUS_TRANSACTION_TIMED_OUT","An unexpected network error occurred."),
0x0211: ("NT_STATUS_TRANSACTION_NO_RELEASE","An unexpected network error occurred."),
0x0212: ("NT_STATUS_TRANSACTION_NO_MATCH","An unexpected network error occurred."),
0x0213: ("NT_STATUS_TRANSACTION_RESPONDED","An unexpected network error occurred."),
0x0214: ("NT_STATUS_TRANSACTION_INVALID_ID","An unexpected network error occurred."),
0x0215: ("NT_STATUS_TRANSACTION_INVALID_TYPE","An unexpected network error occurred."),
0x0216: ("NT_STATUS_NOT_SERVER_SESSION","The network request is not supported."),
0x0217: ("NT_STATUS_NOT_CLIENT_SESSION","The network request is not supported."),
0x0218: ("NT_STATUS_CANNOT_LOAD_REGISTRY_FILE","NT_STATUS_CANNOT_LOAD_REGISTRY_FILE"),
0x0219: ("NT_STATUS_DEBUG_ATTACH_FAILED","NT_STATUS_DEBUG_ATTACH_FAILED"),
0x021a: ("NT_STATUS_SYSTEM_PROCESS_TERMINATED","NT_STATUS_SYSTEM_PROCESS_TERMINATED"),
0x021b: ("NT_STATUS_DATA_NOT_ACCEPTED","NT_STATUS_DATA_NOT_ACCEPTED"),
0x021c: ("NT_STATUS_NO_BROWSER_SERVERS_FOUND","The list of servers for this workgroup is not currently available"),
0x021d: ("NT_STATUS_VDM_HARD_ERROR","NT_STATUS_VDM_HARD_ERROR"),
0x021e: ("NT_STATUS_DRIVER_CANCEL_TIMEOUT","NT_STATUS_DRIVER_CANCEL_TIMEOUT"),
0x021f: ("NT_STATUS_REPLY_MESSAGE_MISMATCH","NT_STATUS_REPLY_MESSAGE_MISMATCH"),
0x0220: ("NT_STATUS_MAPPED_ALIGNMENT","The base address or the file offset specified does not have the proper alignment."),
0x0221: ("NT_STATUS_IMAGE_CHECKSUM_MISMATCH","%1 is not a valid Windows NT application."),
0x0222: ("NT_STATUS_LOST_WRITEBEHIND_DATA","NT_STATUS_LOST_WRITEBEHIND_DATA"),
0x0223: ("NT_STATUS_CLIENT_SERVER_PARAMETERS_INVALID","NT_STATUS_CLIENT_SERVER_PARAMETERS_INVALID"),
0x0224: ("NT_STATUS_PASSWORD_MUST_CHANGE","The user must change his password before he logs on the first time."),
0x0225: ("NT_STATUS_NOT_FOUND","NT_STATUS_NOT_FOUND"),
0x0226: ("NT_STATUS_NOT_TINY_STREAM","NT_STATUS_NOT_TINY_STREAM"),
0x0227: ("NT_STATUS_RECOVERY_FAILURE","NT_STATUS_RECOVERY_FAILURE"),
0x0228: ("NT_STATUS_STACK_OVERFLOW_READ","NT_STATUS_STACK_OVERFLOW_READ"),
0x0229: ("NT_STATUS_FAIL_CHECK","NT_STATUS_FAIL_CHECK"),
0x022a: ("NT_STATUS_DUPLICATE_OBJECTID","NT_STATUS_DUPLICATE_OBJECTID"),
0x022b: ("NT_STATUS_OBJECTID_EXISTS","NT_STATUS_OBJECTID_EXISTS"),
0x022c: ("NT_STATUS_CONVERT_TO_LARGE","NT_STATUS_CONVERT_TO_LARGE"),
0x022d: ("NT_STATUS_RETRY","NT_STATUS_RETRY"),
0x022e: ("NT_STATUS_FOUND_OUT_OF_SCOPE","NT_STATUS_FOUND_OUT_OF_SCOPE"),
0x022f: ("NT_STATUS_ALLOCATE_BUCKET","NT_STATUS_ALLOCATE_BUCKET"),
0x0230: ("NT_STATUS_PROPSET_NOT_FOUND","NT_STATUS_PROPSET_NOT_FOUND"),
0x0231: ("NT_STATUS_MARSHALL_OVERFLOW","NT_STATUS_MARSHALL_OVERFLOW"),
0x0232: ("NT_STATUS_INVALID_VARIANT","NT_STATUS_INVALID_VARIANT"),
0x0233: ("NT_STATUS_DOMAIN_CONTROLLER_NOT_FOUND","Could not find the domain controller for this domain."),
0x0234: ("NT_STATUS_ACCOUNT_LOCKED_OUT","The referenced account is currently locked out and may not be logged on to."),
0x0235: ("NT_STATUS_HANDLE_NOT_CLOSABLE","The handle is invalid."),
0x0236: ("NT_STATUS_CONNECTION_REFUSED","The remote system refused the network connection."),
0x0237: ("NT_STATUS_GRACEFUL_DISCONNECT","The network connection was gracefully closed."),
0x0238: ("NT_STATUS_ADDRESS_ALREADY_ASSOCIATED","The network transport endpoint already has an address associated with it."),
0x0239: ("NT_STATUS_ADDRESS_NOT_ASSOCIATED","An address has not yet been associated with the network endpoint."),
0x023a: ("NT_STATUS_CONNECTION_INVALID","An operation was attempted on a non-existent network connection."),
0x023b: ("NT_STATUS_CONNECTION_ACTIVE","An invalid operation was attempted on an active network connection."),
0x023c: ("NT_STATUS_NETWORK_UNREACHABLE","The remote network is not reachable by the transport."),
0x023d: ("NT_STATUS_HOST_UNREACHABLE","The remote system is not reachable by the transport."),
0x023e: ("NT_STATUS_PROTOCOL_UNREACHABLE","The remote system does not support the transport protocol."),
0x023f: ("NT_STATUS_PORT_UNREACHABLE","No service is operating at the destination network endpoint on the remote system."),
0x0240: ("NT_STATUS_REQUEST_ABORTED","The request was aborted."),
0x0241: ("NT_STATUS_CONNECTION_ABORTED","The network connection was aborted by the local system."),
0x0242: ("NT_STATUS_BAD_COMPRESSION_BUFFER","NT_STATUS_BAD_COMPRESSION_BUFFER"),
0x0243: ("NT_STATUS_USER_MAPPED_FILE","The requested operation cannot be performed on a file with a user mapped section open."),
0x0244: ("NT_STATUS_AUDIT_FAILED","NT_STATUS_AUDIT_FAILED"),
0x0245: ("NT_STATUS_TIMER_RESOLUTION_NOT_SET","NT_STATUS_TIMER_RESOLUTION_NOT_SET"),
0x0246: ("NT_STATUS_CONNECTION_COUNT_LIMIT","A connection to the server could not be made because the limit on the number of concurrent connections for this account has been reached."),
0x0247: ("NT_STATUS_LOGIN_TIME_RESTRICTION","Attempting to login during an unauthorized time of day for this account."),
0x0248: ("NT_STATUS_LOGIN_WKSTA_RESTRICTION","The account is not authorized to login from this station."),
0x0249: ("NT_STATUS_IMAGE_MP_UP_MISMATCH","%1 is not a valid Windows NT application."),
0x0250: ("NT_STATUS_INSUFFICIENT_LOGON_INFO","NT_STATUS_INSUFFICIENT_LOGON_INFO"),
0x0251: ("NT_STATUS_BAD_DLL_ENTRYPOINT","NT_STATUS_BAD_DLL_ENTRYPOINT"),
0x0252: ("NT_STATUS_BAD_SERVICE_ENTRYPOINT","NT_STATUS_BAD_SERVICE_ENTRYPOINT"),
0x0253: ("NT_STATUS_LPC_REPLY_LOST","The security account database contains an internal inconsistency."),
0x0254: ("NT_STATUS_IP_ADDRESS_CONFLICT1","NT_STATUS_IP_ADDRESS_CONFLICT1"),
0x0255: ("NT_STATUS_IP_ADDRESS_CONFLICT2","NT_STATUS_IP_ADDRESS_CONFLICT2"),
0x0256: ("NT_STATUS_REGISTRY_QUOTA_LIMIT","NT_STATUS_REGISTRY_QUOTA_LIMIT"),
0x0257: ("NT_STATUS_PATH_NOT_COVERED","The remote system is not reachable by the transport."),
0x0258: ("NT_STATUS_NO_CALLBACK_ACTIVE","NT_STATUS_NO_CALLBACK_ACTIVE"),
0x0259: ("NT_STATUS_LICENSE_QUOTA_EXCEEDED","The service being accessed is licensed for a particular number of connections. No more connections can be made to the service at this time because there are already as many connections as the service can accept."),
0x025a: ("NT_STATUS_PWD_TOO_SHORT","NT_STATUS_PWD_TOO_SHORT"),
0x025b: ("NT_STATUS_PWD_TOO_RECENT","NT_STATUS_PWD_TOO_RECENT"),
0x025c: ("NT_STATUS_PWD_HISTORY_CONFLICT","NT_STATUS_PWD_HISTORY_CONFLICT"),
0x025e: ("NT_STATUS_PLUGPLAY_NO_DEVICE","The specified service is disabled and cannot be started."),
0x025f: ("NT_STATUS_UNSUPPORTED_COMPRESSION","NT_STATUS_UNSUPPORTED_COMPRESSION"),
0x0260: ("NT_STATUS_INVALID_HW_PROFILE","NT_STATUS_INVALID_HW_PROFILE"),
0x0261: ("NT_STATUS_INVALID_PLUGPLAY_DEVICE_PATH","NT_STATUS_INVALID_PLUGPLAY_DEVICE_PATH"),
0x0262: ("NT_STATUS_DRIVER_ORDINAL_NOT_FOUND","The operating system cannot run %1."),
0x0263: ("NT_STATUS_DRIVER_ENTRYPOINT_NOT_FOUND","The specified procedure could not be found."),
0x0264: ("NT_STATUS_RESOURCE_NOT_OWNED","Attempt to release mutex not owned by caller."),
0x0265: ("NT_STATUS_TOO_MANY_LINKS","An attempt was made to create more links on a file than the file system supports."),
0x0266: ("NT_STATUS_QUOTA_LIST_INCONSISTENT","NT_STATUS_QUOTA_LIST_INCONSISTENT"),
0x0267: ("NT_STATUS_FILE_IS_OFFLINE","NT_STATUS_FILE_IS_OFFLINE"),
0x0275: ("NT_STATUS_NOT_A_REPARSE_POINT","NT_STATUS_NOT_A_REPARSE_POINT"),
0x0EDE: ("NT_STATUS_NO_SUCH_JOB","NT_STATUS_NO_SUCH_JOB"),
}
dos_msgs = {
ERRbadfunc: ("ERRbadfunc", "Invalid function."),
ERRbadfile: ("ERRbadfile", "File not found."),
ERRbadpath: ("ERRbadpath", "Directory invalid."),
ERRnofids: ("ERRnofids", "No file descriptors available"),
ERRnoaccess: ("ERRnoaccess", "Access denied."),
ERRbadfid: ("ERRbadfid", "Invalid file handle."),
ERRbadmcb: ("ERRbadmcb", "Memory control blocks destroyed."),
ERRnomem: ("ERRnomem", "Insufficient server memory to perform the requested function."),
ERRbadmem: ("ERRbadmem", "Invalid memory block address."),
ERRbadenv: ("ERRbadenv", "Invalid environment."),
11: ("ERRbadformat", "Invalid format."),
ERRbadaccess: ("ERRbadaccess", "Invalid open mode."),
ERRbaddata: ("ERRbaddata", "Invalid data."),
ERRres: ("ERRres", "reserved."),
ERRbaddrive: ("ERRbaddrive", "Invalid drive specified."),
ERRremcd: ("ERRremcd", "A Delete Directory request attempted to remove the server's current directory."),
ERRdiffdevice: ("ERRdiffdevice", "Not same device."),
ERRnofiles: ("ERRnofiles", "A File Search command can find no more files matching the specified criteria."),
ERRbadshare: ("ERRbadshare", "The sharing mode specified for an Open conflicts with existing FIDs on the file."),
ERRlock: ("ERRlock", "A Lock request conflicted with an existing lock or specified an invalid mode, or an Unlock requested attempted to remove a lock held by another process."),
ERRunsup: ("ERRunsup", "The operation is unsupported"),
ERRnosuchshare: ("ERRnosuchshare", "You specified an invalid share name"),
ERRfilexists: ("ERRfilexists", "The file named in a Create Directory, Make New File or Link request already exists."),
ERRinvalidname: ("ERRinvalidname", "Invalid name"),
ERRbadpipe: ("ERRbadpipe", "Pipe invalid."),
ERRpipebusy: ("ERRpipebusy", "All instances of the requested pipe are busy."),
ERRpipeclosing: ("ERRpipeclosing", "Pipe close in progress."),
ERRnotconnected: ("ERRnotconnected", "No process on other end of pipe."),
ERRmoredata: ("ERRmoredata", "There is more data to be returned."),
ERRinvgroup: ("ERRinvgroup", "Invalid workgroup (try the -W option)"),
ERRlogonfailure: ("ERRlogonfailure", "Logon failure"),
ERRdiskfull: ("ERRdiskfull", "Disk full"),
ERRgeneral: ("ERRgeneral", "General failure"),
ERRunknownlevel: ("ERRunknownlevel", "Unknown info level")
}
server_msgs = {
1: ("ERRerror", "Non-specific error code."),
2: ("ERRbadpw", "Bad password - name/password pair in a Tree Connect or Session Setup are invalid."),
3: ("ERRbadtype", "reserved."),
4: ("ERRaccess", "The requester does not have the necessary access rights within the specified context for the requested function. The context is defined by the TID or the UID."),
5: ("ERRinvnid", "The tree ID (TID) specified in a command was invalid."),
6: ("ERRinvnetname", "Invalid network name in tree connect."),
7: ("ERRinvdevice", "Invalid device - printer request made to non-printer connection or non-printer request made to printer connection."),
49: ("ERRqfull", "Print queue full (files) -- returned by open print file."),
50: ("ERRqtoobig", "Print queue full -- no space."),
51: ("ERRqeof", "EOF on print queue dump."),
52: ("ERRinvpfid", "Invalid print file FID."),
64: ("ERRsmbcmd", "The server did not recognize the command received."),
65: ("ERRsrverror","The server encountered an internal error, e.g., system file unavailable."),
67: ("ERRfilespecs", "The file handle (FID) and pathname parameters contained an invalid combination of values."),
68: ("ERRreserved", "reserved."),
69: ("ERRbadpermits", "The access permissions specified for a file or directory are not a valid combination. The server cannot set the requested attribute."),
70: ("ERRreserved", "reserved."),
71: ("ERRsetattrmode", "The attribute mode in the Set File Attribute request is invalid."),
81: ("ERRpaused", "Server is paused."),
82: ("ERRmsgoff", "Not receiving messages."),
83: ("ERRnoroom", "No room to buffer message."),
87: ("ERRrmuns", "Too many remote user names."),
88: ("ERRtimeout", "Operation timed out."),
89: ("ERRnoresource", "No resources currently available for request."),
90: ("ERRtoomanyuids", "Too many UIDs active on this session."),
91: ("ERRbaduid", "The UID is not known as a valid ID on this session."),
250: ("ERRusempx","Temp unable to support Raw, use MPX mode."),
251: ("ERRusestd","Temp unable to support Raw, use standard read/write."),
252: ("ERRcontmpx", "Continue in MPX mode."),
253: ("ERRreserved", "reserved."),
254: ("ERRreserved", "reserved."),
0xFFFF: ("ERRnosupport", "Function not supported.")
}
# Error clases
ERRDOS = 0x1
error_classes = { 0: ("SUCCESS", {}),
ERRDOS: ("ERRDOS", dos_msgs),
0x02: ("ERRSRV",server_msgs),
0x03: ("ERRHRD",hard_msgs),
0x04: ("ERRXOS", {} ),
0xE1: ("ERRRMX1", {} ),
0xE2: ("ERRRMX2", {} ),
0xE3: ("ERRRMX3", {} ),
0xC000: ("ERRNT", nt_msgs),
0xFF: ("ERRCMD", {} ) }
def __init__( self, str, error_class, error_code, nt_status = 0):
Exception.__init__(self, str)
self._args = str
if nt_status:
self.error_class = error_code
self.error_code = error_class
else:
self.error_class = error_class
self.error_code = error_code
def get_error_class( self ):
return self.error_class
def get_error_code( self ):
return self.error_code
def __str__( self ):
error_class = SessionError.error_classes.get( self.error_class, None )
if not error_class:
error_code_str = self.error_code
error_class_str = self.error_class
else:
error_class_str = error_class[0]
error_code = error_class[1].get( self.error_code, None )
if not error_code:
error_code_str = self.error_code
else:
error_code_str = '%s(%s)' % (error_code)
return 'SMB SessionError: class: %s, code: %s' % (error_class_str, error_code_str)
# Raised when an supported feature is present/required in the protocol but is not
# currently supported by pysmb
class UnsupportedFeature(Exception): pass
# Contains information about a SMB shared device/service
class SharedDevice:
def __init__(self, name, type, comment):
self.__name = name
self.__type = type
self.__comment = comment
def get_name(self):
return self.__name
def get_type(self):
return self.__type
def get_comment(self):
return self.__comment
def __repr__(self):
return '<SharedDevice instance: name=' + self.__name + ', type=' + str(self.__type) + ', comment="' + self.__comment + '">'
# Contains information about the shared file/directory
class SharedFile:
def __init__(self, ctime, atime, mtime, filesize, allocsize, attribs, shortname, longname):
self.__ctime = ctime
self.__atime = atime
self.__mtime = mtime
self.__filesize = filesize
self.__allocsize = allocsize
self.__attribs = attribs
try:
self.__shortname = shortname[:string.index(shortname, '\0')]
except ValueError:
self.__shortname = shortname
try:
self.__longname = longname[:string.index(longname, '\0')]
except ValueError:
self.__longname = longname
def get_ctime(self):
return self.__ctime
def get_ctime_epoch(self):
return self.__convert_smbtime(self.__ctime)
def get_mtime(self):
return self.__mtime
def get_mtime_epoch(self):
return self.__convert_smbtime(self.__mtime)
def get_atime(self):
return self.__atime
def get_atime_epoch(self):
return self.__convert_smbtime(self.__atime)
def get_filesize(self):
return self.__filesize
def get_allocsize(self):
return self.__allocsize
def get_attributes(self):
return self.__attribs
def is_archive(self):
return self.__attribs & ATTR_ARCHIVE
def is_compressed(self):
return self.__attribs & ATTR_COMPRESSED
def is_normal(self):
return self.__attribs & ATTR_NORMAL
def is_hidden(self):
return self.__attribs & ATTR_HIDDEN
def is_readonly(self):
return self.__attribs & ATTR_READONLY
def is_temporary(self):
return self.__attribs & ATTR_TEMPORARY
def is_directory(self):
return self.__attribs & ATTR_DIRECTORY
def is_system(self):
return self.__attribs & ATTR_SYSTEM
def get_shortname(self):
return self.__shortname
def get_longname(self):
return self.__longname
def __repr__(self):
return '<SharedFile instance: shortname="' + self.__shortname + '", longname="' + self.__longname + '", filesize=' + str(self.__filesize) + '>'
def __convert_smbtime(self, t):
x = t >> 32
y = t & 0xffffffffL
geo_cal_offset = 11644473600.0 # = 369.0 * 365.25 * 24 * 60 * 60 - (3.0 * 24 * 60 * 60 + 6.0 * 60 * 60)
return ((x * 4.0 * (1 << 30) + (y & 0xfff00000L)) * 1.0e-7 - geo_cal_offset)
# Contain information about a SMB machine
class SMBMachine:
def __init__(self, nbname, type, comment):
self.__nbname = nbname
self.__type = type
self.__comment = comment
def __repr__(self):
return '<SMBMachine instance: nbname="' + self.__nbname + '", type=' + hex(self.__type) + ', comment="' + self.__comment + '">'
class SMBDomain:
def __init__(self, nbgroup, type, master_browser):
self.__nbgroup = nbgroup
self.__type = type
self.__master_browser = master_browser
def __repr__(self):
return '<SMBDomain instance: nbgroup="' + self.__nbgroup + '", type=' + hex(self.__type) + ', master browser="' + self.__master_browser + '">'
# Represents a SMB Packet
class NewSMBPacket(Structure):
structure = (
('Signature', '"\xffSMB'),
('Command','B=0'),
('ErrorClass','B=0'),
('_reserved','B=0'),
('ErrorCode','<H=0'),
('Flags1','B=0'),
('Flags2','<H=0'),
('PIDHigh','<H=0'),
('SecurityFeatures','8s=""'),
('Reserved','<H=0'),
('Tid','<H=0xffff'),
('Pid','<H=0'),
('Uid','<H=0'),
('Mid','<H=0'),
('Data','*:'),
)
def __init__(self, **kargs):
Structure.__init__(self, **kargs)
if self.fields.has_key('Flags2') is False:
self['Flags2'] = 0
if self.fields.has_key('Flags1') is False:
self['Flags1'] = 0
if not kargs.has_key('data'):
self['Data'] = []
def addCommand(self, command):
if len(self['Data']) == 0:
self['Command'] = command.command
else:
self['Data'][-1]['Parameters']['AndXCommand'] = command.command
self['Data'][-1]['Parameters']['AndXOffset'] = len(self)
self['Data'].append(command)
def isMoreData(self):
return (self['Command'] in [SMB.SMB_COM_TRANSACTION, SMB.SMB_COM_READ_ANDX, SMB.SMB_COM_READ_RAW] and
self['ErrorClass'] == 1 and self['ErrorCode'] == SessionError.ERRmoredata)
def isMoreProcessingRequired(self):
return self['ErrorClass'] == 0x16 and self['ErrorCode'] == 0xc000
def isValidAnswer(self, cmd):
# this was inside a loop reading more from the net (with recv_packet(None))
if self['Command'] == cmd:
if (self['ErrorClass'] == 0x00 and
self['ErrorCode'] == 0x00):
return 1
elif self.isMoreData():
return 1
elif self.isMoreProcessingRequired():
return 1
raise SessionError, ("SMB Library Error", self['ErrorClass'] + (self['_reserved'] << 8), self['ErrorCode'], self['Flags2'] & SMB.FLAGS2_NT_STATUS)
else:
raise UnsupportedFeature, ("Unexpected answer from server: Got %d, Expected %d" % (self['Command'], cmd))
class SMBPacket:
def __init__(self,data = ''):
# The uid attribute will be set when the client calls the login() method
self._command = 0x0
self._error_class = 0x0
self._error_code = 0x0
self._flags = 0x0
self._flags2 = 0x0
self._pad = '\0' * 12
self._tid = 0x0
self._pid = 0x0
self._uid = 0x0
self._mid = 0x0
self._wordcount = 0x0
self._parameter_words = ''
self._bytecount = 0x0
self._buffer = ''
if data != '':
self._command = ord(data[4])
self._error_class = ord(data[5])
self._reserved = ord(data[6])
self._error_code = unpack('<H',data[7:9])[0]
self._flags = ord(data[9])
self._flags2 = unpack('<H',data[10:12])[0]
self._tid = unpack('<H',data[24:26])[0]
self._pid = unpack('<H',data[26:28])[0]
self._uid = unpack('<H',data[28:30])[0]
self._mid = unpack('<H',data[30:32])[0]
self._wordcount = ord(data[32])
self._parameter_words = data[33:33+self._wordcount*2]
self._bytecount = ord(data[33+self._wordcount*2])
self._buffer = data[35+self._wordcount*2:]
def set_command(self,command):
self._command = command
def set_error_class(self, error_class):
self._error_class = error_class
def set_error_code(self,error_code):
self._error_code = error_code
def set_flags(self,flags):
self._flags = flags
def set_flags2(self, flags2):
self._flags2 = flags2
def set_pad(self, pad):
self._pad = pad
def set_tid(self,tid):
self._tid = tid
def set_pid(self,pid):
self._pid = pid
def set_uid(self,uid):
self._uid = uid
def set_mid(self,mid):
self._mid = mid
def set_parameter_words(self,param):
self._parameter_words = param
self._wordcount = len(param)/2
def set_buffer(self,buffer):
if type(buffer) is types.UnicodeType:
raise Exception('SMBPacket: Invalid buffer. Received unicode')
self._buffer = buffer
self._bytecount = len(buffer)
def get_command(self):
return self._command
def get_error_class(self):
return self._error_class
def get_error_code(self):
return self._error_code
def get_reserved(self):
return self._reserved
def get_flags(self):
return self._flags
def get_flags2(self):
return self._flags2
def get_pad(self):
return self._pad
def get_tid(self):
return self._tid
def get_pid(self):
return self._pid
def get_uid(self):
return self._uid
def get_mid(self):
return self._mid
def get_parameter_words(self):
return self._parameter_words
def get_wordcount(self):
return self._wordcount
def get_bytecount(self):
return self._bytecount
def get_buffer(self):
return self._buffer
def rawData(self):
data = pack('<4sBBBHBH12sHHHHB','\xffSMB',self._command,self._error_class,0,self._error_code,self._flags,
self._flags2,self._pad,self._tid, self._pid, self._uid, self._mid, self._wordcount) + self._parameter_words + pack('<H',self._bytecount) + self._buffer
return data
class SMBCommand(Structure):
structure = (
('WordCount', 'B=len(Parameters)/2'),
('_ParametersLength','_-Parameters','WordCount*2'),
('Parameters',':'), # default set by constructor
('ByteCount','<H-Data'),
('Data',':'), # default set by constructor
)
def __init__(self, commandOrData = None, data = None, **kargs):
if type(commandOrData) == type(0):
self.command = commandOrData
else:
data = data or commandOrData
Structure.__init__(self, data = data, **kargs)
if data is None:
self['Parameters'] = ''
self['Data'] = ''
class AsciiOrUnicodeStructure(Structure):
def __init__(self, flags = 0, **kargs):
if flags & SMB.FLAGS2_UNICODE:
self.structure = self.UnicodeStructure
else:
self.structure = self.AsciiStructure
return Structure.__init__(self, **kargs)
class SMBCommand_Parameters(Structure):
pass
class SMBAndXCommand_Parameters(Structure):
commonHdr = (
('AndXCommand','B=0xff'),
('_reserved','B=0'),
('AndXOffset','<H=0'),
)
structure = ( # default structure, overriden by subclasses
('Data',':=""'),
)
############# TRANSACTIONS RELATED
# TRANS2_QUERY_FS_INFORMATION
# QUERY_FS Information Levels
# SMB_QUERY_FS_ATTRIBUTE_INFO
class SMBQueryFsAttributeInfo(Structure):
structure = (
('FileSystemAttributes','<L'),
('MaxFilenNameLengthInBytes','<L'),
('LengthOfFileSystemName','<L-FileSystemName'),
('FileSystemName',':'),
)
class SMBQueryFsInfoVolume(Structure):
structure = (
('ulVolSerialNbr','<L=0xABCDEFAA'),
('cCharCount','<B-VolumeLabel'),
('VolumeLabel','z'),
)
# SMB_QUERY_FS_SIZE_INFO
class SMBQueryFsSizeInfo(Structure):
structure = (
('TotalAllocationUnits','<q=148529400'),
('TotalFreeAllocationUnits','<q=14851044'),
('SectorsPerAllocationUnit','<L=2'),
('BytesPerSector','<L=512'),
)
# SMB_QUERY_FS_VOLUME_INFO
class SMBQueryFsVolumeInfo(Structure):
structure = (
('VolumeCreationTime','<q'),
('SerialNumber','<L=0xABCDEFAA'),
('VolumeLabelSize','<L=len(VolumeLabel)/2'),
('Reserved','<H=0'),
('VolumeLabel',':')
)
# SMB_FIND_FILE_BOTH_DIRECTORY_INFO level
class SMBFindFileBothDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=0'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L=0'),
#('ShortNameLength','<B-ShortName','len(ShortName)'),
('ShortNameLength','<B=0'),
('Reserved','<B=0'),
('ShortName','24s'),
('FileName',':'),
)
# SMB_FIND_FILE_ID_FULL_DIRECTORY_INFO level
class SMBFindFileIdFullDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=0'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L=0'),
#('ShortNameLength','<B-ShortName','len(ShortName)'),
('FileID','<q=0'),
('FileName',':'),
)
# SMB_FIND_FILE_ID_BOTH_DIRECTORY_INFO level
class SMBFindFileIdBothDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=0'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L=0'),
#('ShortNameLength','<B-ShortName','len(ShortName)'),
('ShortNameLength','<B=0'),
('Reserved','<B=0'),
('ShortName','24s'),
('Reserved','<H=0'),
('FileID','<q=0'),
('FileName',':'),
)
# SMB_FIND_FILE_DIRECTORY_INFO level
class SMBFindFileDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=1'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('FileName','z'),
)
# SMB_FIND_FILE_NAMES_INFO level
class SMBFindFileNamesInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('FileName','z'),
)
# SMB_FIND_FILE_FULL_DIRECTORY_INFO level
class SMBFindFileFullDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=1'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L'),
('FileName','z'),
)
# SMB_FIND_INFO_STANDARD level
class SMBFindInfoStandard(Structure):
structure = (
('ResumeKey','<L=0xff'),
('CreationDate','<H=0'),
('CreationTime','<H=0'),
('LastAccessDate','<H=0'),
('LastAccessTime','<H=0'),
('LastWriteDate','<H=0'),
('LastWriteTime','<H=0'),
('EaSize','<L'),
('AllocationSize','<L=1'),
('ExtFileAttributes','<H=0'),
('FileNameLength','<B-FileName','len(FileName)'),
('FileName','z'),
)
# SET_FILE_INFORMATION structures
# SMB_SET_FILE_DISPOSITION_INFO
class SMBSetFileDispositionInfo(Structure):
structure = (
('DeletePending','<B'),
)
# SMB_SET_FILE_BASIC_INFO
class SMBSetFileBasicInfo(Structure):
structure = (
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('ChangeTime','<q'),
('ExtFileAttributes','<H'),
('Reserved','<L'),
)
# SMB_SET_FILE_END_OF_FILE_INFO
class SMBSetFileEndOfFileInfo(Structure):
structure = (
('EndOfFile','<q'),
)
# TRANS2_FIND_NEXT2
class SMBFindNext2_Parameters(Structure):
structure = (
('SID','<H'),
('SearchCount','<H'),
('InformationLevel','<H'),
('ResumeKey','<L'),
('Flags','<H'),
('FileName','z'),
)
class SMBFindNext2Response_Parameters(Structure):
structure = (
('SearchCount','<H'),
('EndOfSearch','<H=1'),
('EaErrorOffset','<H=0'),
('LastNameOffset','<H=0'),
)
class SMBFindNext2_Data(Structure):
structure = (
('GetExtendedAttributesListLength','_-GetExtendedAttributesList', 'self["GetExtendedAttributesListLength"]'),
('GetExtendedAttributesList',':'),
)
# TRANS2_FIND_FIRST2
class SMBFindFirst2Response_Parameters(Structure):
structure = (
('SID','<H'),
('SearchCount','<H'),
('EndOfSearch','<H=1'),
('EaErrorOffset','<H=0'),
('LastNameOffset','<H=0'),
)
class SMBFindFirst2_Parameters(Structure):
structure = (
('SearchAttributes','<H'),
('SearchCount','<H'),
('Flags','<H'),
('InformationLevel','<H'),
('SearchStorageType','<L'),
('FileName','z'),
)
class SMBFindFirst2_Data(Structure):
structure = (
('GetExtendedAttributesListLength','_-GetExtendedAttributesList', 'self["GetExtendedAttributesListLength"]'),
('GetExtendedAttributesList',':'),
)
# TRANS2_SET_PATH_INFORMATION
class SMBSetPathInformation_Parameters(Structure):
structure = (
('InformationLevel','<H'),
('Reserved','<L'),
('FileName','z'),
)
class SMBSetPathInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
# TRANS2_SET_FILE_INFORMATION
class SMBSetFileInformation_Parameters(Structure):
structure = (
('FID','<H'),
('InformationLevel','<H'),
('Reserved','<H'),
)
class SMBSetFileInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
# TRANS2_QUERY_FILE_INFORMATION
class SMBQueryFileInformation_Parameters(Structure):
structure = (
('FID','<H'),
('InformationLevel','<H'),
)
class SMBQueryFileInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0')
)
class SMBQueryFileInformation_Data(Structure):
structure = (
('GetExtendedAttributeList',':'),
)
class SMBQueryFileInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
# TRANS2_QUERY_PATH_INFORMATION
class SMBQueryPathInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
class SMBQueryPathInformation_Parameters(Structure):
structure = (
('InformationLevel','<H'),
('Reserved','<L=0'),
('FileName','z'),
)
class SMBQueryPathInformation_Data(Structure):
structure = (
('GetExtendedAttributeList',':'),
)
# SMB_QUERY_FILE_EA_INFO
class SMBQueryFileEaInfo(Structure):
structure = (
('EaSize','<L=0'),
)
# SMB_QUERY_FILE_BASIC_INFO
class SMBQueryFileBasicInfo(Structure):
structure = (
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('ExtFileAttributes','<L'),
#('Reserved','<L=0'),
)
# SMB_QUERY_FILE_STANDARD_INFO
class SMBQueryFileStandardInfo(Structure):
structure = (
('AllocationSize','<q'),
('EndOfFile','<q'),
('NumberOfLinks','<L=0'),
('DeletePending','<B=0'),
('Directory','<B'),
)
# SMB_QUERY_FILE_ALL_INFO
class SMBQueryFileAllInfo(Structure):
structure = (
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('ExtFileAttributes','<L'),
('Reserved','<L=0'),
('AllocationSize','<q'),
('EndOfFile','<q'),
('NumberOfLinks','<L=0'),
('DeletePending','<B=0'),
('Directory','<B'),
('Reserved','<H=0'),
('EaSize','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('FileName','z'),
)
# \PIPE\LANMAN NetShareEnum
class SMBNetShareEnum(Structure):
structure = (
('RAPOpcode','<H=0'),
('ParamDesc','z'),
('DataDesc','z'),
('InfoLevel','<H'),
('ReceiveBufferSize','<H'),
)
class SMBNetShareEnumResponse(Structure):
structure = (
('Status','<H=0'),
('Convert','<H=0'),
('EntriesReturned','<H'),
('EntriesAvailable','<H'),
)
class NetShareInfo1(Structure):
structure = (
('NetworkName','13s'),
('Pad','<B=0'),
('Type','<H=0'),
('RemarkOffsetLow','<H=0'),
('RemarkOffsetHigh','<H=0'),
)
# \PIPE\LANMAN NetServerGetInfo
class SMBNetServerGetInfoResponse(Structure):
structure = (
('Status','<H=0'),
('Convert','<H=0'),
('TotalBytesAvailable','<H'),
)
class SMBNetServerInfo1(Structure):
# Level 1 Response
structure = (
('ServerName','16s'),
('MajorVersion','B=5'),
('MinorVersion','B=0'),
('ServerType','<L=3'),
('ServerCommentLow','<H=0'),
('ServerCommentHigh','<H=0'),
)
# \PIPE\LANMAN NetShareGetInfo
class SMBNetShareGetInfo(Structure):
structure = (
('RAPOpcode','<H=0'),
('ParamDesc','z'),
('DataDesc','z'),
('ShareName','z'),
('InfoLevel','<H'),
('ReceiveBufferSize','<H'),
)
class SMBNetShareGetInfoResponse(Structure):
structure = (
('Status','<H=0'),
('Convert','<H=0'),
('TotalBytesAvailable','<H'),
)
############# Security Features
class SecurityFeatures(Structure):
structure = (
('Key','<L=0'),
('CID','<H=0'),
('SequenceNumber','<H=0'),
)
############# SMB_COM_QUERY_INFORMATION2 (0x23)
class SMBQueryInformation2_Parameters(Structure):
structure = (
('Fid','<H'),
)
class SMBQueryInformation2Response_Parameters(Structure):
structure = (
('CreateDate','<H'),
('CreationTime','<H'),
('LastAccessDate','<H'),
('LastAccessTime','<H'),
('LastWriteDate','<H'),
('LastWriteTime','<H'),
('FileDataSize','<L'),
('FileAllocationSize','<L'),
('FileAttributes','<L'),
)
############# SMB_COM_SESSION_SETUP_ANDX (0x73)
class SMBSessionSetupAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('MaxBuffer','<H'),
('MaxMpxCount','<H'),
('VCNumber','<H'),
('SessionKey','<L'),
('AnsiPwdLength','<H'),
('UnicodePwdLength','<H'),
('_reserved','<L=0'),
('Capabilities','<L'),
)
class SMBSessionSetupAndX_Extended_Parameters(SMBAndXCommand_Parameters):
structure = (
('MaxBufferSize','<H'),
('MaxMpxCount','<H'),
('VcNumber','<H'),
('SessionKey','<L'),
('SecurityBlobLength','<H'),
('Reserved','<L=0'),
('Capabilities','<L'),
)
class SMBSessionSetupAndX_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('AnsiPwdLength','_-AnsiPwd','self["AnsiPwdLength"]'),
('UnicodePwdLength','_-UnicodePwd','self["UnicodePwdLength"]'),
('AnsiPwd',':=""'),
('UnicodePwd',':=""'),
('Account','z=""'),
('PrimaryDomain','z=""'),
('NativeOS','z=""'),
('NativeLanMan','z=""'),
)
UnicodeStructure = (
('AnsiPwdLength','_-AnsiPwd','self["AnsiPwdLength"]'),
('UnicodePwdLength','_-UnicodePwd','self["UnicodePwdLength"]'),
('AnsiPwd',':=""'),
('UnicodePwd',':=""'),
('Account','u=""'),
('PrimaryDomain','u=""'),
('NativeOS','u=""'),
('NativeLanMan','u=""'),
)
class SMBSessionSetupAndX_Extended_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','z=""'),
('NativeLanMan','z=""'),
)
UnicodeStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','u=""'),
('NativeLanMan','u=""'),
)
class SMBSessionSetupAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Action','<H'),
)
class SMBSessionSetupAndX_Extended_Response_Parameters(SMBAndXCommand_Parameters):
structure = (
('Action','<H=0'),
('SecurityBlobLength','<H'),
)
class SMBSessionSetupAndXResponse_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('NativeOS','z=""'),
('NativeLanMan','z=""'),
('PrimaryDomain','z=""'),
)
UnicodeStructure = (
('NativeOS','u=""'),
('NativeLanMan','u=""'),
('PrimaryDomain','u=""'),
)
class SMBSessionSetupAndX_Extended_Response_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','z=""'),
('NativeLanMan','z=""'),
)
UnicodeStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','u=""'),
('NativeLanMan','u=""'),
)
############# SMB_COM_TREE_CONNECT (0x70)
class SMBTreeConnect_Parameters(SMBCommand_Parameters):
structure = (
)
class SMBTreeConnect_Data(SMBCommand_Parameters):
structure = (
('PathFormat','"\x04'),
('Path','z'),
('PasswordFormat','"\x04'),
('Password','z'),
('ServiceFormat','"\x04'),
('Service','z'),
)
############# SMB_COM_TREE_CONNECT (0x75)
class SMBTreeConnectAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Flags','<H=0'),
('PasswordLength','<H'),
)
class SMBTreeConnectAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('OptionalSupport','<H=0'),
)
class SMBTreeConnectAndXExtendedResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('OptionalSupport','<H=1'),
('MaximalShareAccessRights','<L=0x1fffff'),
('GuestMaximalShareAccessRights','<L=0x1fffff'),
)
class SMBTreeConnectAndX_Data(Structure):
structure = (
('_PasswordLength','_-Password','self["_PasswordLength"]'),
('Password',':'),
('Path','z'),
('Service','z'),
)
class SMBTreeConnectAndXResponse_Data(Structure):
structure = (
('Service','z'),
('PadLen','_-Pad','self["PadLen"]'),
('Pad',':=""'),
('NativeFileSystem','z'),
)
############# SMB_COM_NT_CREATE_ANDX (0xA2)
class SMBNtCreateAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('_reserved', 'B=0'),
('FileNameLength','<H'), # NameLength
('CreateFlags','<L'), # Flags
('RootFid','<L=0'), # RootDirectoryFID
('AccessMask','<L'), # DesiredAccess
('AllocationSizeLo','<L=0'), # AllocationSize
('AllocationSizeHi','<L=0'),
('FileAttributes','<L=0'), # ExtFileAttributes
('ShareAccess','<L=3'), #
('Disposition','<L=1'), # CreateDisposition
('CreateOptions','<L'), # CreateOptions
('Impersonation','<L=2'),
('SecurityFlags','B=3'),
)
class SMBNtCreateAndXResponse_Parameters(SMBAndXCommand_Parameters):
# XXX Is there a memory leak in the response for NTCreate (where the Data section would be) in Win 2000, Win XP, and Win 2003?
structure = (
('OplockLevel', 'B=0'),
('Fid','<H'),
('CreateAction','<L'),
('CreateTime','<q=0'),
('LastAccessTime','<q=0'),
('LastWriteTime','<q=0'),
('LastChangeTime','<q=0'),
('FileAttributes','<L=0x80'),
('AllocationSize','<q=0'),
('EndOfFile','<q=0'),
('FileType','<H=0'),
('IPCState','<H=0'),
('IsDirectory','B'),
)
class SMBNtCreateAndXExtendedResponse_Parameters(SMBAndXCommand_Parameters):
# [MS-SMB] Extended response description
structure = (
('OplockLevel', 'B=0'),
('Fid','<H'),
('CreateAction','<L'),
('CreateTime','<q=0'),
('LastAccessTime','<q=0'),
('LastWriteTime','<q=0'),
('LastChangeTime','<q=0'),
('FileAttributes','<L=0x80'),
('AllocationSize','<q=0'),
('EndOfFile','<q=0'),
('FileType','<H=0'),
('IPCState','<H=0'),
('IsDirectory','B'),
('VolumeGUID','16s'),
('FileIdLow','<L=0'),
('FileIdHigh','<L=0'),
('MaximalAccessRights','<L=0x12019b'),
('GuestMaximalAccessRights','<L=0x120089'),
)
class SMBNtCreateAndX_Data(Structure):
structure = (
('FileName','z'),
)
############# SMB_COM_OPEN_ANDX (0xD2)
class SMBOpenAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Flags','<H=0'),
('DesiredAccess','<H=0'),
('SearchAttributes','<H=0'),
('FileAttributes','<H=0'),
('CreationTime','<L=0'),
('OpenMode','<H=1'), # SMB_O_OPEN = 1
('AllocationSize','<L=0'),
('Reserved','8s=""'),
)
class SMBOpenAndX_Data(SMBNtCreateAndX_Data):
pass
class SMBOpenAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Fid','<H=0'),
('FileAttributes','<H=0'),
('LastWriten','<L=0'),
('FileSize','<L=0'),
('GrantedAccess','<H=0'),
('FileType','<H=0'),
('IPCState','<H=0'),
('Action','<H=0'),
('ServerFid','<L=0'),
('_reserved','<H=0'),
)
############# SMB_COM_WRITE (0x0B)
class SMBWrite_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Count','<H'),
('Offset','<L'),
('Remaining','<H'),
)
class SMBWriteResponse_Parameters(SMBCommand_Parameters):
structure = (
('Count','<H'),
)
class SMBWrite_Data(Structure):
structure = (
('BufferFormat','<B=1'),
('DataLength','<H-Data'),
('Data',':'),
)
############# SMB_COM_WRITE_ANDX (0x2F)
class SMBWriteAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('_reserved','<L=0xff'),
('WriteMode','<H=8'),
('Remaining','<H'),
('DataLength_Hi','<H=0'),
('DataLength','<H'),
('DataOffset','<H=0'),
('HighOffset','<L=0'),
)
class SMBWriteAndX_Data(Structure):
structure = (
('Pad','<B=0'),
('DataLength','_-Data','self["DataLength"]'),
('Data',':'),
)
class SMBWriteAndX_Parameters2(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('_reserved','<L=0xff'),
('WriteMode','<H=8'),
('Remaining','<H'),
('DataLength_Hi','<H=0'),
('DataLength','<H'),
('DataOffset','<H=0'),
)
class SMBWriteAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Count','<H'),
('Available','<H'),
('Reserved','<L=0'),
)
############# SMB_COM_WRITE_RAW (0x1D)
class SMBWriteRaw_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Count','<H'),
('_reserved','<H=0'),
('Offset','<L'),
('Timeout','<L=0'),
('WriteMode','<H=0'),
('_reserved2','<L=0'),
('DataLength','<H'),
('DataOffset','<H=0'),
)
############# SMB_COM_READ (0x0A)
class SMBRead_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Count','<H'),
('Offset','<L'),
('Remaining','<H=Count'),
)
class SMBReadResponse_Parameters(Structure):
structure = (
('Count','<H=0'),
('_reserved','"\0\0\0\0\0\0\0\0'),
)
class SMBReadResponse_Data(Structure):
structure = (
('BufferFormat','<B=0x1'),
('DataLength','<H-Data'),
('Data',':'),
)
############# SMB_COM_READ_RAW (0x1A)
class SMBReadRaw_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('MaxCount','<H'),
('MinCount','<H=MaxCount'),
('Timeout','<L=0'),
('_reserved','<H=0'),
)
############# SMB_COM_NT_TRANSACT (0xA0)
class SMBNTTransaction_Parameters(SMBCommand_Parameters):
structure = (
('MaxSetupCount','<B=0'),
('Reserved1','<H=0'),
('TotalParameterCount','<L'),
('TotalDataCount','<L'),
('MaxParameterCount','<L=1024'),
('MaxDataCount','<L=65504'),
('ParameterCount','<L'),
('ParameterOffset','<L'),
('DataCount','<L'),
('DataOffset','<L'),
('SetupCount','<B=len(Setup)/2'),
('Function','<H=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBNTTransactionResponse_Parameters(SMBCommand_Parameters):
structure = (
('Reserved1','"\0\0\0'),
('TotalParameterCount','<L'),
('TotalDataCount','<L'),
('ParameterCount','<L'),
('ParameterOffset','<L'),
('ParameterDisplacement','<L=0'),
('DataCount','<L'),
('DataOffset','<L'),
('DataDisplacement','<L=0'),
('SetupCount','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBNTTransaction_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('NT_Trans_ParametersLength','_-NT_Trans_Parameters','self["NT_Trans_ParametersLength"]'),
('NT_Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('NT_Trans_DataLength','_-NT_Trans_Data','self["NT_Trans_DataLength"]'),
('NT_Trans_Data',':'),
)
class SMBNTTransactionResponse_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
############# SMB_COM_TRANSACTION2_SECONDARY (0x33)
class SMBTransaction2Secondary_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('DataCount','<H'),
('DataOffset','<H'),
('DataDisplacement','<H=0'),
('FID','<H'),
)
class SMBTransaction2Secondary_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
############# SMB_COM_TRANSACTION2 (0x32)
class SMBTransaction2_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('MaxParameterCount','<H=1024'),
('MaxDataCount','<H=65504'),
('MaxSetupCount','<B=0'),
('Reserved1','<B=0'),
('Flags','<H=0'),
('Timeout','<L=0'),
('Reserved2','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('DataCount','<H'),
('DataOffset','<H'),
('SetupCount','<B=len(Setup)/2'),
('Reserved3','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBTransaction2Response_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('Reserved1','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('ParameterDisplacement','<H=0'),
('DataCount','<H'),
('DataOffset','<H'),
('DataDisplacement','<H=0'),
('SetupCount','<B=0'),
('Reserved2','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBTransaction2_Data(Structure):
structure = (
# ('NameLength','_-Name','1'),
# ('Name',':'),
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
class SMBTransaction2Response_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
############# SMB_COM_QUERY_INFORMATION (0x08)
class SMBQueryInformation_Data(Structure):
structure = (
('BufferFormat','B=4'),
('FileName','z'),
)
class SMBQueryInformationResponse_Parameters(Structure):
structure = (
('FileAttributes','<H'),
('LastWriteTime','<L'),
('FileSize','<L'),
('Reserved','"0123456789'),
)
############# SMB_COM_TRANSACTION (0x25)
class SMBTransaction_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('MaxParameterCount','<H=1024'),
('MaxDataCount','<H=65504'),
('MaxSetupCount','<B=0'),
('Reserved1','<B=0'),
('Flags','<H=0'),
('Timeout','<L=0'),
('Reserved2','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('DataCount','<H'),
('DataOffset','<H'),
('SetupCount','<B=len(Setup)/2'),
('Reserved3','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBTransactionResponse_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('Reserved1','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('ParameterDisplacement','<H=0'),
('DataCount','<H'),
('DataOffset','<H'),
('DataDisplacement','<H=0'),
('SetupCount','<B'),
('Reserved2','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
# TODO: We should merge these both. But this will require fixing
# the instances where this structure is used on the client side
class SMBTransaction_SData(Structure):
structure = (
('Name','z'),
('Trans_ParametersLength','_-Trans_Parameters'),
('Trans_Parameters',':'),
('Trans_DataLength','_-Trans_Data'),
('Trans_Data',':'),
)
class SMBTransaction_Data(Structure):
structure = (
('NameLength','_-Name'),
('Name',':'),
('Trans_ParametersLength','_-Trans_Parameters'),
('Trans_Parameters',':'),
('Trans_DataLength','_-Trans_Data'),
('Trans_Data',':'),
)
class SMBTransactionResponse_Data(Structure):
structure = (
('Trans_ParametersLength','_-Trans_Parameters'),
('Trans_Parameters',':'),
('Trans_DataLength','_-Trans_Data'),
('Trans_Data',':'),
)
############# SMB_COM_READ_ANDX (0x2E)
class SMBReadAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('MaxCount','<H'),
('MinCount','<H=MaxCount'),
('_reserved','<L=0xffffffff'),
('Remaining','<H=MaxCount'),
('HighOffset','<L=0'),
)
class SMBReadAndX_Parameters2(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('MaxCount','<H'),
('MinCount','<H=MaxCount'),
('_reserved','<L=0xffffffff'),
('Remaining','<H=MaxCount'),
)
class SMBReadAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Remaining','<H=0'),
('DataMode','<H=0'),
('_reserved','<H=0'),
('DataCount','<H'),
('DataOffset','<H'),
('DataCount_Hi','<L'),
('_reserved2','"\0\0\0\0\0\0'),
)
############# SMB_COM_ECHO (0x2B)
class SMBEcho_Data(Structure):
structure = (
('Data',':'),
)
class SMBEcho_Parameters(Structure):
structure = (
('EchoCount','<H'),
)
class SMBEchoResponse_Data(Structure):
structure = (
('Data',':'),
)
class SMBEchoResponse_Parameters(Structure):
structure = (
('SequenceNumber','<H=1'),
)
############# SMB_COM_QUERY_INFORMATION_DISK (0x80)
class SMBQueryInformationDiskResponse_Parameters(Structure):
structure = (
('TotalUnits','<H'),
('BlocksPerUnit','<H'),
('BlockSize','<H'),
('FreeUnits','<H'),
('Reserved','<H=0'),
)
############# SMB_COM_LOGOFF_ANDX (0x74)
class SMBLogOffAndX(SMBAndXCommand_Parameters):
strucure = ()
############# SMB_COM_CLOSE (0x04)
class SMBClose_Parameters(SMBCommand_Parameters):
structure = (
('FID','<H'),
('Time','<L=0'),
)
############# SMB_COM_CREATE_DIRECTORY (0x00)
class SMBCreateDirectory_Data(Structure):
structure = (
('BufferFormat','<B=4'),
('DirectoryName','z'),
)
############# SMB_COM_DELETE (0x06)
class SMBDelete_Data(Structure):
structure = (
('BufferFormat','<B=4'),
('FileName','z'),
)
class SMBDelete_Parameters(Structure):
structure = (
('SearchAttributes','<H'),
)
############# SMB_COM_DELETE_DIRECTORY (0x01)
class SMBDeleteDirectory_Data(Structure):
structure = (
('BufferFormat','<B=4'),
('DirectoryName','z'),
)
############# SMB_COM_RENAME (0x07)
class SMBRename_Parameters(SMBCommand_Parameters):
structure = (
('SearchAttributes','<H'),
)
class SMBRename_Data(Structure):
structure = (
('BufferFormat1','<B=4'),
('OldFileName','z'),
('BufferFormat2','<B=4'),
('NewFileName','z'),
)
############# SMB_COM_OPEN (0x02)
class SMBOpen_Parameters(SMBCommand_Parameters):
structure = (
('DesiredAccess','<H=0'),
('SearchAttributes','<H=0'),
)
class SMBOpen_Data(Structure):
structure = (
('FileNameFormat','"\x04'),
('FileName','z'),
)
class SMBOpenResponse_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H=0'),
('FileAttributes','<H=0'),
('LastWriten','<L=0'),
('FileSize','<L=0'),
('GrantedAccess','<H=0'),
)
############# EXTENDED SECURITY CLASSES
class SMBExtended_Security_Parameters(Structure):
structure = (
('DialectIndex','<H'),
('SecurityMode','<B'),
('MaxMpxCount','<H'),
('MaxNumberVcs','<H'),
('MaxBufferSize','<L'),
('MaxRawSize','<L'),
('SessionKey','<L'),
('Capabilities','<L'),
('LowDateTime','<L'),
('HighDateTime','<L'),
('ServerTimeZone','<H'),
('ChallengeLength','<B'),
)
class SMBExtended_Security_Data(Structure):
structure = (
('ServerGUID','16s'),
('SecurityBlob',':'),
)
class SMBNTLMDialect_Parameters(Structure):
structure = (
('DialectIndex','<H'),
('SecurityMode','<B'),
('MaxMpxCount','<H'),
('MaxNumberVcs','<H'),
('MaxBufferSize','<L'),
('MaxRawSize','<L'),
('SessionKey','<L'),
('Capabilities','<L'),
('LowDateTime','<L'),
('HighDateTime','<L'),
('ServerTimeZone','<H'),
('ChallengeLength','<B'),
)
class SMBNTLMDialect_Data(Structure):
structure = (
('ChallengeLength','_-Challenge','self["ChallengeLength"]'),
('Challenge',':'),
('Payload',':'),
# For some reason on an old Linux this field is not present, we have to check this out. There must be a flag stating this.
('DomainName','_'),
('ServerName','_'),
)
def __init__(self,data = None, alignment = 0):
Structure.__init__(self,data,alignment)
#self['ChallengeLength']=8
def fromString(self,data):
Structure.fromString(self,data)
self['DomainName'] = ''
self['ServerName'] = ''
class SMB:
# SMB Command Codes
SMB_COM_CREATE_DIRECTORY = 0x00
SMB_COM_DELETE_DIRECTORY = 0x01
SMB_COM_OPEN = 0x02
SMB_COM_CREATE = 0x03
SMB_COM_CLOSE = 0x04
SMB_COM_FLUSH = 0x05
SMB_COM_DELETE = 0x06
SMB_COM_RENAME = 0x07
SMB_COM_QUERY_INFORMATION = 0x08
SMB_COM_SET_INFORMATION = 0x09
SMB_COM_READ = 0x0A
SMB_COM_WRITE = 0x0B
SMB_COM_LOCK_BYTE_RANGE = 0x0C
SMB_COM_UNLOCK_BYTE_RANGE = 0x0D
SMB_COM_CREATE_TEMPORARY = 0x0E
SMB_COM_CREATE_NEW = 0x0F
SMB_COM_CHECK_DIRECTORY = 0x10
SMB_COM_PROCESS_EXIT = 0x11
SMB_COM_SEEK = 0x12
SMB_COM_LOCK_AND_READ = 0x13
SMB_COM_WRITE_AND_UNLOCK = 0x14
SMB_COM_READ_RAW = 0x1A
SMB_COM_READ_MPX = 0x1B
SMB_COM_READ_MPX_SECONDARY = 0x1C
SMB_COM_WRITE_RAW = 0x1D
SMB_COM_WRITE_MPX = 0x1E
SMB_COM_WRITE_MPX_SECONDARY = 0x1F
SMB_COM_WRITE_COMPLETE = 0x20
SMB_COM_QUERY_SERVER = 0x21
SMB_COM_SET_INFORMATION2 = 0x22
SMB_COM_QUERY_INFORMATION2 = 0x23
SMB_COM_LOCKING_ANDX = 0x24
SMB_COM_TRANSACTION = 0x25
SMB_COM_TRANSACTION_SECONDARY = 0x26
SMB_COM_IOCTL = 0x27
SMB_COM_IOCTL_SECONDARY = 0x28
SMB_COM_COPY = 0x29
SMB_COM_MOVE = 0x2A
SMB_COM_ECHO = 0x2B
SMB_COM_WRITE_AND_CLOSE = 0x2C
SMB_COM_OPEN_ANDX = 0x2D
SMB_COM_READ_ANDX = 0x2E
SMB_COM_WRITE_ANDX = 0x2F
SMB_COM_NEW_FILE_SIZE = 0x30
SMB_COM_CLOSE_AND_TREE_DISC = 0x31
SMB_COM_TRANSACTION2 = 0x32
SMB_COM_TRANSACTION2_SECONDARY = 0x33
SMB_COM_FIND_CLOSE2 = 0x34
SMB_COM_FIND_NOTIFY_CLOSE = 0x35
# Used by Xenix/Unix 0x60 - 0x6E
SMB_COM_TREE_CONNECT = 0x70
SMB_COM_TREE_DISCONNECT = 0x71
SMB_COM_NEGOTIATE = 0x72
SMB_COM_SESSION_SETUP_ANDX = 0x73
SMB_COM_LOGOFF_ANDX = 0x74
SMB_COM_TREE_CONNECT_ANDX = 0x75
SMB_COM_QUERY_INFORMATION_DISK = 0x80
SMB_COM_SEARCH = 0x81
SMB_COM_FIND = 0x82
SMB_COM_FIND_UNIQUE = 0x83
SMB_COM_FIND_CLOSE = 0x84
SMB_COM_NT_TRANSACT = 0xA0
SMB_COM_NT_TRANSACT_SECONDARY = 0xA1
SMB_COM_NT_CREATE_ANDX = 0xA2
SMB_COM_NT_CANCEL = 0xA4
SMB_COM_NT_RENAME = 0xA5
SMB_COM_OPEN_PRINT_FILE = 0xC0
SMB_COM_WRITE_PRINT_FILE = 0xC1
SMB_COM_CLOSE_PRINT_FILE = 0xC2
SMB_COM_GET_PRINT_QUEUE = 0xC3
SMB_COM_READ_BULK = 0xD8
SMB_COM_WRITE_BULK = 0xD9
SMB_COM_WRITE_BULK_DATA = 0xDA
# TRANSACT codes
TRANS_TRANSACT_NMPIPE = 0x26
# TRANSACT2 codes
TRANS2_FIND_FIRST2 = 0x0001
TRANS2_FIND_NEXT2 = 0x0002
TRANS2_QUERY_FS_INFORMATION = 0x0003
TRANS2_QUERY_PATH_INFORMATION = 0x0005
TRANS2_QUERY_FILE_INFORMATION = 0x0007
TRANS2_SET_FILE_INFORMATION = 0x0008
TRANS2_SET_PATH_INFORMATION = 0x0006
# Security Share Mode (Used internally by SMB class)
SECURITY_SHARE_MASK = 0x01
SECURITY_SHARE_SHARE = 0x00
SECURITY_SHARE_USER = 0x01
SECURITY_SIGNATURES_ENABLED = 0X04
SECURITY_SIGNATURES_REQUIRED = 0X08
# Security Auth Mode (Used internally by SMB class)
SECURITY_AUTH_MASK = 0x02
SECURITY_AUTH_ENCRYPTED = 0x02
SECURITY_AUTH_PLAINTEXT = 0x00
# Raw Mode Mask (Used internally by SMB class. Good for dialect up to and including LANMAN2.1)
RAW_READ_MASK = 0x01
RAW_WRITE_MASK = 0x02
# Capabilities Mask (Used internally by SMB class. Good for dialect NT LM 0.12)
CAP_RAW_MODE = 0x00000001
CAP_MPX_MODE = 0x0002
CAP_UNICODE = 0x0004
CAP_LARGE_FILES = 0x0008
CAP_EXTENDED_SECURITY = 0x80000000
CAP_USE_NT_ERRORS = 0x40
CAP_NT_SMBS = 0x10
CAP_LARGE_READX = 0x00004000
CAP_LARGE_WRITEX = 0x00008000
# Flags1 Mask
FLAGS1_LOCK_AND_READ_OK = 0x01
FLAGS1_PATHCASELESS = 0x08
FLAGS1_CANONICALIZED_PATHS = 0x10
FLAGS1_REPLY = 0x80
# Flags2 Mask
FLAGS2_LONG_NAMES = 0x0001
FLAGS2_EAS = 0x0002
FLAGS2_SMB_SECURITY_SIGNATURE = 0x0004
FLAGS2_IS_LONG_NAME = 0x0040
FLAGS2_DFS = 0x1000
FLAGS2_PAGING_IO = 0x2000
FLAGS2_NT_STATUS = 0x4000
FLAGS2_UNICODE = 0x8000
FLAGS2_COMPRESSED = 0x0008
FLAGS2_SMB_SECURITY_SIGNATURE_REQUIRED = 0x0010
FLAGS2_EXTENDED_SECURITY = 0x0800
# Dialect's Security Mode flags
NEGOTIATE_USER_SECURITY = 0x01
NEGOTIATE_ENCRYPT_PASSWORDS = 0x02
NEGOTIATE_SECURITY_SIGNATURE_ENABLE = 0x04
NEGOTIATE_SECURITY_SIGNATURE_REQUIRED = 0x08
# Tree Connect AndX Response optionalSuppor flags
SMB_SUPPORT_SEARCH_BITS = 0x01
SMB_SHARE_IS_IN_DFS = 0x02
def __init__(self, remote_name, remote_host, my_name = None, host_type = nmb.TYPE_SERVER, sess_port = 445, timeout=None, UDP = 0):
# The uid attribute will be set when the client calls the login() method
self._uid = 0
self.__server_name = ''
self.__server_os = ''
self.__server_lanman = ''
self.__server_domain = ''
self.__remote_name = string.upper(remote_name)
self.__remote_host = remote_host
self.__is_pathcaseless = 0
self.__isNTLMv2 = True
# Negotiate Protocol Result, used everywhere
# Could be extended or not, flags should be checked before
self._dialect_data = 0
self._dialect_parameters = 0
self._action = 0
self._sess = None
self.encrypt_passwords = True
self.tid = 0
self.fid = 0
# Signing stuff
self._SignSequenceNumber = 0
self._SigningSessionKey = ''
self._SigningChallengeResponse = ''
self._SignatureEnabled = False
self._SignatureVerificationEnabled = False
self._SignatureRequired = False
# Base flags
self.__flags1 = 0
self.__flags2 = 0
if timeout==None:
self.__timeout = 10
else:
self.__timeout = timeout
if not my_name:
my_name = socket.gethostname()
i = string.find(my_name, '.')
if i > -1:
my_name = my_name[:i]
# If port 445 and the name sent is *SMBSERVER we're setting the name to the IP. This is to help some old applications still believing
# *SMSBSERVER will work against modern OSes. If port is NETBIOS_SESSION_PORT the user better know about *SMBSERVER's limitations
if sess_port == 445 and remote_name == '*SMBSERVER':
self.__remote_name = remote_host
if UDP:
self._sess = nmb.NetBIOSUDPSession(my_name, remote_name, remote_host, host_type, sess_port, self.__timeout)
else:
self._sess = nmb.NetBIOSTCPSession(my_name, remote_name, remote_host, host_type, sess_port, self.__timeout)
# Initialize session values (_dialect_data and _dialect_parameters)
self.neg_session()
# Call login() without any authentication information to
# setup a session if the remote server
# is in share mode.
if (self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SHARE_MASK) == SMB.SECURITY_SHARE_SHARE:
self.login('', '')
def ntlm_supported(self):
return False
def get_remote_name(self):
return self.__remote_name
def get_remote_host(self):
return self.__remote_host
def get_flags(self):
return self.__flags1, self.__flags2
def set_flags(self, flags1=None, flags2=None):
if flags1 is not None:
self.__flags1 = flags1
if flags2 is not None:
self.__flags2 = flags2
def set_timeout(self, timeout):
self.__timeout = timeout
def get_timeout(self):
return self.__timeout
@contextmanager
def use_timeout(self, timeout):
prev_timeout = self.set_timeout(timeout)
try:
yield
finally:
self.set_timeout(prev_timeout)
def get_session(self):
return self._sess
def get_tid(self):
return self.tid
def get_fid(self):
return self.fid
def isGuestSession(self):
return self._action & SMB_SETUP_GUEST
def doesSupportNTLMv2(self):
return self.__isNTLMv2
def __del__(self):
if self._sess:
self._sess.close()
def recvSMB(self):
r = self._sess.recv_packet(self.__timeout)
return NewSMBPacket(data = r.get_trailer())
def recv_packet(self):
r = self._sess.recv_packet(self.__timeout)
return SMBPacket(r.get_trailer())
def __decode_trans(self, params, data):
totparamcnt, totdatacnt, _, paramcnt, paramoffset, paramds, datacnt, dataoffset, datads, setupcnt = unpack('<HHHHHHHHHB', params[:19])
if paramcnt + paramds < totparamcnt or datacnt + datads < totdatacnt:
has_more = 1
else:
has_more = 0
paramoffset = paramoffset - 55 - setupcnt * 2
dataoffset = dataoffset - 55 - setupcnt * 2
return has_more, params[20:20 + setupcnt * 2], data[paramoffset:paramoffset + paramcnt], data[dataoffset:dataoffset + datacnt]
# TODO: Move this to NewSMBPacket, it belongs there
def signSMB(self, packet, signingSessionKey, signingChallengeResponse):
# This logic MUST be applied for messages sent in response to any of the higher-layer actions and in
# compliance with the message sequencing rules.
# * The client or server that sends the message MUST provide the 32-bit sequence number for this
# message, as specified in sections 3.2.4.1 and 3.3.4.1.
# * The SMB_FLAGS2_SMB_SECURITY_SIGNATURE flag in the header MUST be set.
# * To generate the signature, a 32-bit sequence number is copied into the
# least significant 32 bits of the SecuritySignature field and the remaining
# 4 bytes are set to 0x00.
# * The MD5 algorithm, as specified in [RFC1321], MUST be used to generate a hash of the SMB
# message from the start of the SMB Header, which is defined as follows.
# CALL MD5Init( md5context )
# CALL MD5Update( md5context, Connection.SigningSessionKey )
# CALL MD5Update( md5context, Connection.SigningChallengeResponse )
# CALL MD5Update( md5context, SMB message )
# CALL MD5Final( digest, md5context )
# SET signature TO the first 8 bytes of the digest
# The resulting 8-byte signature MUST be copied into the SecuritySignature field of the SMB Header,
# after which the message can be transmitted.
#print "seq(%d) signingSessionKey %r, signingChallengeResponse %r" % (self._SignSequenceNumber, signingSessionKey, signingChallengeResponse)
packet['SecurityFeatures'] = struct.pack('<q',self._SignSequenceNumber)
# Sign with the sequence
m = hashlib.md5()
m.update( signingSessionKey )
m.update( signingChallengeResponse )
m.update( str(packet) )
# Replace sequence with acual hash
packet['SecurityFeatures'] = m.digest()[:8]
if self._SignatureVerificationEnabled:
self._SignSequenceNumber +=1
else:
self._SignSequenceNumber +=2
def checkSignSMB(self, packet, signingSessionKey, signingChallengeResponse):
# Let's check
signature = packet['SecurityFeatures']
#print "Signature received: %r " % signature
self.signSMB(packet, signingSessionKey, signingChallengeResponse)
#print "Signature calculated: %r" % packet['SecurityFeatures']
if self._SignatureVerificationEnabled is not True:
self._SignSequenceNumber -= 1
return packet['SecurityFeatures'] == signature
def sendSMB(self,smb):
smb['Uid'] = self._uid
smb['Pid'] = os.getpid()
smb['Flags1'] |= self.__flags1
smb['Flags2'] |= self.__flags2
if self._SignatureEnabled:
smb['Flags2'] |= SMB.FLAGS2_SMB_SECURITY_SIGNATURE
self.signSMB(smb, self._SigningSessionKey, self._SigningChallengeResponse)
self._sess.send_packet(str(smb))
# Should be gone soon. Not used anymore within the library. DON'T use it!
# Use sendSMB instead (and build the packet with NewSMBPacket)
def send_smb(self,s):
s.set_uid(self._uid)
s.set_pid(os.getpid())
self._sess.send_packet(s.rawData())
def __send_smb_packet(self, cmd, flags, flags2, tid, mid, params = '', data = ''):
smb = NewSMBPacket()
smb['Flags1'] = flags
smb['Flags2'] = flags2
smb['Tid'] = tid
smb['Mid'] = mid
cmd = SMBCommand(cmd)
smb.addCommand(cmd)
cmd['Parameters'] = params
cmd['Data'] = data
self.sendSMB(smb)
def isValidAnswer(self, s, cmd):
while 1:
if s.rawData():
if s.get_command() == cmd:
if s.get_error_class() == 0x00 and s.get_error_code() == 0x00:
return 1
else:
raise SessionError, ( "SMB Library Error", s.get_error_class()+ (s.get_reserved() << 8), s.get_error_code() , s.get_flags2() & SMB.FLAGS2_NT_STATUS )
else:
break
return 0
def neg_session(self, extended_security = True):
smb = NewSMBPacket()
negSession = SMBCommand(SMB.SMB_COM_NEGOTIATE)
if extended_security == True:
smb['Flags2']=SMB.FLAGS2_EXTENDED_SECURITY
negSession['Data'] = '\x02NT LM 0.12\x00'
smb.addCommand(negSession)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_NEGOTIATE):
sessionResponse = SMBCommand(smb['Data'][0])
self._dialects_parameters = SMBNTLMDialect_Parameters(sessionResponse['Parameters'])
self._dialects_data = SMBNTLMDialect_Data()
self._dialects_data['ChallengeLength'] = self._dialects_parameters['ChallengeLength']
self._dialects_data.fromString(sessionResponse['Data'])
if self._dialects_parameters['Capabilities'] & SMB.CAP_EXTENDED_SECURITY:
# Whether we choose it or it is enforced by the server, we go for extended security
self._dialects_parameters = SMBExtended_Security_Parameters(sessionResponse['Parameters'])
self._dialects_data = SMBExtended_Security_Data(sessionResponse['Data'])
# Let's setup some variable for later use
if self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SIGNATURES_REQUIRED:
self._SignatureRequired = True
# Interestingly, the security Blob might be missing sometimes.
#spnego = SPNEGO_NegTokenInit(self._dialects_data['SecurityBlob'])
#for i in spnego['MechTypes']:
# print "Mech Found: %s" % MechTypes[i]
return 1
# If not, let's try the old way
else:
if self._dialects_data['ServerName'] is not None:
self.__server_name = self._dialects_data['ServerName']
if self._dialects_parameters['DialectIndex'] == 0xffff:
raise UnsupportedFeature,"Remote server does not know NT LM 0.12"
self.__is_pathcaseless = smb['Flags1'] & SMB.FLAGS1_PATHCASELESS
return 1
else:
return 0
def tree_connect(self, path, password = '', service = SERVICE_ANY):
print "[MS-CIFS] This is an original Core Protocol command.\nThis command has been deprecated.\nClient Implementations SHOULD use SMB_COM_TREE_CONNECT_ANDX"
# return 0x800
if password:
# Password is only encrypted if the server passed us an "encryption" during protocol dialect
if self._dialects_parameters['ChallengeLength'] > 0:
# this code is untested
password = self.get_ntlmv1_response(ntlm.compute_lmhash(password))
if not unicode_support:
if unicode_convert:
path = str(path)
else:
raise Exception('SMB: Can\t conver path from unicode!')
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
treeConnect = SMBCommand(SMB.SMB_COM_TREE_CONNECT)
treeConnect['Parameters'] = SMBTreeConnect_Parameters()
treeConnect['Data'] = SMBTreeConnect_Data()
treeConnect['Data']['Path'] = path.upper()
treeConnect['Data']['Password'] = password
treeConnect['Data']['Service'] = service
smb.addCommand(treeConnect)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TREE_CONNECT):
# XXX Here we are ignoring the rest of the response
return smb['Tid']
return smb['Tid']
def get_uid(self):
return self._uid
def set_uid(self, uid):
self._uid = uid
def tree_connect_andx(self, path, password = None, service = SERVICE_ANY, smb_packet=None):
if password:
# Password is only encrypted if the server passed us an "encryption" during protocol dialect
if self._dialects_parameters['ChallengeLength'] > 0:
# this code is untested
password = self.get_ntlmv1_response(ntlm.compute_lmhash(password))
else:
password = '\x00'
if not unicode_support:
if unicode_convert:
path = str(path)
else:
raise Exception('SMB: Can\t convert path from unicode!')
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
else:
smb = smb_packet
treeConnect = SMBCommand(SMB.SMB_COM_TREE_CONNECT_ANDX)
treeConnect['Parameters'] = SMBTreeConnectAndX_Parameters()
treeConnect['Data'] = SMBTreeConnectAndX_Data()
treeConnect['Parameters']['PasswordLength'] = len(password)
treeConnect['Data']['Password'] = password
treeConnect['Data']['Path'] = path.upper()
treeConnect['Data']['Service'] = service
smb.addCommand(treeConnect)
# filename = "\PIPE\epmapper"
# ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
# ntCreate['Parameters'] = SMBNtCreateAndX_Parameters()
# ntCreate['Data'] = SMBNtCreateAndX_Data()
# ntCreate['Parameters']['FileNameLength'] = len(filename)
# ntCreate['Parameters']['CreateFlags'] = 0
# ntCreate['Parameters']['AccessMask'] = 0x3
# ntCreate['Parameters']['CreateOptions'] = 0x0
# ntCreate['Data']['FileName'] = filename
# smb.addCommand(ntCreate)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TREE_CONNECT_ANDX):
# XXX Here we are ignoring the rest of the response
self.tid = smb['Tid']
return self.tid
self.tid = smb['Tid']
return self.tid
# backwars compatibility
connect_tree = tree_connect_andx
def get_server_name(self):
#return self._dialects_data['ServerName']
return self.__server_name
def get_session_key(self):
return self._dialects_parameters['SessionKey']
def get_encryption_key(self):
if self._dialects_data.fields.has_key('Challenge'):
return self._dialects_data['Challenge']
else:
return None
def get_server_time(self):
timestamp = self._dialects_parameters['HighDateTime']
timestamp <<= 32
timestamp |= self._dialects_parameters['LowDateTime']
timestamp -= 116444736000000000
timestamp /= 10000000
d = datetime.datetime.utcfromtimestamp(timestamp)
return d.strftime("%a, %d %b %Y %H:%M:%S GMT")
def disconnect_tree(self, tid):
smb = NewSMBPacket()
smb['Tid'] = tid
smb.addCommand(SMBCommand(SMB.SMB_COM_TREE_DISCONNECT))
self.sendSMB(smb)
smb = self.recvSMB()
def open(self, tid, filename, open_mode, desired_access):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
openFile = SMBCommand(SMB.SMB_COM_OPEN)
openFile['Parameters'] = SMBOpen_Parameters()
openFile['Parameters']['DesiredAccess'] = desired_access
openFile['Parameters']['OpenMode'] = open_mode
openFile['Parameters']['SearchAttributes'] = ATTR_READONLY | ATTR_HIDDEN | ATTR_ARCHIVE
openFile['Data'] = SMBOpen_Data()
openFile['Data']['FileName'] = filename
smb.addCommand(openFile)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_OPEN):
# XXX Here we are ignoring the rest of the response
openFileResponse = SMBCommand(smb['Data'][0])
openFileParameters = SMBOpenResponse_Parameters(openFileResponse['Parameters'])
return (
openFileParameters['Fid'],
openFileParameters['FileAttributes'],
openFileParameters['LastWriten'],
openFileParameters['FileSize'],
openFileParameters['GrantedAccess'],
)
def open_andx(self, tid, filename, open_mode, desired_access):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
openFile = SMBCommand(SMB.SMB_COM_OPEN_ANDX)
openFile['Parameters'] = SMBOpenAndX_Parameters()
openFile['Parameters']['DesiredAccess'] = desired_access
openFile['Parameters']['OpenMode'] = open_mode
openFile['Parameters']['SearchAttributes'] = ATTR_READONLY | ATTR_HIDDEN | ATTR_ARCHIVE
openFile['Data'] = SMBOpenAndX_Data()
openFile['Data']['FileName'] = filename
smb.addCommand(openFile)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_OPEN_ANDX):
# XXX Here we are ignoring the rest of the response
openFileResponse = SMBCommand(smb['Data'][0])
openFileParameters = SMBOpenAndXResponse_Parameters(openFileResponse['Parameters'])
return (
openFileParameters['Fid'],
openFileParameters['FileAttributes'],
openFileParameters['LastWriten'],
openFileParameters['FileSize'],
openFileParameters['GrantedAccess'],
openFileParameters['FileType'],
openFileParameters['IPCState'],
openFileParameters['Action'],
openFileParameters['ServerFid'],
)
def close(self, tid, fid):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
closeFile = SMBCommand(SMB.SMB_COM_CLOSE)
closeFile['Parameters'] = SMBClose_Parameters()
closeFile['Parameters']['FID'] = fid
smb.addCommand(closeFile)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_CLOSE):
return 1
return 0
def send_trans(self, tid, setup, name, param, data, noAnswer = 0):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
transCommand = SMBCommand(SMB.SMB_COM_TRANSACTION)
transCommand['Parameters'] = SMBTransaction_Parameters()
transCommand['Data'] = SMBTransaction_Data()
transCommand['Parameters']['Setup'] = setup
transCommand['Parameters']['TotalParameterCount'] = len(param)
transCommand['Parameters']['TotalDataCount'] = len(data)
transCommand['Parameters']['ParameterCount'] = len(param)
transCommand['Parameters']['ParameterOffset'] = 32+3+28+len(setup)+len(name)
transCommand['Parameters']['DataCount'] = len(data)
transCommand['Parameters']['DataOffset'] = transCommand['Parameters']['ParameterOffset'] + len(param)
transCommand['Data']['Name'] = name
transCommand['Data']['Trans_Parameters'] = param
transCommand['Data']['Trans_Data'] = data
if noAnswer:
transCommand['Parameters']['Flags'] = TRANS_NO_RESPONSE
smb.addCommand(transCommand)
self.sendSMB(smb)
def trans2(self, tid, setup, name, param, data):
data_len = len(data)
name_len = len(name)
param_len = len(param)
setup_len = len(setup)
assert setup_len & 0x01 == 0
param_offset = name_len + setup_len + 63
data_offset = param_offset + param_len
self.__send_smb_packet(SMB.SMB_COM_TRANSACTION2, self.__is_pathcaseless, SMB.FLAGS2_LONG_NAMES, tid, 0, pack('<HHHHBBHLHHHHHBB', param_len, data_len, 1024, self._dialects_parameters['MaxBufferSize'], 0, 0, 0, 0, 0, param_len, param_offset, data_len, data_offset, setup_len / 2, 0) + setup, name + param + data)
def query_file_info(self, tid, fid):
self.trans2(tid, '\x07\x00', '\x00', pack('<HH', fid, 0x107), '')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_TRANSACTION2):
f1, f2 = unpack('<LL', s.get_buffer()[53:53+8])
return (f2 & 0xffffffffL) << 32 | f1
def __nonraw_retr_file(self, tid, fid, offset, datasize, callback):
if (self._dialects_parameters['Capabilities'] & SMB.CAP_LARGE_READX) and self._SignatureEnabled is False:
max_buf_size = 65000
else:
max_buf_size = self._dialects_parameters['MaxBufferSize'] & ~0x3ff # Read in multiple KB blocks
read_offset = offset
while read_offset < datasize:
data = self.read_andx(tid, fid, read_offset, max_buf_size)
callback(data)
read_offset += len(data)
def __raw_retr_file(self, tid, fid, offset, datasize, callback):
print "[MS-CIFS] This command was introduced in the CorePlus dialect, but is often listed as part of the LAN Manager 1.0 dialect.\nThis command has been deprecated.\nClients SHOULD use SMB_COM_READ_ANDX"
max_buf_size = self._dialects_parameters['MaxBufferSize'] & ~0x3ff # Write in multiple KB blocks
read_offset = offset
while read_offset < datasize:
data = self.read_raw(tid, fid, read_offset, 0xffff)
if not data:
# No data returned. Need to send SMB_COM_READ_ANDX to find out what is the error.
data = self.read_andx(tid, fid, read_offset, max_buf_size)
callback(data)
read_offset += len(data)
def __nonraw_stor_file(self, tid, fid, offset, datasize, callback):
if (self._dialects_parameters['Capabilities'] & SMB.CAP_LARGE_WRITEX) and self._SignatureEnabled is False:
max_buf_size = 65000
else:
max_buf_size = self._dialects_parameters['MaxBufferSize'] & ~0x3ff # Write in multiple KB blocks
write_offset = offset
while 1:
data = callback(max_buf_size)
if not data:
break
smb = self.write_andx(tid,fid,data, write_offset)
writeResponse = SMBCommand(smb['Data'][0])
writeResponseParameters = SMBWriteAndXResponse_Parameters(writeResponse['Parameters'])
write_offset += writeResponseParameters['Count']
def __raw_stor_file(self, tid, fid, offset, datasize, callback):
print "[MS-CIFS] This command was introduced in the CorePlus dialect, but is often listed as part of the LAN Manager 1.0 dialect.\nThis command has been deprecated.\nClients SHOULD use SMB_COM_WRITE_ANDX"
write_offset = offset
while 1:
max_raw_size = self._dialects_parameters['MaxRawSize']
# Due to different dialects interpretation of MaxRawSize, we're limiting it to 0xffff
if max_raw_size > 65535:
max_raw_size = 65535
read_data = callback(max_raw_size)
if not read_data:
break
read_len = len(read_data)
self.__send_smb_packet(SMB.SMB_COM_WRITE_RAW, 0, 0, tid, 0, pack('<HHHLLHLHH', fid, read_len, 0, write_offset, 0, 0, 0, 0, 59), '')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_WRITE_RAW):
self._sess.send_packet(read_data)
write_offset = write_offset + read_len
break
def get_server_domain(self):
return self.__server_domain
def get_server_os(self):
return self.__server_os
def set_server_os(self, os):
self.__server_os = os
def get_server_lanman(self):
return self.__server_lanman
def is_login_required(self):
# Login is required if share mode is user.
# Otherwise only public services or services in share mode
# are allowed.
return (self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SHARE_MASK) == SMB.SECURITY_SHARE_USER
def get_ntlmv1_response(self, key):
challenge = self._dialects_data['Challenge']
return ntlm.get_ntlmv1_response(key, challenge)
def login_extended(self, user, password, domain = '', lmhash = '', nthash = '', use_ntlmv2 = True ):
# Once everything's working we should join login methods into a single one
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_EXTENDED_SECURITY
# Are we required to sign SMB? If so we do it, if not we skip it
if self._SignatureRequired:
smb['Flags2'] |= SMB.FLAGS2_SMB_SECURITY_SIGNATURE
sessionSetup = SMBCommand(SMB.SMB_COM_SESSION_SETUP_ANDX)
sessionSetup['Parameters'] = SMBSessionSetupAndX_Extended_Parameters()
sessionSetup['Data'] = SMBSessionSetupAndX_Extended_Data()
sessionSetup['Parameters']['MaxBufferSize'] = 61440
sessionSetup['Parameters']['MaxMpxCount'] = 2
sessionSetup['Parameters']['VcNumber'] = 1
sessionSetup['Parameters']['SessionKey'] = 0
sessionSetup['Parameters']['Capabilities'] = SMB.CAP_EXTENDED_SECURITY | SMB.CAP_USE_NT_ERRORS | SMB.CAP_UNICODE | SMB.CAP_LARGE_READX | SMB.CAP_LARGE_WRITEX
# Let's build a NegTokenInit with the NTLMSSP
# TODO: In the future we should be able to choose different providers
blob = SPNEGO_NegTokenInit()
# NTLMSSP
blob['MechTypes'] = [TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']]
auth = ntlm.getNTLMSSPType1('',domain,self._SignatureRequired, use_ntlmv2 = use_ntlmv2)
blob['MechToken'] = str(auth)
sessionSetup['Parameters']['SecurityBlobLength'] = len(blob)
sessionSetup['Parameters'].getData()
sessionSetup['Data']['SecurityBlob'] = blob.getData()
# Fake Data here, don't want to get us fingerprinted
sessionSetup['Data']['NativeOS'] = 'Unix'
sessionSetup['Data']['NativeLanMan'] = 'Samba'
smb.addCommand(sessionSetup)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_SESSION_SETUP_ANDX):
# We will need to use this uid field for all future requests/responses
self._uid = smb['Uid']
# Now we have to extract the blob to continue the auth process
sessionResponse = SMBCommand(smb['Data'][0])
sessionParameters = SMBSessionSetupAndX_Extended_Response_Parameters(sessionResponse['Parameters'])
sessionData = SMBSessionSetupAndX_Extended_Response_Data(flags = smb['Flags2'])
sessionData['SecurityBlobLength'] = sessionParameters['SecurityBlobLength']
sessionData.fromString(sessionResponse['Data'])
respToken = SPNEGO_NegTokenResp(sessionData['SecurityBlob'])
# Let's parse some data and keep it to ourselves in case it is asked
ntlmChallenge = ntlm.NTLMAuthChallenge(respToken['ResponseToken'])
if ntlmChallenge['TargetInfoFields_len'] > 0:
infoFields = ntlmChallenge['TargetInfoFields']
av_pairs = ntlm.AV_PAIRS(ntlmChallenge['TargetInfoFields'][:ntlmChallenge['TargetInfoFields_len']])
if av_pairs[ntlm.NTLMSSP_AV_HOSTNAME] is not None:
try:
self.__server_name = av_pairs[ntlm.NTLMSSP_AV_HOSTNAME][1].decode('utf-16le')
except:
# For some reason, we couldn't decode Unicode here.. silently discard the operation
pass
if av_pairs[ntlm.NTLMSSP_AV_DOMAINNAME] is not None:
try:
if self.__server_name != av_pairs[ntlm.NTLMSSP_AV_DOMAINNAME][1].decode('utf-16le'):
self.__server_domain = av_pairs[ntlm.NTLMSSP_AV_DOMAINNAME][1].decode('utf-16le')
except:
# For some reason, we couldn't decode Unicode here.. silently discard the operation
pass
type3, exportedSessionKey = ntlm.getNTLMSSPType3(auth, respToken['ResponseToken'], user, password, domain, lmhash, nthash, use_ntlmv2 = use_ntlmv2)
if exportedSessionKey is not None:
self._SigningSessionKey = exportedSessionKey
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_EXTENDED_SECURITY #| SMB.FLAGS2_NT_STATUS
# Are we required to sign SMB? If so we do it, if not we skip it
if self._SignatureRequired:
smb['Flags2'] |= SMB.FLAGS2_SMB_SECURITY_SIGNATURE
respToken2 = SPNEGO_NegTokenResp()
respToken2['ResponseToken'] = str(type3)
# Reusing the previous structure
sessionSetup['Parameters']['SecurityBlobLength'] = len(respToken2)
sessionSetup['Data']['SecurityBlob'] = respToken2.getData()
# Storing some info for later use
self.__server_os = sessionData['NativeOS']
self.__server_lanman = sessionData['NativeLanMan']
smb.addCommand(sessionSetup)
self.sendSMB(smb)
smb = self.recvSMB()
self._uid = 0
if smb.isValidAnswer(SMB.SMB_COM_SESSION_SETUP_ANDX):
self._uid = smb['Uid']
sessionResponse = SMBCommand(smb['Data'][0])
sessionParameters = SMBSessionSetupAndXResponse_Parameters(sessionResponse['Parameters'])
sessionData = SMBSessionSetupAndXResponse_Data(flags = smb['Flags2'], data = sessionResponse['Data'])
self._action = sessionParameters['Action']
# If smb sign required, let's enable it for the rest of the connection
if self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SIGNATURES_REQUIRED:
self._SignSequenceNumber = 2
self._SignatureEnabled = True
# Set up the flags to be used from now on
self.__flags1 = SMB.FLAGS1_PATHCASELESS
self.__flags2 = SMB.FLAGS2_EXTENDED_SECURITY
return 1
else:
raise Exception('Error: Could not login successfully')
def login(self, user, password, domain = '', lmhash = '', nthash = ''):
# If we have hashes, normalize them
if ( lmhash != '' or nthash != ''):
if len(lmhash) % 2: lmhash = '0%s' % lmhash
if len(nthash) % 2: nthash = '0%s' % nthash
try: # just in case they were converted already
lmhash = a2b_hex(lmhash)
nthash = a2b_hex(nthash)
except:
pass
if self._dialects_parameters['Capabilities'] & SMB.CAP_EXTENDED_SECURITY:
try:
self.login_extended(user, password, domain, lmhash, nthash, use_ntlmv2 = True)
except:
# If the target OS is Windows 5.0 or Samba, let's try using NTLMv1
if (self.get_server_lanman().find('Windows 2000') != -1) or (self.get_server_lanman().find('Samba') != -1):
self.login_extended(user, password, domain, lmhash, nthash, use_ntlmv2 = False)
self.__isNTLMv2 = False
else:
raise
else:
self.login_standard(user, password, domain, lmhash, nthash)
self.__isNTLMv2 = False
def login_standard(self, user, password, domain = '', lmhash = '', nthash = ''):
# Only supports NTLMv1
# Password is only encrypted if the server passed us an "encryption key" during protocol dialect negotiation
if self._dialects_parameters['ChallengeLength'] > 0:
if lmhash != '' or nthash != '':
pwd_ansi = self.get_ntlmv1_response(lmhash)
pwd_unicode = self.get_ntlmv1_response(nthash)
elif password:
lmhash = ntlm.compute_lmhash(password)
nthash = ntlm.compute_nthash(password)
pwd_ansi = self.get_ntlmv1_response(lmhash)
pwd_unicode = self.get_ntlmv1_response(nthash)
else: # NULL SESSION
pwd_ansi = ''
pwd_unicode = ''
else:
pwd_ansi = password
pwd_unicode = ''
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
sessionSetup = SMBCommand(SMB.SMB_COM_SESSION_SETUP_ANDX)
sessionSetup['Parameters'] = SMBSessionSetupAndX_Parameters()
sessionSetup['Data'] = SMBSessionSetupAndX_Data()
sessionSetup['Parameters']['MaxBuffer'] = 61440
sessionSetup['Parameters']['MaxMpxCount'] = 2
sessionSetup['Parameters']['VCNumber'] = os.getpid()
sessionSetup['Parameters']['SessionKey'] = self._dialects_parameters['SessionKey']
sessionSetup['Parameters']['AnsiPwdLength'] = len(pwd_ansi)
sessionSetup['Parameters']['UnicodePwdLength'] = len(pwd_unicode)
sessionSetup['Parameters']['Capabilities'] = SMB.CAP_RAW_MODE | SMB.CAP_USE_NT_ERRORS | SMB.CAP_LARGE_READX | SMB.CAP_LARGE_WRITEX
sessionSetup['Data']['AnsiPwd'] = pwd_ansi
sessionSetup['Data']['UnicodePwd'] = pwd_unicode
sessionSetup['Data']['Account'] = str(user)
sessionSetup['Data']['PrimaryDomain'] = str(domain)
sessionSetup['Data']['NativeOS'] = str(os.name)
sessionSetup['Data']['NativeLanMan'] = 'pysmb'
smb.addCommand(sessionSetup)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_SESSION_SETUP_ANDX):
# We will need to use this uid field for all future requests/responses
self._uid = smb['Uid']
sessionResponse = SMBCommand(smb['Data'][0])
sessionParameters = SMBSessionSetupAndXResponse_Parameters(sessionResponse['Parameters'])
sessionData = SMBSessionSetupAndXResponse_Data(flags = smb['Flags2'], data = sessionResponse['Data'])
self._action = sessionParameters['Action']
# Still gotta figure out how to do this with no EXTENDED_SECURITY
if sessionParameters['Action'] & SMB_SETUP_USE_LANMAN_KEY == 0:
self._SigningChallengeResponse = sessionSetup['Data']['UnicodePwd']
self._SigningSessionKey = nthash
else:
self._SigningChallengeResponse = sessionSetup['Data']['AnsiPwd']
self._SigningSessionKey = lmhash
#self._SignSequenceNumber = 1
#self.checkSignSMB(smb, self._SigningSessionKey ,self._SigningChallengeResponse)
#self._SignatureEnabled = True
self.__server_os = sessionData['NativeOS']
self.__server_lanman = sessionData['NativeLanMan']
self.__server_domain = sessionData['PrimaryDomain']
# Set up the flags to be used from now on
self.__flags1 = SMB.FLAGS1_PATHCASELESS
self.__flags2 = 0
return 1
else: raise Exception('Error: Could not login successfully')
def waitNamedPipe(self, tid, pipe, noAnswer = 0):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
transCommand = SMBCommand(SMB.SMB_COM_TRANSACTION)
transCommand['Parameters'] = SMBTransaction_Parameters()
transCommand['Data'] = SMBTransaction_Data()
setup = '\x53\x00\x00\x00'
name = '\\PIPE%s\x00' % pipe
transCommand['Parameters']['Setup'] = setup
transCommand['Parameters']['TotalParameterCount'] = 0
transCommand['Parameters']['TotalDataCount'] = 0
transCommand['Parameters']['MaxParameterCount'] = 0
transCommand['Parameters']['MaxDataCount'] = 0
transCommand['Parameters']['Timeout'] = 5000
transCommand['Parameters']['ParameterCount'] = 0
transCommand['Parameters']['ParameterOffset'] = 32+3+28+len(setup)+len(name)
transCommand['Parameters']['DataCount'] = 0
transCommand['Parameters']['DataOffset'] = 0
transCommand['Data']['Name'] = name
transCommand['Data']['Trans_Parameters'] = ''
transCommand['Data']['Trans_Data'] = ''
if noAnswer:
transCommand['Parameters']['Flags'] = TRANS_NO_RESPONSE
smb.addCommand(transCommand)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TRANSACTION):
return 1
return 0
def read(self, tid, fid, offset=0, max_size = None, wait_answer=1):
if not max_size:
max_size = self._dialects_parameters['MaxBufferSize'] # Read in multiple KB blocks
# max_size is not working, because although it would, the server returns an error (More data avail)
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
read = SMBCommand(SMB.SMB_COM_READ)
read['Parameters'] = SMBRead_Parameters()
read['Parameters']['Fid'] = fid
read['Parameters']['Offset'] = offset
read['Parameters']['Count'] = max_size
smb.addCommand(read)
if wait_answer:
answer = ''
while 1:
self.sendSMB(smb)
ans = self.recvSMB()
if ans.isValidAnswer(SMB.SMB_COM_READ):
readResponse = SMBCommand(ans['Data'][0])
readParameters = SMBReadResponse_Parameters(readResponse['Parameters'])
readData = SMBReadResponse_Data(readResponse['Data'])
return readData['Data']
return None
def read_andx(self, tid, fid, offset=0, max_size = None, wait_answer=1, smb_packet=None):
if not max_size:
if (self._dialects_parameters['Capabilities'] & SMB.CAP_LARGE_READX) and self._SignatureEnabled is False:
max_size = 65000
else:
max_size = self._dialects_parameters['MaxBufferSize'] # Read in multiple KB blocks
# max_size is not working, because although it would, the server returns an error (More data avail)
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
readAndX = SMBCommand(SMB.SMB_COM_READ_ANDX)
readAndX['Parameters'] = SMBReadAndX_Parameters()
readAndX['Parameters']['Fid'] = fid
readAndX['Parameters']['Offset'] = offset
readAndX['Parameters']['MaxCount'] = max_size
smb.addCommand(readAndX)
else:
smb = smb_packet
if wait_answer:
answer = ''
while 1:
self.sendSMB(smb)
ans = self.recvSMB()
if ans.isValidAnswer(SMB.SMB_COM_READ_ANDX):
# XXX Here we are only using a few fields from the response
readAndXResponse = SMBCommand(ans['Data'][0])
readAndXParameters = SMBReadAndXResponse_Parameters(readAndXResponse['Parameters'])
offset = readAndXParameters['DataOffset']
count = readAndXParameters['DataCount']+0x10000*readAndXParameters['DataCount_Hi']
answer += str(ans)[offset:offset+count]
if not ans.isMoreData():
return answer
max_size = min(max_size, readAndXParameters['Remaining'])
readAndX['Parameters']['Offset'] += count # XXX Offset is not important (apparently)
else:
self.sendSMB(smb)
ans = self.recvSMB()
try:
if ans.isValidAnswer(SMB.SMB_COM_READ_ANDX):
return ans
else:
return None
except:
return ans
return None
def read_raw(self, tid, fid, offset=0, max_size = None, wait_answer=1):
if not max_size:
max_size = self._dialects_parameters['MaxBufferSize'] # Read in multiple KB blocks
# max_size is not working, because although it would, the server returns an error (More data avail)
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
readRaw = SMBCommand(SMB.SMB_COM_READ_RAW)
readRaw['Parameters'] = SMBReadRaw_Parameters()
readRaw['Parameters']['Fid'] = fid
readRaw['Parameters']['Offset'] = offset
readRaw['Parameters']['MaxCount'] = max_size
smb.addCommand(readRaw)
self.sendSMB(smb)
if wait_answer:
data = self._sess.recv_packet(self.__timeout).get_trailer()
if not data:
# If there is no data it means there was an error
data = self.read_andx(tid, fid, offset, max_size)
return data
return None
def write(self,tid,fid,data, offset = 0, wait_answer=1):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
write = SMBCommand(SMB.SMB_COM_WRITE)
smb.addCommand(write)
write['Parameters'] = SMBWrite_Parameters()
write['Data'] = SMBWrite_Data()
write['Parameters']['Fid'] = fid
write['Parameters']['Count'] = len(data)
write['Parameters']['Offset'] = offset
write['Parameters']['Remaining'] = len(data)
write['Data']['Data'] = data
self.sendSMB(smb)
if wait_answer:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_WRITE):
return smb
return None
def write_andx(self,tid,fid,data, offset = 0, wait_answer=1, write_pipe_mode = False, smb_packet=None):
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
writeAndX = SMBCommand(SMB.SMB_COM_WRITE_ANDX)
smb.addCommand(writeAndX)
writeAndX['Parameters'] = SMBWriteAndX_Parameters()
writeAndX['Parameters']['Fid'] = fid
writeAndX['Parameters']['Offset'] = offset
writeAndX['Parameters']['WriteMode'] = 8
writeAndX['Parameters']['Remaining'] = len(data)
writeAndX['Parameters']['DataLength'] = len(data)
writeAndX['Parameters']['DataOffset'] = len(smb) # this length already includes the parameter
writeAndX['Data'] = data
if write_pipe_mode is True:
# First of all we gotta know what the MaxBuffSize is
maxBuffSize = self._dialects_parameters['MaxBufferSize']
if len(data) > maxBuffSize:
chunks_size = maxBuffSize - 5
writeAndX['Parameters']['WriteMode'] = 0x0c
sendData = '\xff\xff' + data
totalLen = len(sendData)
writeAndX['Parameters']['DataLength'] = chunks_size
writeAndX['Parameters']['Remaining'] = totalLen-2
writeAndX['Data'] = sendData[:chunks_size]
self.sendSMB(smb)
if wait_answer:
smbResp = self.recvSMB()
smbResp.isValidAnswer(SMB.SMB_COM_WRITE_ANDX)
alreadySent = chunks_size
sendData = sendData[chunks_size:]
while alreadySent < totalLen:
writeAndX['Parameters']['WriteMode'] = 0x04
writeAndX['Parameters']['DataLength'] = len(sendData[:chunks_size])
writeAndX['Data'] = sendData[:chunks_size]
self.sendSMB(smb)
if wait_answer:
smbResp = self.recvSMB()
smbResp.isValidAnswer(SMB.SMB_COM_WRITE_ANDX)
alreadySent += writeAndX['Parameters']['DataLength']
sendData = sendData[chunks_size:]
return smbResp
else:
smb = smb_packet
self.sendSMB(smb)
if wait_answer:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_WRITE_ANDX):
return smb
return None
def write_raw(self,tid,fid,data, offset = 0, wait_answer=1):
print "[MS-CIFS] This command was introduced in the CorePlus dialect, but is often listed as part of the LAN Manager 1.0 dialect.\nThis command has been deprecated.\nClients SHOULD use SMB_COM_WRITE_ANDX"
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
writeRaw = SMBCommand(SMB.SMB_COM_WRITE_RAW)
smb.addCommand(writeRaw)
writeRaw['Parameters'] = SMBWriteRaw_Parameters()
writeRaw['Parameters']['Fid'] = fid
writeRaw['Parameters']['Offset'] = offset
writeRaw['Parameters']['Count'] = len(data)
writeRaw['Parameters']['DataLength'] = 0
writeRaw['Parameters']['DataOffset'] = 0
self.sendSMB(smb)
self._sess.send_packet(data)
if wait_answer:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_WRITE_RAW):
return smb
return None
def TransactNamedPipe(self, tid, fid, data = '', noAnswer = 0, waitAnswer = 1, offset = 0):
self.send_trans(tid,pack('<HH', 0x26, fid),'\\PIPE\\\x00','',data, noAnswer = noAnswer)
if noAnswer or not waitAnswer:
return
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TRANSACTION):
transResponse = SMBCommand(smb['Data'][0])
transParameters = SMBTransactionResponse_Parameters(transResponse['Parameters'])
return transResponse['Data'][-transParameters['TotalDataCount']:] # Remove Potential Prefix Padding
return None
def nt_create_andx(self,tid,filename, smb_packet=None, cmd = None):
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
else:
smb = smb_packet
if cmd == None:
ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
ntCreate['Parameters'] = SMBNtCreateAndX_Parameters()
ntCreate['Data'] = SMBNtCreateAndX_Data()
ntCreate['Parameters']['FileNameLength'] = len(filename)
ntCreate['Parameters']['CreateFlags'] = 0x16
ntCreate['Parameters']['AccessMask'] = 0x2019f
ntCreate['Parameters']['CreateOptions'] = 0x40
ntCreate['Data']['FileName'] = filename
else:
ntCreate = cmd
smb.addCommand(ntCreate)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_NT_CREATE_ANDX):
# XXX Here we are ignoring the rest of the response
ntCreateResponse = SMBCommand(smb['Data'][0])
ntCreateParameters = SMBNtCreateAndXResponse_Parameters(ntCreateResponse['Parameters'])
self.fid = ntCreateParameters['Fid']
return ntCreateParameters['Fid']
def logoff(self):
smb = NewSMBPacket()
logOff = SMBCommand(SMB.SMB_COM_LOGOFF_ANDX)
logOff['Parameters'] = SMBLogOffAndX()
smb.addCommand(logOff)
self.sendSMB(smb)
smb = self.recvSMB()
# Let's clear some fields so you can login again under the same session
self._uid = 0
def list_shared(self):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\IPC$')
buf = StringIO()
try:
self.send_trans(tid, '', '\\PIPE\\LANMAN\0', '\x00\x00WrLeh\0B13BWz\0\x01\x00\xe0\xff', '')
numentries = 0
share_list = [ ]
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_TRANSACTION):
has_more, _, transparam, transdata = self.__decode_trans(s.get_parameter_words(), s.get_buffer())
if not numentries:
status, data_offset, numentries = unpack('<HHH', transparam[:6])
buf.write(transdata)
if not has_more:
share_data = buf.getvalue()
offset = 0
for i in range(0, numentries):
name = share_data[offset:string.find(share_data, '\0', offset)]
type, commentoffset = unpack('<HH', share_data[offset + 14:offset + 18])
comment = share_data[commentoffset-data_offset:share_data.find('\0', commentoffset-data_offset)]
offset = offset + 20
share_list.append(SharedDevice(name, type, comment))
return share_list
finally:
buf.close()
self.disconnect_tree(tid)
def list_path(self, service, path = '*', password = None):
path = string.replace(path, '/', '\\')
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.trans2(tid, '\x01\x00', '\x00', '\x16\x00\x00\x02\x06\x00\x04\x01\x00\x00\x00\x00' + path + '\x00', '')
resume = False
files = [ ]
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_TRANSACTION2):
has_more, _, transparam, transdata = self.__decode_trans(s.get_parameter_words(), s.get_buffer())
# A fairly quick trans reassembly.
while has_more:
s2 = self.recv_packet()
if self.isValidAnswer(s2,SMB.SMB_COM_TRANSACTION2):
has_more, _, transparam2, transdata2 = self.__decode_trans(s2.get_parameter_words(), s2.get_buffer())
transdata += transdata2
transparam += transparam2
if not resume:
sid, searchcnt, eos, erroffset, lastnameoffset = unpack('<HHHHH', transparam)
else:
searchcnt, eos, erroffset, lastnameoffset = unpack('<HHHH', transparam)
offset = 0
data_len = len(transdata)
while offset < data_len:
nextentry, fileindex, lowct, highct, lowat, highat, lowmt, highmt, lowcht, hightcht, loweof, higheof, lowsz, highsz, attrib, longnamelen, easz, shortnamelen = unpack('<lL12LLlLB', transdata[offset:offset + 69])
files.append(SharedFile(highct << 32 | lowct, highat << 32 | lowat, highmt << 32 | lowmt, higheof << 32 | loweof, highsz << 32 | lowsz, attrib, transdata[offset + 70:offset + 70 + shortnamelen], transdata[offset + 94:offset + 94 + longnamelen]))
resume_filename = transdata[offset + 94:offset + 94 + longnamelen]
offset = offset + nextentry
if not nextentry:
break
if eos:
return files
else:
self.trans2(tid, '\x02\x00', '\x00', pack('<H', sid) + '\x56\x05\x04\x01\x00\x00\x00\x00\x06\x00' + resume_filename + '\x00', '')
resume = True
resume_filename = ''
finally:
self.disconnect_tree(tid)
def retr_file(self, service, filename, callback, mode = SMB_O_OPEN, offset = 0, password = None):
filename = string.replace(filename, '/', '\\')
fid = -1
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
fid, attrib, lastwritetime, datasize, grantedaccess, filetype, devicestate, action, serverfid = self.open_andx(tid, filename, mode, SMB_ACCESS_READ | SMB_SHARE_DENY_WRITE)
if not datasize:
datasize = self.query_file_info(tid, fid)
self.__nonraw_retr_file(tid, fid, offset, datasize, callback)
finally:
if fid >= 0:
self.close(tid, fid)
self.disconnect_tree(tid)
def stor_file(self, service, filename, callback, mode = SMB_O_CREAT | SMB_O_TRUNC, offset = 0, password = None):
filename = string.replace(filename, '/', '\\')
fid = -1
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
fid, attrib, lastwritetime, datasize, grantedaccess, filetype, devicestate, action, serverfid = self.open_andx(tid, filename, mode, SMB_ACCESS_WRITE | SMB_SHARE_DENY_WRITE)
self.__nonraw_stor_file(tid, fid, offset, datasize, callback)
finally:
if fid >= 0:
self.close(tid, fid)
self.disconnect_tree(tid)
def stor_file_nonraw(self, service, filename, callback, mode = SMB_O_CREAT | SMB_O_TRUNC, offset = 0, password = None):
filename = string.replace(filename, '/', '\\')
fid = -1
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
fid, attrib, lastwritetime, datasize, grantedaccess, filetype, devicestate, action, serverfid = self.open_andx(tid, filename, mode, SMB_ACCESS_WRITE | SMB_SHARE_DENY_WRITE)
self.__nonraw_stor_file(tid, fid, offset, datasize, callback)
finally:
if fid >= 0:
self.close(tid, fid)
self.disconnect_tree(tid)
def copy(self, src_service, src_path, dest_service, dest_path, callback = None, write_mode = SMB_O_CREAT | SMB_O_TRUNC, src_password = None, dest_password = None):
dest_path = string.replace(dest_path, '/', '\\')
src_path = string.replace(src_path, '/', '\\')
src_tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + src_service, src_password)
dest_tid = -1
try:
if src_service == dest_service:
dest_tid = src_tid
else:
dest_tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + dest_service, dest_password)
dest_fid = self.open_andx(dest_tid, dest_path, write_mode, SMB_ACCESS_WRITE | SMB_SHARE_DENY_WRITE)[0]
src_fid, _, _, src_datasize, _, _, _, _, _ = self.open_andx(src_tid, src_path, SMB_O_OPEN, SMB_ACCESS_READ | SMB_SHARE_DENY_WRITE)
if not src_datasize:
src_datasize = self.query_file_info(src_tid, src_fid)
if callback:
callback(0, src_datasize)
max_buf_size = (self._dialects_parameters['MaxBufferSize'] >> 10) << 10
read_offset = 0
write_offset = 0
while read_offset < src_datasize:
self.__send_smb_packet(SMB.SMB_COM_READ_ANDX, 0, 0, src_tid, 0, pack('<BBHHLHHLH', 0xff, 0, 0, src_fid, read_offset, max_buf_size, max_buf_size, 0, 0), '')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_READ_ANDX):
offset = unpack('<H', s.get_parameter_words()[2:4])[0]
data_len, dataoffset = unpack('<HH', s.get_parameter_words()[10+offset:14+offset])
d = s.get_buffer()
if data_len == len(d):
self.__send_smb_packet(SMB.SMB_COM_WRITE_ANDX, 0, 0, dest_tid, 0, pack('<BBHHLLHHHHH', 0xff, 0, 0, dest_fid, write_offset, 0, 0, 0, 0, data_len, 59), d)
else:
self.__send_smb_packet(SMB.SMB_COM_WRITE_ANDX, 0, 0, dest_tid, 0, pack('<BBHHLLHHHHH', 0xff, 0, 0, dest_fid, write_offset, 0, 0, 0, 0, data_len, 59), d[dataoffset - 59:dataoffset - 59 + data_len])
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_WRITE_ANDX):
data_len, dataoffset = unpack('<HH', s.get_parameter_words()[4:8])
break
read_offset = read_offset + data_len
if callback:
callback(read_offset, src_datasize)
break
finally:
self.disconnect_tree(src_tid)
if dest_tid > -1 and src_service != dest_service:
self.disconnect_tree(dest_tid)
def check_dir(self, service, path, password = None):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.__send_smb_packet(SMB.SMB_COM_CHECK_DIRECTORY, 0x08, 0, tid, 0, '', '\x04' + path + '\x00')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_CHECK_DIRECTORY):
return
finally:
self.disconnect_tree(tid)
def remove(self, service, path, password = None):
# Perform a list to ensure the path exists
self.list_path(service, path, password)
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.__send_smb_packet(SMB.SMB_COM_DELETE, 0x08, 0, tid, 0, pack('<H', ATTR_HIDDEN | ATTR_SYSTEM | ATTR_ARCHIVE), '\x04' + path + '\x00')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_DELETE):
return
finally:
self.disconnect_tree(tid)
def rmdir(self, service, path, password = None):
# Check that the directory exists
self.check_dir(service, path, password)
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.__send_smb_packet(SMB.SMB_COM_DELETE_DIRECTORY, 0x08, 0, tid, 0, '', '\x04' + path + '\x00')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_DELETE_DIRECTORY):
return
finally:
self.disconnect_tree(tid)
def mkdir(self, service, path, password = None):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
smb = NewSMBPacket()
smb['Tid'] = tid
createDir = SMBCommand(SMB.SMB_COM_CREATE_DIRECTORY)
createDir['Data'] = SMBCreateDirectory_Data()
createDir['Data']['DirectoryName'] = path
smb.addCommand(createDir)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_CREATE_DIRECTORY):
return 1
return 0
finally:
self.disconnect_tree(tid)
def rename(self, service, old_path, new_path, password = None):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
smb = NewSMBPacket()
smb['Tid'] = tid
smb['Flags'] = SMB.FLAGS1_PATHCASELESS
renameCmd = SMBCommand(SMB.SMB_COM_RENAME)
renameCmd['Parameters'] = SMBRename_Parameters()
renameCmd['Parameters']['SearchAttributes'] = ATTR_SYSTEM | ATTR_HIDDEN | ATTR_DIRECTORY
renameCmd['Data'] = SMBRename_Data()
renameCmd['Data']['OldFileName'] = old_path
renameCmd['Data']['NewFileName'] = new_path
smb.addCommand(renameCmd)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_RENAME):
return 1
return 0
finally:
self.disconnect_tree(tid)
def get_socket(self):
return self._sess.get_socket()
ERRDOS = { 1: 'Invalid function',
2: 'File not found',
3: 'Invalid directory',
4: 'Too many open files',
5: 'Access denied',
6: 'Invalid file handle. Please file a bug report.',
7: 'Memory control blocks destroyed',
8: 'Out of memory',
9: 'Invalid memory block address',
10: 'Invalid environment',
11: 'Invalid format',
12: 'Invalid open mode',
13: 'Invalid data',
15: 'Invalid drive',
16: 'Attempt to remove server\'s current directory',
17: 'Not the same device',
18: 'No files found',
32: 'Sharing mode conflicts detected',
33: 'Lock request conflicts detected',
80: 'File already exists'
}
ERRSRV = { 1: 'Non-specific error',
2: 'Bad password',
4: 'Access denied',
5: 'Invalid tid. Please file a bug report.',
6: 'Invalid network name',
7: 'Invalid device',
49: 'Print queue full',
50: 'Print queue full',
51: 'EOF on print queue dump',
52: 'Invalid print file handle',
64: 'Command not recognized. Please file a bug report.',
65: 'Internal server error',
67: 'Invalid path',
69: 'Invalid access permissions',
71: 'Invalid attribute mode',
81: 'Server is paused',
82: 'Not receiving messages',
83: 'No room to buffer messages',
87: 'Too many remote user names',
88: 'Operation timeout',
89: 'Out of resources',
91: 'Invalid user handle. Please file a bug report.',
250: 'Temporarily unable to support raw mode for transfer',
251: 'Temporarily unable to support raw mode for transfer',
252: 'Continue in MPX mode',
65535: 'Unsupported function'
}
ERRHRD = { 19: 'Media is write-protected',
20: 'Unknown unit',
21: 'Drive not ready',
22: 'Unknown command',
23: 'CRC error',
24: 'Bad request',
25: 'Seek error',
26: 'Unknown media type',
27: 'Sector not found',
28: 'Printer out of paper',
29: 'Write fault',
30: 'Read fault',
31: 'General failure',
32: 'Open conflicts with an existing open',
33: 'Invalid lock request',
34: 'Wrong disk in drive',
35: 'FCBs not available',
36: 'Sharing buffer exceeded'
}
| apache-2.0 | -8,134,039,149,275,074,000 | 43.43501 | 319 | 0.581626 | false | 3.720197 | false | false | false |
sniperganso/python-manilaclient | manilaclient/base.py | 1 | 7387 | # Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import contextlib
import hashlib
import os
from manilaclient import exceptions
from manilaclient.openstack.common import cliutils
from manilaclient import utils
# Python 2.4 compat
try:
all
except NameError:
def all(iterable):
return True not in (not x for x in iterable)
class Manager(utils.HookableMixin):
"""Manager for CRUD operations.
Managers interact with a particular type of API (shares, snapshots,
etc.) and provide CRUD operations for them.
"""
resource_class = None
def __init__(self, api):
self.api = api
@property
def api_version(self):
return self.api.api_version
def _list(self, url, response_key, obj_class=None, body=None):
resp = None
if body:
resp, body = self.api.client.post(url, body=body)
else:
resp, body = self.api.client.get(url)
if obj_class is None:
obj_class = self.resource_class
data = body[response_key]
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
# unlike other services which just return the list...
if isinstance(data, dict):
try:
data = data['values']
except KeyError:
pass
with self.completion_cache('human_id', obj_class, mode="w"):
with self.completion_cache('uuid', obj_class, mode="w"):
return [obj_class(self, res, loaded=True)
for res in data if res]
@contextlib.contextmanager
def completion_cache(self, cache_type, obj_class, mode):
"""Bash autocompletion items storage.
The completion cache store items that can be used for bash
autocompletion, like UUIDs or human-friendly IDs.
A resource listing will clear and repopulate the cache.
A resource create will append to the cache.
Delete is not handled because listings are assumed to be performed
often enough to keep the cache reasonably up-to-date.
"""
base_dir = cliutils.env('manilaclient_UUID_CACHE_DIR',
'MANILACLIENT_UUID_CACHE_DIR',
default="~/.manilaclient")
# NOTE(sirp): Keep separate UUID caches for each username + endpoint
# pair
username = cliutils.env('OS_USERNAME', 'MANILA_USERNAME')
url = cliutils.env('OS_URL', 'MANILA_URL')
uniqifier = hashlib.md5(username.encode('utf-8') +
url.encode('utf-8')).hexdigest()
cache_dir = os.path.expanduser(os.path.join(base_dir, uniqifier))
try:
os.makedirs(cache_dir, 0o755)
except OSError:
# NOTE(kiall): This is typically either permission denied while
# attempting to create the directory, or the directory
# already exists. Either way, don't fail.
pass
resource = obj_class.__name__.lower()
filename = "%s-%s-cache" % (resource, cache_type.replace('_', '-'))
path = os.path.join(cache_dir, filename)
cache_attr = "_%s_cache" % cache_type
try:
setattr(self, cache_attr, open(path, mode))
except IOError:
# NOTE(kiall): This is typically a permission denied while
# attempting to write the cache file.
pass
try:
yield
finally:
cache = getattr(self, cache_attr, None)
if cache:
cache.close()
delattr(self, cache_attr)
def write_to_completion_cache(self, cache_type, val):
cache = getattr(self, "_%s_cache" % cache_type, None)
if cache:
cache.write("%s\n" % val)
def _get(self, url, response_key=None):
resp, body = self.api.client.get(url)
if response_key:
return self.resource_class(self, body[response_key], loaded=True)
else:
return self.resource_class(self, body, loaded=True)
def _get_with_base_url(self, url, response_key=None):
resp, body = self.api.client.get_with_base_url(url)
if response_key:
return [self.resource_class(self, res, loaded=True)
for res in body[response_key] if res]
else:
return self.resource_class(self, body, loaded=True)
def _create(self, url, body, response_key, return_raw=False, **kwargs):
self.run_hooks('modify_body_for_create', body, **kwargs)
resp, body = self.api.client.post(url, body=body)
if return_raw:
return body[response_key]
with self.completion_cache('human_id', self.resource_class, mode="a"):
with self.completion_cache('uuid', self.resource_class, mode="a"):
return self.resource_class(self, body[response_key])
def _delete(self, url):
resp, body = self.api.client.delete(url)
def _update(self, url, body, response_key=None, **kwargs):
self.run_hooks('modify_body_for_update', body, **kwargs)
resp, body = self.api.client.put(url, body=body)
if body:
if response_key:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
class ManagerWithFind(Manager):
"""Like a `Manager`, but with additional `find()`/`findall()` methods."""
def find(self, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
matches = self.findall(**kwargs)
num_matches = len(matches)
if num_matches == 0:
msg = "No %s matching %s." % (self.resource_class.__name__, kwargs)
raise exceptions.NotFound(404, msg)
elif num_matches > 1:
raise exceptions.NoUniqueMatch
else:
return matches[0]
def findall(self, **kwargs):
"""Find all items with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
found = []
searches = list(kwargs.items())
for obj in self.list():
try:
if all(getattr(obj, attr) == value
for (attr, value) in searches):
found.append(obj)
except AttributeError:
continue
return found
def list(self):
raise NotImplementedError
| apache-2.0 | 2,738,515,539,624,619,500 | 33.35814 | 79 | 0.590226 | false | 4.119911 | false | false | false |
pingali/dgit | dgitcore/plugins/validator.py | 1 | 1534 | #!/usr/bin/env python
import os, sys
import json
from collections import namedtuple
import requests
Key = namedtuple("Key", ["name","version"])
class ValidatorBase(object):
"""
This is the base class for all backends including
"""
def __init__(self, name, version, description, supported=[]):
"""
Parameters:
-----------
name: Name of the backend service e.g., s3
version: Version of this implementation
description: Text description of this service
supported: supported services with including name
For example, there may be multiple s3 implementations that
support different kinds of services.
"""
self.enable = 'y'
self.name = name
self.version = version
self.description = description
self.support = supported + [name]
self.initialize()
def __str__(self):
return self.name
def initialize(self):
"""
Called to initialize sessions, internal objects etc.
"""
return
def autooptions(self):
"""
Get default options
"""
return None
def evaluate(self, repo, files, rules):
"""
Evaluate the repo
returns: A list of dictionaries with:
target: relative path of the file
rules: rules file used
validator: name of the validator
status: OK/Success/Error
Message: Any additional information
"""
return []
| isc | 8,900,136,766,115,214,000 | 24.566667 | 66 | 0.584094 | false | 5.07947 | false | false | false |
whatsthehubbub/rippleeffect | fabfile/base.py | 1 | 2942 | from fabric.api import *
from fabric.colors import cyan
from fabric.contrib import files
packages = (
'build-essential',
'git',
'mercurial',
'rsync',
'vim',
)
def install_base_packages():
sudo('apt-get update')
for package in packages:
sudo('apt-get install %s --assume-yes' % package)
def upgrade_system():
sudo('apt-get update')
sudo('apt-get dist-upgrade --assume-yes --quiet')
def create_deploy_user():
"creates deployment user"
username = 'deploy'
# create deploy user & home without password
if files.contains('/etc/passwd', username):
return
sudo('useradd %s --create-home --shell /bin/bash' % username)
# create authorized_keys & upload public key
sudo('mkdir -p /home/deploy/.ssh')
sudo('chmod 700 /home/deploy/.ssh')
pub_key = open(env.key_filename, 'rb').read()
files.append('/home/%s/.ssh/authorized_keys' % username, pub_key, use_sudo=True)
# update authorized_keys permissions
sudo('chmod 400 /home/%s/.ssh/authorized_keys' % username)
sudo('chown deploy:deploy /home/%s/.ssh -R' % username)
# create sudo password & add to sudoers
print(cyan('set sudo password for "%s" user' % username))
sudo('passwd %s' % username)
files.append('/etc/sudoers', '%s ALL=(ALL) ALL' % username, use_sudo=True)
def automate_security_updates():
"enable automatic installation of security updates"
sudo('apt-get install unattended-upgrades')
files.upload_template(
'apt/10periodic',
'/etc/apt/apt.conf.d/10periodic',
env,
template_dir='fabfile/templates',
use_sudo=True,
mode=644,
)
# TODO: checkout apticron for email alerts
def install_rackspace_monitoring():
# add the rackspace apt repo to list
files.append("/etc/apt/sources.list.d/rackspace-monitoring-agent.list",
"deb http://stable.packages.cloudmonitoring.rackspace.com/ubuntu-12.04-x86_64 cloudmonitoring main",
use_sudo=True)
# install rackspace repo signing key
run('curl https://monitoring.api.rackspacecloud.com/pki/agent/linux.asc | apt-key add -')
# install the monitoring agent
run('apt-get update')
run('apt-get install rackspace-monitoring-agent')
# run setup
run('rackspace-monitoring-agent --setup')
def harden_sudoers():
"""
>> /etc/sudoers
root ALL=(ALL) ALL
deploy ALL=(ALL) ALL
"""
pass
def harden_ssh():
"""
>> /etc/ssh/sshd_config
PermitRootLogin no
PasswordAuthentication no
"""
run('service ssh restart')
def setup_firewall():
"""
ufw allow from {your-ip} to any port 22
ufw allow 80
ufw enable
"""
pass
def harden_server():
setup_firewall()
harden_ssh()
harden_sudoers()
def provision_base_server():
upgrade_system()
install_base_packages()
automate_security_updates()
create_deploy_user()
| mit | 6,000,494,274,286,775,000 | 24.807018 | 108 | 0.64208 | false | 3.557437 | false | false | false |
astrophysicist87/iEBE-Plumberg | EBE-Node/crank/SequentialEventDriver.py | 1 | 56172 | #! /usr/bin/env python
# This package performs a sequential calculations of a given number of events,
# after reading parameters from ParameterDict.py. The most important control
# parameters are set in controlParameterList, for other parameters see
# allParameterLists. This package is intended to be working-at-background thus
# only basic output are generated. When necessary, other functions given in the
# package for single executables can be invoked individually for more
# flexibilities.
# The main entry is the sequentialEventDriverShell function.
from os import path, getcwd, remove, makedirs
from sys import stdout, exit
from shutil import move, copy, copytree, rmtree
from glob import glob
from subprocess import call
import numpy as np
import re
class ExecutionError(Exception): pass # used to signal my own exception
# set global default parameters
allParameterLists = [
'controlParameterList',
'initial_condition_control',
'superMCControl',
'superMCParameters',
'preEquilibriumControl',
'preEquilibriumParameters',
'hydroControl',
'hydroParameters',
'iSSControl',
'iSSParameters',
'iSControl',
'iSParameters',
'photonEmissionControl',
'photonEmissionParameters',
'osc2uControl',
'osc2uParameters',
'urqmdControl',
'urqmdParameters',
'binUtilitiesControl',
'binUtilitiesParameters',
'HoTCoffeehControl',
'HoTCoffeehParameters',
]
controlParameterList = {
'simulation_type' : 'hybrid', # 'hybrid' or 'hydro'
'niceness' : 10, # range from 0 to 19 for process priority, 0 for the highest priority
'numberOfEvents' : 10, # how many sequential calculations
'rootDir' : path.abspath('../'),
'resultDir' : path.abspath('../finalResults'), # final results will be saved here, absolute
'eventResultDirPattern' : 'event-%d', # %d->event_id, where event results are saved
'eventResultDir' : None, # used to pass event result folder from sequentialEventDriverShell to others
'combinedUrqmdFile' : 'urqmdCombined.txt', # urqmd from all events will be combined into this file
'buildCMD' : 'make build',
'cleanCMD' : 'make clean',
}
initial_condition_control = {
'centrality': '0-5%', # centrality bin
# centrality cut variable: total_entropy or Npart
'cut_type': 'total_entropy',
'initial_condition_type': 'superMC', # type of initial conditions
# file path for the pre-generated initial condition files
'pre-generated_initial_file_path': 'initial_conditions',
# name pattern for the initial condition files
'pre-generated_initial_file_pattern': 'sd_event_[0-9]*_block.dat',
'pre-generated_initial_file_read_in_mode': 2, # read in mode for VISH2+1
}
superMCControl = {
'mainDir' : 'superMC',
'dataDir' : 'data', # where initial conditions are stored, relative
'saveICFile' : True, # whether to save initial condition file
'dataFiles' : '*event_%d_*.dat', # data filenames
'initialFiles' : 'sd_event_*_block.dat', #initial density profile filenames
'numberOfEventsParameterName' : 'nev',
'executable' : 'superMC.e',
}
superMCParameters = {
'model_name' : 'MCGlb',
'which_mc_model' : 5,
'sub_model' : 1,
'Npmin' : 0,
'Npmax' : 1000,
'bmin' : 0,
'bmax' : 20,
'cutdSdy' : 1,
'cutdSdy_lowerBound' : 551.864,
'cutdSdy_upperBound' : 1000000.0,
'ecm' : 2760,
'Aproj' : 208,
'Atarg' : 208,
'proj_deformed' : 0,
'targ_deformed' : 0,
'finalFactor' : 56.763,
'use_ed' : 0,
'alpha' : 0.118,
'lambda' : 0.288,
'operation' : 1,
'include_NN_correlation' : 1,
'cc_fluctuation_model' : 6,
'cc_fluctuation_Gamma_theta' : 0.75,
'maxx' : 13.0, # grid size in x (fm)
'maxy' : 13.0, # grid size in y (fm)
'dx' : 0.1, # grid spacing in x (fm)
'dy' : 0.1, # grid spacing in y (fm)
'nev' : 1,
}
preEquilibriumControl = {
'mainDir' : 'fs',
'initialConditionDir' : 'data/events', # where initial conditions are stored
'initialConditionFile' : 'sd_event_1_block.dat', # IC filename
'resultDir' : 'data/result/event_1/%g', # pre-equilibrium results folder
'resultFiles' : '*', # results files
'executable' : 'lm.e',
}
preEquilibriumParameters = {
'event_mode' : 1,
'taumin' : 0.6,
'taumax' : 0.6,
'dtau' : 0.2,
}
hydroControl = {
'mainDir' : 'VISHNew',
'initialConditionDir' : 'Initial', # hydro initial condition folder, relative
'initialConditionFile' : 'InitialSd.dat', # IC filename
'resultDir' : 'results', # hydro results folder, relative
'resultFiles' : '*', # results files
'saveICFile' : True, # whether to save initial condition file
'saveResultGlobs' : ['*.h5','surface.dat', 'dec*.dat', 'ecc*.dat'],
# files match these globs will be saved
'executable' : 'VISHNew.e',
}
hydroParameters = {
'IINIT' : 2,
'IEOS' : 7,
'iEin' : 1,
'vis' : 0.08,
'Ivisflag' : 0,
'IvisBulkFlag' : 0, # flag for temperature dependence of bulk viscosity
'visbulknorm' : 0.0, # the overall normalization of the bulk viscosity
'IviscousEqsType' : 1, # type of evolution equations for viscous quantities
'iLS' : 130, # lattice points in the transverse plane
'dx' : 0.1, # lattice spacing in x
'dy' : 0.1, # lattice spacing in y
'T0' : 0.6, # tau_0
'dt' : 0.02, # dtau
'Edec' : 0.3, # 0.3->160 MeV, 0.18->120 MeV
'factor' : 1.0,
'IhydroJetoutput' : 1, # switch for output hydro evolution history into hdf5 file
'InitialURead' : 1, # switch to read in initial flow velocity and shear tensor
}
iSSControl = {
'mainDir' : 'iSS',
'operationDir' : 'results',
'saveResultGlobs' : ['*vn*.dat','OSCAR.DAT'], # files in the operation directory matching these globs will be saved
'OSCARFile' : 'OSCAR.DAT',
'executable' : 'iSS.e',
}
iSSParameters = {
'turn_on_bulk' : 0,
'include_deltaf_bulk' : 0,
'include_deltaf_shear' : 0,
'calculate_vn' : 0,
'MC_sampling' : 2,
'number_of_repeated_sampling' : 10,
'y_LB' : -2.5,
'y_RB' : 2.5,
}
iSControl = {
'mainDir' : 'iS',
'operationDir' : 'results',
'saveResultGlobs' : ['dN_ptdptdphidy.dat', '*_vndata.dat', 'v2data*'], # files in the operation directory matching these globs will be saved
'executables' : ('iS.e', 'resonance.e', 'iInteSp.e'),
'entryShell' : 'iS_withResonance.sh',
}
iSParameters = {}
photonEmissionControl = {
'mainDir' : 'photonEmission',
'operationDir' : 'results',
'saveResultGlobs' : ['*Sp*.dat', '*dTdtau*.dat'], # files in the operation directory matching these globs will be saved
'executable' : 'hydro_photonEmission.e',
}
photonEmissionParameters = {
'dx' : 0.5,
'dy' : 0.5,
'dTau' : 0.02,
'T_dec' : 0.120,
'tau_start' : 0.6,
'calHGIdFlag' : 0,
}
osc2uControl = {
'mainDir' : 'osc2u',
'outputFilename' : 'fort.14',
'saveOSCAR' : False, # whether to save OSCAR file
'executable' : 'osc2u.e',
}
osc2uParameters = {}
urqmdControl = {
'mainDir' : 'urqmd',
'controlFilename' : 'uqmd.burner',
'ICFilename' : 'OSCAR.input',
'outputFilename' : 'particle_list.dat',
'saveOutputFile' : True, # whether to save the output file
'executable' : 'urqmd.e',
'entryShell' : 'runqmd.sh',
'run_UrQMD' : False, # don't run UrQMD by default
}
urqmdParameters = {}
binUtilitiesControl = {
'mainDir' : 'binUtilities',
'operationDir' : 'results',
'saveResultGlobs' : ['*flow*.dat', 'pT_*.dat'], # files in the operation directory matching these globs will be saved
'executable' : 'urqmdBinShell.py',
}
binUtilitiesParameters = {}
EbeCollectorControl = {
'mainDir' : 'EbeCollector',
'executable_hybrid' : 'EbeCollectorShell_hydroWithUrQMD.py',
'executable_hydro' : 'EbeCollectorShell_pureHydro.py',
'executable_hydroEM' : 'EbeCollectorShell_HydroEM.py',
'executable_hydroEM_with_decaycocktail' : 'EbeCollectorShell_HydroEM_with_decaycocktail.py',
}
EbeCollectorParameters = {
'subfolderPattern' : '"event-(\d*)"',
'databaseFilename' : 'collected.db',
}
HoTCoffeehControl = {
'mainDir' : 'HoTCoffeeh',
'operationDir' : 'results',
'runHoTCoffeeh' : False,
'executables' : ('cfwr.e', 'svwr.e'),
'entryShell' : 'HoTCoffeeh.sh',
'saveResultGlobs' : ['all*dat', 'total*dat', 'correlfunct3D*.dat', \
'*spectra.dat', 'HBT*dat', 'resonance*h5', \
'target*h5', 'resonance_fraction.dat', 'chosen_resonances.dat'],
}
HoTCoffeehParameters = {
'grouping_particles' : 0,
'particle_diff_tolerance' : 0.00,
'use_plane_psi_order' : 0,
'ignore_long_lived_resonances' : 1,
'max_lifetime' : 100.0,
'include_delta_f' : 1,
'include_bulk_pi' : 1,
'n_order' : 4,
'tolerance' : 0.00,
'flag_negative_S' : 1,
'chosenParticlesMode' : 0,
'nKT' : 101,
'nKphi' : 48,
'KTmin' : 0.01,
'KTmax' : 1.01,
'SV_npT' : 15,
'SV_npphi' : 48,
'SV_resonanceThreshold' : 1.00,
'CF_npT' : 15,
'CF_npphi' : 36,
'CF_npY' : 21,
'CF_resonanceThreshold' : 0.60,
'use_lambda' : 1,
'use_log_fit' : 1,
'use_extrapolation' : 1,
'fit_with_projected_cfvals' : 1,
'flesh_out_cf' : 1,
'calculate_CF_mode' : 0,
'qtnpts' : 51,
'qxnpts' : 7,
'qynpts' : 7,
'qznpts' : 7,
'delta_qx' : 0.025,
'delta_qy' : 0.025,
'delta_qz' : 0.0125,
}
def readInParameters():
""" Overwrite default parameter lists with those in ParameterDict. """
try:
import ParameterDict
for aParameterList in allParameterLists:
if aParameterList in dir(ParameterDict):
exec("%s.update(ParameterDict.%s)" % (aParameterList, aParameterList))
except (IOError, SyntaxError):
raise ExecutionError("Errors trying to open/read the ParameterDict.py file!")
def sorted_nicely( l ):
""" Sorts the given iterable in the way that is expected.
Required arguments:
l -- The iterable to be sorted.
"""
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return sorted(l, key = alphanum_key)
def translate_centrality_cut():
"""
translate the centrality boundaries to Npart, dS/dy, b values and update
the parameter lists for simulations
"""
cut_type = initial_condition_control['cut_type']
if cut_type not in ['total_entropy', 'Npart']:
print "invalid centrality cut type: ", cut_type
exit(1)
centrality_string = initial_condition_control['centrality']
centrality_lower_bound = float(centrality_string.split('-')[0])
centrality_upper_bound = float(
centrality_string.split('-')[1].split('%')[0])
if superMCParameters['model_name'] == 'MCGlb':
superMCParameters['which_mc_model'] == 5
superMCParameters['sub_model'] == 1
model_name = 'MCGlb'
elif superMCParameters['model_name'] == 'MCKLN':
superMCParameters['which_mc_model'] == 1
superMCParameters['sub_model'] == 7
model_name = 'MCKLN'
if superMCParameters['cc_fluctuation_model'] != 0:
multiplicity_fluctuation = 'withMultFluct'
else:
multiplicity_fluctuation = 'noMultFluct'
if superMCParameters['include_NN_correlation'] != 0:
NNcorrelation = 'withNNcorrelation'
else:
NNcorrelation = 'd0.9'
collision_energy = str(superMCParameters['ecm'])
Aproj = superMCParameters['Aproj']
Atrag = superMCParameters['Atarg']
nucleus_name_dict = {
208: 'Pb',
197: 'Au',
238: 'U',
63: 'Cu',
27: 'Al',
1: 'p',
2: 'd',
3: 'He',
}
if Aproj == Atrag: #symmetric collision
nucleus_name = nucleus_name_dict[Aproj]+nucleus_name_dict[Atrag]
else: # asymmetric collision
nucleus_name = (nucleus_name_dict[min(Aproj, Atrag)]
+ nucleus_name_dict[max(Aproj, Atrag)])
centrality_cut_file_name = (
'iebe_centralityCut_%s_%s_sigmaNN_gauss_%s_%s.dat'
% (cut_type, model_name + nucleus_name + collision_energy,
NNcorrelation, multiplicity_fluctuation)
)
try:
centrality_cut_file = np.loadtxt(
path.join(path.abspath('../centrality_cut_tables'),
centrality_cut_file_name))
except IOError:
print "Can not find the centrality cut table for the collision system"
print centrality_cut_file_name
exit(1)
lower_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_lower_bound+1e-30))
upper_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_upper_bound))
cut_value_upper = (
(centrality_cut_file[lower_idx-1, 1]
- centrality_cut_file[lower_idx, 1])
/(centrality_cut_file[lower_idx-1, 0]
- centrality_cut_file[lower_idx, 0])
*(centrality_lower_bound - centrality_cut_file[lower_idx-1, 0])
+ centrality_cut_file[lower_idx-1, 1]
)
cut_value_low = (
(centrality_cut_file[upper_idx-1, 1]
- centrality_cut_file[upper_idx, 1])
/(centrality_cut_file[upper_idx-1, 0]
- centrality_cut_file[upper_idx, 0])
*(centrality_upper_bound - centrality_cut_file[upper_idx-1, 0])
+ centrality_cut_file[upper_idx-1, 1]
)
if cut_type == 'total_entropy':
superMCParameters['cutdSdy'] = 1
npart_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
npart_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 4])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 5])
superMCParameters['cutdSdy_lowerBound'] = cut_value_low
superMCParameters['cutdSdy_upperBound'] = cut_value_upper
elif cut_type == 'Npart':
superMCParameters['cutdSdy'] = 0
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
npart_min = cut_value_low
npart_max = cut_value_upper
superMCParameters['Npmax'] = npart_max
superMCParameters['Npmin'] = npart_min
superMCParameters['bmax'] = b_max
superMCParameters['bmin'] = b_min
#print out information
print '-'*80
print('%s collisions at sqrt{s} = %s A GeV with %s initial conditions'
% (nucleus_name , collision_energy, model_name))
print("Centrality : %g - %g"
% (centrality_lower_bound, centrality_upper_bound) + r"%")
print 'centrality cut on ', cut_type
if cut_type == 'total_entropy':
print 'dS/dy :', cut_value_low, '-', cut_value_upper
print "Npart: ", npart_min, '-', npart_max
print "b: ", b_min, '-', b_max, ' fm'
print '-'*80
return
def get_initial_condition_list():
"""
return a list of initial condition file
"""
file_list = []
initial_type = initial_condition_control['initial_condition_type']
if initial_type == 'superMC':
nev = controlParameterList['numberOfEvents']
file_list = [afile for afile in generateSuperMCInitialConditions(nev)]
elif initial_type == 'pre-generated':
file_list = [
afile for afile in get_pre_generated_initial_conditions_list()]
file_list = sorted_nicely(file_list) # make sure files are in correct order
return(file_list)
def get_pre_generated_initial_conditions_list():
"""
Yield the pre-generated initial conditions absolute path
"""
# set directory strings
initial_condition_dirName = initial_condition_control['pre-generated_initial_file_path']
initial_condition_path = path.join(controlParameterList['rootDir'],
initial_condition_dirName)
print 'Initial conditions path:', initial_condition_path
#copytree( path.commonprefix(fileList), HoTCoffeehOperationDirectory )
# yield initial conditions
file_list = glob(path.join(initial_condition_path,
initial_condition_control['pre-generated_initial_file_pattern']))
for afile in file_list:
# then yield it
yield path.join(initial_condition_path, afile)
def generateSuperMCInitialConditions(numberOfEvents):
"""
Generate initial conditions using superMC. It then yield the absolute
path for all the initial conditions.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
superMCDirectory = path.join(controlParameterList['rootDir'],
superMCControl['mainDir'])
superMCDataDirectory = path.join(superMCDirectory,
superMCControl['dataDir'])
superMCExecutable = superMCControl['executable']
# clean up the data subfolder for output
cleanUpFolder(superMCDataDirectory)
# check executable
checkExistenceOfExecutable(path.join(superMCDirectory, superMCExecutable))
# set "nev=#" in superMCParameters
superMCParameters[superMCControl['numberOfEventsParameterName']] = (
numberOfEvents)
# form assignment string
assignments = formAssignmentStringFromDict(superMCParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ superMCExecutable + assignments)
# execute!
run(executableString, cwd=superMCDirectory)
# yield initial conditions
file_list = glob(path.join(superMCDataDirectory,
superMCControl['initialFiles']))
for aFile in file_list:
# then yield it
yield path.join(superMCDataDirectory, aFile)
def hydroWithInitialCondition(aFile):
"""
Perform a single hydro calculation with the given absolute path to an
initial condition. Yield the result files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
hydroDirectory = path.join(controlParameterList['rootDir'],
hydroControl['mainDir'])
hydroICDirectory = path.join(hydroDirectory,
hydroControl['initialConditionDir'])
hydroResultsDirectory = path.join(hydroDirectory,
hydroControl['resultDir'])
hydroExecutable = hydroControl['executable']
# check executable
checkExistenceOfExecutable(path.join(hydroDirectory, hydroExecutable))
# clean up initial and results folder
cleanUpFolder(hydroICDirectory)
cleanUpFolder(hydroResultsDirectory)
# check existence of the initial conditions
if not path.exists(aFile):
raise ExecutionError("Hydro initial condition file %s not found!"
% aFile)
# storing initial condition file
if hydroControl['saveICFile']:
copy(aFile, controlParameterList['eventResultDir'])
# move initial condition to the designated folder
move(aFile, path.join(hydroICDirectory,
hydroControl['initialConditionFile']))
# form assignment string
assignments = formAssignmentStringFromDict(hydroParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ hydroExecutable + assignments)
# execute!
run(executableString, cwd=hydroDirectory)
# yield result files
worthStoring = []
for aGlob in hydroControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(hydroResultsDirectory, aGlob)))
file_list = glob(path.join(hydroResultsDirectory,
hydroControl['resultFiles']))
for aFile in file_list:
# check if this file worth storing, then copy to event result folder
if aFile in worthStoring:
copy(aFile, controlParameterList['eventResultDir'])
# yield it
yield path.join(hydroResultsDirectory, aFile)
def hydro_with_pre_equilbirium(aFile):
"""
Perform a single pre-equilibrium evolution and hydro calculation with
the given absolute path to an initial condition. Yield the result
files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
# pre-equilibrium model
pre_equilibrium_directory = path.join(
controlParameterList['rootDir'], preEquilibriumControl['mainDir'])
pre_equilibrium_ic_directory = path.join(
pre_equilibrium_directory, preEquilibriumControl['initialConditionDir']
)
pre_equilibrium_results_directory = path.join(
pre_equilibrium_directory, preEquilibriumControl['resultDir']
% preEquilibriumParameters['taumin']
)
pre_equilibrium_executable = preEquilibriumControl['executable']
# hydro model
hydroDirectory = path.join(controlParameterList['rootDir'],
hydroControl['mainDir'])
hydroICDirectory = path.join(hydroDirectory,
hydroControl['initialConditionDir'])
hydroResultsDirectory = path.join(hydroDirectory,
hydroControl['resultDir'])
hydroExecutable = hydroControl['executable']
# check executable
checkExistenceOfExecutable(path.join(pre_equilibrium_directory,
pre_equilibrium_executable))
checkExistenceOfExecutable(path.join(hydroDirectory, hydroExecutable))
# clean up initial and results folder
cleanUpFolder(pre_equilibrium_ic_directory)
cleanUpFolder(pre_equilibrium_results_directory)
cleanUpFolder(hydroICDirectory)
cleanUpFolder(hydroResultsDirectory)
# check existence of the initial conditions
if not path.exists(aFile):
raise ExecutionError("Hydro initial condition file %s not found!"
% aFile)
# storing initial condition file
if hydroControl['saveICFile']:
copy(aFile, controlParameterList['eventResultDir'])
# first move initial condition to the pre-equilibrium folder
move(aFile, path.join(pre_equilibrium_ic_directory,
preEquilibriumControl['initialConditionFile']))
# form assignment string
assignments = formAssignmentStringFromDict(preEquilibriumParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ pre_equilibrium_executable + assignments)
# execute!
run(executableString, cwd=pre_equilibrium_directory)
# then move pre-equilibrium results to hydro folder
for aFile in glob(path.join(pre_equilibrium_results_directory,
preEquilibriumControl['resultFiles'])):
file_name = aFile.split('/')[-1].split('kln')[0] + 'kln.dat'
move(aFile, path.join(hydroICDirectory, file_name))
# form assignment string
assignments = formAssignmentStringFromDict(hydroParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ hydroExecutable + assignments)
# execute!
run(executableString, cwd=hydroDirectory)
# yield result files
worthStoring = []
for aGlob in hydroControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(hydroResultsDirectory, aGlob)))
for aFile in glob(path.join(hydroResultsDirectory,
hydroControl['resultFiles'])):
# check if this file worth storing, then copy to event result folder
if aFile in worthStoring:
copy(aFile, controlParameterList['eventResultDir'])
# yield it
yield path.join(hydroResultsDirectory, aFile)
def iSSWithHydroResultFiles(fileList):
"""
Perform iSS calculation using the given list of hydro result files.
Return the path to the OSCAR file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSSDirectory = path.join(controlParameterList['rootDir'],
iSSControl['mainDir'])
iSSOperationDirectory = path.join(iSSDirectory, iSSControl['operationDir'])
iSSOSCARFilepath = path.join(iSSDirectory, iSSControl['OSCARFile'])
iSSExecutable = iSSControl['executable']
# check executable
checkExistenceOfExecutable(path.join(iSSDirectory, iSSExecutable))
# clean up operation folder
cleanUpFolder(iSSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSSOperationDirectory)
# move(aFile, iSSOperationDirectory)
# make sure all hadrons up to 2 GeV are calculated
#copy(path.join(iSSDirectory, 'EOS', 'chosen_particles_urqmd_v3.3+.dat'),
# path.join(iSSDirectory, 'EOS', 'chosen_particles.dat'))
# make sure to use the pdg table with tagged decay photons
copy(path.join(iSSDirectory, 'EOS', 'pdg-urqmd_v3.3+.dat'),
path.join(iSSDirectory, 'EOS', 'pdg.dat'))
# form assignment string
assignments = formAssignmentStringFromDict(iSSParameters)
# form executable string
executableString = (
"nice -n %d ./" % (ProcessNiceness) + iSSExecutable + assignments)
# execute!
run(executableString, cwd=iSSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return OSCAR file path
#print 'iSSOSCARFilepath =', iSSOSCARFilepath
return iSSOSCARFilepath
def iSWithResonancesWithHydroResultFiles(fileList):
"""
Perform iS calculation using the given list of hydro result files,
followed by resonance calculations and iInteSp calculations.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSDirectory = path.join(controlParameterList['rootDir'],
iSControl['mainDir'])
iSOperationDirectory = path.join(iSDirectory, iSControl['operationDir'])
iSExecutables = iSControl['executables']
iSExecutionEntry = iSControl['entryShell']
# check executable
checkExistenceOfExecutables(
[path.join(iSDirectory, aExe) for aExe in iSExecutables])
# clean up operation folder
cleanUpFolder(iSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSOperationDirectory)
# move(aFile, iSOperationDirectory)
copy(path.join(iSDirectory, 'EOS', 'chosen_particles_s95pv1.dat'),
path.join(iSDirectory, 'EOS', 'chosen_particles.dat'))
copy(path.join(iSDirectory, 'EOS', 'pdg-s95pv1_withDecayPhotons.dat'),
path.join(iSDirectory, 'EOS', 'pdg.dat'))
# execute!
run("nice -n %d bash ./" % (ProcessNiceness) + iSExecutionEntry,
cwd=iSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def write_RUN_CFWR_PBS(HCDirectory, assignments):
open(path.join(HCDirectory, "run_cfwr.pbs"), "w").write(
"""
#!/usr/bin/env bash
#PBS -l walltime=48:00:00
#PBS -l mem=8gb
#PBS -j oe
#PBS -S /bin/bash
cd %s
(
ulimit -n 1000
./cfwr.e %s
)
""" % (HCDirectory, assignments)
)
def doHBTWithHydroResultFiles(fileList):
"""
Perform HoTCoffeeh calculation.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
HoTCoffeehDirectory = path.join(controlParameterList['rootDir'],
HoTCoffeehControl['mainDir'])
HoTCoffeehOperationDirectory = path.join(HoTCoffeehDirectory, HoTCoffeehControl['operationDir'])
HoTCoffeehExecutables = HoTCoffeehControl['executables']
HoTCoffeehExecutionEntry = HoTCoffeehControl['entryShell']
print 'fileList =', fileList
# check executable
checkExistenceOfExecutables(
[path.join(HoTCoffeehDirectory, aExe) for aExe in HoTCoffeehExecutables])
# clean up operation folder
cleanUpFolder(HoTCoffeehOperationDirectory)
#rmtree(HoTCoffeehOperationDirectory)
#copytree( path.commonprefix(fileList), HoTCoffeehOperationDirectory )
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, HoTCoffeehOperationDirectory)
# move(aFile, HoTCoffeehOperationDirectory)
# form assignment string
assignments = formAssignmentStringFromDict(HoTCoffeehParameters)
#runSVWR = "true"
#runCFWR = "true"
#if HoTCoffeehExecutables[0]==0:
# runSVWR = "false"
#
#if HoTCoffeehExecutables[1]==0:
# runCFWR = "false"
# execute!
#print 'Running', "nice -n %d bash ./" % (ProcessNiceness) \
# + HoTCoffeehExecutionEntry + " " + runSVWR + " " + runCFWR + " " + assignments
#run("nice -n %d bash ./" % (ProcessNiceness) \
# + HoTCoffeehExecutionEntry + " " + runSVWR + " " + runCFWR + " " + assignments, \
# cwd=HoTCoffeehDirectory)
commandToExecute = "nice -n %d bash ./" % (ProcessNiceness) \
+ HoTCoffeehExecutionEntry + " true true " + assignments
print 'Running', commandToExecute
write_RUN_CFWR_PBS(HoTCoffeehDirectory, assignments)
run(commandToExecute, cwd=HoTCoffeehDirectory)
# save some of the important result files
worthStoring = []
for aGlob in HoTCoffeehControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(HoTCoffeehOperationDirectory, aGlob)))
for aFile in glob(path.join(HoTCoffeehOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def iSSeventplaneAngleWithHydroResultFiles(fileList):
"""
Perform iSS calculation using the given list of hydro result files.
Return the path to the OSCAR file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSSDirectory = path.join(controlParameterList['rootDir'],
iSSControl['mainDir'])
iSSOperationDirectory = path.join(iSSDirectory,
iSSControl['operationDir'])
hydroH5Filepath = path.join(iSSOperationDirectory, 'JetData.h5')
iSSExecutable = iSSControl['executable']
# check executable
checkExistenceOfExecutable(path.join(iSSDirectory, iSSExecutable))
# clean up operation folder
cleanUpFolder(iSSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSSOperationDirectory)
# move(aFile, iSSOperationDirectory)
copy(path.join(iSSDirectory, 'EOS', 'chosen_particles_urqmd_v3.3+.dat'),
path.join(iSSDirectory, 'EOS', 'chosen_particles.dat'))
copy(path.join(iSSDirectory, 'EOS', 'pdg-urqmd_v3.3+.dat'),
path.join(iSSDirectory, 'EOS', 'pdg.dat'))
# form assignment string
assignments = formAssignmentStringFromDict(iSSParameters)
# form executable string
executableString = (
"nice -n %d ./" % (ProcessNiceness) + iSSExecutable + assignments)
# execute!
run(executableString, cwd=iSSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return hydro h5 file path
return (hydroH5Filepath,)
def iSWithResonancesWithdecayPhotonWithHydroResultFiles(fileList):
"""
Perform iS calculation using the given list of hydro result files,
followed by resonance calculations and iInteSp calculations with decay
photons.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSDirectory = path.join(controlParameterList['rootDir'],
iSControl['mainDir'])
iSOperationDirectory = path.join(iSDirectory, iSControl['operationDir'])
hydroH5Filepath = path.join(iSOperationDirectory, 'JetData.h5')
iSExecutables = iSControl['executables']
iSExecutionEntry = iSControl['entryShell']
# check executable
checkExistenceOfExecutables(
[path.join(iSDirectory, aExe) for aExe in iSExecutables])
# clean up operation folder
cleanUpFolder(iSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSOperationDirectory)
simulationType = controlParameterList['simulation_type']
if simulationType == "hydroEM_with_decaycocktail_with_urqmd":
# make sure all hadrons up to 2 GeV are calculated
copy(path.join(iSDirectory, 'EOS', 'chosen_particles_urqmd_v3.3+.dat'),
path.join(iSDirectory, 'EOS', 'chosen_particles.dat'))
# make sure to use the pdg table with tagged decay photons
copy(path.join(iSDirectory, 'EOS',
'pdg-urqmd_v3.3+_withDecayPhotons.dat'),
path.join(iSDirectory, 'EOS', 'pdg.dat'))
else:
# make sure all hadrons up to 2 GeV are calculated
copy(path.join(iSDirectory, 'EOS', 'chosen_particles_s95pv1.dat'),
path.join(iSDirectory, 'EOS', 'chosen_particles.dat'))
# make sure to use the pdg table with tagged decay photons
copy(path.join(iSDirectory, 'EOS', 'pdg-s95pv1_withDecayPhotons.dat'),
path.join(iSDirectory, 'EOS', 'pdg.dat'))
# execute!
run("nice -n %d bash ./" % (ProcessNiceness) + iSExecutionEntry,
cwd=iSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return hydro h5 file path
return (hydroH5Filepath,)
def photonEmissionWithHydroResultFiles(fileList):
"""
Perform thermal photon calculation using the given list of hydro
result files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
photonEmDirectory = path.join(controlParameterList['rootDir'],
photonEmissionControl['mainDir'])
photonEmOperationDirectory = path.join(
photonEmDirectory, photonEmissionControl['operationDir'])
photonEmExecutable = photonEmissionControl['executable']
# check executable
checkExistenceOfExecutable(path.join(photonEmDirectory, photonEmExecutable))
# clean up results folder
cleanUpFolder(photonEmOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, photonEmOperationDirectory)
# move(aFile, photonEmOperationDirectory)
# form assignment string
assignments = formAssignmentStringFromDict(photonEmissionParameters)
# form executable string
executableString = (
"nice -n %d ./" % (ProcessNiceness) + photonEmExecutable + assignments)
# execute!
run(executableString, cwd=photonEmDirectory)
# save some of the important result files
worthStoring = []
for aGlob in photonEmissionControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(photonEmOperationDirectory, aGlob)))
for aFile in glob(path.join(photonEmOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def osc2uFromOSCARFile(OSCARFilePath):
"""
Execute osc2u program using the given path to the OSCAR file. Return the
path to the output file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
osc2uDirectory = path.join(controlParameterList['rootDir'], osc2uControl['mainDir'])
osc2uOutputFilePath = path.join(osc2uDirectory, osc2uControl['outputFilename'])
osc2uExecutable = osc2uControl['executable']
# check executable
checkExistenceOfExecutable(path.join(osc2uDirectory, osc2uExecutable))
# remove output file if already exists
if path.exists(osc2uOutputFilePath):
remove(osc2uOutputFilePath)
# check existence of the OSCAR file then execute
if path.exists(OSCARFilePath):
run("nice -n %d ./" % (ProcessNiceness) + osc2uExecutable + " < " + OSCARFilePath, cwd=osc2uDirectory)
# save OSCAR file
if osc2uControl['saveOSCAR']:
move(OSCARFilePath, controlParameterList['eventResultDir'])
# return the output file path
return osc2uOutputFilePath
def urqmdFromOsc2uOutputFile(osc2uFilePath):
"""
Perform urqmd using osc2u output file. Return the path to the output
file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
urqmdDirectory = path.join(controlParameterList['rootDir'], urqmdControl['mainDir'])
urqmdOutputFilePath = path.join(urqmdDirectory, urqmdControl['outputFilename'])
urqmdExecutable = urqmdControl['executable']
urqmdExecutionEntry = urqmdControl['entryShell']
# check executable
checkExistenceOfExecutable(path.join(urqmdDirectory, urqmdExecutable))
# remove output file if already exists
if path.exists(urqmdOutputFilePath):
remove(urqmdOutputFilePath)
# clean up IC
urqmdIC = path.join(urqmdDirectory, urqmdControl['ICFilename'])
if path.exists(urqmdIC):
remove(urqmdIC)
# check existence of the osc2u output, move it then execute urqmd
if path.exists(osc2uFilePath):
move(osc2uFilePath, urqmdIC)
run("nice -n %d bash ./" % (ProcessNiceness) + urqmdExecutionEntry, cwd=urqmdDirectory)
# save output file
if urqmdControl['saveOutputFile']:
copy(urqmdOutputFilePath, controlParameterList['eventResultDir'])
# return the output file path
return urqmdOutputFilePath
def binUrqmdResultFiles(urqmdOutputFile):
"""
Bin the output from URQMD to generate flows etc.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
binUDirectory = path.join(controlParameterList['rootDir'], binUtilitiesControl['mainDir'])
binUOperationDirectory = path.join(binUDirectory, binUtilitiesControl['operationDir'])
binUExecutable = binUtilitiesControl['executable']
# clean up operation folder
cleanUpFolder(binUOperationDirectory)
# check existence urqmd output file
if not path.exists(urqmdOutputFile):
raise ExecutionError("URQMD output file %s not found!" % urqmdOutputFile)
# form executable string
executableString = "nice -n %d python ./" % (ProcessNiceness) + binUExecutable + " " + urqmdOutputFile
# execute!
run(executableString, cwd=binUDirectory)
# save some of the important result files
worthStoring = []
for aGlob in binUtilitiesControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(binUOperationDirectory, aGlob)))
for aFile in glob(path.join(binUOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def collectEbeResultsToDatabaseFrom(folder):
"""
Collect the mostly used results from subfolders that contain hydro
results into a database, including ecc and flow etc.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
collectorDirectory = path.join(controlParameterList['rootDir'],
EbeCollectorControl['mainDir'])
# for executable string
simulationType = controlParameterList['simulation_type']
if simulationType == 'hybrid':
collectorExecutable = EbeCollectorControl['executable_hybrid']
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %g %s %s" % (
folder, 1.0/(iSSParameters['number_of_repeated_sampling']
*(iSSParameters["y_RB"] - iSSParameters["y_LB"])),
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydro':
collectorExecutable = EbeCollectorControl['executable_hydro']
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM':
collectorExecutable = EbeCollectorControl['executable_hydroEM']
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM_with_decaycocktail':
collectorExecutable = (
EbeCollectorControl['executable_hydroEM_with_decaycocktail'])
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM_with_decaycocktail_with_urqmd':
collectorExecutable = (
EbeCollectorControl['executable_hydroEM_with_decaycocktail'])
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable +
" %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM_preEquilibrium':
collectorExecutable = (
EbeCollectorControl['executable_hydroEM_with_decaycocktail'])
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
# execute
run(executableString, cwd=collectorDirectory)
def formAssignmentStringFromDict(aDict):
"""
Generate a parameter-equals-value string from the given dictionary. The
generated string has a leading blank.
"""
result = ""
for aParameter in aDict.keys():
result += " {}={}".format(aParameter, aDict[aParameter])
return result
def cleanUpFolder(aDir):
""" Delete all data files in the given directory. """
if path.exists(aDir):
try:
run("rm -rf *", cwd=aDir, echo=False)
except OSError:
pass # very likely the the folder is already empty
else:
makedirs(aDir)
def checkExistenceOfExecutable(executableFilename):
""" Check the existence of the executable file, and compile if not. """
if not path.exists(executableFilename):
# build then clean
exec_path, exec_filename = path.split(executableFilename)
run("make", cwd=exec_path)
# if still cannot find the executable
if not path.exists(executableFilename):
raise ExecutionError(
"Cannot generate executable %s!" % executableFilename)
def checkExistenceOfExecutables(executableFilenames):
"""
Check the existences of the executable files, and compile them if not.
Will call the checkExistenceOfExecutable function.
"""
for executableFilename in executableFilenames:
checkExistenceOfExecutable(executableFilename)
def run(command, cwd=getcwd(), echo=True):
""" Invoke a command from terminal and wait for it to stop. """
if echo:
print("-"*80)
print("In "+cwd)
print("Executing command: "+command)
print("-"*80)
stdout.flush()
return call(command, shell=True, cwd=cwd)
def sequentialEventDriverShell():
"""
Perform a sequential calculations for a given number of events.
Parameters are read from dictionaries given by allParameterList.
"""
try:
# read parameters
readInParameters()
translate_centrality_cut()
# create result folder
resultDir = controlParameterList['resultDir']
print('resultDir =', resultDir)
if path.exists(resultDir):
rmtree(resultDir)
makedirs(resultDir)
# get simulation type
simulationType = controlParameterList['simulation_type']
event_id = 0
# generate initial conditions then loop over initial conditions
initial_condition_list = get_initial_condition_list()
print('initial_condition_list =', initial_condition_list)
nev = len(initial_condition_list)
# print current progress to terminal
stdout.write("PROGRESS: %d events out of %d finished.\n"
% (event_id, nev))
stdout.flush()
#print initial_condition_list
# loop over initial conditions
for aInitialConditionFile in initial_condition_list:
event_id += 1
eventResultDir = path.join(resultDir,
controlParameterList['eventResultDirPattern'] % event_id)
controlParameterList['eventResultDir'] = eventResultDir
if path.exists(eventResultDir):
rmtree(eventResultDir)
makedirs(eventResultDir)
# print current progress to terminal
print("Starting event %d..." % event_id)
initial_type = initial_condition_control['initial_condition_type']
if initial_type == 'superMC': # initial conditions from superMC
if superMCControl['saveICFile']:
initial_id = int(
aInitialConditionFile.split('/')[-1].split('_')[2])
superMCDataDirectory = path.join(
controlParameterList['rootDir'],
superMCControl['mainDir'], superMCControl['dataDir'])
file_list = glob(path.join(superMCDataDirectory,
superMCControl['dataFiles'] % initial_id))
for aFile in file_list:
copy(aFile, controlParameterList['eventResultDir'])
elif initial_type == 'pre-generated':
# initial conditions from pre-generated files
copy(aInitialConditionFile, controlParameterList['eventResultDir'])
print 'Associating ' + aInitialConditionFile + ' with event ' + str(event_id)
print controlParameterList['rootDir']
if simulationType == 'hydroEM_preEquilibrium':
# perform hydro calculations with pre-equilibrium evolution
# and get a list of all the result filenames
hydroResultFiles = [aFile for aFile in
hydro_with_pre_equilbirium(aInitialConditionFile)]
else:
# perform hydro calculations and get a list of all the result
# filenames
hydroResultFiles = [aFile for aFile in hydroWithInitialCondition(aInitialConditionFile)]
print controlParameterList['rootDir']
print(controlParameterList['rootDir'])
print('simulationType =', simulationType)
print('HoTCoffeehControl[runHoTCoffeeh] =', HoTCoffeehControl['runHoTCoffeeh'] )
if simulationType != 'hybrid' and HoTCoffeehControl['runHoTCoffeeh']:
print('Doing HBT!')
doHBTWithHydroResultFiles(hydroResultFiles)
# fork simulation type here
if simulationType == 'hybrid':
# perform iSS calculation and return the path to the OSCAR file
OSCARFilePath = iSSWithHydroResultFiles(hydroResultFiles)
if urqmdControl['run_UrQMD']:
# perform osc2u
osc2uOutputFilePath = osc2uFromOSCARFile(OSCARFilePath)
# now urqmd
urqmdOutputFilePath = urqmdFromOsc2uOutputFile(
osc2uOutputFilePath)
# copy and concatenate final results from all hydro events
# into one file
combinedUrqmdFile = path.join(
controlParameterList['resultDir'],
controlParameterList['combinedUrqmdFile'])
open(combinedUrqmdFile, 'a').writelines(
open(urqmdOutputFilePath).readlines())
# bin the combined result file to get flows
binUrqmdResultFiles(urqmdOutputFilePath)
# delete the huge final UrQMD combined file
remove(urqmdOutputFilePath)
elif simulationType == 'hydro':
# perform iS calculation and resonance decays
print controlParameterList['rootDir']
iSWithResonancesWithHydroResultFiles(hydroResultFiles)
print controlParameterList['rootDir']
elif simulationType == 'hydroEM':
h5file = iSSeventplaneAngleWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
elif simulationType == 'hydroEM_with_decaycocktail':
h5file = iSWithResonancesWithdecayPhotonWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
elif simulationType == 'hydroEM_preEquilibrium':
# perform iS calculation and resonance decays
h5file = iSWithResonancesWithdecayPhotonWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
elif simulationType == 'hydroEM_with_decaycocktail_with_urqmd':
h5file = iSWithResonancesWithdecayPhotonWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
# perform iSS calculation and return the path to the OSCAR file
OSCARFilePath = iSSWithHydroResultFiles(hydroResultFiles)
# perform osc2u
osc2uOutputFilePath = osc2uFromOSCARFile(OSCARFilePath)
# now urqmd
urqmdOutputFilePath = urqmdFromOsc2uOutputFile(
osc2uOutputFilePath)
tarfile_name = (
controlParameterList['eventResultDir'].split('/')[-1])
call("tar -cf %s.tar %s" % (tarfile_name, tarfile_name),
shell=True, cwd=resultDir)
call("rm -fr %s" % (tarfile_name,), shell=True, cwd=resultDir)
# print current progress to terminal
stdout.write("PROGRESS: %d events out of %d finished.\n"
% (event_id, nev))
stdout.flush()
# collect mostly used data into a database
#collectEbeResultsToDatabaseFrom(resultDir)
except ExecutionError as e:
print("Errors encountered during execution, aborting.")
raise
finally:
print("Thank you for using. Zhi Qiu, 2013-02")
if __name__ == "__main__":
sequentialEventDriverShell()
| gpl-3.0 | 9,157,743,708,320,808,000 | 39.852364 | 148 | 0.612387 | false | 3.789005 | false | false | false |
adragomir/hiyapyco | examples/hiyapyco_example.py | 2 | 1868 | #! /usr/bin/env python
import sys
import os
import logging
import argparse
basepath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.dirname(basepath))
import hiyapyco
class LoggingAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
# print '%r %r %r' % (namespace, values, option_string)
logger = logging.getLogger()
logger.setLevel(values)
setattr(namespace, self.dest, values)
logger = logging.getLogger()
logging.basicConfig(
level=logging.WARN,
format='%(levelname)s\t[%(name)s] %(funcName)s: %(message)s'
)
parser = argparse.ArgumentParser()
parser.add_argument(
'-l', '--loglevel',
help='set loglevel',
type=str,
choices=[k for k in logging._levelNames.keys() if isinstance(k, str)],
action=LoggingAction
)
parser.add_argument(
'-y', '--usedefaultyamlloader', dest='usedefaultyamlloader',
action='store_true', default=False, help='yaml file(s) to parse'
)
parser.add_argument('-f', '--file', type=str, nargs='+', help='yaml file(s) to parse')
args = parser.parse_args()
if args.loglevel is None:
logging.disable(logging.CRITICAL)
# FIXME: in fact this should be the job of argparse
if args.file is None or len(args.file) == 0:
raise Exception('please provide at least one yaml file!')
for mergemethod in hiyapyco.METHODS.keys():
print('='*10, 'method=', mergemethod, '='*10)
conf = hiyapyco.load(
*args.file,
method=hiyapyco.METHODS[mergemethod],
interpolate=True,
failonmissingfiles=True,
usedefaultyamlloader=args.usedefaultyamlloader
)
print(conf)
print('-'*10, 'YAML', '-'*10)
print(hiyapyco.dump(conf))
if len(args.file) < 2:
break
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 smartindent nu
| lgpl-3.0 | 7,087,518,255,125,991,000 | 28.1875 | 86 | 0.66167 | false | 3.35368 | false | false | false |
tslocum/PyIB | manage.py | 1 | 29062 | import _mysql
import os
import cgi
from database import *
from settings import Settings
from framework import *
from formatting import *
from template import *
from post import *
def manage(self, path_split):
page = ''
validated = False
administrator = False
skiptemplate = False
try:
if self.formdata['pyib_username'] and self.formdata['pyib_password']:
password = getMD5(self.formdata['pyib_password'])
valid_account = FetchOne("SELECT * FROM `staff` WHERE `username` = '" + _mysql.escape_string(self.formdata['pyib_username']) + "' AND `password` = '" + _mysql.escape_string(password) + "' LIMIT 1")
if valid_account:
setCookie(self, 'pyib_manage', self.formdata['pyib_username'] + ':' + valid_account['password'], domain='THIS')
setCookie(self, 'pyib_staff', 'yes')
UpdateDb('DELETE FROM `logs` WHERE `timestamp` < ' + str(timestamp() - 604800)) # one week
else:
page += 'Incorrect username/password.<hr>'
except:
pass
try:
manage_cookie = self._cookies['pyib_manage'].value
if manage_cookie != '':
username, password = manage_cookie.split(':')
staff_account = FetchOne("SELECT * FROM `staff` WHERE `username` = '" + _mysql.escape_string(username) + "' AND `password` = '" + _mysql.escape_string(password) + "' LIMIT 1")
if staff_account:
validated = True
if staff_account['rights'] == '0' or staff_account['rights'] == '1':
administrator = True
UpdateDb('UPDATE `staff` SET `lastactive` = ' + str(timestamp()) + ' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
except:
pass
if not validated:
page += """<div style="text-align: center;">
<form action=""" + '"' + Settings.CGI_URL + """manage" method="post">
<label for="username">Username</label> <input type="text" name="pyib_username"><br>
<label for="password">Password</label> <input type="password" name="pyib_password"><br>
<label for="submit"> </label> <input type="submit" name="submit" value="Log in">
</form>"""
else:
if len(path_split) > 2:
if path_split[2] == 'rebuild':
if not administrator:
return
try:
board_dir = path_split[3]
except:
board_dir = ''
if board_dir == '':
page += 'Please click on the board you wish to rebuild:<br><br><a href="' + Settings.CGI_URL + 'manage/rebuild/!ALL">Rebuild all boards</b></a><br>'
page += boardlist('rebuild')
else:
if board_dir == '!ALL':
t1 = time.time()
boards = FetchAll('SELECT `dir` FROM `boards`')
for board in boards:
board = setBoard(board['dir'])
regenerateBoard()
page += 'Rebuilt all boards in ' + timeTaken(t1, time.time()) + ' seconds'
logAction(staff_account['username'], 'Rebuilt all boards')
else:
t1 = time.time()
board = setBoard(board_dir)
regenerateBoard()
page += 'Rebuilt /' + board['dir'] + '/ in ' + timeTaken(t1, time.time()) + ' seconds'
logAction(staff_account['username'], 'Rebuilt /' + board['dir'] + '/')
elif path_split[2] == 'rebuildnameblocks':
board_dir = ''
try:
board_dir = path_split[3]
except:
pass
if board_dir == '':
try:
board_dir = self.formdata['dir']
except:
pass
if board_dir != '':
t1 = time.time()
board = setBoard(board_dir)
posts = FetchAll('SELECT `id`, `name`, `tripcode`, `email`, `timestamp` FROM `posts` WHERE `boardid` = ' + board['id'])
for post in posts:
nameblock = nameBlock(post['name'], post['tripcode'], post['email'], formatTimestamp(post['timestamp']))
UpdateDb('UPDATE `posts` SET `nameblock` = \'' + _mysql.escape_string(nameblock) + '\' WHERE `id` = ' + post['id'] + ' AND `boardid` = ' + board['id'] + ' LIMIT 1')
page += 'Rebuilt name blocks for /' + board['dir'] + '/ in ' + timeTaken(t1, time.time()) + ' seconds'
logAction(staff_account['username'], 'Rebuilt /' + board['dir'] + '/')
elif path_split[2] == 'modbrowse':
board_dir = ''
thread_id = 0
try:
board_dir = path_split[3]
thread_id = path_split[4]
except:
pass
if board_dir == '':
try:
board_dir = self.formdata['dir']
thread_id = self.formdata['postid']
except:
pass
if board_dir == '':
page += """<div style="text-align: center;">
<form action=""" + '"' + Settings.CGI_URL + """manage/modbrowse" method="post">
<label for="dir">Board</label> <select name="dir">"""
boards = FetchAll('SELECT * FROM `boards` ORDER BY `dir`')
for board in boards:
page += '<option value="' + board['dir'] + '">/' + board['dir'] + '/ - ' + board['name'] + '</option>'
page += '</select><br>' + \
'<label for="postid">Thread ID</label> <input type="text" name="postid"><br>' \
'<label for="submit"> </label> <input type="submit" name="submit" value="Modbrowse">' \
'</form>'
else:
skiptemplate = True
Settings._.MODBROWSE = True
board = setBoard(board_dir)
self.output += threadPage(thread_id)
elif path_split[2] == 'staff':
if staff_account['rights'] != '0':
return
action_taken = False
if len(path_split) > 3:
if path_split[3] == 'add' or path_split[3] == 'edit':
member = None
member_username = ''
member_rights = '2'
if path_split[3] == 'edit':
if len(path_split) > 4:
member = FetchOne('SELECT * FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
if member:
member_username = member['username']
member_rights = member['rights']
action = 'edit/' + member['id']
try:
if self.formdata['username'] != '':
if self.formdata['rights'] in ['0', '1', '2']:
action_taken = True
if not ':' in self.formdata['username']:
UpdateDb("UPDATE `staff` SET `username` = '" + _mysql.escape_string(self.formdata['username']) + "', `rights` = " + self.formdata['rights'] + " WHERE `id` = " + member['id'] + " LIMIT 1")
page += 'Staff member updated.'
logAction(staff_account['username'], 'Updated staff account for ' + self.formdata['username'])
else:
page += 'The character : can not be used in usernames.'
except:
pass
else:
action = 'add'
try:
if self.formdata['username'] != '' and self.formdata['password'] != '':
username_taken = FetchOne('SELECT * FROM `staff` WHERE `username` = \'' + _mysql.escape_string(self.formdata['username']) + '\' LIMIT 1')
if not username_taken:
if self.formdata['rights'] in ['0', '1', '2']:
action_taken = True
if not ':' in self.formdata['username']:
password = getMD5(self.formdata['password'])
InsertDb("INSERT INTO `staff` (`username`, `password`, `added`, `rights`) VALUES ('" + _mysql.escape_string(self.formdata['username']) + "', '" + _mysql.escape_string(password) + "', " + str(timestamp()) + ", " + self.formdata['rights'] + ")")
page += 'Staff member added.'
logAction(staff_account['username'], 'Added staff account for ' + self.formdata['username'])
else:
page += 'The character : can not be used in usernames.'
else:
action_taken = True
page += 'That username is already in use.'
except:
pass
if not action_taken:
action_taken = True
page += '<form action="' + Settings.CGI_URL + 'manage/staff/' + action + '" method="post">' + \
'<label for="username">Username</label> <input type="text" name="username" value="' + member_username + '"><br>'
if not member:
page += '<label for="password">Password</label> <input type="password" name="password"><br>'
page += '<label for="rights">Rights</label> <select name="rights"><option value="2"'
if member_rights == '2':
page += ' selected'
page += '>Moderator</option><option value="1"'
if member_rights == '1':
page += ' selected'
page += '>Administrator</option><option value="0"'
if member_rights == '0':
page += ' selected'
page += '>Super administrator</option></select><br>' + \
'<label for="submit"> </label> <input type="submit" name="submit" value="'
if path_split[3] == 'add':
page += 'Add'
else:
page += 'Edit'
page += '">' + \
'</form>'
elif path_split[3] == 'delete':
action_taken = True
page += '<a href="' + Settings.CGI_URL + 'manage/staff/delete_confirmed/' + path_split[4] + '">Click here to confirm the deletion of that staff member</a>'
elif path_split[3] == 'delete_confirmed':
try:
action_taken = True
member = FetchOne('SELECT `username` FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
if member:
UpdateDb('DELETE FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
page += 'Staff member deleted.'
logAction(staff_account['username'], 'Deleted staff account for ' + member['username'])
else:
page += 'Unable to locate a staff account with that ID.'
except:
pass
if not action_taken:
page += '<a href="' + Settings.CGI_URL + 'manage/staff/add">Add new</a><br>' + \
'<table border="1"><tr><th>ID</th><th>Username</th><th>Rights</th><th>Last Active</th><th> </th></tr>'
staff = FetchAll('SELECT * FROM `staff` ORDER BY `rights`')
for member in staff:
page += '<tr><td>' + member['id'] + '</td><td>' + member['username'] + '</td><td>'
if member['rights'] == '0':
page += 'Super administrator'
elif member['rights'] == '1':
page += 'Administrator'
elif member['rights'] == '2':
page += 'Moderator'
page += '</td><td>'
if member['lastactive'] != '0':
page += formatTimestamp(member['lastactive'])
else:
page += 'Never'
page += '</td><td><a href="' + Settings.CGI_URL + 'manage/staff/edit/' + member['id'] + '">edit</a> <a href="' + Settings.CGI_URL + '/manage/staff/delete/' + member['id'] + '">delete</a></td></tr>'
page += '</table>'
elif path_split[2] == 'delete':
do_ban = False
try:
if self.formdata['ban'] == 'true':
do_ban = True
except:
pass
board = setBoard(path_split[3])
post = FetchOne('SELECT `parentid`, `ip` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
if not post:
page += 'Unable to locate a post with that ID.'
else:
deletePost(path_split[4])
if post['parentid'] != '0':
threadUpdated(post['parentid'])
else:
regenerateFrontPages()
page += 'Post successfully deleted.'
logAction(staff_account['username'], 'Deleted post /' + path_split[3] + '/' + path_split[4])
if do_ban:
page += '<br>Redirecting to ban page...<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage/ban/' + post['ip'] + '">'
elif path_split[2] == 'ban':
if len(path_split) > 4:
board = setBoard(path_split[3])
post = FetchOne('SELECT `ip` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
if not post:
page += 'Unable to locate a post with that ID.'
else:
page += '<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage/ban/' + post['ip'] + '">'
else:
if path_split[3] == '':
try:
ip = self.formdata['ip']
except:
ip = ''
else:
ip = path_split[3]
if ip != '':
try:
reason = self.formdata['reason']
except:
reason = None
if reason is not None:
ban = FetchOne('SELECT `ip` FROM `bans` WHERE `ip` = \'' + _mysql.escape_string(ip) + '\' AND `where` = \'\' AND `until` = 0 LIMIT 1')
if not ban:
if self.formdata['seconds'] != '0':
until = str(timestamp() + int(self.formdata['seconds']))
else:
until = '0'
where = ''
if 'board_all' not in self.formdata.keys():
where = []
boards = FetchAll('SELECT `dir` FROM `boards`')
for board in boards:
keyname = 'board_' + board['dir']
if keyname in self.formdata.keys():
if self.formdata[keyname] == "1":
where.append(board['dir'])
if len(where) > 0:
where = pickle.dumps(where)
else:
self.error("You must select where the ban shall be placed")
return
if 'edit' in self.formdata.keys():
UpdateDb("DELETE FROM `bans` WHERE `id` = '" + _mysql.escape_string(self.formdata['edit']) + "' LIMIT 1")
InsertDb("INSERT INTO `bans` (`ip`, `where`, `added`, `until`, `staff`, `reason`, `note`) VALUES ('" + _mysql.escape_string(ip) + "', '" + _mysql.escape_string(where) + "', " + str(timestamp()) + ", " + until + ", '" + _mysql.escape_string(staff_account['username']) + "', '" + _mysql.escape_string(self.formdata['reason']) + "', '" + _mysql.escape_string(self.formdata['note']) + "')")
if 'edit' in self.formdata.keys():
page += 'Ban successfully edited.'
action = 'Edited ban for ' + ip
else:
page += 'Ban successfully placed.'
action = 'Banned ' + ip
if until != '0':
action += ' until ' + formatTimestamp(until)
else:
action += ' permanently'
logAction(staff_account['username'], action)
else:
page += 'There is already a global, permanent ban in place for that IP.'
else:
startvalues = {'where': [],
'reason': '',
'note': '',
'seconds': '0'}
edit_id = 0
if 'edit' in self.formdata.keys():
edit_id = self.formdata['edit']
ban = FetchOne("SELECT * FROM `bans` WHERE `id` = '" + _mysql.escape_string(edit_id) + "'")
if ban:
if ban['where'] == '':
where = ''
else:
where = pickle.loads(ban['where'])
if ban['until'] == '0':
until = 0
else:
until = int(ban['until']) - timestamp()
startvalues = {'where': where,
'reason': ban['reason'],
'note': ban['note'],
'seconds': str(until)}
else:
edit_id = 0
page += '<form action="' + Settings.CGI_URL + 'manage/ban/' + ip + '" name="banform" method="post">' + \
'<label>Board(s)</label> <ul>' + \
'<li><input type="checkbox" name="board_all" value="1"'
if startvalues['where'] == '':
page += ' checked'
page += '> <b>All boards</b><br><i>or</i></li>'
boards = FetchAll('SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
for board in boards:
page += '<li><input type="checkbox" name="board_' + board['dir'] + '" value="1"'
if board['dir'] in startvalues['where']:
page += ' checked'
page += '> ' + board['name'] + '</li>'
page += '</ul>'
if edit_id > 0:
page += '<input type="hidden" name="edit" value="' + edit_id + '">'
page += '<label for="reason">Reason</label> <input type="text" name="reason" value="' + startvalues['reason'] + '"><br>' + \
'<label for="note">Staff note</label> <input type="text" name="note" value="' + startvalues['note'] + '"><br>' + \
'<label for="seconds">Expire in #Seconds</label> <input type="text" name="seconds" value="' + startvalues['seconds'] + '"> <a href="#" onclick="document.banform.seconds.value=\'0\';return false;">no expiration</a> <a href="#" onclick="document.banform.seconds.value=\'3600\';return false;">1hr</a> <a href="#" onclick="document.banform.seconds.value=\'604800\';return false;">1w</a> <a href="#" onclick="document.banform.seconds.value=\'1209600\';return false;">2w</a> <a href="#" onclick="document.banform.seconds.value=\'2592000\';return false;">30d</a> <a href="#" onclick="document.banform.seconds.value=\'31536000\';return false;">1yr</a><br>' + \
'<label for="submit"> </label> <input type="submit" value="Place Ban">' + \
'</form>'
elif path_split[2] == 'bans':
if len(path_split) > 4:
if path_split[3] == 'delete':
ip = FetchOne('SELECT `ip` FROM `bans` WHERE `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1', 0)[0]
if ip != '':
UpdateDb('DELETE FROM `bans` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
page += 'Ban successfully deleted.'
logAction(staff_account['username'], 'Deleted ban for ' + ip)
else:
page += 'There was a problem while deleting that ban. It may have already been removed, or recently expired.'
bans = FetchAll('SELECT * FROM `bans` ORDER BY `added` DESC')
page += '<form action="' + Settings.CGI_URL + 'manage/ban/" name="banform" method="post">' + \
'<label for="ip">IP address</label> <input type="text" name="ip"><br>' + \
'<label for="submit"> </label> <input type="submit" value="Proceed to ban form">' + \
'</form><br>'
if bans:
page += '<table border="1"><tr><th>IP Address</th><th>Boards</th><th>Added</th><th>Expires</th><th>Placed by</th><th>Reason</th><th>Staff note</th><th> </th></tr>'
for ban in bans:
page += '<tr><td>' + ban['ip'] + '</td><td>'
if ban['where'] == '':
page += 'All boards'
else:
where = pickle.loads(ban['where'])
if len(where) > 1:
page += '/' + '/, /'.join(where) + '/'
else:
page += '/' + where[0] + '/'
page += '</td><td>' + formatTimestamp(ban['added']) + '</td><td>'
if ban['until'] == '0':
page += 'Does not expire'
else:
page += formatTimestamp(ban['until'])
page += '</td><td>' + ban['staff'] + '</td><td>' + escapeHTML(ban['reason']) + '</td><td>' + ban['note'] + '</td><td><a href="' + Settings.CGI_URL + 'manage/ban/' + ban['ip'] + '?edit=' + ban['id'] + '">edit</a> <a href="' + Settings.CGI_URL + 'manage/bans/delete/' + ban['id'] + '">delete</a></td></tr>'
page += '</table>'
elif path_split[2] == 'changepassword':
form_submitted = False
try:
if self.formdata['oldpassword'] != '' and self.formdata['newpassword'] != '' and self.formdata['newpassword2'] != '':
form_submitted = True
except:
pass
if form_submitted:
if getMD5(self.formdata['oldpassword']) == staff_account['password']:
if self.formdata['newpassword'] == self.formdata['newpassword2']:
UpdateDb('UPDATE `staff` SET `password` = \'' + getMD5(self.formdata['newpassword']) + '\' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
page += 'Password successfully changed. Please log out and log back in.'
else:
page += 'Passwords did not match.'
else:
page += 'Current password incorrect.'
else:
page += '<form action="' + Settings.CGI_URL + 'manage/changepassword" method="post">' + \
'<label for="oldpassword">Current password</label> <input type="password" name="oldpassword"><br>' + \
'<label for="newpassword">New password</label> <input type="password" name="newpassword"><br>' + \
'<label for="newpassword2">New password (confirm)</label> <input type="password" name="newpassword2"><br>' + \
'<label for="submit"> </label> <input type="submit" value="Change Password">' + \
'</form>'
elif path_split[2] == 'board':
if not administrator:
return
if len(path_split) > 3:
board = setBoard(path_split[3])
form_submitted = False
try:
if self.formdata['name'] != '':
form_submitted = True
except:
pass
if form_submitted:
if self.formdata['name'] != board['name']:
UpdateDb('UPDATE `boards` SET `name` = \'' + _mysql.escape_string(self.formdata['name']) + '\' WHERE `id` = ' + board['id'] + ' LIMIT 1')
board['settings']['anonymous'] = self.formdata['anonymous']
if self.formdata['forced_anonymous'] == '0':
board['settings']['forced_anonymous'] = False
else:
board['settings']['forced_anonymous'] = True
if self.formdata['disable_subject'] == '0':
board['settings']['disable_subject'] = False
else:
board['settings']['disable_subject'] = True
board['settings']['postarea_extra_html_top'] = self.formdata['postarea_extra_html_top']
updateBoardSettings()
page += 'Board options successfully updated.'
else:
page += '<form action="' + Settings.CGI_URL + 'manage/board/' + board['dir'] + '" method="post">' + \
'<label for="name">Name</label> <input type="text" name="name" value="' + board['name'] + '"><br>' + \
'<label for="anonymous">Anonymous</label> <input type="text" name="anonymous" value="' + board['settings']['anonymous'] + '"><br>' + \
'<label for="forced_anonymous">Forced anonymous</label> <input type="radio" name="forced_anonymous" value="0"'
if not board['settings']['forced_anonymous']:
page += ' checked'
page += '>No <input type="radio" name="forced_anonymous" value="1"'
if board['settings']['forced_anonymous']:
page += ' checked'
page += '>Yes<br>' + \
'<label for="disable_subject">Disable subject</label> <input type="radio" name="disable_subject" value="0"'
if not board['settings']['disable_subject']:
page += ' checked'
page += '>No <input type="radio" name="disable_subject" value="1"'
if board['settings']['disable_subject']:
page += ' checked'
page += '>Yes<br>' + \
'<label for="postarea_extra_html_top">HTML to include above posting area</label> <textarea name="postarea_extra_html_top" rows="10" cols="80">' + board['settings']['postarea_extra_html_top'] + '</textarea><br>' + \
'<label for="submit"> </label> <input type="submit" value="Update Options">' + \
'</form>'
else:
page += 'Click a board to view/change its options:' + boardlist('board')
elif path_split[2] == 'addboard':
if not administrator:
return
action_taken = False
board_dir = ''
try:
if self.formdata['name'] != '':
board_dir = self.formdata['dir']
except:
pass
if board_dir != '':
action_taken = True
board_exists = FetchOne('SELECT * FROM `boards` WHERE `dir` = \'' + _mysql.escape_string(board_dir) + '\' LIMIT 1')
if not board_exists:
os.mkdir(Settings.ROOT_DIR + board_dir)
os.mkdir(Settings.ROOT_DIR + board_dir + '/res')
os.mkdir(Settings.ROOT_DIR + board_dir + '/src')
os.mkdir(Settings.ROOT_DIR + board_dir + '/thumb')
if os.path.exists(Settings.ROOT_DIR + board_dir) and os.path.isdir(Settings.ROOT_DIR + board_dir):
UpdateDb('INSERT INTO `boards` (`dir`, `name`) VALUES (\'' + _mysql.escape_string(board_dir) + '\', \'' + _mysql.escape_string(self.formdata['name']) + '\')')
board = setBoard(board_dir)
f = open(Settings.ROOT_DIR + board['dir'] + '/.htaccess', 'w')
try:
f.write('DirectoryIndex index.html')
finally:
f.close()
regenerateFrontPages()
page += 'Board added'
logAction(staff_account['username'], 'Added board /' + board['dir'] + '/')
else:
page += 'There was a problem while making the directories'
else:
page += 'There is already a board with that directory'
if not action_taken:
page += '<form action="' + Settings.CGI_URL + 'manage/addboard" method="post">' + \
'<label for="dir">Directory</label> <input type="text" name="dir"><br>' + \
'<label for="name">Name</label> <input type="text" name="name"><br>' + \
'<label for="submit"> </label> <input type="submit" name="submit" value="Add board">' + \
'</form>'
elif path_split[2] == 'logs':
if staff_account['rights'] != '0':
return
page += '<table border="1"><tr><th>Date</th><th>Staff Account</th><th>Action</th></tr>'
logs = FetchAll('SELECT * FROM `logs` ORDER BY `timestamp` DESC')
for log in logs:
page += '<tr><td>' + formatTimestamp(log['timestamp']) + '</td><td>' + log['staff'] + '</td><td>' + log['action'] + '</td></tr>'
page += '</table>'
elif path_split[2] == 'logout':
page += 'Logging out...<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage">'
setCookie(self, 'pyib_manage', '', domain='THIS')
setCookie(self, 'pyib_staff', '')
else:
page += "I'll think of something to put on the manage home."
if not skiptemplate:
template_values = {
'title': 'Manage',
'validated': validated,
'page': page,
'navbar': False,
}
if validated:
template_values.update({
'username': staff_account['username'],
'rights': staff_account['rights'],
'administrator': administrator,
'added': formatTimestamp(staff_account['added']),
})
self.output += renderTemplate('manage.html', template_values)
def logAction(staff, action):
InsertDb("INSERT INTO `logs` (`timestamp`, `staff`, `action`) VALUES (" + str(timestamp()) + ", '" + _mysql.escape_string(staff) + "\', \'" + _mysql.escape_string(action) + "\')")
def boardlist(action):
page = ''
boards = FetchAll('SELECT * FROM `boards` ORDER BY `dir`')
for board in boards:
page += '<br><a href="' + Settings.CGI_URL + 'manage/' + action + '/' + board['dir'] + '">/' + board['dir'] + '/ - ' + board['name'] + '</a>'
return page
| gpl-3.0 | -6,305,240,722,364,262,000 | 49.280277 | 691 | 0.498142 | false | 3.987104 | false | false | false |
SeanBeseler/data-structures | src/graph1.py | 1 | 3741 | class Graph1(object):
def __init__(self, itt=0):
"""init the graph object"""
self.graph = {}
def add_node(self, val):
"""adds a node to the graph"""
if val in self.graph:
raise ValueError('graph all ready has the node')
self.graph[val] = []
def add_edge(self, val1, val2):
"""adds edges to the graph"""
if val1 not in self.graph:
self.add_node(val1)
if val2 not in self.graph:
self.add_node(val2)
if val2 in self.graph[val1]:
self.graph[val1].remove()
self.graph[val1].append(val2)
def del_node(self, val):
"""del node from graph and edges that are from or to node"""
if val in self.graph:
del self.graph[val]
for key in self.graph:
if val in self.graph[key]:
self.graph[key].remove(val)
else:
raise ValueError('graph does has node')
def del_edge(self, val1, val2):
"""del an edge"""
if val1 not in self.graph or val2 not in self.graph:
raise ValueError('graph does not have one of the nodes')
if val2 not in self.graph[val1]:
raise ValueError('graph does not have edge')
self.graph[val1].remove(val2)
def has_node(self, val):
"""check to see if graph has node"""
if val in self.graph:
return True
return False
def edges(self):
"""ouputs all neighbors of val"""
pair = ()
output = []
for key in self.graph:
for neigh in self.graph[key]:
pair = ()
pair = pair + (key, neigh)
output.append(pair)
return output
def adjacent(self, val1, val2):
"""Check to see if val1 is adjacent to val2"""
if val1 not in self.graph or val2 not in self.graph:
raise ValueError('graph does not have one of the nodes')
if val2 in self.graph[val1]:
return True
return False
def neighbors(self, val):
"""Outputs all neighbors of val"""
output = []
for neigh in self.graph[val]:
output.append(neigh)
return output
def nodes(self):
"""Returns a list of all nodes in graphs"""
output = []
for key in self.graph:
output.append(key)
return output
def depth_first_traversal(self, val1, output=[]):
"""Retrieves nodes ordered by a depth search criteria"""
if val1 not in self.graph:
raise ValueError('This node is not in the graph')
neighbors = self.graph[val1]
if val1 not in output:
output = []
output.append(val1)
for x in range(len(neighbors)):
if neighbors[x] not in output:
output.append(neighbors[x])
output = self.depth_first_traversal(neighbors[x], output)
return output
def breadth_first_traversal(self, val):
"""Retrieves nodes ordered by a breadth search criteria"""
output = []
done = False
if val not in self.graph:
raise ValueError('This node is not in the graph')
output.append(val)
iterator = 0
while not done:
neighbors = self.graph[val]
sample_size = len(output)
for x in range(len(neighbors)):
if neighbors[x] not in output:
output.append(neighbors[x])
if sample_size == len(output) and iterator >= len(output) - 1:
done = True
else:
iterator += 1
val = output[iterator]
return output
| mit | 3,894,893,344,389,052,000 | 32.401786 | 74 | 0.535953 | false | 4.179888 | false | false | false |
Eksmo/calibre | src/calibre/gui2/convert/fb2_input_ui.py | 1 | 1301 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/gugu/w/calibre/src/calibre/gui2/convert/fb2_input.ui'
#
# Created: Thu Jul 19 23:32:30 2012
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 300)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
spacerItem = QtGui.QSpacerItem(20, 213, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 1, 0, 1, 1)
self.opt_no_inline_fb2_toc = QtGui.QCheckBox(Form)
self.opt_no_inline_fb2_toc.setObjectName(_fromUtf8("opt_no_inline_fb2_toc"))
self.gridLayout.addWidget(self.opt_no_inline_fb2_toc, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_("Form"))
self.opt_no_inline_fb2_toc.setText(_("Do not insert a &Table of Contents at the beginning of the book."))
| gpl-3.0 | 478,254,432,766,602,400 | 36.171429 | 113 | 0.683321 | false | 3.293671 | false | false | false |
piksels-and-lines-orchestra/inkscape | share/extensions/jessyInk_uninstall.py | 8 | 5652 | #!/usr/bin/env python
# Copyright 2008, 2009 Hannes Hochreiner
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
# These lines are only needed if you don't put the script directly into
# the installation directory
import sys
# Unix
sys.path.append('/usr/share/inkscape/extensions')
# OS X
sys.path.append('/Applications/Inkscape.app/Contents/Resources/extensions')
# Windows
sys.path.append('C:\Program Files\Inkscape\share\extensions')
# We will use the inkex module with the predefined Effect base class.
import inkex
def propStrToList(str):
list = []
propList = str.split(";")
for prop in propList:
if not (len(prop) == 0):
list.append(prop.strip())
return list
def listToPropStr(list):
str = ""
for prop in list:
str += " " + prop + ";"
return str[1:]
class JessyInk_Uninstall(inkex.Effect):
def __init__(self):
# Call the base class constructor.
inkex.Effect.__init__(self)
self.OptionParser.add_option('--tab', action = 'store', type = 'string', dest = 'what')
self.OptionParser.add_option('--remove_script', action = 'store', type = 'inkbool', dest = 'remove_script', default = True)
self.OptionParser.add_option('--remove_effects', action = 'store', type = 'inkbool', dest = 'remove_effects', default = True)
self.OptionParser.add_option('--remove_masterSlide', action = 'store', type = 'inkbool', dest = 'remove_masterSlide', default = True)
self.OptionParser.add_option('--remove_transitions', action = 'store', type = 'inkbool', dest = 'remove_transitions', default = True)
self.OptionParser.add_option('--remove_autoTexts', action = 'store', type = 'inkbool', dest = 'remove_autoTexts', default = True)
self.OptionParser.add_option('--remove_views', action = 'store', type = 'inkbool', dest = 'remove_views', default = True)
inkex.NSS[u"jessyink"] = u"https://launchpad.net/jessyink"
def effect(self):
# Remove script, if so desired.
if self.options.remove_script:
# Find and delete script node.
for node in self.document.xpath("//svg:script[@id='JessyInk']", namespaces=inkex.NSS):
node.getparent().remove(node)
# Remove "jessyInkInit()" in the "onload" attribute, if present.
if self.document.getroot().get("onload"):
propList = propStrToList(self.document.getroot().get("onload"))
else:
propList = []
for prop in propList:
if prop == "jessyInkInit()":
propList.remove("jessyInkInit()")
if len(propList) > 0:
self.document.getroot().set("onload", listToPropStr(propList))
else:
if self.document.getroot().get("onload"):
del self.document.getroot().attrib["onload"]
# Remove effect attributes, if so desired.
if self.options.remove_effects:
for node in self.document.xpath("//*[@jessyink:effectIn]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}effectIn"]
for node in self.document.xpath("//*[@jessyink:effectOut]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}effectOut"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_effectIn]", namespaces=inkex.NSS):
del node.attrib["jessyInk_effectIn"]
for node in self.document.xpath("//*[@jessyInk_effectOut]", namespaces=inkex.NSS):
del node.attrib["jessyInk_effectOut"]
# Remove master slide assignment, if so desired.
if self.options.remove_masterSlide:
for node in self.document.xpath("//*[@jessyink:masterSlide]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}masterSlide"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_masterSlide]", namespaces=inkex.NSS):
del node.attrib["jessyInk_masterSlide"]
# Remove transitions, if so desired.
if self.options.remove_transitions:
for node in self.document.xpath("//*[@jessyink:transitionIn]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}transitionIn"]
for node in self.document.xpath("//*[@jessyink:transitionOut]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}transitionOut"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_transitionIn]", namespaces=inkex.NSS):
del node.attrib["jessyInk_transitionIn"]
for node in self.document.xpath("//*[@jessyInk_transitionOut]", namespaces=inkex.NSS):
del node.attrib["jessyInk_transitionOut"]
# Remove auto texts, if so desired.
if self.options.remove_autoTexts:
for node in self.document.xpath("//*[@jessyink:autoText]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}autoText"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_autoText]", namespaces=inkex.NSS):
del node.attrib["jessyInk_autoText"]
# Remove views, if so desired.
if self.options.remove_views:
for node in self.document.xpath("//*[@jessyink:view]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}view"]
# Create effect instance.
effect = JessyInk_Uninstall()
effect.affect()
| gpl-2.0 | -4,624,478,952,183,116,000 | 40.255474 | 135 | 0.699575 | false | 3.098684 | false | false | false |
FCP-INDI/C-PAC | CPAC/sca/sca.py | 1 | 26697 | from nipype.interfaces.afni import preprocess
from CPAC.pipeline import nipype_pipeline_engine as pe
import nipype.algorithms.rapidart as ra
import nipype.interfaces.afni as afni
import nipype.interfaces.fsl as fsl
import nipype.interfaces.io as nio
import nipype.interfaces.utility as util
from CPAC.sca.utils import *
from CPAC.utils.utils import extract_one_d
from CPAC.utils.datasource import resample_func_roi, \
create_roi_mask_dataflow, create_spatial_map_dataflow
from CPAC.timeseries.timeseries_analysis import get_roi_timeseries, \
get_spatial_map_timeseries
def create_sca(name_sca='sca'):
"""
Map of the correlations of the Region of Interest(Seed in native or MNI space) with the rest of brain voxels.
The map is normalized to contain Z-scores, mapped in standard space and treated with spatial smoothing.
Parameters
----------
name_sca : a string
Name of the SCA workflow
Returns
-------
sca_workflow : workflow
Seed Based Correlation Analysis Workflow
Notes
-----
`Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/sca/sca.py>`_
Workflow Inputs::
inputspec.rest_res_filt : string (existing nifti file)
Band passed Image with Global Signal , white matter, csf and
motion regression. Recommended bandpass filter (0.001,0.1) )
inputspec.timeseries_one_d : string (existing nifti file)
1D 3dTcorr1D compatible timeseries file. 1D file can be timeseries
from a mask or from a parcellation containing ROIs
Workflow Outputs::
outputspec.correlation_file : string (nifti file)
Correlations of the functional file and the input time series
outputspec.Z_score : string (nifti file)
Fisher Z transformed correlations of the seed
SCA Workflow Procedure:
1. Compute pearson correlation between input timeseries 1D file and input functional file
Use 3dTcorr1D to compute that. Input timeseries can be a 1D file containing parcellation ROI's
or a 3D mask
2. Compute Fisher Z score of the correlation computed in step above. If a mask is provided then a
a single Z score file is returned, otherwise z-scores for all ROIs are returned as a list of
nifti files
.. exec::
from CPAC.sca import create_sca
wf = create_sca()
wf.write_graph(
graph2use='orig',
dotfilename='./images/generated/sca.dot'
)
Workflow:
.. image:: ../../images/generated/sca.png
:width: 500
Detailed Workflow:
.. image:: ../../images/generated/sca_detailed.png
:width: 500
Examples
--------
>>> sca_w = create_sca("sca_wf")
>>> sca_w.inputs.inputspec.functional_file = '/home/data/subject/func/rest_bandpassed.nii.gz'
>>> sca_w.inputs.inputspec.timeseries_one_d = '/home/data/subject/func/ts.1D'
>>> sca_w.run() # doctest: +SKIP
"""
from CPAC.utils.utils import get_roi_num_list
sca = pe.Workflow(name=name_sca)
inputNode = pe.Node(util.IdentityInterface(fields=['timeseries_one_d',
'functional_file',]),
name='inputspec')
outputNode = pe.Node(util.IdentityInterface(fields=[
'correlation_stack',
'correlation_files',
'Z_score',
]),
name='outputspec')
# 2. Compute voxel-wise correlation with Seed Timeseries
corr = pe.Node(interface=preprocess.TCorr1D(),
name='3dTCorr1D', mem_gb=3.0)
corr.inputs.pearson = True
corr.inputs.outputtype = 'NIFTI_GZ'
sca.connect(inputNode, 'timeseries_one_d',
corr, 'y_1d')
sca.connect(inputNode, 'functional_file',
corr, 'xset')
# Transform the sub-bricks into volumes
try:
concat = pe.Node(interface=preprocess.TCat(), name='3dTCat')
except AttributeError:
from nipype.interfaces.afni import utils as afni_utils
concat = pe.Node(interface=afni_utils.TCat(), name='3dTCat')
concat.inputs.outputtype = 'NIFTI_GZ'
# also write out volumes as individual files
#split = pe.Node(interface=fsl.Split(), name='split_raw_volumes_sca')
#split.inputs.dimension = 't'
#split.inputs.out_base_name = 'sca_'
#get_roi_num_list = pe.Node(util.Function(input_names=['timeseries_file',
# 'prefix'],
# output_names=['roi_list'],
# function=get_roi_num_list),
# name='get_roi_num_list')
#get_roi_num_list.inputs.prefix = "sca"
#sca.connect(inputNode, 'timeseries_one_d', get_roi_num_list,
# 'timeseries_file')
#rename_rois = pe.MapNode(interface=util.Rename(), name='output_rois',
# iterfield=['in_file', 'format_string'])
#rename_rois.inputs.keep_ext = True
#sca.connect(split, 'out_files', rename_rois, 'in_file')
#sca.connect(get_roi_num_list, 'roi_list', rename_rois, 'format_string')
sca.connect(corr, 'out_file', concat, 'in_files')
#sca.connect(concat, 'out_file', split, 'in_file')
sca.connect(concat, 'out_file',
outputNode, 'correlation_stack')
#sca.connect(rename_rois, 'out_file', outputNode,
# 'correlation_files')
return sca
def create_temporal_reg(wflow_name='temporal_reg', which='SR'):
"""
Temporal multiple regression workflow
Provides a spatial map of parameter estimates corresponding to each
provided timeseries in a timeseries.txt file as regressors
Parameters
----------
wflow_name : a string
Name of the temporal regression workflow
which: a string
SR: Spatial Regression, RT: ROI Timeseries
NOTE: If you set (which = 'RT'), the output of this workflow will be
renamed based on the header information provided in the
timeseries.txt file.
If you run the temporal regression workflow manually, don\'t set
(which = 'RT') unless you provide a timeseries.txt file with a header
containing the names of the timeseries.
Returns
-------
wflow : workflow
temporal multiple regression Workflow
Notes
-----
`Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/sca/sca.py>`_
Workflow Inputs::
inputspec.subject_rest : string (existing nifti file)
Band passed Image with Global Signal , white matter, csf and
motion regression. Recommended bandpass filter (0.001,0.1) )
inputspec.subject_timeseries : string (existing txt file)
text file containing the timeseries to be regressed on the subjects
functional file
timeseries are organized by columns, timepoints by rows
inputspec.subject_mask : string (existing nifti file)
path to subject functional mask
inputspec.demean : Boolean
control whether to demean model and data
inputspec.normalize : Boolean
control whether to normalize the input timeseries to unit standard deviation
Workflow Outputs::
outputspec.temp_reg_map : string (nifti file)
GLM parameter estimate image for each timeseries in the input file
outputspec.temp_reg_map_zstat : string (nifti file)
Normalized version of the GLM parameter estimates
Temporal Regression Workflow Procedure:
Enter all timeseries into a general linear model and regress these
timeseries to the subjects functional file to get spatial maps of voxels
showing activation patterns related to those in the timeseries.
.. exec::
from CPAC.sca import create_temporal_reg
wf = create_temporal_reg()
wf.write_graph(
graph2use='orig',
dotfilename='./images/generated/create_temporal_regression.dot'
)
Workflow:
.. image:: ../../images/generated/create_temporal_regression.png
:width: 500
Detailed Workflow:
.. image:: ../../images/generated/create_temporal_regression_detailed.png
:width: 500
References
----------
`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/DualRegression/UserGuide <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/DualRegression/UserGuide>`_
Examples
--------
>>> tr_wf = create_temporal_reg('temporal regression')
>>> tr_wf.inputs.inputspec.subject_rest = '/home/data/subject/func/rest_bandpassed.nii.gz'
>>> tr_wf.inputs.inputspec.subject_timeseries = '/home/data/subject/func/timeseries.txt'
>>> tr_wf.inputs.inputspec.subject_mask = '/home/data/spatialmaps/spatial_map.nii.gz'
>>> tr_wf.inputs.inputspec.demean = True
>>> tr_wf.inputs.inputspec.normalize = True
>>> tr_wf.run() # doctest: +SKIP
"""
wflow = pe.Workflow(name=wflow_name)
inputNode = pe.Node(util.IdentityInterface
(fields=['subject_rest',
'subject_timeseries',
'subject_mask',
'demean',
'normalize']),
name='inputspec')
outputNode = pe.Node(util.IdentityInterface
(fields=['temp_reg_map',
'temp_reg_map_files',
'temp_reg_map_z',
'temp_reg_map_z_files']),
name='outputspec')
check_timeseries = pe.Node(util.Function(input_names=['in_file'],
output_names=['out_file'],
function=check_ts),
name='check_timeseries')
wflow.connect(inputNode, 'subject_timeseries',
check_timeseries, 'in_file')
temporalReg = pe.Node(interface=fsl.GLM(), name='temporal_regression',
mem_gb=4.0)
temporalReg.inputs.out_file = 'temp_reg_map.nii.gz'
temporalReg.inputs.out_z_name = 'temp_reg_map_z.nii.gz'
wflow.connect(inputNode, 'subject_rest', temporalReg, 'in_file')
wflow.connect(check_timeseries, 'out_file', temporalReg, 'design')
wflow.connect(inputNode, 'demean', temporalReg, 'demean')
wflow.connect(inputNode, 'normalize', temporalReg, 'des_norm')
wflow.connect(inputNode, 'subject_mask', temporalReg, 'mask')
wflow.connect(temporalReg, 'out_file', outputNode, 'temp_reg_map')
wflow.connect(temporalReg, 'out_z', outputNode, 'temp_reg_map_z')
'''
split = pe.Node(interface=fsl.Split(), name='split_raw_volumes')
split.inputs.dimension = 't'
split.inputs.out_base_name = 'temp_reg_map_'
wflow.connect(temporalReg, 'out_file', split, 'in_file')
split_zstat = pe.Node(interface=fsl.Split(), name='split_zstat_volumes')
split_zstat.inputs.dimension = 't'
split_zstat.inputs.out_base_name = 'temp_reg_map_z_'
wflow.connect(temporalReg, 'out_z',
split_zstat, 'in_file')
if which == 'SR':
wflow.connect(split, 'out_files',
outputNode, 'temp_reg_map_files')
wflow.connect(split_zstat, 'out_files',
outputNode, 'temp_reg_map_z_files')
elif which == 'RT':
map_roi_imports = ['import os', 'import numpy as np']
# get roi order and send to output node for raw outputs
get_roi_order = pe.Node(util.Function(input_names=['maps',
'timeseries'],
output_names=['labels',
'maps'],
function=map_to_roi,
imports=map_roi_imports),
name='get_roi_order')
wflow.connect(split, 'out_files', get_roi_order, 'maps')
wflow.connect(inputNode, 'subject_timeseries',
get_roi_order, 'timeseries')
rename_maps = pe.MapNode(interface=util.Rename(),
name='rename_maps',
iterfield=['in_file',
'format_string'])
rename_maps.inputs.keep_ext = True
wflow.connect(get_roi_order, 'labels', rename_maps, 'format_string')
wflow.connect(get_roi_order, 'maps', rename_maps, 'in_file')
wflow.connect(rename_maps, 'out_file',
outputNode, 'temp_reg_map_files')
# get roi order and send to output node for z-stat outputs
get_roi_order_zstat = pe.Node(util.Function(input_names=['maps',
'timeseries'],
output_names=['labels',
'maps'],
function=map_to_roi,
imports=map_roi_imports),
name='get_roi_order_zstat')
wflow.connect(split_zstat, 'out_files', get_roi_order_zstat, 'maps')
wflow.connect(inputNode, 'subject_timeseries',
get_roi_order_zstat, 'timeseries')
rename_maps_zstat = pe.MapNode(interface=util.Rename(),
name='rename_maps_zstat',
iterfield=['in_file',
'format_string'])
rename_maps_zstat.inputs.keep_ext = True
wflow.connect(get_roi_order_zstat, 'labels',
rename_maps_zstat, 'format_string')
wflow.connect(get_roi_order_zstat, 'maps',
rename_maps_zstat, 'in_file')
wflow.connect(rename_maps_zstat, 'out_file',
outputNode, 'temp_reg_map_z_files')
'''
return wflow
def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None):
'''Run Seed-Based Correlation Analysis.
Node Block:
{"name": "SCA_AVG",
"config": ["seed_based_correlation_analysis"],
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": [["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"]],
"outputs": ["desc-MeanSCA_timeseries",
"desc-MeanSCA_correlations",
"atlas_name"]}
'''
# same workflow, except to run TSE and send it to the resource
# pool so that it will not get sent to SCA
resample_functional_roi_for_sca = pe.Node(
util.Function(input_names=['in_func',
'in_roi',
'realignment',
'identity_matrix'],
output_names=['out_func', 'out_roi'],
function=resample_func_roi,
as_module=True),
name=f'resample_functional_roi_for_sca_{pipe_num}')
resample_functional_roi_for_sca.inputs.realignment = \
cfg.timeseries_extraction['realignment']
resample_functional_roi_for_sca.inputs.identity_matrix = \
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines']['identity_matrix']
roi_dataflow_for_sca = create_roi_mask_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['Avg'],
f'roi_dataflow_for_sca_{pipe_num}'
)
roi_dataflow_for_sca.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)
roi_timeseries_for_sca = get_roi_timeseries(
f'roi_timeseries_for_sca_{pipe_num}')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
# resample the input functional file to roi
wf.connect(node, out,
resample_functional_roi_for_sca, 'in_func')
wf.connect(roi_dataflow_for_sca, 'outputspec.out_file',
resample_functional_roi_for_sca, 'in_roi')
# connect it to the roi_timeseries
wf.connect(resample_functional_roi_for_sca, 'out_roi',
roi_timeseries_for_sca, 'input_roi.roi')
wf.connect(resample_functional_roi_for_sca, 'out_func',
roi_timeseries_for_sca, 'inputspec.rest')
sca_roi = create_sca(f'sca_roi_{pipe_num}')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out, sca_roi, 'inputspec.functional_file')
wf.connect(roi_timeseries_for_sca, 'outputspec.roi_csv',
#('outputspec.roi_outputs', extract_one_d),
sca_roi, 'inputspec.timeseries_one_d')
outputs = {
'desc-MeanSCA_timeseries':
(roi_timeseries_for_sca, 'outputspec.roi_csv'),
#('outputspec.roi_outputs',
# extract_one_d)),
'desc-MeanSCA_correlations':
(sca_roi, 'outputspec.correlation_stack'),
'atlas_name': (roi_dataflow_for_sca, 'outputspec.out_name')
}
return (wf, outputs)
def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None):
'''Run Dual Regression - spatial regression and then temporal regression.
Node Block:
{"name": "dual_regression",
"config": ["seed_based_correlation_analysis"],
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": [["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"],
"space-template_desc-bold_mask"],
"outputs": ["desc-DualReg_correlations",
"desc-DualReg_statmap",
"atlas_name"]}
'''
resample_spatial_map_to_native_space_for_dr = pe.Node(
interface=fsl.FLIRT(),
name=f'resample_spatial_map_to_native_space_for_DR_{pipe_num}'
)
resample_spatial_map_to_native_space_for_dr.inputs.set(
interp='nearestneighbour',
apply_xfm=True,
in_matrix_file=
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines'][
'identity_matrix']
)
spatial_map_dataflow_for_dr = create_spatial_map_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['DualReg'],
f'spatial_map_dataflow_for_DR_{pipe_num}'
)
spatial_map_dataflow_for_dr.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)
spatial_map_timeseries_for_dr = get_spatial_map_timeseries(
f'spatial_map_timeseries_for_DR_{pipe_num}'
)
spatial_map_timeseries_for_dr.inputs.inputspec.demean = True
# resample the input functional file and functional mask
# to spatial map
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out,
resample_spatial_map_to_native_space_for_dr, 'reference')
wf.connect(node, out,
spatial_map_timeseries_for_dr, 'inputspec.subject_rest')
wf.connect(spatial_map_dataflow_for_dr, 'select_spatial_map.out_file',
resample_spatial_map_to_native_space_for_dr, 'in_file')
# connect it to the spatial_map_timeseries
wf.connect(resample_spatial_map_to_native_space_for_dr, 'out_file',
spatial_map_timeseries_for_dr, 'inputspec.spatial_map'
)
dr_temp_reg = create_temporal_reg(f'temporal_regression_{pipe_num}')
dr_temp_reg.inputs.inputspec.normalize = \
cfg.seed_based_correlation_analysis['norm_timeseries_for_DR']
dr_temp_reg.inputs.inputspec.demean = True
wf.connect(spatial_map_timeseries_for_dr, 'outputspec.subject_timeseries',
dr_temp_reg, 'inputspec.subject_timeseries')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out, dr_temp_reg, 'inputspec.subject_rest')
node, out = strat_pool.get_data("space-template_desc-bold_mask")
wf.connect(node, out, dr_temp_reg, 'inputspec.subject_mask')
outputs = {
'desc-DualReg_correlations':
(dr_temp_reg, 'outputspec.temp_reg_map'),
'desc-DualReg_statmap':
(dr_temp_reg, 'outputspec.temp_reg_map_z'),
'atlas_name':
(spatial_map_dataflow_for_dr, 'select_spatial_map.out_name')
}
return (wf, outputs)
def multiple_regression(wf, cfg, strat_pool, pipe_num, opt=None):
'''Run Multiple Regression.
Node Block:
{"name": "multiple_regression",
"config": ["seed_based_correlation_analysis"],
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": [["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"],
"space-template_desc-bold_mask"],
"outputs": ["desc-MultReg_correlations",
"desc-MultReg_statmap",
"atlas_name"]}
'''
# same workflow, except to run TSE and send it to the resource
# pool so that it will not get sent to SCA
resample_functional_roi_for_multreg = pe.Node(
util.Function(input_names=['in_func',
'in_roi',
'realignment',
'identity_matrix'],
output_names=['out_func',
'out_roi'],
function=resample_func_roi,
as_module=True),
name=f'resample_functional_roi_for_multreg_{pipe_num}')
resample_functional_roi_for_multreg.inputs.realignment = \
cfg.timeseries_extraction['realignment']
resample_functional_roi_for_multreg.inputs.identity_matrix = \
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines']['identity_matrix']
roi_dataflow_for_multreg = create_roi_mask_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['MultReg'],
f'roi_dataflow_for_mult_reg_{pipe_num}')
roi_dataflow_for_multreg.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)
roi_timeseries_for_multreg = get_roi_timeseries(
f'roi_timeseries_for_mult_reg_{pipe_num}')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
# resample the input functional file to roi
wf.connect(node, out, resample_functional_roi_for_multreg, 'in_func')
wf.connect(roi_dataflow_for_multreg,
'outputspec.out_file',
resample_functional_roi_for_multreg,
'in_roi')
# connect it to the roi_timeseries
wf.connect(resample_functional_roi_for_multreg,
'out_roi',
roi_timeseries_for_multreg,
'input_roi.roi')
wf.connect(resample_functional_roi_for_multreg,
'out_func',
roi_timeseries_for_multreg,
'inputspec.rest')
sc_temp_reg = create_temporal_reg(
f'temporal_regression_sca_{pipe_num}',
which='RT')
sc_temp_reg.inputs.inputspec.normalize = \
cfg.seed_based_correlation_analysis['norm_timeseries_for_DR']
sc_temp_reg.inputs.inputspec.demean = True
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out, sc_temp_reg, 'inputspec.subject_rest')
wf.connect(roi_timeseries_for_multreg, 'outputspec.roi_csv',
#('outputspec.roi_outputs', extract_one_d),
sc_temp_reg, 'inputspec.subject_timeseries')
node, out = strat_pool.get_data('space-template_desc-bold_mask')
wf.connect(node, out, sc_temp_reg, 'inputspec.subject_mask')
outputs = {
'desc-MultReg_correlations':
(sc_temp_reg, 'outputspec.temp_reg_map'),
'desc-MultReg_statmap':
(sc_temp_reg, 'outputspec.temp_reg_map_z'),
'atlas_name': (roi_dataflow_for_multreg, 'outputspec.out_name')
}
return (wf, outputs)
| bsd-3-clause | 7,963,554,521,188,729,000 | 38.492604 | 134 | 0.568041 | false | 3.92834 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.