text
stringlengths 29
850k
|
---|
""" Tests for the clustering computation """
from nose.tools import *
import itertools
from shapely.geometry import Polygon
import marble as mb
#
# Synthetic data for tests
#
def grid():
""" Areal units arranged in a grid """
au = [i*3+j for i,j in itertools.product(range(3), repeat=2)]
units = {a:Polygon([(a%3, a/3),
(a%3, 1+a/3),
(1+a%3, 1+a/3),
(1+a%3, a/3)]) for a in au}
return units
def checkerboard_city():
city = {0: {"A":100, "B":1},
1: {"A":1, "B":100},
2: {"A":100, "B":1},
3: {"A":1, "B":100},
4: {"A":100, "B":1},
5: {"A":1, "B":100},
6: {"A":100, "B":1},
7: {"A":1, "B":100},
8: {"A":100, "B":1}}
return city
def clustered_city():
city = {0: {"A":100, "B":1},
1: {"A":100, "B":1},
2: {"A":1, "B":100},
3: {"A":100, "B":1},
4: {"A":1, "B":100},
5: {"A":1, "B":100},
6: {"A":100, "B":1},
7: {"A":1, "B":100},
8: {"A":1, "B":100}}
return city
#
# Perform tests
#
class TestClustering(object):
def test_clustering_checkerboard(self):
units = grid()
city = checkerboard_city()
c = mb.clustering(city, units)
assert c["A"] == 0.0
assert c["B"] == 0.0
def test_clustering_checkerboard(self):
units = grid()
city = clustered_city()
c = mb.clustering(city, units)
assert c["A"] == 1.0
assert c["B"] == 1.0
|
23 1, 2, 1, 291| In him "all things were created, in heaven and on earth.. .
24 1, 2, 1, 291| earth.. . all things were created through him and for him.
52 1, 2, 1, 358| is it that is about to be created, that enjoys such honour?
82 3, 1, 1, 1701| God,"3 that man has been created "in the image and likeness"
93 3, 2, 2, 2331| and communion."114~"God created man in his own image . . .
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copied and adapted from the DistUtilsExtra project
# Created by Sebastian Heinlein and Martin Pitt
# Copyright Canonical Ltd.
# Modified by Kai Willadsen for the Meld project
# Copyright (C) 2013-2014 Kai Willadsen <[email protected]>
import distutils.cmd
import distutils.command.build
import distutils.command.build_py
import distutils.command.install
import distutils.command.install_data
import distutils.dir_util
import distutils.dist
import glob
import os.path
import platform
import sys
from distutils.log import info
try:
import distro
except ImportError:
python_version = tuple(int(x) for x in platform.python_version_tuple())
if python_version >= (3, 8):
print(
'Missing build requirement "distro" Python module; '
'install paths may be incorrect', file=sys.stderr)
def has_help(self):
return "build_help" in self.distribution.cmdclass and os.name != 'nt'
def has_icons(self):
return "build_icons" in self.distribution.cmdclass
def has_i18n(self):
return "build_i18n" in self.distribution.cmdclass and os.name != 'nt'
def has_data(self):
return "build_data" in self.distribution.cmdclass
distutils.command.build.build.sub_commands.extend([
("build_i18n", has_i18n),
("build_icons", has_icons),
("build_help", has_help),
("build_data", has_data),
])
class MeldDistribution(distutils.dist.Distribution):
global_options = distutils.dist.Distribution.global_options + [
("no-update-icon-cache", None, "Don't run gtk-update-icon-cache"),
("no-compile-schemas", None, "Don't compile gsettings schemas"),
]
def __init__(self, *args, **kwargs):
self.no_update_icon_cache = False
self.no_compile_schemas = False
super().__init__(*args, **kwargs)
class build_data(distutils.cmd.Command):
gschemas = [
('share/glib-2.0/schemas', ['data/org.gnome.meld.gschema.xml'])
]
frozen_gschemas = [
('share/meld', ['data/gschemas.compiled']),
]
# FIXME: This is way too much hard coding, but I really hope
# it also doesn't last that long.
resource_source = "meld/resources/meld.gresource.xml"
resource_target = "org.gnome.meld.gresource"
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_data_files(self):
data_files = []
build_path = os.path.join('build', 'data')
if not os.path.exists(build_path):
os.makedirs(build_path)
info("compiling gresources")
resource_dir = os.path.dirname(self.resource_source)
target = os.path.join(build_path, self.resource_target)
self.spawn([
"glib-compile-resources",
"--target={}".format(target),
"--sourcedir={}".format(resource_dir),
self.resource_source,
])
data_files.append(('share/meld', [target]))
if os.name == 'nt':
gschemas = self.frozen_gschemas
else:
gschemas = self.gschemas
data_files.extend(gschemas)
return data_files
def run(self):
data_files = self.distribution.data_files
data_files.extend(self.get_data_files())
class build_help(distutils.cmd.Command):
help_dir = 'help'
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_data_files(self):
data_files = []
name = self.distribution.metadata.name
if "LINGUAS" in os.environ:
self.selected_languages = os.environ["LINGUAS"].split()
else:
self.selected_languages = [
d for d in os.listdir(self.help_dir) if os.path.isdir(d)
]
if 'C' not in self.selected_languages:
self.selected_languages.append('C')
self.C_PAGES = glob.glob(os.path.join(self.help_dir, 'C', '*.page'))
self.C_EXTRA = glob.glob(os.path.join(self.help_dir, 'C', '*.xml'))
for lang in self.selected_languages:
source_path = os.path.join(self.help_dir, lang)
if not os.path.exists(source_path):
continue
build_path = os.path.join('build', self.help_dir, lang)
if not os.path.exists(build_path):
os.makedirs(build_path)
if lang != 'C':
po_file = os.path.join(source_path, lang + '.po')
mo_file = os.path.join(build_path, lang + '.mo')
msgfmt = ['msgfmt', po_file, '-o', mo_file]
self.spawn(msgfmt)
for page in self.C_PAGES:
itstool = [
'itstool', '-m', mo_file, '-o', build_path, page]
self.spawn(itstool)
for extra in self.C_EXTRA:
extra_path = os.path.join(
build_path, os.path.basename(extra))
if os.path.exists(extra_path):
os.unlink(extra_path)
os.symlink(os.path.relpath(extra, source_path), extra_path)
else:
distutils.dir_util.copy_tree(source_path, build_path)
xml_files = glob.glob('%s/*.xml' % build_path)
mallard_files = glob.glob('%s/*.page' % build_path)
path_help = os.path.join('share', 'help', lang, name)
path_figures = os.path.join(path_help, 'figures')
data_files.append((path_help, xml_files + mallard_files))
figures = glob.glob('%s/figures/*.png' % build_path)
if figures:
data_files.append((path_figures, figures))
return data_files
def run(self):
data_files = self.distribution.data_files
data_files.extend(self.get_data_files())
self.check_help()
def check_help(self):
for lang in self.selected_languages:
build_path = os.path.join('build', self.help_dir, lang)
if not os.path.exists(build_path):
continue
pages = [os.path.basename(p) for p in self.C_PAGES]
for page in pages:
page_path = os.path.join(build_path, page)
if not os.path.exists(page_path):
info("skipping missing file %s", page_path)
continue
lint = ['xmllint', '--noout', '--noent', '--path', build_path,
'--xinclude', page_path]
self.spawn(lint)
class build_icons(distutils.cmd.Command):
icon_dir = os.path.join("data", "icons")
target = "share/icons"
frozen_target = "share/meld/icons"
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
target_dir = self.frozen_target if os.name == 'nt' else self.target
data_files = self.distribution.data_files
for theme in glob.glob(os.path.join(self.icon_dir, "*")):
for size in glob.glob(os.path.join(theme, "*")):
for category in glob.glob(os.path.join(size, "*")):
icons = (glob.glob(os.path.join(category, "*.png")) +
glob.glob(os.path.join(category, "*.svg")))
icons = [
icon for icon in icons if not os.path.islink(icon)]
if not icons:
continue
data_files.append(("%s/%s/%s/%s" %
(target_dir,
os.path.basename(theme),
os.path.basename(size),
os.path.basename(category)),
icons))
class build_i18n(distutils.cmd.Command):
bug_contact = None
domain = "meld"
po_dir = "po"
merge_po = False
# FIXME: It's ridiculous to specify these here, but I know of no other
# way except magically extracting them from self.distribution.data_files
desktop_files = [('share/applications', glob.glob("data/*.desktop.in"))]
xml_files = [
('share/metainfo', glob.glob("data/*.appdata.xml.in")),
('share/mime/packages', glob.glob("data/mime/*.xml.in"))
]
schemas_files = []
key_files = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def _rebuild_po(self):
# If there is a po/LINGUAS file, or the LINGUAS environment variable
# is set, only compile the languages listed there.
selected_languages = None
linguas_file = os.path.join(self.po_dir, "LINGUAS")
if "LINGUAS" in os.environ:
selected_languages = os.environ["LINGUAS"].split()
elif os.path.isfile(linguas_file):
selected_languages = open(linguas_file).read().split()
# If we're on Windows, assume we're building frozen and make a bunch
# of insane assumptions.
if os.name == 'nt':
msgfmt = "C:\\Python27\\Tools\\i18n\\msgfmt"
else:
msgfmt = "msgfmt"
# Update po(t) files and print a report
# We have to change the working dir to the po dir for intltool
cmd = [
"intltool-update",
(self.merge_po and "-r" or "-p"), "-g", self.domain
]
wd = os.getcwd()
os.chdir(self.po_dir)
self.spawn(cmd)
os.chdir(wd)
max_po_mtime = 0
for po_file in glob.glob("%s/*.po" % self.po_dir):
lang = os.path.basename(po_file[:-3])
if selected_languages and lang not in selected_languages:
continue
mo_dir = os.path.join("build", "mo", lang, "LC_MESSAGES")
mo_file = os.path.join(mo_dir, "%s.mo" % self.domain)
if not os.path.exists(mo_dir):
os.makedirs(mo_dir)
cmd = [msgfmt, po_file, "-o", mo_file]
po_mtime = os.path.getmtime(po_file)
mo_mtime = (
os.path.exists(mo_file) and os.path.getmtime(mo_file) or 0)
if po_mtime > max_po_mtime:
max_po_mtime = po_mtime
if po_mtime > mo_mtime:
self.spawn(cmd)
targetpath = os.path.join("share/locale", lang, "LC_MESSAGES")
self.distribution.data_files.append((targetpath, (mo_file,)))
self.max_po_mtime = max_po_mtime
def run(self):
if self.bug_contact is not None:
os.environ["XGETTEXT_ARGS"] = "--msgid-bugs-address=%s " % \
self.bug_contact
self._rebuild_po()
intltool_switches = [
(self.xml_files, "-x"),
(self.desktop_files, "-d"),
(self.schemas_files, "-s"),
(self.key_files, "-k"),
]
for file_set, switch in intltool_switches:
for target, files in file_set:
build_target = os.path.join("build", target)
if not os.path.exists(build_target):
os.makedirs(build_target)
files_merged = []
for file in files:
file_merged = os.path.basename(file)
if file_merged.endswith(".in"):
file_merged = file_merged[:-3]
file_merged = os.path.join(build_target, file_merged)
cmd = ["intltool-merge", switch, self.po_dir, file,
file_merged]
mtime_merged = (os.path.exists(file_merged) and
os.path.getmtime(file_merged) or 0)
mtime_file = os.path.getmtime(file)
if (mtime_merged < self.max_po_mtime or
mtime_merged < mtime_file):
# Only build if output is older than input (.po,.in)
self.spawn(cmd)
files_merged.append(file_merged)
self.distribution.data_files.append((target, files_merged))
class build_py(distutils.command.build_py.build_py):
"""Insert real package installation locations into conf module
Adapted from gottengeography
"""
data_line = 'DATADIR = "%s"'
locale_line = 'LOCALEDIR = "%s"'
def build_module(self, module, module_file, package):
if module_file == 'meld/conf.py':
with open(module_file) as f:
contents = f.read()
try:
options = self.distribution.get_option_dict('install')
prefix = options['prefix'][1]
except KeyError as e:
print(e)
prefix = sys.prefix
datadir = os.path.join(prefix, 'share', 'meld')
localedir = os.path.join(prefix, 'share', 'locale')
start, end = 0, 0
lines = contents.splitlines()
for i, line in enumerate(lines):
if line.startswith('# START'):
start = i
elif line.startswith('# END'):
end = i
if start and end:
lines[start:end + 1] = [
self.data_line % datadir,
self.locale_line % localedir,
]
module_file = module_file + "-installed"
contents = "\n".join(lines)
with open(module_file, 'w') as f:
f.write(contents)
distutils.command.build_py.build_py.build_module(
self, module, module_file, package)
class install(distutils.command.install.install):
def finalize_options(self):
special_cases = ('debian', 'ubuntu', 'linuxmint')
if platform.system() == 'Linux':
# linux_distribution has been removed in Python 3.8; we require
# the third-party distro package for future handling.
try:
distribution = platform.linux_distribution()[0].lower()
except AttributeError:
try:
distribution = distro.id()
except NameError:
distribution = 'unknown'
if distribution in special_cases:
# Maintain an explicit install-layout, but use deb by default
specified_layout = getattr(self, 'install_layout', None)
self.install_layout = specified_layout or 'deb'
distutils.command.install.install.finalize_options(self)
class install_data(distutils.command.install_data.install_data):
def run(self):
distutils.command.install_data.install_data.run(self)
if not self.distribution.no_update_icon_cache:
# TODO: Generalise to non-hicolor icon themes
info("running gtk-update-icon-cache")
icon_path = os.path.join(self.install_dir, "share/icons/hicolor")
self.spawn(["gtk-update-icon-cache", "-q", "-t", icon_path])
if not self.distribution.no_compile_schemas:
info("compiling gsettings schemas")
gschema_path = build_data.gschemas[0][0]
gschema_install = os.path.join(self.install_dir, gschema_path)
self.spawn(["glib-compile-schemas", gschema_install])
|
BabyBarista and the Art of War published by Harry Potter's very own Bloomsbury Publishing is officially released today. It is a legal comedy aimed at both lawyers and non-lawyers alike and has been described by broadcaster Jeremy Vine as "well-drawn, smartly plotted and laugh out loud" and by author Boris Starling as "sharp, acerbic, and almost illegally funny". Reviews can be found here and you can order a copy at a heavily discounted £7.19 (incl p&p) from amazon here. The book launch will be on Wednesday 5th August from 6pm-11pm at the Old Bank of England Pub, 194 Fleet Street, Holborn, London EC4A. It is open house so please come along, bring your friends and say 'hello'. It promises to be a great evening. Further details can be found here. Signed copies will also be available at a discount at the Fleet Street, High Holborn and Ludgate Circus branches of Waterstones from 5th August.
good review in The Times today, BB. Well done.
Dear Baby B, rather belated congratulations on your book launch. I’d have been there to share the fun, except for the small matter of being on the other side of the world.
|
#SBaaS
from .stage02_isotopomer_measuredData_io import stage02_isotopomer_measuredData_io
from SBaaS_isotopomer.stage01_isotopomer_averages_query import stage01_isotopomer_averages_query
from SBaaS_physiology.stage01_physiology_rates_query import stage01_physiology_rates_query
from SBaaS_LIMS.lims_msMethod_query import lims_msMethod_query
#SBaaS
from .stage02_isotopomer_measuredData_postgresql_models import *
from genomeScale_MFA.MFA_utilities import MFA_utilities
#resources
import re
from math import sqrt
class stage02_isotopomer_measuredData_execute(stage02_isotopomer_measuredData_io,
stage01_isotopomer_averages_query,
lims_msMethod_query,
stage01_physiology_rates_query):
def execute_makeMeasuredFragments(self,experiment_id_I, sample_name_abbreviations_I = [], time_points_I = [], scan_types_I = [], met_ids_I = []):
'''Collect and format MS data from data_stage01_isotopomer_averagesNormSum for fluxomics simulation'''
mfautilities = MFA_utilities();
# get experiment information:
met_id_conv_dict = {'Hexose_Pool_fru_glc-D':'glc-D',
'Pool_2pg_3pg':'3pg',
'23dpg':'13dpg'};
data_O = [];
experiment_stdev = [];
# get sample names and sample name abbreviations
if sample_name_abbreviations_I:
sample_abbreviations = sample_name_abbreviations_I;
st = 'Unknown';
sample_types_lst = [];
sample_types_lst.extend([st for i in range(len(sample_abbreviations))]);
else:
sample_abbreviations = [];
sample_types = ['Unknown'];
sample_types_lst = [];
for st in sample_types:
sample_abbreviations_tmp = [];
sample_abbreviations_tmp = self.get_sampleNameAbbreviations_experimentIDAndSampleType_dataStage01AveragesNormSum(experiment_id_I,st);
sample_abbreviations.extend(sample_abbreviations_tmp);
sample_types_lst.extend([st for i in range(len(sample_abbreviations_tmp))]);
for sna_cnt,sna in enumerate(sample_abbreviations):
print('Collecting experimental MS data for sample name abbreviation ' + sna);
# get time points
if time_points_I:
time_points = time_points_I;
else:
time_points = [];
time_points = self.get_timePoint_experimentIDAndSampleNameAbbreviation_dataStage01AveragesNormSum(experiment_id_I,sna);
for tp in time_points:
print('Collecting experimental MS data for time-point ' + str(tp));
# get the scan_types
if scan_types_I:
scan_types = [];
scan_types_tmp = [];
scan_types_tmp = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01AveragesNormSum(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
scan_types = [st for st in scan_types_tmp if st in scan_types_I];
else:
scan_types = [];
scan_types = self.get_scanTypes_experimentIDAndTimePointAndSampleAbbreviationsAndSampleType_dataStage01AveragesNormSum(experiment_id_I,tp,sna,sample_types_lst[sna_cnt]);
for scan_type in scan_types:
print('Collecting experimental MS data for scan type ' + scan_type)
# met_ids
if not met_ids_I:
met_ids = [];
met_ids = self.get_metIDs_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanType_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type);
else:
met_ids = met_ids_I;
if not(met_ids): continue #no component information was found
for met in met_ids:
print('Collecting experimental MS data for metabolite ' + met);
# format the metabolite
if met in list(met_id_conv_dict.keys()):
met_formatted = met_id_conv_dict[met];
else: met_formatted = met;
met_formatted = re.sub('-','_DASH_',met_formatted)
met_formatted = re.sub('[(]','_LPARANTHES_',met_formatted)
met_formatted = re.sub('[)]','_RPARANTHES_',met_formatted)
# fragments
fragment_formulas = [];
fragment_formulas = self.get_fragmentFormula_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanTypeAndMetID_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met);
# frag c map
frag_cmap = {};
frag_cmap = self.get_precursorFormulaAndProductFormulaAndCMapsAndPositions_metID(met,'-','tuning');
for frag in fragment_formulas:
# data
data_mat = [];
data_mat_cv = [];
data_mat_n = [];
data_mat, data_mat_cv, data_mat_n = self.get_spectrum_experimentIDAndSampleAbbreviationAndTimePointAndSampleTypeAndScanTypeAndMetIDAndFragmentFormula_dataStage01AveragesNormSum( \
experiment_id_I,sna,tp,sample_types_lst[sna_cnt],scan_type,met,frag);
# combine into a structure
positions,elements = [],[];
positions,elements = mfautilities.convert_fragmentAndElements2PositionAndElements(frag_cmap[frag]['fragment'],frag_cmap[frag]['fragment_elements']);
#fragname = met_formatted+'_c'+'_'+ re.sub('[-+]','',frag);
fragname = met_formatted+'_c'+'_'+ re.sub('[-+]','',frag)+'_'+scan_type;
data_names = [];
data_stdev = [];
data_stderr = [];
for i,d in enumerate(data_mat):
stdev = 0.0;
stderr = 0.0;
if data_mat_cv[i]:
if data_mat_n[i]==1:
stdev = 0.05;
else:
stdev = data_mat[i]*data_mat_cv[i]/100;
stderr = stdev/sqrt(data_mat_n[i]);
data_names.append(fragname+str(i));
data_stdev.append(stdev);
data_stderr.append(stderr);
experiment_stdev.append(stdev);
data_tmp = {'experiment_id':experiment_id_I,
'sample_name_abbreviation':sna,
'sample_type':sample_types_lst[sna_cnt],
'time_point':tp,
'met_id':met_formatted+'_c',
'fragment_id':fragname,
'fragment_formula':frag,
'intensity_normalized_average':data_mat,
'intensity_normalized_cv':data_mat_cv,
'intensity_normalized_stdev':data_stdev,
'intensity_normalized_n':data_mat_n,
'intensity_normalized_units':'normSum',
'met_elements':elements,
'met_atompositions':positions};
data_O.append(data_tmp);
#add data to the database
row = [];
row = data_stage02_isotopomer_measuredFragments(
experiment_id_I,
sna,
tp,
met_formatted+'_c',
fragname,
frag,
data_mat,
data_mat_cv,
data_stdev,
'normSum',
scan_type,
elements,
positions,
True,
None);
self.session.add(row);
self.session.commit();
def execute_addMeasuredFluxes(self,experiment_id_I, ko_list={}, flux_dict={}, model_ids_I=[], sample_name_abbreviations_I=[]):
'''Add flux data for physiological simulation'''
#Input:
#flux_dict = {};
#flux_dict['iJO1366'] = {};
#flux_dict['iJO1366'] = {};
#flux_dict['iJO1366']['sna'] = {};
#flux_dict['iJO1366']['sna']['Ec_biomass_iJO1366_WT_53p95M'] = {'ave':None,'stdev':None,'units':'mmol*gDCW-1*hr-1','lb':0.704*0.9,'ub':0.704*1.1};
#flux_dict['iJO1366']['sna']['EX_ac_LPAREN_e_RPAREN_'] = {'ave':None,'stdev':None,'units':'mmol*gDCW-1*hr-1','lb':2.13*0.9,'ub':2.13*1.1};
#flux_dict['iJO1366']['sna']['EX_o2_LPAREN_e_RPAREN__reverse'] = {'ave':None,'units':'mmol*gDCW-1*hr-1','stdev':None,'lb':0,'ub':16};
#flux_dict['iJO1366']['sna']['EX_glc_LPAREN_e_RPAREN_'] = {'ave':None,'stdev':None,'units':'mmol*gDCW-1*hr-1','lb':-7.4*1.1,'ub':-7.4*0.9};
data_O = [];
# get the model ids:
if model_ids_I:
model_ids = model_ids_I;
else:
model_ids = [];
model_ids = self.get_modelID_experimentID_dataStage02IsotopomerSimulation(experiment_id_I);
for model_id in model_ids:
# get sample names and sample name abbreviations
if sample_name_abbreviations_I:
sample_name_abbreviations = sample_name_abbreviations_I;
else:
sample_name_abbreviations = [];
sample_name_abbreviations = self.get_sampleNameAbbreviations_experimentIDAndModelID_dataStage02IsotopomerSimulation(experiment_id_I,model_id);
for sna_cnt,sna in enumerate(sample_name_abbreviations):
print('Adding experimental fluxes for sample name abbreviation ' + sna);
if flux_dict:
for k,v in flux_dict[model_id][sna].items():
# record the data
data_tmp = {'experiment_id':experiment_id_I,
'model_id':model_id,
'sample_name_abbreviation':sna,
'rxn_id':k,
'flux_average':v['ave'],
'flux_stdev':v['stdev'],
'flux_lb':v['lb'],
'flux_ub':v['ub'],
'flux_units':v['units'],
'used_':True,
'comment_':None}
data_O.append(data_tmp);
##add data to the database
#row = [];
#row = data_stage02_isotopomer_measuredFluxes(
# experiment_id_I,
# model_id,
# sna,
# k,
# v['ave'],
# v['stdev'],
# v['lb'],
# v['ub'],
# v['units'],
# True,
# None);
#self.session.add(row);
if ko_list:
for k in ko_list[model_id][sna]:
# record the data
data_tmp = {'experiment_id':experiment_id_I,
'model_id':model_id,
'sample_name_abbreviation':sna,
'rxn_id':k,
'flux_average':0.0,
'flux_stdev':0.0,
'flux_lb':0.0,
'flux_ub':0.0,
'flux_units':'mmol*gDCW-1*hr-1',
'used_':True,
'comment_':None}
data_O.append(data_tmp);
##add data to the database
#row = [];
#row = data_stage02_isotopomer_measuredFluxes(
# experiment_id_I,
# model_id,
# sna,
# k,
# 0.0,
# 0.0,
# 0.0,
# 0.0,
# 'mmol*gDCW-1*hr-1',
# True,
# None);
#self.session.add(row);
# add data to the DB
self.add_data_stage02_isotopomer_measuredFluxes(data_O);
#self.session.commit();
def execute_makeMeasuredFluxes(self,experiment_id_I, metID2RxnID_I = {}, sample_name_abbreviations_I = [], met_ids_I = [],snaIsotopomer2snaPhysiology_I={},
correct_EX_glc_LPAREN_e_RPAREN_I = True):
'''Collect and flux data from data_stage01_physiology_ratesAverages for fluxomics simulation
INPUT:
metID2RxnID_I = e.g. {'glc-D':{'model_id':'140407_iDM2014','rxn_id':'EX_glc_LPAREN_e_RPAREN_'},
'ac':{'model_id':'140407_iDM2014','rxn_id':'EX_ac_LPAREN_e_RPAREN_'},
'succ':{'model_id':'140407_iDM2014','rxn_id':'EX_succ_LPAREN_e_RPAREN_'},
'lac-L':{'model_id':'140407_iDM2014','rxn_id':'EX_lac_DASH_L_LPAREN_e_RPAREN_'},
'biomass':{'model_id':'140407_iDM2014','rxn_id':'Ec_biomass_iJO1366_WT_53p95M'}};
snaIsotopomer2snaPhysiology_I = {'OxicEvo04Ecoli13CGlc':'OxicEvo04EcoliGlc',
'OxicEvo04gndEcoli13CGlc':'OxicEvo04gndEcoliGlc',
'OxicEvo04pgiEcoli13CGlc':'OxicEvo04pgiEcoliGlc',
'OxicEvo04sdhCBEcoli13CGlc':'OxicEvo04sdhCBEcoliGlc',
'OxicEvo04tpiAEcoli13CGlc':'OxicEvo04tpiAEcoliGlc'}
TODO:
Need to implement a way to detect the direction of the reaction,
and change direction of the rate accordingly
'''
data_O = [];
# get sample names and sample name abbreviations
if sample_name_abbreviations_I:
sample_name_abbreviations = sample_name_abbreviations_I;
else:
sample_name_abbreviations = [];
sample_name_abbreviations = self.get_sampleNameAbbreviations_experimentID_dataStage02IosotopomerSimulation(experiment_id_I);
for sna in sample_name_abbreviations:
print('Collecting experimental fluxes for sample name abbreviation ' + sna);
query_sna = sna;
if snaIsotopomer2snaPhysiology_I: query_sna = snaIsotopomer2snaPhysiology_I[sna];
# get met_ids
if not met_ids_I:
met_ids = [];
met_ids = self.get_metID_experimentIDAndSampleNameAbbreviation_dataStage01PhysiologyRatesAverages(experiment_id_I,query_sna);
else:
met_ids = met_ids_I;
if not(met_ids): continue #no component information was found
for met in met_ids:
print('Collecting experimental fluxes for metabolite ' + met);
# get rateData
slope_average, intercept_average, rate_average, rate_lb, rate_ub, rate_units, rate_var = None,None,None,None,None,None,None;
slope_average, intercept_average, rate_average, rate_lb, rate_ub, rate_units, rate_var = self.get_rateData_experimentIDAndSampleNameAbbreviationAndMetID_dataStage01PhysiologyRatesAverages(experiment_id_I,query_sna,met);
rate_stdev = sqrt(rate_var);
model_id = metID2RxnID_I[met]['model_id'];
rxn_id = metID2RxnID_I[met]['rxn_id'];
# correct for glucose uptake
if rxn_id == 'EX_glc_LPAREN_e_RPAREN_' and correct_EX_glc_LPAREN_e_RPAREN_I:
rate_lb_tmp,rate_ub_tmp = rate_lb,rate_ub;
rate_lb = min([abs(x) for x in [rate_lb_tmp,rate_ub_tmp]]);
rate_ub = max([abs(x) for x in [rate_lb_tmp,rate_ub_tmp]]);
rate_average = abs(rate_average);
# record the data
data_tmp = {'experiment_id':experiment_id_I,
'model_id':model_id,
'sample_name_abbreviation':sna,
'rxn_id':rxn_id,
'flux_average':rate_average,
'flux_stdev':rate_stdev,
'flux_lb':rate_lb,
'flux_ub':rate_ub,
'flux_units':rate_units,
'used_':True,
'comment_':None}
data_O.append(data_tmp);
##add data to the database
#row = [];
#row = data_stage02_isotopomer_measuredFluxes(
# experiment_id_I,
# model_id,
# sna,
# rxn_id,
# rate_average,
# rate_stdev,
# rate_lb,
# rate_ub,
# rate_units,
# True,
# None);
#self.session.add(row);
#add data to the DB
self.add_data_stage02_isotopomer_measuredFluxes(data_O);
#self.session.commit();
|
Diageo in spat with Bacardi over rum | City A.M.
DRINKS giant Diageo has accused rival Bacardi of trying to drive its Captain Morgan rum production out of the United States in order to protect its own rum subsidies from the Puerto Rican government.
The company claimed Bacardi is leading a “hidden campaign” to hound Diageo’s Captain Morgan rum production out of the US Virgin Islands (USVI), in a move that would “destroy” the territory’s economy.
Diageo has a deal with the USVI involving a new rum distillation facility where it will produce Captain Morgan rum for at least 30 years, starting from 2012 when its contract with Puerto Rican rum producer Destileria Serralles runs out.
Last week, the National Puerto Rican Coalition (NPRC) said that Diageo would get $2.7bn (£1.75bn) in US taxpayer-funded subsidies for the move.
But Diageo vice president Guy Smith is understood to have claimed that Bacardi was “using a campaign of misinformation to get Congress to retroactively overturn its US Virgin Islands initiative”.
Smith said Bacardi officials and lobbyists have visited US congressional leaders and Puerto Rico officials to discuss opposing its move.
He also said that the NPRC is urging the Hispanic community in Puerto Rico and throughout the United States to boycott Diageo goods.
But Bacardi dismissed the claims in a statement.
“This isn’t about where Diageo receives a free distillery, but about the proper use of federal tax dollars,” said spokeswoman Patricia Neal.
|
# coding: utf-8
from __future__ import division, unicode_literals
"""
FIXME: Proper module docstring
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Aug 26, 2012"
import unittest
from pymatgen.util.string_utils import generate_latex_table, str_delimited, \
str_aligned, formula_double_format, latexify, latexify_spacegroup
class FuncTest(unittest.TestCase):
def test_latexify(self):
self.assertEqual(latexify("Li3Fe2(PO4)3"),
"Li$_{3}$Fe$_{2}$(PO$_{4}$)$_{3}$")
self.assertEqual(latexify("Li0.2Na0.8Cl"),
"Li$_{0.2}$Na$_{0.8}$Cl")
def test_latexify_spacegroup(self):
self.assertEqual(latexify_spacegroup("Fd-3m"), "Fd$\overline{3}$m")
self.assertEqual(latexify_spacegroup("P2_1/c"), "P2$_{1}$/c")
def test_str_aligned_delimited(self):
data = [["a", "bb"], ["ccc", "dddd"]]
ans = """ a bb
ccc dddd"""
self.assertEqual(str_aligned(data), ans)
self.assertEqual(str_aligned(data, header=["X", "Y"]),
' X Y\n----------\n a bb\nccc dddd')
self.assertEqual(str_delimited(data), 'a\tbb\nccc\tdddd')
def test_generate_latex_table(self):
data = [["a", "bb"], ["ccc", "dddd"]]
self.assertEqual(generate_latex_table(data), '\\begin{table}[H]\n\\caption{Caption}\n\\label{Label}\n\\begin{tabular*}{\\textwidth}{@{\\extracolsep{\\fill}}cc}\n\\hline\na & bb\\\\\nccc & dddd\\\\\n\\hline\n\\end{tabular*}\n\\end{table}')
def test_formula_double_format(self):
self.assertEqual(formula_double_format(1.00), "")
self.assertEqual(formula_double_format(2.00), "2")
self.assertEqual(formula_double_format(2.10), "2.1")
self.assertEqual(formula_double_format(2.10000000002), "2.1")
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
The Shark Reef Cafe is a full service restaurant open from 9:00am to 5:00pm, serving both breakfast and lunch.
The menu includes breakfast burritos, pancakes, salads, burgers and sandwiches. Catering services available.
|
import sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = ("List speakers and associated tickets. By default, only lists"
" speakers for accepted talk, but this can be overriden by"
" the --all option")
option_list = BaseCommand.option_list + tuple([
make_option('--all', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = get_user_model().objects.filter(
talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['all']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = u'%d' % tickets[0].barcode
else:
ticket = u'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
|
Why do I have to pay for works in advance if they are above the next month rent?
We are members of the Association of Residential Letting Agent. We subscribe to the highest standard when it comes to handling clients money.
One of this standards is that we cannot accept any liability on the landlord behalf unless we expect funds to cover such liability within the agreed credit term that such liability may have.
With the exception of a few suppliers (including big discount/high street chains who demand payment on receipt), most of our qualified contractors issue an invoice shortly after a job has been completed. This invoice is addressed to the landlord and has a credit term of 30 days. Any unpaid invoice creates a liability in the landlord client account.
Provided that the amount of the next month rent less our management fee covers the value of any expected invoice with a credit facility, then we can instruct the work since we expect the liability to be paid within the agreed term. However in the very unlikely case that we have any grounds to believe that the rent is not going to be received on time, or if there are already other invoices that bring the total due amount above the expected available funds, we would request payment in advance.
We need to bring to the attention of the landlord that should the rent is not paid in the expected time then he/she would have to transfer the funds to pay the outstanding invoice (to cover his/her liability), whilst we pursue the payment of the outstanding rent (to cover the tenant’s liability).
|
from .syscall_full import FullSystemCalls, AlarmTemplate
from .elements import Statement, Comment
from .elements import CodeTemplate, Include, VariableDefinition, \
Block, Statement, Comment, Function, Hook, DataObject, DataObjectArray
from generator.tools import unwrap_seq
from generator.analysis.AtomicBasicBlock import E,S
from generator.analysis.SystemSemantic import SystemState
from generator.analysis import Subtask
import logging
class FSMSystemCalls(FullSystemCalls):
def __init__(self, use_pla = False):
super(FSMSystemCalls, self).__init__()
self.alarms = FSMAlarmTemplate
if use_pla:
self.fsm_template = PLA_FSMTemplate
else:
self.fsm_template = SimpleFSMTemplate
def generate_system_code(self):
self.generator.source_file.include("syscall.h")
# Grab the finite state machine
self.fsm = self.system_graph.get_pass("fsm").fsm
# Use the fsm template
self.generator.source_file.include("reschedule-ast.h")
self.generator.source_file.include("os/scheduler/task.h")
self.impl = self.fsm_template(self)
self.impl.add_transition_table()
self.generator.source_file.declarations.append(self.impl.expand())
self.generator.source_file.include("os/alarm.h")
self.generator.source_file.include("os/util/redundant.h")
self.generator.source_file.declarations.append(self.alarms(self).expand())
def StartOS(self, block):
block.unused_parameter(0)
for subtask in self.system_graph.real_subtasks:
# Use Reset the stack pointer for all all tasks
self.call_function(block,
self.task_desc(subtask) + ".tcb.reset",
"void", [])
# Call the StartupHook
self.call_function(block, "CALL_HOOK", "void", ["StartupHook"])
# Bootstrap: Do the initial syscall
dispatch_func = Function("__OS_StartOS_dispatch", "void", ["int"], extern_c = True)
self.generator.source_file.function_manager.add(dispatch_func)
# Initial SystemCall
for ev in self.fsm.events:
if ev.name.isA(S.StartOS):
self.fsm_schedule(ev.name, block, dispatch_func)
break
self.call_function(block, "arch::syscall", "void",
[dispatch_func.function_name])
self.call_function(block, "Machine::unreachable", "void", [])
# Forward fsm_schedule and fsm_event
def fsm_event(self, *args, **kwargs):
self.impl.fsm_event(*args, **kwargs)
def fsm_schedule(self, *args, **kwargs):
self.impl.fsm_schedule(*args, **kwargs)
def iret(self, *args, **kwargs):
self.impl.fsm_iret(*args, **kwargs)
def kickoff(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
if not syscall.subtask.conf.is_isr:
self.arch_rules.kickoff(syscall, userspace)
def TerminateTask(self, syscall, userspace, kernelspace):
self.call_function(kernelspace, self.task_desc(syscall.subtask) + ".tcb.reset",
"void", [])
self.fsm_schedule(syscall, userspace, kernelspace)
ChainTask = TerminateTask
ActivateTask = fsm_schedule
WaitEvent = fsm_schedule
ClearEvent = fsm_schedule
SetEvent = fsm_schedule
GetResource = fsm_schedule
ReleaseResource = fsm_schedule
def ASTSchedule(self, function):
pass
def AdvanceCounter(self, abb, userspace, kernelspace):
raise NotImplementedError
################################################################
# These system calls are only enhanced by the FSM step function
################################################################
# Do not overwrite: SetRelAlarm
# Do not overwrite: GetAlarm
# Do not overwrite: CancelAlarm
# Do not overwrite: DisableAllInterrupts
# Do not overwrite: SuspendAllInterrupts
# Do not overwrite: SuspendOSInterrupts
# Do not overwrite: EnableAllInterrupts
# Do not overwrite: ResumeAllInterrupts
# Do not overwrite: ResumeOSInterrupts
# Do not overwrite: AcquireCheckedObject
# Do not overwrite: ReleaseCheckedObject
def SetRelAlarm(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.SetRelAlarm(self, syscall, userspace, kernelspace)
def GetAlarm(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.GetAlarm(self, syscall, userspace, kernelspace)
def CancelAlarm(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.CancelAlarm(self, syscall, userspace, kernelspace)
def DisableAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.DisableAllInterrupts(self, syscall, userspace, kernelspace)
def SuspendAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.SuspendAllInterrupts(self, syscall, userspace, kernelspace)
def SuspendOSInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.SuspendOSInterrupts(self, syscall, userspace, kernelspace)
def EnableAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.EnableAllInterrupts(self, syscall, userspace, kernelspace)
def ResumeAllInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.ResumeAllInterrupts(self, syscall, userspace, kernelspace)
def ResumeOSInterrupts(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.ResumeOSInterrupts(self, syscall, userspace, kernelspace)
def AcquireCheckedObject(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.AcquireCheckedObject(self, syscall, userspace, kernelspace)
def ReleaseCheckedObject(self, syscall, userspace, kernelspace):
self.fsm_event(syscall, userspace, kernelspace)
FullSystemCalls.ReleaseCheckedObject(self, syscall, userspace, kernelspace)
def do_assertions(self, block, assertions):
"""We do not support assertions for a FSM kernel"""
logging.error("Assertions are not implemented for the FSM coder")
class SimpleFSMTemplate(CodeTemplate):
def __init__(self, syscall_fsm):
CodeTemplate.__init__(self, syscall_fsm.generator, "os/fsm/simple-fsm.h.in")
self.syscall_fsm = syscall_fsm
self.system_graph = self.generator.system_graph
self.syscall_fsm.generator.source_file.include("os/fsm/simple-fsm.h")
self.fsm = self.syscall_fsm.fsm
def add_transition_table(self):
self.syscall_map = {}
# Rename action labels to their task id
def action_rename(action):
task_id = action.impl.task_id
if task_id == None:
task_id = 255
return task_id
self.fsm.rename(actions = action_rename)
# Generate the transition table
for event in self.fsm.events:
self.syscall_map[event.name] = event
# Do not generate a transition table, if there is only one
# transition.
if len(event.transitions) == 1:
event.impl.transition_table = None
continue
table = DataObjectArray("os::fsm::SimpleFSM::Transition",
"fsm_table_" + event.name.generated_function_name(),
str(len(event.transitions)))
table.static_initializer = []
for t in event.transitions:
table.static_initializer\
.append("{%d, %d, %d}" % (t.source, t.target, t.action))
event.impl.transition_table = table
self.syscall_fsm.generator.source_file.data_manager\
.add(table, namespace = ('os', 'fsm'))
def fsm_event(self, syscall, userspace, kernelspace):
if not syscall in self.syscall_map:
return
event = self.syscall_map[syscall]
if event.impl.transition_table:
transition_table = event.impl.transition_table.name
transition_length = str(len(event.transitions))
# kernelspace.add(Statement('kout << "%s" << endl' % syscall.path()))
task = self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.event",
"SimpleFSM::task_t", [transition_table, transition_length])
else:
followup_state = event.impl.followup_state = event.transitions[0].target
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.set_state",
"void", [str(followup_state)])
task = event.transitions[0].action
return task
def fsm_schedule(self, syscall, userspace, kernelspace):
if not syscall in self.syscall_map:
return
task = self.fsm_event(syscall, userspace, kernelspace)
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [task.name])
def fsm_iret(self, syscall, userspace, kernelspace):
if not syscall in self.syscall_map:
return
task = self.fsm_event(syscall, userspace, kernelspace)
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [task.name])
################################################################
# Used in Template Code
################################################################
def subtask_desc(self, snippet, args):
return self._subtask.impl.task_descriptor.name
def subtask_id(self, snippet, args):
return str(self._subtask.impl.task_id)
def foreach_subtask_sorted(self, snippet, args):
body = args[0]
ret = []
for subtask in sorted(self.system_graph.real_subtasks, key=lambda s: s.impl.task_id):
self._subtask = subtask
ret.append(self.expand_snippet(body))
return ret
class PLA_FSMTemplate(CodeTemplate):
def __init__(self, syscall_fsm):
CodeTemplate.__init__(self, syscall_fsm.generator, "os/fsm/pla-fsm.h.in")
self.syscall_fsm = syscall_fsm
self.system_graph = self.generator.system_graph
self.logic = self.system_graph.get_pass("LogicMinimizer")
self.fsm = self.logic.fsm
def add_transition_table(self):
# Truth table is generated in pla-fsm.h
return
def fsm_event(self, syscall, userspace, kernelspace):
event = None
for ev in self.fsm.events:
if self.fsm.event_mapping[ev.name] == syscall:
event = ev
break
if not event:
return # No Dispatch
# kernelspace.add(Statement('kout << "%s" << endl' % syscall.path()))
task = self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.event",
"unsigned", [str(int(event.name, 2))])
return task
def fsm_schedule(self, syscall, userspace, kernelspace):
task = self.fsm_event(syscall, userspace, kernelspace)
if not task:
return
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.dispatch",
"void", [task.name])
def fsm_iret(self, syscall, userspace, kernelspace):
task = self.fsm_event(syscall, userspace, kernelspace)
if not task:
return
if type(task) == int:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [str(task)])
else:
self.syscall_fsm.call_function(kernelspace, "os::fsm::fsm_engine.iret",
"void", [task.name])
################################################################
# Used in Template Code
################################################################
def truth_table(self, *args):
# Generate the transition table
initializer = []
for (mask, pattern, output_state, output_action) in self.logic.truth_table:
initializer.append("{{{0}, {1}, {2}, {3}}}".format(
int(mask, 2),
int(pattern, 2),
int(output_state, 2),
int(output_action, 2)))
return "{" + (", ".join(initializer)) + "}"
def mask_pattern_len(self, *args):
return str(self.logic.event_len + self.logic.state_len)
def truth_table_entries(self, *args):
return str(len(self.logic.truth_table))
def initial_state(self, *args):
return str(int(self.fsm.initial_state,2))
def dispatch_table(self, *args):
mapping = {}
for k, subtask in self.fsm.action_mapping.items():
mapping[int(k, 2)] = subtask
if not 0 in mapping:
mapping[0] = None
self.NO_DISPATCH = 0
initializer = []
for k,subtask in sorted(mapping.items(), key = lambda x:x[0]):
if not subtask or subtask.conf.is_isr:
initializer.append("0 /* NO_DISPATCH */")
elif subtask == self.system_graph.idle_subtask:
initializer.append("0 /* IDLE */")
self.IDLE = k
else:
initializer.append("&" +subtask.impl.task_descriptor.name)
if not hasattr(self, "IDLE"):
self.IDLE = len(mapping) + 100
return ", ".join(initializer)
class FSMAlarmTemplate(AlarmTemplate):
def __init__(self, rules):
AlarmTemplate.__init__(self, rules)
|
Please note that this article requires basic understanding of HTML and WordPress. If you’re not sure what to do, we advise you to avoid tampering with your website’s code.
Occasionally we like to endeavor on a little like-frenzy trip down our Facebook Newsfeed. We just like content – from images to articles and individual comments – without really reading further into the story.
We may happen to stumble on a particular article with an interesting title, nice picture – but still, not worth visiting or reading. Now, as creators of that article, we can control what the user will see once they like, comment or interact with our post, without leaving their Newsfeed. Much like the Related Posts section on our blog, Facebook’s related articles work in a similar way.
Today we will implement OpenGraph tags for Related Content on our WordPress articles.
Control what #Facebook shows as related content with OpenGraph.
Acquaint yourself with OpenGraph for WordPress.
This time we will be adding the see_also meta tag on our article. It contains an array<url> of links to related content, which you can define. The links can be specific articles, that is, static content. Make sure your tag is contained within an Article Object Type, otherwise this doesn’t seem to work.
WordPress is a powerful content management tool, and changing the code every so may become tedious. In this case, we can call a WP loop of related content within our tag and define our parameters from there. Just in case, take a look into WordPress’s documentation on how loops and queries work.
You need to make sure your code will go on a single.php template, or, if your header.php calls your meta, use our conditional tags instead. Otherwise, this will not work, unless you change conditions.
Essentially, every time a Facebook user interacts with our shared article on their Newsfeed, our see_also property will trigger an action to display our related permalinks ( previews of articles ) underneath the post, from the same category as our original shared article.
Would you like to add anything to this story? Share your insight with us on the comments below.
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FeedbackRecordDTO(Model):
"""Active learning feedback record.
:param user_id: Unique identifier for the user.
:type user_id: str
:param user_question: The suggested question being provided as feedback.
:type user_question: str
:param qna_id: The qnaId for which the suggested question is provided as
feedback.
:type qna_id: int
"""
_validation = {
'user_question': {'max_length': 1000},
}
_attribute_map = {
'user_id': {'key': 'userId', 'type': 'str'},
'user_question': {'key': 'userQuestion', 'type': 'str'},
'qna_id': {'key': 'qnaId', 'type': 'int'},
}
def __init__(self, **kwargs):
super(FeedbackRecordDTO, self).__init__(**kwargs)
self.user_id = kwargs.get('user_id', None)
self.user_question = kwargs.get('user_question', None)
self.qna_id = kwargs.get('qna_id', None)
|
This is a small 12″ x 9″ pastel study of a golden retriever on the beach.
The work has been done on to Fisher 400 Art Pastel Paper. The technique involves building layers of watercolour onto the surface and then working over the top in pastels.
The work will sent unframed in a heavy duty roll. Don’t be worried that the work is sent unframed. This means that delivery is free, it arrives safely and you can chose your own frame. You do not even have to handle the painting. I will provide detailed instructions. All you have to do is take the roll to your local framer, chose your own frame and he will do the rest. If you require further information regarding this painting please ask, I will respond quickly.
The framed image shown is to show what it could look like – but if the work is collected from the gallery the frame its in here is FREE .
The Beaconsfield Gallery, Flockton Moor, Wakefield, WF4 4BP. Please call if you have any enquiries: +44 (0)1924 840 687.
|
# A very simple perceptron for classifying american sign language letters
import signdata
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Flatten, Dropout, Conv2D, MaxPooling2D, Reshape
from keras.utils import np_utils
import wandb
from wandb.keras import WandbCallback
# logging code
run = wandb.init()
config = run.config
config.loss = "mae"
config.optimizer = "adam"
config.epochs = 10
# load data
(X_test, y_test) = signdata.load_test_data()
(X_train, y_train) = signdata.load_train_data()
img_width = X_test.shape[1]
img_height = X_test.shape[2]
# one hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_train.shape[1]
# create model
model = Sequential()
model.add(Flatten())
model.add(Dense(num_classes, activation="softmax"))
model.compile(loss=config.loss, optimizer=config.optimizer,
metrics=['accuracy'])
# Fit the model
model.fit(X_train, y_train, epochs=10, validation_data=(X_test, y_test),
callbacks=[WandbCallback(data_type="image", labels=signdata.letters)])
|
A step closer in NY?
Shed Craft Distillery permit in WA state - USA?
The law and the reality - how is distilling law enforced?
Legal "Loophole" for Distilling "at home"
I'm a hobby distiller and proud of it.
I had to take a break for a while due to too much talk.
Best State in the U.S.A. to start a craft distillery ?
|
def extractDreamsOfJianghu(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
bad = ['pictures']
if any([(tmp in item['tags']) for tmp in bad]):
return None
tagmap = [
('TBVW', 'To Be A Virtuous Wife', 'translated'),
('WC', 'World of Cultivation', 'translated'),
('8TT', 'Eight Treasure Trousseau', 'translated'),
('4.6', '4.6 Billion Years Symphony of Evolution', 'translated'),
('Zuo Mo', 'World of Cultivation', 'translated'),
('lpj', 'Like Pearl and Jade', 'translated'),
('ZX', 'Zhui Xu', 'translated'),
('AUW', 'An Unyielding Wind', 'translated'),
('ADND', 'Ascending, Do Not Disturb', 'translated'),
('sd', 'Sword Dynasty', 'translated'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
The Grammys can recruit anyone to open the show. They picked Camila Cabello. Don’t know her? You should.
Last year #GrammysSoMale and Grammy president Neil Portnow saying “Women need to step up” dominated conversation. For the 61st Awards, Academy voters highlighted deserving women, artists of color, and passionate, progressive music resulting in a show (mostly) worth watching.
Looking at the lineup you could predict what would be amazing and what would be lousy (beer bong burnout Post Malone and a flat Red Chili Peppers). Stuff you need to dig up on YouTube today: Best Rap Album champ Cardi B grinding on grand piano so hard Madonna might have blushed on “Money,” Dolly Parton with half a dozen superstars doing everything from “Jolene” to “After the Gold Rush,” and Brandi Carlile performing “The Joke” like the folk ballad, country barn burner, gospel hymn and rock epic it is.
In keeping with the substance-and-style theme flash: Early in the night, Childish Gambino’s “This Is America” — a song and video that examined the ugly intersections of race, racism, violence, capitalism, art, and entertainment — won best music video and rap/sung performance.
Many more of biggest, best and most deserving names in music took home golden gramophones. Maybe the most nuanced and visceral songwriter working today, Carlile, nabbed best American roots performance, American roots song and Americana album. Right behind her, Kacey Musgraves won huge with best country album, country song and country solo performance awards. Lady Gaga went home with best pop solo performance, pop duo/group performance and song written for visual media.
|
#!/usr/bin/python
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Uses llvm tool pnacl-bccompress to add abbreviations into the input file.
# It runs pnacl-bccompress multiple times, using a hill-climbing solution
# to try and find a good local minima for file size.
from __future__ import print_function
from driver_env import env
from driver_log import Log
import driver_tools
import pathtools
import os
import shutil
EXTRA_ENV = {
'INPUTS': '',
'OUTPUT': '',
'MAX_ATTEMPTS': '25',
'RETRIES': '3',
'SUFFIX': '-c',
'VERBOSE': '0',
}
PrepPatterns = [
( ('-o','(.*)'), "env.set('OUTPUT', pathtools.normalize($0))"),
( '--max-attempts=(.*)', "env.set('MAX_ATTEMPTS', $0)"),
( '--retries=(.*)', "env.set('RETRIES', $0)"),
( ('--suffix','(.*)'), "env.set('SUFFIX', $0)"),
( '--verbose', "env.set('VERBOSE', '1')"),
( '-v', "env.set('VERBOSE', '1')"),
( '(-.*)', driver_tools.UnrecognizedOption),
( '(.*)', "env.append('INPUTS', pathtools.normalize($0))"),
]
def Compress(f_input, f_output):
""" Hill climb to smallest file.
This code calls pnacl-compress multiple times to attempt to
compress the file. That tool works by adding abbreviations that
have a good likelyhood of shrinking the bitcode file. Unfortunately,
any time abbreviations are added to PNaCl bitcode files, they can
get larger because the addition of more abbreviations will require
more bits to save abbreviation indices, resulting in the file
actually increasing in size.
To mitigate this, this driver hill climbs assuming that there
may be local minima that are the best solution. Hence, anytime
a local minima is reached, an additional number of attempts
to see if we can find a smaller bitcode file, which implies
you are moving closer to another local minima.
"""
verbose = env.getbool('VERBOSE')
# Number of times we will continue to retry after finding local
# minimum file size.
# max_retry_count: The maximum number of retries.
# retry_count: The number of retries left before we give up.
max_retry_count = int(env.getone('RETRIES'))
retry_count = max_retry_count
if max_retry_count < 1:
Log.Fatal("RETRIES must be >= 1")
# The suffix to append to the input file, to generate intermediate files.
# test_suffix: The prefix of the suffix.
# test_index: The index of the current test file (appened to test_suffix).
test_suffix = env.getone('SUFFIX')
test_index = 1
# The maximum number of times we will attempt to compress the file before
# giving up.
max_attempts = int(env.getone('MAX_ATTEMPTS'))
if max_attempts < 1:
Log.Fatal("MAX_ATTEMPTS must be >= 1")
# The result of the last attempt to compress a file.
# last_file: The name of the file
# last_size: The number of bytes in last_file.
# last_saved: True if we did not remove last_file.
last_file = f_input
last_size = pathtools.getsize(f_input)
last_saved = True
# Keeps track of the current best compressed file.
current_smallest_file = last_file
current_smallest_size = last_size
while max_attempts > 0 and retry_count > 0:
next_file = f_input + test_suffix + str(test_index)
if verbose:
print("Compressing %s: %s bytes" % (last_file, last_size))
driver_tools.Run('"${PNACL_COMPRESS}" ' + last_file + ' -o ' + next_file)
next_size = pathtools.getsize(next_file)
if not last_saved:
os.remove(last_file)
if next_size < current_smallest_size:
old_file = current_smallest_file
current_smallest_file = next_file
current_smallest_size = next_size
if (f_input != old_file):
os.remove(old_file)
retry_count = max_retry_count
next_saved = True
else:
next_saved = False
retry_count -= 1
last_file = next_file
last_size = next_size
last_saved = next_saved
max_attempts -= 1
test_index += 1
# Install results.
if verbose:
print("Compressed %s: %s bytes" % (last_file, last_size))
print("Best %s: %s bytes" % (current_smallest_file,
current_smallest_size))
if not last_saved:
os.remove(last_file)
if (f_input == f_output):
if (f_input == current_smallest_file): return
# python os.rename/shutil.move on Windows will raise an error when
# dst already exists, and f_input already exists.
f_temp = f_input + test_suffix + "0"
shutil.move(f_input, f_temp)
shutil.move(current_smallest_file, f_input)
os.remove(f_temp)
elif f_input == current_smallest_file:
shutil.copyfile(current_smallest_file, f_output)
else:
shutil.move(current_smallest_file, f_output)
def main(argv):
env.update(EXTRA_ENV)
driver_tools.ParseArgs(argv, PrepPatterns)
inputs = env.get('INPUTS')
output = env.getone('OUTPUT')
for path in inputs + [output]:
driver_tools.CheckPathLength(path)
if len(inputs) != 1:
Log.Fatal('Can only have one input')
f_input = inputs[0]
# Allow in-place file changes if output isn't specified.
if output != '':
f_output = output
else:
f_output = f_input
Compress(f_input, f_output)
return 0
def get_help(unused_argv):
script = env.getone('SCRIPT_NAME')
return """Usage: %s <options> in-file
This tool compresses a pnacl bitcode (PEXE) file. It does so by
generating a series of intermediate files. Each file represents
an attempt to compress the previous file in the series. Uses
hill-climbing to find the smallest file to use, and sets the
output file to the best found case.
The options are:
-h --help Display this output
-o <file> Place the output into <file>. Otherwise, the
input file is modified in-place.
--max-attempts=N Maximum number of attempts to reduce file size.
--retries=N Number of additional attempts to try after
a local minimum is found before quiting.
--suffix XX Create intermediate compressed files by adding
suffix XXN (where N is a number).
-v --verbose Show generated intermediate files and corresponding
sizes.
""" % script
|
Long-lasting partnerships with our patients are fostered because of our commitment to provide them with only the best in healthcare solutions. We strive to meet the requirements of all family members, as we also work with each client on an individual basis to determine his or her needs.
On a case-by-case method, we find the solutions that will best suit our patient's health-related, financial, and social demands. All our employees are committed to and believe in providing an open, supportive, and safe work environment in order to ensure the ongoing success of our patients' health and well-being – and that of our practice.
|
'''
USB definitions and enumerations from the USB spec.
'''
# USB.py
#
# Contains definition of USB class, which is just a container for a bunch of
# constants/enums associated with the USB protocol.
#
# TODO: would be nice if this module could re-export the other USB* classes so
# one need import only USB to get all the functionality
class DescriptorType(object):
device = 0x01
configuration = 0x02
string = 0x03
interface = 0x04
endpoint = 0x05
device_qualifier = 0x06
other_speed_configuration = 0x07
interface_power = 0x08
bos = 0x0f
device_capability = 0x10
hid = 0x21
report = 0x22
cs_interface = 0x24
cs_endpoint = 0x25
hub = 0x29
class USB(object):
feature_endpoint_halt = 0
feature_device_remote_wakeup = 1
feature_test_mode = 2
# while this holds for HID, it may not be a correct model for the USB
# ecosystem at large
if_class_to_desc_type = {
0x03: DescriptorType.hid,
0x0b: DescriptorType.hid
}
class State(object):
detached = 0
attached = 1
powered = 2
default = 3
address = 4
configured = 5
suspended = 6
class Request(object):
direction_host_to_device = 0
direction_device_to_host = 1
type_standard = 0
type_class = 1
type_vendor = 2
recipient_device = 0
recipient_interface = 1
recipient_endpoint = 2
recipient_other = 3
def interface_class_to_descriptor_type(interface_class):
return USB.if_class_to_desc_type.get(interface_class, None)
|
The Luminor Due is the thinnest and most versatile of the manufacturer’s creations. Panerai presents the Luminor Due 3 Days Automatic models with a stainless steel or red gold case, in sizes of 42 mm and 38 mm in diameter. The new Luminor Due 3 Days Automatic watches are fitted with the OP XXXIV Manufacture calibre, with automatic winding and a power reserve of three days.
|
from django.db import models
from django.http import Http404
from sigma_core.models.user import User
class GroupMember(models.Model):
"""
Modelize a membership relation between an User and a Group.
"""
class Meta:
# TODO: Make a primary key once Django supports it
unique_together = (("user", "group"),)
user = models.ForeignKey('User', related_name='memberships')
group = models.ForeignKey('Group', related_name='memberships')
created = models.DateTimeField(auto_now_add=True)
join_date = models.DateField(blank=True, null=True)
leave_date = models.DateField(blank=True, null=True)
# if super_administrator = True then is_administrator = True
# administrators must have all the rights below
is_administrator = models.BooleanField(default=False)
is_super_administrator = models.BooleanField(default=False)
can_invite = models.BooleanField(default=False)
can_be_contacted = models.BooleanField(default=False)
can_publish = models.BooleanField(default=False)
can_kick = models.BooleanField(default=False)
is_accepted = models.BooleanField(default=False)
can_modify_group_infos = models.BooleanField(default=False)
# Related fields:
# - values (model GroupMemberValue)
def __str__(self):
return "User \"%s\" in Group \"%s\"" % (self.user.__str__(), self.group.__str__())
# Perms for admin site
def has_perm(self, perm, obj=None): # pragma: no cover
return True
def has_module_perms(self, app_label): # pragma: no cover
return True
|
Book Description: Lady Elanna Valtai is fiercely devoted to the King who raised her like a daughter. But when he dies under mysterious circumstances, Elanna is accused of his murder and must flee for her life.
Review: First off, thank you to the publisher and Goodreads for providing me this book through a give away! I also read a portion of it through an e-book ARC provided by NetGalley. You know, cuz I need to be able to read the book at ANY GIVEN MOMENT and thus need copies available in every format.
Anywho! On to the review! Beyond the beautiful cover (yes, I do judge a book by its cover when it suits me, thank you very much), I was instantly intrigued after reading the story synopsis. It sounded like an appealing mix of political intrigue, manners and etiquette, and, of course, magic. And while it was all of those things, there were a few stumbling blocks along the way.
First off, the political intrigue. It became very clear early in the book that the author was drawing inspiration from the Jacobite rebellion between Scotland and England to create the history and heart of the conflict in her story. There are two countries occupying an island nation, one has been overthrown in recent history, but still hopes to put their own choice leader on the thrown and regain independence for their portion of the country. Obviously, there’s much more to it than this, but at its core, it’s fairly straightforward. I was very pleased with this portion of the story. It was interesting finding similar threads to real history sprinkled within this fantasy novel, especially when those threads diverged from the path with which we are familiar.
Bates clearly had a lot of world building she was trying to pack in this novel. Beyond these tie-ins to the Jacobite rebellion, there’s a complicated history that goes back centuries before it, involving not only these two nations, but another powerful nation who had conquered the entire region at one point and then retreated again. Detailed histories likes this make a story interesting, but they also present a challenge to authors. All too often books end up with large info-dumps presenting all of these details, which no one loves. But here, we saw the opposite side of the coin. I was a good 150 pages into this story and was still trying to work out the timeline of who conquered who when and why. At a certain point, it was so frustrating that I simply gave up trying to understand. I hesitate to recommend more info dumping, but in circumstances like this, it’s probably the better option than sprinkling in details throughout a long-ish book where much of the plot revolves around the political implications of this history and readers end up just confused.
I did love the magical set up that was brought into the story. Sure there was the cool magic that Elanna was able to create, but the more interesting part was, again, the detailed framework and history behind her power. Not only are her powers needed for the rebellion, but the symbol that she represents as a corner of the tri-part governing force that traditionally ruled the land is highly motivating to the people.
I had mixed feelings with regards to Elanna herself. Her history (the stolen child of a failed rebel leader being held to keep the other side in check) is one that sets her up to have many conflicting feelings and views of those around her. Things like family, friendship, and even national loyalty are all tied together in knots. She feels abandoned by one family, guilty for developing attachments to her captors, questions everyone’s motives around her, questions her own loyalties. Much of this was very interesting and created a rich character arc for her to travel. Unfortunately, all too often she would perform complete 180s on a dime with very little explanation for why she changed her mind. She hates her father! She’ll join her father in this rebellion! Also, while the stress and frustration that would arise from her situation is understandable, at times she read as very unlikable and immature. I never could quite decide how I felt about her. Ultimately, I think I was more invested in the story that she was living than in her as a character on her own.
So there are my thoughts! To be summed up, I was very conflicted with this book. It had true moments of brilliance with a unique and complicated history, both political and magical, and the main character also had flashes of greatness. But I was also all too often confused by the same history and frustrated with Elanna herself. I would still likely recommend this book to readers who enjoy historical “fantasy of manners” type books based on its strengths. Want to judge for yourself? Enter our giveaway to receive an ARC of this book!
Rating 6: Had so many things going on (complicated history, complicated characters) that it didn’t quite manage to fully flesh it all out.
Find “The Waking Land” at your library using WorldCat!
And my TBR list gets bigger. Gotta pick this up. Very interesting plot.
Next Next post: Announcement: We’re Blogger Award Winners… Again!
|
import os, json
'''
This file is part of gitiscool.
gitiscool is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
gitiscool is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Foobar. If not, see <http://www.gnu.org/licenses/>.
'''
class Repo:
def __init__(self):
self.id = None
self.owner = None #GitUser
self.main_repo = False
self.name = None
self.forks = []
def __str__(self):
output = "id: %s\nname: %s\nowner: %s" % (str(self.id), self.name, self.owner.username)
return output
def add_fork(self, fork):
self.forks.append(fork)
class GitUser:
def __init__(self):
self.id = None
self.username = None
self.avatar_url = None
self.email = None
class Student(GitUser):
def __init__(self):
self.repo = None
class Problem:
def __init__(self):
self.number = None
self.description = None
self.committed_score = None
self.first_solution_score = None
self.datetime = None
class Solution:
def __init__(self):
self.problem = None
self.student = None
self.is_first_solution = False
self.datetime = None
|
The movie subscription service has contributed $26 million to films competing in the best picture race and other top categories.
MoviePass said Thursday it has exceeded 2 million subscribers, who pay $9.95 a month to see as many films as they want.
The news came one day after MoviePass made headlines when it issued a press release saying it is driving people to see Oscar-nominated films they might otherwise skip. As a result, the movie subscription service said it has contributed $128.7 million in box-office revenue to nominated films, a hefty sum.
Several hours later, however, MoviePass CEO Mitch Lowe issued a second statement clarifying the statistic. He explained that the $128 million figure includes the "halo effect," i.e., tickets purchased by consumers accompanying MoviePass subscribers to the cinema, as well as tickets bought by customers who were referred by MoviePass patrons.
In terms of revenue directly coming from MoviePass, however, the share the company has contributed to Oscar films is closer to $26 million, according to data provided by MoviePass. That excludes MoviePass revenue for such titles as Star Wars: The Last Jedi, The Greatest Showman, Blade Runner 2049 and Wonder, since none of those films are nominated in top categories, such as best picture, best director or best actor. In other words, Oscar nominations aren't necessarily driving people to see those movies, as is the case with such specialty players as best picture contenders The Shape of Water or Three Billboards Outside Ebbing, Missouri, among others. If the tally includes all the films that received nominations, MovePass said its share is $48 million.
To date, Guillermo del Toro's The Shape of Water has earned $46.3 million domestically; MoviePass has kicked in $3.6 million. Three Billboards' current total is $42.8 million; MoviePass' share is roughly $3 million.
The company's customer base has grown exponentially since August — when it lowered the monthly price to $9.95 — from 20,000 subscribers to 1 million in four months. By the end of January, it had 1.5 million subs, before clearing 2 million in recent days.
Offering 30 admissions for the price of one each month has attracted skepticism, however, since MoviePass pays full price for the tickets its members use (except at a few theater partners). AMC, with 8,000-plus screens in the U.S., has sought legal counsel in its quest to opt out of the service, and Adam Aron, its CEO, said in November that AMC has "no intention of sharing any — I repeat, any — of our admissions revenue or our concessions" with MoviePass.
MoviePass is a majority-owned subsidiary of Helios and Matheson Analytics.
|
# Инициализация
import Functions # Определения функций
import Names # Случайные имена
import Vars # Переменные
import AllCountries # Список реальных стран
import sys # Системная библиотека
import random # Генератор случайных чисел
# Интерактивная инициализация
Vars.is_lose = random.choice([True, False]) # Выиграет игрок или проиграет
print("")
print("Revolution v1.0")
print("-----")
print("Введите своё имя: ", end='')
Vars.MyPlayer.name = input() # Имя игрока
print("Введите свой возраст (от 14 до 50 лет): ", end='')
age_temp = int(input())
if age_temp < 14:
print("Маленький ещё страной управлять!")
sys.exit()
elif age_temp > 50:
print("Староват уже.")
sys.exit()
Vars.MyPlayer.age = int(age_temp) # Возраст игрока
Vars.MyCountry = random.choice(AllCountries.allcountries) # Страна игрока
print("Ваша страна - ", Vars.MyCountry.name)
Vars.MyPlayer.place = Vars.MyCountry.capital # Местоположение игрока
print("Введите количество ботов: ", end='')
bots = int(input()) # Количество ботов
for j in range(bots): # Добавление ботов
Vars.countries.append(random.choice(AllCountries.allcountries))
for q in range(5): # "Созыв" министров
Vars.MyCountry.min_now[q] = Names.random_name()
Functions.gen_gamemap() # Генерация карты
# Цикл игры
while 1:
# Вывод основной информации
print("")
print("Год:", Vars.year)
print("Ваш возраст:", Vars.MyPlayer.age)
print("Ваша популярность:", Vars.MyPlayer.popular)
print("Денег в казне:", Vars.MyCountry.money, "руб.")
print("Население страны:", Vars.MyCountry.population, "чел.")
print("Личных денег:", Vars.MyPlayer.money, "руб.")
print("Вы находитесь в:", Vars.MyPlayer.place)
print("Новости:", Vars.news)
print("-----")
print("Для помощи напишите 'помощь' (без кавычек)")
print("Введите команду: ", end='')
# Ввод и обработка команды
command = input() # Воод команды
if command == "конец хода":
Functions.next_year()
elif command == "министры":
print("")
print("Кабинет министров:")
print("Премьер-министр:", Vars.MyCountry.min_now[0], "| Уровень:", Vars.MyCountry.min_stats[0])
print("Министр внутренних дел:", Vars.MyCountry.min_now[1], "| Уровень:", Vars.MyCountry.min_stats[1])
print("Министр финансов:", Vars.MyCountry.min_now[2], "| Уровень:", Vars.MyCountry.min_stats[2])
print("Министр иностранных дел:", Vars.MyCountry.min_now[3], "| Уровень:", Vars.MyCountry.min_stats[3])
print("Министр народного просвещения:", Vars.MyCountry.min_now[4], "| Уровень:", Vars.MyCountry.min_stats[4])
elif command == "сменить министра":
Functions.change_min()
elif command == "выступить":
for x in range(10):
print(Names.random_phrase(), '.')
elif command == "выход":
sys.exit()
elif command == "помощь":
print('помощь, выход, конец хода, министры, сменить министра, выступить, тайл, карта')
elif command == "карта":
Functions.draw_gamemap()
elif command == "тайл":
print("Введите строку: ", end='')
y = int(input())
print("Введите столбец: ", end='')
x = int(input())
tile = Vars.gamemap[Functions.xy_to_index(x, y)]
print("Страна: " + str(tile.Country.name))
print("Защита: " + str(tile.Defence))
print("Армия: " + str(tile.Army))
elif command == "перебросить":
print("Введите строку (откуда): ", end='')
y1 = int(input())
print("Введите столбец (откуда): ", end='')
x1 = int(input())
print("Введите строку (куда): ", end='')
y2 = int(input())
print("Введите столбец (куда) ", end='')
x2 = int(input())
print("Сколько войск перебросить: ", end='')
n = int(input())
Functions.move(Functions.xy_to_index(x1, y1), Functions.xy_to_index(x2, y2), n)
elif command == "атаковать":
print("Введите строку (откуда): ", end='')
y1 = int(input())
print("Введите столбец (откуда): ", end='')
x1 = int(input())
print("Введите строку (куда): ", end='')
y2 = int(input())
print("Введите столбец (куда) ", end='')
x2 = int(input())
print("Сколько человек послать: ", end='')
Functions.attack(Functions.xy_to_index(x1, y1), Functions.xy_to_index(x2, y2), n)
else:
print("Нет такой команды!")
|
Please Go to the "LEADERSHIP" associated Tabs..!! THANKS..!!
|
import os
import logging
import glob
import bot
class Kaloot:
def __init__(self, args):
self.nicknames = self.parse_nicknames(args)
def parse_nicknames(self, args):
nickname_files = None
nickname_path = '%s/nicknames' % os.path.dirname(__file__)
if args.nicknames is None:
nickname_files = glob.glob('%s/*.txt' % nickname_path)
elif args.nicknames != 'None':
nickname_files = []
for filename in args.nicknames.split(','):
if os.path.isfile(filename):
nickname_files.append(filename)
else:
nickname_file = '%s/%s.txt' % (nickname_path, filename)
if os.path.isfile(nickname_file):
nickname_files.append(nickname_file)
if nickname_files:
return self.fetch_nicknames(nickname_files)
@staticmethod
def fetch_nicknames(nickname_files):
nicknames = set()
for filename in nickname_files:
try:
with open(filename) as f:
nicknames |= set([
nickname.strip() for nickname in f.readlines() if len(nickname.strip()) > 0
])
except FileNotFoundError:
logging.error('File %s.txt not found in nicknames folder' % filename)
return list(nicknames)
class RandomKaloot(Kaloot):
def __init__(self, args):
super(RandomKaloot, self).__init__(args)
threads = []
for i in range(args.n):
arguments = {
'index': i,
'game_id': args.game,
'prefix': args.prefix,
'delay': args.delay
}
if self.nicknames is not None and i < len(self.nicknames):
arguments['nickname'] = self.nicknames[i]
threads.append(bot.RandomBot(**arguments))
threads[i].start()
for i in range(args.n):
threads[i].join()
|
It seems that Automatic outbound NAT rule generation does not working as expected.
It works only when I define Manual outbound NAT rule generation and create my rules.
Tested with fresh installation OPNsense 18.7.7 and 18.7.7 versions.
Does someone are facing the same problem ??!
Can you define "not working" a bit more for us?
And what does the automatic rule look like, and what is the manual rule?
And after I create a manual Outbound nat rule, my hosts got internet access.
Expected result it is attached.
So you are saying automatic outbound rules aren't created for your LAN anymore since a version 18.7.x? Do you know which particular version for reference?
Yes, the automatic outbound rules aren't created from LAN.
Well, I have tested it 18.7.6 and 18.7.7 .. and both does not work.
I have a 18.7.3 version that is working.
It seems the problem happen after 18.7.5.
We have tested 18.7.6 and 18.7.7 and both have the same behaivor. But with 18.7.5 no.
I can confirm this issue exists in 19.1.x; specifically if you follow the guidance on interfaces page regarding gateway selection for WAN, automatic outbound NAT rules are not created, but manual ones work; if you select the default (only) gateway automatic outbound NAT rules work as expected.
The firewall on which this was diagnosed worked in a n initial test bench situation; the only difference will have been WAN changed to manual from DHCP (and gateway added) and additional LAN/SSN network interfaces added (but not yet in use).
Can't rule out there is still an issue left, but would you mind going through the history and letting me know what differs in your case?
I can confirm this issue exists in 19.1.4. I have updated a v18.7.10 today to v19.1.4 and have no outbound NAT automatic rules and must create it manually.
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
AUTHOR
Written by Vincent MAILLOL (modified by Gautier Sarah)
BUGS
[email protected]
COPYRIGHT
Copyright © 2011 DAVEM, 2014 AGAP. Licence GPLv3+ : GNU
GPL version 3 ou supérieures <http://gnu.org/licenses/gpl.html>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/> or
write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import sys, os
sys.path.append("")
from davem_fastq import Fastq_read, Fastq_file
import argparse
from itertools import izip
from bisect import bisect_left
if sys.version_info[0] == 2:
if sys.version_info[1] < 6:
msg = "ERROR: Python should be in version 2.6 or higher"
sys.stderr.write("%s\n\n" % msg)
sys.exit(1)
class FastqFileType( object ) :
"""
Fabrique Fastq_file
"""
def __init__( self, mode ) :
self.mode = mode
def __call__( self, path_name) :
return Fastq_file( path_name, self.mode )
class Selector( object ) :
"""
Abstract class to look for a line in a table_adaptator.
table_adaptator is like :
[ (adaptator-1, output-file-A1, output-file-B1),
(adaptator-2, output-file-A2, output-file-B2),
...
(adaptator-N, output-file-AN, output-file-BN)
]
In single end mode, table adaptator only have one output file by tuple
You must implement methods __single_select and __paired_select.
"""
def __init__(self, table_adaptator, single_end) :
"""
If single_end is True, a call to monSelector.select( sequence )
will execute the method _single_select_ otherwise, the call will be
monSelector.select( sequence-1, sequence-2 ) and will execute the
method _paired_select_
"""
self.table_adaptator = table_adaptator
if single_end :
self.select = self._single_select
else :
self.select = self._paired_select
def _single_select( self, sequence ) :
"""
Look for a line in table_adaptator with only one sequence
"""
raise NotImplementedError
def _paired_select( self, sequence_1, sequence_2 ) :
"""
Look for a line in table_adaptator with two sequences
"""
raise NotImplementedError
class Levenshtein_selector( Selector ) :
table_adaptator = None
single_end = False
rate = 0
def __init__( self, table_adaptator, single_end, rate ) :
if not isinstance( rate, float ) :
raise ValueError( "rate argument must be a float not %s" % type( rate ) )
Selector.__init__( self, table_adaptator, single_end)
self.rate = rate
def _single_select( self, sequence) :
from Levenshtein import ratio
distances = []
for (adaptator, output_file) in self.table_adaptator :
dist = ratio( adaptator, sequence[ : len( adaptator ) ] )
if dist == 1.0 :
return (adaptator, output_file)
distances.append( dist )
max_dist = max( distances )
if max_dist >= self.rate and distances.count( max_dist ) == 1 :
return self.table_adaptator[ distances.index( max_dist ) ]
return None
def _paired_select( self, sequence_1, sequence_2) :
from Levenshtein import ratio
distances_1 = []
distances_2 = []
for line in self.table_adaptator :
adaptator = line[ 0 ]
dist_1 = ratio( adaptator, sequence_1[ : len( adaptator ) ] )
dist_2 = ratio( adaptator, sequence_2[ : len( adaptator ) ] )
distances_1.append( dist_1 )
distances_2.append( dist_2 )
max_dist_1 = max( distances_1 )
max_dist_2 = max( distances_2 )
if max_dist_1 > max_dist_2 :
if max_dist_1 >= self.rate and distances_1.count( max_dist_1 ) == 1 :
return self.table_adaptator[ distances_1.index( max_dist_1 ) ]
elif max_dist_1 < max_dist_2 :
if max_dist_2 >= self.rate and distances_2.count( max_dist_2 ) == 1 :
return self.table_adaptator[ distances_2.index( max_dist_2 ) ]
else :
if max_dist_1 >= self.rate :
if distances_1.count( max_dist_1 ) == 1 :
index_1 = distances_1.index( max_dist_1 )
index_2 = distances_2.index( max_dist_2 )
if index_1 == index_2 :
return self.table_adaptator[ index_1 ]
elif distances_2.count( max_dist_2 ) == 1 :
index_1 = distances_1.index( max_dist_1 )
index_2 = distances_2.index( max_dist_2 )
if index_1 == index_2 :
return self.table_adaptator[ distances_2.index( max_dist_2 ) ]
return None
class LevenshteinAllSelector( Levenshtein_selector ) :
"""
Same as Levenshtein_selector except that in paired-end, both members
of the pair must be above or equal to the min ratio and adaptators of
both members must be identical
"""
def _paired_select( self, sequence_1, sequence_2) :
from Levenshtein import ratio
distances_1 = []
distances_2 = []
for line in self.table_adaptator :
adaptator = line[ 0 ]
dist_1 = ratio( adaptator, sequence_1[ : len( adaptator ) ] )
dist_2 = ratio( adaptator, sequence_2[ : len( adaptator ) ] )
distances_1.append( dist_1 )
distances_2.append( dist_2 )
max_dist_1 = max( distances_1 )
max_dist_2 = max( distances_2 )
if ( max_dist_1 >= self.rate and max_dist_2 >= self.rate
and distances_1.count( max_dist_1 ) == distances_2.count( max_dist_2 ) == 1 ) :
adapt_1 = self.table_adaptator[ distances_1.index( max_dist_1 ) ]
adapt_2 = self.table_adaptator[ distances_2.index( max_dist_2 ) ]
if adapt_1 == adapt_2 :
return adapt_1
else :
return None
class Std_selector( Selector ):
"""
Dichotomic search in list_adaptator
table_adaptator
If provided index is empty, return None
"""
def _paired_select( self, sequence_1, sequence_2):
l1 = self._single_select( sequence_1 )
l2 = self._single_select( sequence_2 )
if l1 is None :
return l2
if l2 is None :
return l1
if l1 == l2 :
return l1
return None
def _single_select( self, sequence):
a = 0
b = len( self.table_adaptator ) -1
if b == -1 :
return None
while a <= b :
m = ( a + b ) // 2
adaptator = self.table_adaptator[ m ][ 0 ]
start_seq = sequence[ : len( adaptator ) ]
if adaptator > start_seq :
b = m - 1
elif adaptator < start_seq :
a = m + 1
else :
return self.table_adaptator[ m ]
if adaptator == sequence[ : len( adaptator ) ] :
return self.table_adaptator[ m ]
return None
def get_adapt_counter( opened_adapt_file ) :
"""
Return a hash where keys are the adaptators
and values are initialized with [ name_tag, 0 ]
"""
d = {}
opened_adapt_file.seek(0)
for line in opened_adapt_file :
if not line.isspace() :
try :
adapt, name_tag = line.split()
except ValueError :
print >> sys.stderr, "File '%s' is malformed." % opened_adapt_file.name
exit( 1 )
d[ adapt ] = [ name_tag, 0 ]
return d
def get_maximal_annalogie( file_adapt ) :
"""
Compute maximal levenshtein between all adaptators
"""
from Levenshtein import ratio
adaptators = []
for line in file_adapt :
if line :
(adapt, name) = line.split()
if adapt != "*" :
adaptators.append( adapt )
ratio_max = 0.0
for i, adapt in enumerate( adaptators ) :
for adapt2 in adaptators[i+1:] :
ratio_max = max( ratio_max,ratio( adapt, adapt2 ) )
return ratio_max
def get_output_files( opened_adapt_file, prefix, paired_end=True ) :
"""
Create output files and put them in a list:
if paired_end is True, twa files by adaptator are created
[ (adaptator, output_file_1, output_file_2 ), ... ]
otherwise only one
[ (adaptator, output_file ), ... ]
The function return the files table and the Trash files
Two trash files for paired-end, one for single-end
( table, (trash-file, ) )
"""
ada_files = []
default = None
cache_name_file_by_adapt = {}
for line in opened_adapt_file :
if not line.isspace() :
try :
adapt, suffix_file = line.split()
except ValueError :
print >> sys.stderr, "File '%s' is malformed." % opened_adapt_file.name
exit( 1 )
if paired_end :
if line[0] == '*' :
default = ( Fastq_file( "%s-%s_1.fastq" % (prefix, suffix_file), "w" ),
Fastq_file( "%s-%s_2.fastq" % (prefix, suffix_file), "w" ), )
else :
if suffix_file in cache_name_file_by_adapt :
f1, f2 = cache_name_file_by_adapt[ suffix_file ]
ada_files.append( ( adapt, f1, f2 ) )
else :
f1 = Fastq_file( "%s-%s_1.fastq" % (prefix, suffix_file), "w" )
f2 = Fastq_file( "%s-%s_2.fastq" % (prefix, suffix_file), "w" )
ada_files.append( (adapt, f1, f2) )
cache_name_file_by_adapt[ suffix_file ] = (f1, f2)
else :
# TODO Make cache system for single mode.
if line[0] == '*' :
default = ( Fastq_file( "%s-%s.fastq" % (prefix, suffix_file), "w" ) , )
else :
if suffix_file in cache_name_file_by_adapt :
f1 = cache_name_file_by_adapt[ suffix_file ]
ada_files.append( ( adapt, f1 ) )
else:
f1 = Fastq_file( "%s-%s.fastq" % (prefix, suffix_file), "w" )
ada_files.append( ( adapt, f1 ) )
cache_name_file_by_adapt[ suffix_file ] = ( f1 )
if default is None :
print >> sys.stderr, "File '%s' doesn't have a line with the joker tag *.\nAdd a line '* tag_name_for_trash'." % opened_adapt_file.name
sys.exit(1)
ada_files.sort()
return ada_files, default
def parse_user_argument() :
"""
Recover user argument
"""
parser = argparse.ArgumentParser( description="demultiplex fastq_file(s)" )
parser.add_argument( '-V', '--version', action='version', help="Print the version and license",
version="%(prog)s 1.1\nCopyright (C) 2011 DAVEM, 2014 AGAP\nGPL3+\nWritten by Vincent Maillol" )
parser.add_argument( '-v', '--verbose', dest="verbose", action='store_true',
help="Be verbose" )
parser.add_argument( 'file_adapt', metavar="FILE_ADAPT", nargs=1, type=argparse.FileType('r'),
help="Format is one line by adaptor, such as: adaptor_1<tab>id_sample_1, etc. Last line should be like *<tab>name_trash")
parser.add_argument( '-f', '--fastq_1', dest="fastq_1", type=FastqFileType( "r" ), action='store',
help="For a single-end file or the first paired-end file" )
parser.add_argument( '-F', '--fastq_2', dest="fastq_2", type=FastqFileType( "r" ), action='store', default=None,
help="For the 2nd paired-end file" )
parser.add_argument( '-p', '--output_prefix', dest="output_prefix", default="", action='store',
help="Output files have name PREFIX-ADAPTOR.fastq" )
parser.add_argument( '-l', '--levenshtein', dest="levenshtein", action='store', type=float, default=None,
help="Use a Levenshtein distance to demultiplex" )
parser.add_argument( '-a', '--analogy', dest="analogy", action='store_true',
help="Compute the maximal Levenshtein ratio between adaptors" )
parser.add_argument( '--all', dest="all", action='store_true',
help="If this option is used with option levenshtein in paired-end, both members should be higher than the ratio and each should be close to one adaptor. If option levenshtein is not used, this option is not used either." )
user_args = parser.parse_args()
user_args.file_adapt = user_args.file_adapt[0]
user_args.single_end = user_args.fastq_2 is None
return user_args
def main() :
user_args = parse_user_argument()
if user_args.analogy :
print "Maximal Levenshtein ratio between adaptors is %f" % get_maximal_annalogie( user_args.file_adapt )
sys.exit(0)
output_files_by_adapt, defaults_files = get_output_files( user_args.file_adapt,
user_args.output_prefix,
not user_args.single_end )
nb_reads_writen = get_adapt_counter( user_args.file_adapt )
user_args.file_adapt.close()
if user_args.levenshtein :
if user_args.all :
select_output_file = LevenshteinAllSelector( output_files_by_adapt,
user_args.single_end,
user_args.levenshtein )
else :
select_output_file = Levenshtein_selector( output_files_by_adapt,
user_args.single_end,
user_args.levenshtein )
else :
select_output_file = Std_selector( output_files_by_adapt,
user_args.single_end )
if user_args.single_end :
print "single end"
default_file = defaults_files[0]
for str_read in user_args.fastq_1 :
read = Fastq_read( str_read )
adapt_and_line = select_output_file.select( read.seq )
if adapt_and_line is None :
if user_args.verbose :
print "Read '%s' start with %s... and go to *" % (read.name, read.seq[ : 14 ])
default_file.write( "%s" % str( read ) )
nb_reads_writen[ '*' ][ 1 ] += 1
else :
(adapt, output_file) = adapt_and_line
if user_args.verbose :
print "Read '%s' start with %s... and go to %s" % (read.name, read.seq[ : len( adapt ) ], adapt)
read.cut_start( len( adapt ) )
output_file.write( "%s" % str( read ) )
nb_reads_writen[ adapt ][ 1 ] += 1
user_args.fastq_1.close()
for adapt, output_file in output_files_by_adapt :
if not output_file.closed:
output_file.write("")
output_file.close()
if not default_file.closed:
default_file.write("")
default_file.close()
else :
print "paired-end"
(default_file_1, default_file_2) = defaults_files
for str_read_1, str_read_2 in izip( user_args.fastq_1, user_args.fastq_2 ) :
read_1 = Fastq_read( str_read_1 )
read_2 = Fastq_read( str_read_2 )
adapt_and_line = select_output_file.select( read_1.seq, read_2.seq )
if adapt_and_line is None :
default_file_1.write( "%s" % str( read_1 ) )
default_file_2.write( "%s" % str( read_2 ) )
nb_reads_writen[ '*' ][1] += 1
else :
(adapt, output_file_1, output_file_2 ) = adapt_and_line
read_1.cut_start( len( adapt ) )
read_2.cut_start( len( adapt ) )
output_file_1.write( "%s" % str( read_1 ) )
output_file_2.write( "%s" % str( read_2 ) )
nb_reads_writen[ adapt ][1] += 1
user_args.fastq_1.close()
user_args.fastq_2.close()
for adapt, file_1, file_2 in output_files_by_adapt :
if not file_1.closed:
file_1.write("")
file_1.close()
if not file_2.closed:
file_2.write("")
file_2.close()
if not default_file_1.closed:
default_file_1.write("")
default_file_1.close()
if not default_file_2.closed:
default_file_2.write("")
default_file_2.close()
# show stat.
for nb_reads_by_name in nb_reads_writen.values() :
print "%s %d reads" % tuple( nb_reads_by_name )
if __name__ == '__main__':
main()
|
Channelplay's Mystery Shopping services help you objectively measure the quality of your customers' experience at retail stores, restaurants and service locations such as branches and service centers. Our Retail Audit services help you measure compliance to policies and SOPs & our Stock Audit services help you measure and reconcile inventory with your records. We use cutting edge technology to ensure accuracy and reliability of data collection, and our highly experienced and trained secret shoppers and auditors help find deep insights to help you improve your business.
From Apple to McDonald's, the world's most successful brands scale through their ability to standardize. New York or New Delhi, Manchester or Mysore - walk into one of their stores, and you can expect the same experience and quality of service.
Ensuring standardization takes a relentless focus on measurement of customer experience, and mystery shopping is a key tool in the kit.
A lot of money is spent building beautiful stores, restaurants and branches. Brands who have franchise partners and retail networks invest in fixtures, furniture & signage, demo units and branded displays.
All this expense can not only go to waste, but can be counterproductive if your locations and infrastructure are not well maintained. Our retail audit service ensures that doesn't happen.
An accurate picture of stock in the supply chain is important to ensure correct accounting, manage channel incentivization, maintain freshness and quality and minimize shrinkage through loss or theft.
Channelplay's stock audit service provides accurate and reliable stock and inventory measurement across the country.
Why We're The Leading Mystery Shopping & Audit Company in India.
Some of the world's most successful companies rely on us to execute their shopper marketing initiatives. We leverage that experience to design and execute better mystery shopping programs.
Our cutting edge technology platform ensures that shoppers that are right for a project are deployed, that information collection is reliable and real-time, and that analytics are rich and insightful.
We currently deliver over 8000 mystery shops and audits across 300 cities every month using a network of freelance mystery shoppers spanning every Socio-Economic Classification, and a team of full-time; highly-trained auditors.
We use rigorous shopper selection processes, train and certify shoppers on every project before they start work, and verify each response received for accuracy and insightfulness before reporting to clients.
|
#!/usr/bin/python
# Combat Simulator Project
# Copyright (C) 2002-2005 The Combat Simulator Project
# http://csp.sourceforge.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Combat Simulator Project : Utils script
"""
import csp.cspsim
class SlotProxy(csp.cspsim.Slot):
def __init__(self, method):
csp.cspsim.Slot.__init__(self)
self.method = method
def notify(self, data):
self.method()
class SlotManager:
def __init__(self):
self.slots = []
def connectToClickSignal(self, control, method):
signal = control.getClickSignal()
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
def connectToCheckedChangedSignal(self, control, method):
signal = control.getCheckedChangedSignal()
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
def connectToSelectedItemChangedSignal(self, control, method):
signal = control.getSelectedItemChangedSignal()
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
def connectToInputInterfaceAction(self, cspsim, action, method):
gameScreen = cspsim.getCurrentScreen()
interfaceAdapter = gameScreen.getInputInterfaceWfAdapter()
signal = interfaceAdapter.registerActionSignal(action)
slot = SlotProxy(method)
signal.connect(slot)
self.slots.append(slot)
class ListBoxManager:
def addListToControl(self, listbox, texts):
for text in texts:
listBoxItem = csp.cspsim.ListBoxItem()
listBoxItem.setText(text)
listbox.addItem(listBoxItem)
|
The Ship on the Shore has a tall reputation as a great purveyor of seafood delights. Situated on The Shore at Leith, it has a lot of local competition in one of the city’s finest restaurant districts, but has carved out a niche that ensures it’s always busy.
The setting is heavy on wood and gold framed artworks, with nautical chart wallpaper and stained glass above the bar. It’s essentially an old-style pub, but the fare on offer is a cut above that. It’s a speciality seafood place, with quite an array of fishy delights to tempt your palette.
On today’s quest, I was joined by Iain and Aileen, who are soon to be married. I’ll be doing a reading at their wedding, in June. This is equal parts an honour (for me) and a terrifying prospect (for those attending the wedding).
I arrived a little early, so was able to secure a table. Iain and Aileen soon arrived and we assessed our options, from their extensive lunch and daytime menu. Some of the seafood platters that were being sampled by our fellow diners, including a fruit de mere “royale” for the princely sum of£85, looked incredibly tempting. I succumbed and ordered a more moderately priced platter to follow a bowl of cullen skink.
My dining companions opted for beer battered haddock and chips to follow a shared portion of fishcakes.
The cullen skink was rather underwhelming. It was good and chunky, but just a bit flat. Service was also a trifle muted. There was nothing particularly wrong with it, but it felt rather stand-offish.
My main course, thankfully, was a completely different matter. It was a truly dazzling array of seafood delights, with each item prepared simply and with expert accuracy.
Featured items included prawns, clams, oysters, mussels, smoked salmon, herring, crayfish, and Arbroath smokie, all of which were truly excellent.
I’m glad to have finally sampled the Ship on the Shore’s seafood goodness. As a place to send seafood lovers, I think this would be top of my list of recommendations. The place didn’t quite provide me with as satisfying an overall experience as it might have, but there’s no denying that the quality of the produce on offer is quite exceptional.
|
#!/usr/bin/env python3
import json
import base64
import binascii
import csv
from datetime import datetime
with open ( 'data.json' ) as data_file:
data = json.load ( data_file )
lenData = len ( data["points"] )
hexData = []
for i in range ( 0, lenData ):
hexData.append(i)
hexData[i] = ( binascii.b2a_hex ( binascii.a2b_base64 ( data["points"][i]["data"] )))
led = []
pressure = []
temperature = []
altitude = []
battery = []
latitude = []
longitude = []
elevation = []
time = []
delta = []
for i in range ( 0, lenData ):
led.append(i)
pressure.append(i)
temperature.append(i)
altitude.append(i)
battery.append(i)
latitude.append(i)
longitude.append(i)
elevation.append(i)
time.append(i)
led[i] = int(hexData[i][:2], 16)
pressure[i] = int(hexData[i][2:-26], 16) * 10.0
temperature[i] = int(hexData[i][6:-22], 16) / 100.0
altitude[i] = int(hexData[i][10:-18], 16) / 10.0
battery[i] = (int(hexData[i][14:-16], 16) / 255.0)
latitude[i] = hexData[i][16:-10]
if int(latitude[i],16) & 0x800000:
latitude[i] = ( ( int(latitude[i],16) & 0x00FFFFFF ) + 1 ) * -90.0 / 0x800000;
else:
latitude[i] = int(latitude[i],16) * 90.0 / 0x7FFFFF;
longitude[i] = hexData[i][22:-4]
if int(longitude[i],16) & 0x800000:
longitude[i] = ( ( int(longitude[i],16) & 0x00FFFFFF ) + 1 ) * -180.0 / 0x800000;
else:
longitude[i] = int(longitude[i],16) * 180.0 / 0x7FFFFF;
elevation[i] = hexData[i][28:]
time[i] = datetime.strptime(data["points"][i]["time"][11:][:8], '%H:%M:%S')
startTime = min(time)
for i in range ( 0, lenData ):
delta.append(i)
delta[i] = time[i] - startTime
print ( led[0] )
print ( pressure[0] )
print ( temperature[0] )
print ( altitude[0] )
print ( battery[0] )
print ( latitude[0] )
print ( longitude[0] )
print ( elevation[0] )
print ( time[0])
print ( hexData[0] )
print ( hexData[lenData - 1])
with open('data.csv', 'wb') as csvfile:
csv = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
# csv.writerow(['latitude', 'longitude'])
#
# for i in range ( 0, lenData ):
# csv.writerow([latitude[i], longitude[i]])
csv.writerow(['delta', 'time', 'node_eui', 'gateway_eui', 'led', 'pressure', 'temperature', 'altitude', 'battery', 'latitude', 'longitude', 'elevation'])
for i in range ( 0, lenData ):
csv.writerow([delta[i], time[i], data["points"][i]["node_eui"], data["points"][i]["gateway_eui"], led[i], pressure[i], temperature[i], altitude[i], battery[i], latitude[i], longitude[i], elevation[i]])
# print ("second time")
#
#
# with open('map.csv', 'wb') as csvfile:
# csv = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
# # csv.writerow(['latitude', 'longitude'])
# #
# # for i in range ( 0, lenData ):
# # csv.writerow([latitude[i], longitude[i]])
# csv.writerow(['latitude', 'longitude'])
#
# for i in range ( 0, lenData ):
# csv.writerow([latitude[i], longitude[i]])
# time = [h , m, s]
# for i in range ( 0, lenData ):
# time.append(i)
# time0h = int(data["points"][0]["time"][11:-14])
# time0m = int(data["points"][0]["time"][14:-11])
# time0s = int(data["points"][0]["time"][17:-8])
#time1 = data["points"][10]["time"][11:-8]
|
Great, cheap, low CAS timing, and reliable, couldn't ask for more from DDR3!
I wish the PCB was black, but it looks good and performs well.
Cheap, and it works. I don't have to say much about it. While playing even the most intense games I don't ever go over 9GB, but my common max is under 7GB. 8GB of a better choice would have done just as well, and would have been cheaper. But I'm not dwelling on it. Although the best range for my needs would have been a 10-12GB setup, it wasn't highly available or cost effective when I was ordering the parts.
Mushkin RAM works. But I'd order even Crucial with no heat sink instead if I absolutely had to cut the budget for whatever reason.
|
"""
Django settings for tchelper project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+%m9a62-i($(xc4ok(3y5!1!=dvydl$n5p$+$85%3g_((un=e@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = (os.environ.get('DJANGO_DEBUG', 'on') == 'on')
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = ['*']
INTERNAL_IPS = ('127.0.0.1',)
# Application definition
INSTALLED_APPS = (
# 'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 3rd party
'rest_framework',
# user apps
'frontpage',
'api',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tchelper.urls'
WSGI_APPLICATION = 'tchelper.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
import dj_database_url
DATABASES = {
'default': dj_database_url.config(default='sqlite:///db.sqlite3')
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles/')
# Email settings
EMAIL_BACKEND = 'api.utils.MailgunEmailBackEnd'
EMAIL_BASE_URL = os.environ.get('EMAIL_BASE_URL', '')
MAILGUN_KEY = os.environ.get('MAILGUN_KEY', '')
MAILGUN_URL = os.environ.get('MAILGUN_URL', '')
DEFAULT_FROM_EMAIL = os.environ.get('DEFAULT_FROM_EMAIL', '')
# Rest Frameworks
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
}
# Account settings
LOGIN_URL = 'login'
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
AUTH_PROFILE_MODULE = 'api.UserProfile'
SECURE_SSL_REDIRECT = True
APPEND_SLASH = False
|
Multi-discipline engineering capacity providing a one stop integrated design.
Shear Force Engineering has been providing professional civil and structural engineering and design solutions in WA for 25 years and offers a complete service with the capacity to support the client through all stages of the project.
Request a no obligation meeting and quotation.
Copyright © 2019. SHEAR FORCE ENGINEERING. All rights reserved.
|
""" The ConnectomeViewer wrapper for a cfflib object """
# Copyright (C) 2009-2010, Ecole Polytechnique Federale de Lausanne (EPFL) and
# University Hospital Center and University of Lausanne (UNIL-CHUV)
#
# Modified BSD License
# Standard library imports
import os
# Enthought library imports
from traits.api import HasTraits, Str, Bool, CBool, Any, Dict, implements, \
List, Instance, DelegatesTo, Property
from traitsui.api import View, Item, auto_close_message, message
# ConnectomeViewer imports
import cfflib
try:
from nibabel.nifti1 import intent_codes
except ImportError:
print("Please install Nibabel >= 1.1.0")
# Logging import
import logging
logger = logging.getLogger('root.'+__name__)
class CSurfaceDarray(HasTraits):
""" The implementation of the Connectome Surface data array """
def __init__(self, darray, **traits):
super(CSurfaceDarray, self).__init__(**traits)
self.data = darray
if not self.data.meta is None:
getdict = self.data.get_metadata()
prim = ''
if getdict.has_key('AnatomicalStructurePrimary'):
prim = getdict['AnatomicalStructurePrimary']
sec = ''
if getdict.has_key('AnatomicalStructureSecondary'):
sec = getdict['AnatomicalStructureSecondary']
# name resolution
if prim == '':
if sec == '':
dname = 'Data arrays (%s)' % str(intent_codes.label[self.data.intent])
else:
dname = '%s (%s)' % (sec, str(intent_codes.label[self.data.intent]))
else:
if sec == '':
dname = '%s (%s)' % (prim, str(intent_codes.label[self.data.intent]))
else:
dname = '%s / %s (%s)' % (prim, sec, str(intent_codes.label[self.data.intent]))
else:
dname = 'Data arrays (%s)' % str(intent_codes.label[self.data.intent])
self.dname = dname
# attach it to parent?
|
© Article author(s) (or their employer(s) unless otherwise stated in the text of the article) 2017. Introduction The global prevalence of obesity has risen significantly in recent decades. There is a pressing need to identify effective interventions to treat established obesity that can be delivered at scale. The aim of the Doctor Referral of Overweight People to a Low-Energy Treatment (DROPLET) study is to determine the clinical effectiveness, feasibility and acceptability of referral to a low-energy total diet replacement programme compared with usual weight management interventions in primary care. Methods and analysis The DROPLET trial is a randomised controlled trial comparing a low-energy total diet replacement programme with usual weight management interventions delivered in primary care. Eligible patients will be recruited through primary care registers and randomised to receive a behavioural support programme delivered by their practice nurse or a referral to a commercial provider offering an initial 810 kcal/d low-energy total diet replacement programme for 8 weeks, followed by gradual food reintroduction, along with weekly behavioural support for 24 weeks. The primary outcome is weight change at 12 months. The secondary outcomes are weight change at 3 and 6 months, the proportion of participants achieving 5% and 10% weight loss at 12 months, and change in fat mass, haemoglobin A1c, low-density lipoprotein cholesterol and systolic and diastolic blood pressure at 12 months. Data will be analysed on the basis of intention to treat. Qualitative interviews on a subsample of patients and healthcare providers will assess their experiences of the weight loss programmes and identify factors affecting acceptability and adherence. Ethics and dissemination This study has been reviewed and approved by the National Health ServiceHealth Research Authority (HRA)Research Ethics Committee (Ref: SC/15/0337). The trial findings will be disseminated to academic and health professionals through presentations at meetings and peer-reviewed journals and to the public through the media. If the intervention is effective, the results will be communicated to policymakers and commissioners of weight management services.
|
#!/usr/bin/python3
import sys
from modgrammar import *
grammar_whitespace_mode = 'optional'
class Number (Grammar):
grammar = (OPTIONAL('-'), WORD('0-9'), OPTIONAL('.', WORD('0-9')))
def value(self):
return float(self.string)
class ParenExpr (Grammar):
grammar = (L('('), REF('Expr'), L(')'))
def value(self):
return self[1].value()
class P0Term (Grammar):
grammar = (ParenExpr | Number)
def value(self):
return self[0].value()
class P0Expr (Grammar):
grammar = (P0Term, ONE_OR_MORE(L('/'), P0Term))
def value(self):
value = self[0].value()
for e in self[1]:
value /= e[1].value()
return value
class P1Term (Grammar):
grammar = (P0Expr | ParenExpr | Number)
def value(self):
return self[0].value()
class P1Expr (Grammar):
grammar = (P1Term, ONE_OR_MORE(L('*'), P1Term))
def value(self):
value = self[0].value()
for e in self[1]:
value *= e[1].value()
return value
class P2Term (Grammar):
grammar = (P0Expr | P1Expr | ParenExpr | Number)
def value(self):
return self[0].value()
class P2Expr (Grammar):
grammar = (P2Term, ONE_OR_MORE(L('+') | L('-'), P2Term))
def value(self):
value = self[0].value()
for e in self[1]:
if e[0].string == '+':
value += e[1].value()
else:
value -= e[1].value()
return value
class Expr (Grammar):
grammar = (P2Expr | P1Expr | P0Expr | ParenExpr | Number)
def value(self):
return self[0].value()
if __name__ == '__main__':
parser = Expr.parser()
result = parser.parse_text(sys.argv[1], eof=True)
remainder = parser.remainder()
print("Parsed Text: {}".format(result))
print("Unparsed Text: {}".format(remainder))
print("Value: {}".format(result.value()))
|
The message of this passage is something Jesus says a bunch of times in a bunch of different ways: BE BOLD IN PRAYER. Ask, seek, knock — each one ups the intensity level.
Is the point that God responds to aggression, that we need to shout to get God's attention? No, the point is that our prayers, well at least my prayers, are often tepid, timid and half-hearted. Pray as if you mean it. Pray as if God were real and something might happen.
One Sunday a woman named Irma showed up at church. When it was time to pray, we asked God to comfort Sid who was getting over gall bladder surgery and hoped to be playing golf again soon. We asked God to be with the homeless people and all people in need.
Then Irma got to her feet and started in. She told God it was rough out there but that she was trying to stay on the way. She said that this week she had come "this close" to picking up a gun and using it. She wanted to, but she knew that wasn't right, she knew that wasn't what Jesus wanted. Irma called on Jesus to give her the strength to do the right thing. She asked Jesus to "hook her up with the weapons of spiritual warfare," because she was up against some seriously nasty demons. She prayed for a victory over Satan. And at the end, Irma murmured, "in the precious name of Jesus," several times, then sat down.
Not a sound until some hitherto bored teenager sat up and said, "Wow . . . like Amen to that."
After church, at the door, one of the first people out said to me, "Your services have gotten more interesting. I think I'll come again next Sunday."
I thought, "Lord, that Prayer of Invocation where I asked you to break in among us, I didn't mean . . ."
Grant us the courage to pray boldly, honestly, passionately dear God, trusting that you can do more than we think or imagine. Amen.
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing instances.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from horizon import tables
from horizon import workflows
from trove_dashboard import api
from .tabs import InstanceDetailTabs
from .tables import InstancesTable
from .workflows import LaunchInstance
LOG = logging.getLogger(__name__)
class IndexView(tables.DataTableView):
table_class = InstancesTable
template_name = 'project/databases/index.html'
def has_more_data(self, table):
return self._more
def get_data(self):
marker = self.request.GET. \
get(InstancesTable._meta.pagination_param, None)
# Gather our instances
try:
instances = api.trove.instance_list(self.request, marker=marker)
LOG.info(msg=_("Obtaining instances at %s class"
% repr(IndexView.__class__)))
self._more = False
except:
self._more = False
instances = []
LOG.critical("Http 500. Internal server error. "
"Unable to retrieve instances.")
return instances
#exceptions.handle(self.request, ignore=True)
# Gather our flavors and correlate our instances to them
if instances:
try:
flavors = api.trove.flavor_list(self.request)
LOG.info(msg=_("Obtaining flavor list from nova at %s class"
% repr(IndexView.__class__)))
except:
flavors = []
LOG.critical(msg=_("Nova exception while obtaining "
"flavor list at % class"
% repr(IndexView.__class__)))
exceptions.handle(self.request, ignore=True)
full_flavors = SortedDict([(str(flavor.id), flavor)
for flavor in flavors])
# Loop through instances to get flavor info.
for instance in instances:
try:
flavor_id = instance.flavor["id"]
if flavor_id in full_flavors:
instance.full_flavor = full_flavors[flavor_id]
else:
# If the flavor_id is not in full_flavors list,
# get it via nova api.trove.
instance.full_flavor = api.trove.flavor_get(
self.request, flavor_id)
except:
msg = _('Unable to retrieve instance size information')
LOG.critical(msg + _(" at %s class"
% repr(IndexView.__class__)))
exceptions.handle(self.request, msg)
return instances
class LaunchInstanceView(workflows.WorkflowView):
workflow_class = LaunchInstance
template_name = "project/databases/launch.html"
def get_initial(self):
initial = super(LaunchInstanceView, self).get_initial()
initial['project_id'] = self.request.user.tenant_id
initial['user_id'] = self.request.user.id
return initial
class DetailView(tabs.TabbedTableView):
tab_group_class = InstanceDetailTabs
template_name = 'project/databases/detail.html'
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
context["instance"] = self.get_data()
return context
def get_data(self):
if not hasattr(self, "_instance"):
try:
instance_id = self.kwargs['instance_id']
instance = api.trove.instance_get(self.request, instance_id)
LOG.info(msg=_("Obtaining instance for detailed view "
"at %s class" % repr(DetailView.__class__)))
instance.full_flavor = api.trove.flavor_get(
self.request, instance.flavor["id"])
except:
redirect = reverse('horizon:project:databases:index')
LOG.critical(msg=_("Exception while btaining instance"
" for detailed view at %s class"
% repr(DetailView.__class__)))
exceptions.handle(self.request,
_('Unable to retrieve details for '
'instance "%s".') % instance_id,
redirect=redirect)
self._instance = instance
return self._instance
def get_tabs(self, request, *args, **kwargs):
instance = self.get_data()
return self.tab_group_class(request, instance=instance, **kwargs)
|
Tomorrow night, Wednesday, June 24, at 7:00 p.m. there will be a city-wide prayer meeting at the Metropolitan AME church located at 1101 East 10th Street.
Along with several other pastors in the city, I will be helping to lead concerted prayers for the unity of our city in support of the African Methodist Episcopal congregation in Charleston that suffered violence a week ago when nine people were gunned down during a prayer gathering.
Would you please consider altering your plans at this late hour and join us?
The third through fifth graders are collecting donations for the ABC Soup Kitchen. If you would like to be part of this donation drive, please bring your non-perishable food items, travel-sized toiletries, toothbrushes and (brand new) white socks to Hope by July 19 and leave them in the donation baskets in the foyer or coffee shop. The kids will take it from there.
If you have children ages 4-11, they are welcome to join us in Hope’s front yard next Monday through Thursday, June 22-25, for our Backyard Bible Club. A team of teens from Hill Country Bible Church will be leading the club each day. I guarantee it will be great! Club starts at 10am each day. Text Matt Ryniker and let him know how many kids you are bringing.
Starting Tuesday June 23rd, a Bible reading class called Scripture Together will meet 7-8PM upstairs in the Den. The first book will be the Gospel According to Mark. See promo video below.
|
# coding: utf-8
#$ header legendre(int)
def legendre(p):
k = p + 1
x = zeros(k, double)
w = zeros(k, double)
if p == 1:
x[0] = -0.577350269189625765
x[1] = 0.577350269189625765
w[0] = 1.0
w[1] = 1.0
elif p == 2:
x[0] = -0.774596669241483377
x[1] = 0.0
x[2] = 0.774596669241483377
w[0] = 0.55555555555555556
w[1] = 0.888888888888888889
w[2] = 0.55555555555555556
elif p == 3:
x[0] = -0.861136311594052575
x[1] = -0.339981043584856265
x[2] = 0.339981043584856265
x[3] = 0.861136311594052575
w[0] = 0.347854845137453853
w[1] = 0.65214515486254615
w[2] = 0.65214515486254614
w[3] = 0.34785484513745386
return x,w
#$ header make_knots(int, int)
def make_knots(n,p):
n_elements = n-p
m = n+p+1
knots = zeros(m, double)
for i in range(0, p+1):
knots[i] = 0.0
for i in range(p+1, n):
j = i-p
knots[i] = j / n_elements
for i in range(n, n+p+1):
knots[i] = 1.0
return knots
#$ header make_greville(double [:], int, int)
def make_greville(knots, n, p):
greville = zeros(n, double)
for i in range(0, n):
s = 0.0
for j in range(i+1, i+p+1):
s = s + knots[j]
greville[i] = s / p
return greville
#$ header f_scalar(double, double)
def f_scalar(x, y):
f = x * y
return f
#$ header f_vector(double, double)
def f_vector(x, y):
f1 = x*y
f2 = x*y
return f1, f2
#$ header integrate_edge(int, int, double, double [:], double [:], double, double, int)
def integrate_edge(component, axis, y, us, ws, x_min, x_max, p):
r = 0.0
d = x_max - x_min
for j in range(0, p+1):
u = us[j]
w = ws[j]
x = x_min + d * u
w = 0.5 * d * w
if axis==0:
f1, f2 = f_vector(x, y)
else:
f1, f2 = f_vector(y, x)
if component == 0:
f = f1
else:
f = f2
r = r + f * w
return r
#$ header interpolate_V_0(double [:], double [:], int, int, int, int)
def interpolate_V_0(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
nu1 = n_elements_u+1
nv1 = n_elements_v+1
r = zeros(nu1*nv1, double)
i = 0
for i_u in range(0, n_elements_u+1):
for i_v in range(0, n_elements_v+1):
r[i] = f_scalar(t_u[i_u], t_v[i_v])
i = i + 1
return r
#$ header interpolate_V_1(double [:], double [:], int, int, int, int)
def interpolate_V_1(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
us, wus = legendre(p_u)
vs, wvs = legendre(p_v)
us = us + 1.0
us = 0.5 * us
vs = vs + 1.0
vs = 0.5 * vs
nu1 = n_elements_u
nv1 = n_elements_v+1
nu2 = n_elements_u+1
nv2 = n_elements_v
r_0 = zeros((nu1, nv1), double)
r_1 = zeros((nu2, nv2), double)
component = 0
axis = 0
for i_u in range(0, n_elements_u):
x_min = t_u[i_u]
x_max = t_u[i_u+1]
for i_v in range(0, n_elements_v+1):
y = t_v[i_v]
r_0[i_u, i_v] = integrate_edge(component, axis, y, us, wus, x_min, x_max, p_u)
component = 1
axis = 1
for i_u in range(0, n_elements_u+1):
y = t_u[i_u]
for i_v in range(0, n_elements_v):
x_min = t_v[i_v]
x_max = t_v[i_v+1]
r_1[i_u, i_v] = integrate_edge(component, axis, y, vs, wvs, x_min, x_max, p_v)
m = nu1 * nv1 + nu2 * nv2
r = zeros(m, double)
i = 0
for i_u in range(0, nu1):
for i_v in range(0, nv1):
r[i] = r_0[i_u, i_v]
i = i + 1
for i_u in range(0, nu2):
for i_v in range(0, nv2):
r[i] = r_1[i_u, i_v]
i = i + 1
return r
#$ header interpolate_V_2(double [:], double [:], int, int, int, int)
def interpolate_V_2(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
us, wus = legendre(p_u)
vs, wvs = legendre(p_v)
us = us + 1.0
us = 0.5 * us
vs = vs + 1.0
vs = 0.5 * vs
nu1 = n_elements_u+1
nv1 = n_elements_v
nu2 = n_elements_u
nv2 = n_elements_v+1
r_0 = zeros((nu1, nv1), double)
r_1 = zeros((nu2, nv2), double)
component = 0
axis = 1
for i_u in range(0, n_elements_u+1):
y = t_u[i_u]
for i_v in range(0, n_elements_v):
x_min = t_v[i_v]
x_max = t_v[i_v+1]
r_0[i_u, i_v] = integrate_edge(component, axis, y, vs, wvs, x_min, x_max, p_v)
component = 1
axis = 0
for i_u in range(0, n_elements_u):
x_min = t_u[i_u]
x_max = t_u[i_u+1]
for i_v in range(0, n_elements_v+1):
y = t_v[i_v]
r_1[i_u, i_v] = integrate_edge(component, axis, y, us, wus, x_min, x_max, p_u)
m = nu1 * nv1 + nu2 * nv2
r = zeros(m, double)
i = 0
for i_u in range(0, nu1):
for i_v in range(0, nv1):
r[i] = r_0[i_u, i_v]
i = i + 1
for i_u in range(0, nu2):
for i_v in range(0, nv2):
r[i] = r_1[i_u, i_v]
i = i + 1
return r
#$ header interpolate_V_3(double [:], double [:], int, int, int, int)
def interpolate_V_3(t_u, t_v, n_u, n_v, p_u, p_v):
n_elements_u = n_u-p_u
n_elements_v = n_v-p_v
us, wus = legendre(p_u)
vs, wvs = legendre(p_v)
us = us + 1.0
us = 0.5 * us
vs = vs + 1.0
vs = 0.5 * vs
r = zeros(n_elements_u*n_elements_v, double)
i = 0
for i_u in range(0, n_elements_u):
x_min = t_u[i_u]
x_max = t_u[i_u+1]
dx = x_max - x_min
for i_v in range(0, n_elements_v):
y_min = t_v[i_v]
y_max = t_v[i_v+1]
dy = y_max - y_min
contribution = 0.0
for j_u in range(0, p_u+1):
x = x_min + dx * us[j_u]
for j_v in range(0, p_v+1):
y = y_min + dy * vs[j_v]
w = wus[j_u] * wvs[j_v]
w = 0.5 * dx * dy * w
f = f_scalar(x,y)
contribution = contribution + w * f
r[i] = contribution
i = i + 1
return r
n_elements_u = 2
n_elements_v = 2
p_u = 2
p_v = 2
n_u = p_u + n_elements_u
n_v = p_v + n_elements_v
knots_u = make_knots(n_u, p_u)
knots_v = make_knots(n_v, p_v)
greville_u = make_greville(knots_u, n_u, p_u)
greville_v = make_greville(knots_v, n_v, p_v)
#print("knots_u = ", knots_u)
#print("knots_v = ", knots_v)
#print("greville_u = ", greville_u)
#print("greville_v = ", greville_v)
r_0 = interpolate_V_0(greville_u, greville_v, n_u, n_v, p_u, p_v)
r_1 = interpolate_V_1(greville_u, greville_v, n_u, n_v, p_u, p_v)
r_2 = interpolate_V_2(greville_u, greville_v, n_u, n_v, p_u, p_v)
r_3 = interpolate_V_3(greville_u, greville_v, n_u, n_v, p_u, p_v)
print("r_0 = ", r_0)
print("r_1 = ", r_1)
print("r_2 = ", r_2)
print("r_3 = ", r_3)
|
Usage: It is recommended to use by inhalation (way of steam externally) or adding in seeds (fixed) oils by massage on the skin.
It is called Jasminum Grandiflorum in Latin and an odorous, white flowered, climbing plant growing in Southeast Asia and Mediterranean climate.
It is grown around Alanya in our country. A kilogram jasmine oil is obtained from eight million jasmine flowers. Its oil is obtained by method of distillation. Jasmine oil is a substance which is used in perfumery industry due to its nice odor.
It is a substance which provides muscle relaxation, decreasing stress and providing mental enthusiasm in cases of mental problems. It is used in all spiritual cases, stress, tiredness, excessive sensitivity, absent-mindedness as aromatic oil.
It is efficient in perspiration and spiritual complaints due to menopause. It is helpful in all aches and cramps in stomach. Jasmine aromatic oil is used in the form of massage, bath, steam, mush and compress.
A better result is obtained if jasmine oil is used by mixing with other oils for massage on the body, it has a perfect relaxing effect for skin. It is recommended to be used for dry skins and sensitive skins.
|
# This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Widgets to edit a list of items in a flexible way.
"""
from __future__ import unicode_literals
from PyQt4.QtCore import pyqtSignal
from PyQt4.QtGui import (
QFileDialog, QGridLayout, QListWidget, QListWidgetItem, QPushButton, QWidget)
import app
import icons
class ListEdit(QWidget):
"""A widget to edit a list of items (e.g. a list of directories)."""
# emitted when anything changed in the listbox.
changed = pyqtSignal()
def __init__(self, *args, **kwargs):
QWidget.__init__(self, *args, **kwargs)
layout = QGridLayout(self)
self.setLayout(layout)
self.addButton = QPushButton(icons.get('list-add'), '')
self.editButton = QPushButton(icons.get('document-edit'), '')
self.removeButton = QPushButton(icons.get('list-remove'), '')
self.listBox = QListWidget()
layout.setContentsMargins(1, 1, 1, 1)
layout.setSpacing(0)
layout.addWidget(self.listBox, 0, 0, 8, 1)
layout.addWidget(self.addButton, 0, 1)
layout.addWidget(self.editButton, 1, 1)
layout.addWidget(self.removeButton, 2, 1)
@self.addButton.clicked.connect
def addClicked():
item = self.createItem()
if self.openEditor(item):
self.addItem(item)
@self.editButton.clicked.connect
def editClicked():
item = self.listBox.currentItem()
item and self.editItem(item)
@self.removeButton.clicked.connect
def removeClicked():
item = self.listBox.currentItem()
if item:
self.removeItem(item)
@self.listBox.itemDoubleClicked.connect
def itemDoubleClicked(item):
item and self.editItem(item)
self.listBox.model().layoutChanged.connect(self.changed)
def updateSelection():
selected = bool(self.listBox.currentItem())
self.editButton.setEnabled(selected)
self.removeButton.setEnabled(selected)
self.changed.connect(updateSelection)
self.listBox.itemSelectionChanged.connect(updateSelection)
updateSelection()
app.translateUI(self)
def translateUI(self):
self.addButton.setText(_("&Add..."))
self.editButton.setText(_("&Edit..."))
self.removeButton.setText(_("&Remove"))
def createItem(self):
return QListWidgetItem()
def addItem(self, item):
self.listBox.addItem(item)
self.itemChanged(item)
self.changed.emit()
def removeItem(self, item):
self.listBox.takeItem(self.listBox.row(item))
self.changed.emit()
def editItem(self, item):
if self.openEditor(item):
self.itemChanged(item)
self.changed.emit()
def setCurrentItem(self, item):
self.listBox.setCurrentItem(item)
def setCurrentRow(self, row):
self.listBox.setCurrentRow(row)
def openEditor(self, item):
"""Opens an editor (dialog) for the item.
Returns True if the dialog was accepted and the item edited.
Returns False if the dialog was cancelled (the item must be left
unedited).
"""
pass
def itemChanged(self, item):
"""Called after an item has been added or edited.
Re-implement to do something at this moment if needed, e.g. alter the
text or display of other items.
"""
pass
def setValue(self, strings):
"""Sets the listbox to a list of strings."""
self.listBox.clear()
self.listBox.addItems(strings)
self.changed.emit()
def value(self):
"""Returns the list of paths in the listbox."""
return [self.listBox.item(i).text()
for i in range(self.listBox.count())]
def setItems(self, items):
"""Sets the listbox to a list of items."""
self.listBox.clear()
for item in items:
self.listBox.addItem(item)
self.itemChanged(item)
self.changed.emit()
def items(self):
"""Returns the list of items in the listbox."""
return [self.listBox.item(i)
for i in range(self.listBox.count())]
def clear(self):
"""Clears the listbox."""
self.listBox.clear()
self.changed.emit()
class FilePathEdit(ListEdit):
"""
A widget to edit a list of directories (e.g. a file path).
"""
def __init__(self, *args, **kwargs):
super(FilePathEdit, self).__init__(*args, **kwargs)
def fileDialog(self):
"""The QFileDialog this widget is using."""
try:
return self._filedialog
except AttributeError:
self._filedialog = d = QFileDialog(self)
d.setFileMode(QFileDialog.Directory)
return d
def openEditor(self, item):
"""Asks the user for an (existing) directory."""
directory = item.text()
dlg = self.fileDialog()
dlg.selectFile(directory)
if dlg.exec_():
item.setText(dlg.selectedFiles()[0])
return True
return False
|
Carbon monoxide gas leaks, electrical fires, flooded basements, and frozen pipes are all possibilities in a Sandy residence. The in-house professional monitoring staff that supports Vivint Smart Home security systems consistently provides industry-leading response times to ensure that emergency situations get resolved quickly.
Smart home products and components are no longer just concepts for homes of the future. Now that home security and automation features are a reality, it's easy to imagine how newly constructed homes can have them incorporated right into their design, but the truth is that you don't need to leave a home that you have already invested in, just to enjoy of all the benefits that a smart home can offer. Thanks to our commitment to convenience and affordability, it's easier than ever to upgrade your home to a smart home. We provide a free one-on-one consultation so that you get the smart home of your dreams, customizing your system to fit the needs of your household. Plus, we will send our Vivint Smart Home Pros directly to your Sandy house to fully install and integrate your smart home package for no additional cost (regularly $199). Contact us today to schedule an appointment and explore just how remarkable Vivint Smart Home can be for you!
|
import sys
from itertools import count
def digit(n, num):
return num%(10**(n+1))//10**n
class Interpreter(object):
def __init__(self, program):
self._reg = [0 for r in range(10)]
self._mem = [0]*1000
self._pc = 0
self._instructions = [
self.i000, self.i100, self.i200,\
self.i300, self.i400, self.i500,\
self.i600, self.i700, self.i800, self.i900]
for n, instruction in enumerate(program):
self._mem[n] = instruction
self._icounter = 0 # executed instructions count
def i100(self, op1, op0):
self._pc = None
def i200(self, op1, op0):
self._reg[op1] = op0
self._pc += 1
def i300(self, op1, op0):
self._reg[op1] = (self._reg[op1]+op0)%1000
self._pc += 1
def i400(self, op1, op0):
self._reg[op1] = (self._reg[op1]*op0)%1000
self._pc += 1
def i500(self, op1, op0):
self._reg[op1] = self._reg[op0]
self._pc += 1
def i600(self, op1, op0):
self._reg[op1] = (self._reg[op1]+self._reg[op0])%1000
self._pc += 1
def i700(self, op1, op0):
self._reg[op1] = (self._reg[op1]*self._reg[op0])%1000
self._pc += 1
def i800(self, op1, op0):
self._reg[op1] = self._mem[self._reg[op0]]
self._pc += 1
def i900(self, op1, op0):
self._mem[self._reg[op0]] = self._reg[op1]
self._pc += 1
def i000(self, op1, op0):
if not self._reg[op0]:
self._pc += 1
else:
self._pc = self._reg[op1]
def decode_execute(self, ins):
family, op1, op0 = digit(2, ins), digit(1, ins), digit(0, ins)
self._instructions[family](op1, op0)
def run(self):
while self._pc is not None:
ins = self._mem[self._pc]
self.decode_execute(ins)
self._icounter +=1
return self._icounter
def load_num():
line = sys.stdin.readline()
if line in ('', '\n'):
return None
else:
return int(line)
def load_prog():
prog = []
while True:
instruction = load_num()
if instruction is None:
break
prog.append(instruction)
return prog
if __name__ == '__main__':
# Number of programs
nprog = load_num()
# Discard empty line
sys.stdin.readline()
for n in range(nprog):
prog = load_prog()
inter = Interpreter(prog)
print(inter.run())
if n+1 < nprog:
print('')
|
Tumblr post-ironic typewriter, sriracha tote bag kogi you most likely have not heard of them Eight-bit tousled aliquip nostrud fixie ut put a fowl on it null. tousled aliquip nostrud fixie ut put a fowl on it nulla. Direct commerce Banksy Carles pop-up. Tadf heard of them Eight-bit tousled aliquip nostrud fixie ut put a fowl on it null. tousled aliquip nostrud fixie ut put a fowl on it nulla. Direct commerce Banksy Carles pop-up.
|
"""The module contains the method for detection with using linear regression.
This module contains method for classification the perfomance change between two profiles
according to computed metrics and models from these profiles, based on the linear regression.
"""
import scipy.stats as stats
import perun.utils as utils
import perun.check.general_detection as detect
import perun.check.fast_check as fast_check
def linear_regression(baseline_profile, target_profile, **_):
"""Temporary function, which call the general function and subsequently returns the
information about performance changes to calling function.
:param dict baseline_profile: base against which we are checking the degradation
:param dict target_profile: profile corresponding to the checked minor version
:param dict _: unification with other detection methods (unused in this method)
:returns: tuple (degradation result, degradation location, degradation rate, confidence)
"""
return detect.general_detection(
baseline_profile, target_profile, detect.ClassificationMethod.LinearRegression
)
def exec_linear_regression(
uid, baseline_x_pts, lin_abs_error, threshold, linear_diff_b1,
baseline_model, target_model, baseline_profile
):
"""Function executes the classification of performance change between two profiles with using
function from scipy module, concretely linear regression and regression analysis. If that fails
classification using linear regression, so it will be used regression analysis to the result of
absolute error. The absolute error is regressed in the all approach used in this method. This
error is calculated from the linear models from both profiles.
:param str uid: uid for which we are computing the linear regression
:param np_array baseline_x_pts: values of the independent variables from both profiles
:param np_array lin_abs_error: the value absolute error computed from the linear models obtained
from both profiles
:param integer threshold: the appropriate value for distinction individual state of detection
:param integer linear_diff_b1: difference coefficients b1 from both linear models
:param ModelRecord baseline_model: the best model from the baseline profile
:param ModelRecord target_model: the best model from the target profile
:param dict baseline_profile: baseline against which we are checking the degradation
:returns: string (classification of the change)
"""
# executing the linear regression
diff_b0 = target_model.b0 - baseline_model.b0
gradient, intercept, r_value, _, _ = stats.linregress(baseline_x_pts, lin_abs_error)
# check the first two types of change
change_type = ''
if baseline_model.type == 'linear' or baseline_model.type == 'constant':
if utils.abs_in_absolute_range(gradient, threshold) \
and utils.abs_in_relative_range(diff_b0, intercept, 0.05) \
and abs(diff_b0 - intercept) < 0.000000000001:
change_type = 'constant'
elif utils.abs_in_relative_range(linear_diff_b1, gradient, 0.3) \
and r_value**2 > 0.95:
change_type = 'linear'
else:
if utils.abs_in_absolute_range(gradient, threshold) \
and utils.abs_in_relative_range(diff_b0, intercept, 0.05):
change_type = 'constant'
elif utils.abs_in_relative_range(linear_diff_b1, gradient, 0.3) \
and r_value**2 > 0.95:
change_type = 'linear'
std_err_profile = fast_check.exec_fast_check(
uid, baseline_profile, baseline_x_pts, lin_abs_error
)
# obtaining the models (linear and quadratic) from the new regressed profile
quad_err_model = detect.get_filtered_best_models_of(
std_err_profile, group='param', model_filter=detect.create_filter_by_model('quadratic')
)
linear_err_model = detect.get_filtered_best_models_of(
std_err_profile, group='param', model_filter=detect.create_filter_by_model('linear')
)
# check the last quadratic type of change
if quad_err_model[uid].r_square > 0.90 \
and abs(quad_err_model[uid].r_square - linear_err_model[uid].r_square) > 0.01:
change_type = 'quadratic'
# We did not classify the change
if not change_type:
std_err_model = detect.get_filtered_best_models_of(std_err_profile, group='param')
change_type = std_err_model[uid].type
return change_type
|
Vue Academy - Learn Vue.js in Canada now!
Contact Us for Corporate On-Site Training!
Vue Academy is a 2 day instructor-led classroom training in Canada!
Learn the core concepts of coding a real business app with Vue.js from an industry expert in an immersive hands-on workshop.
Bring your laptop! An important part of the training is dedicated to building apps together and testing things in real time (interactive learning classe).
This immersive instructor-led workshop covers all the core concepts and guides you to build a full Vue.js data-centric business application, including project setup, code structure, style guide, using the Vue CLI, data binding, querying a REST API, routing and state management with Vuex. Vue Academy is the ideal starting point to understand and use Vue effectively now. You’ll experience several demos and "code along" activities and be provided with the code throughout the workshop, as you learn how to build apps using Vue, with plenty of interactivity with the instructor in the room.
We plan to visit every major Canadian city over the year, contact us if you want Vue Academy in your city soon!
Having a group of 6 min and room for more? Host this workshop at your company and get discounts!
Be notified of new courses and dates!
We will only contact you regarding our Vue Workshops!
We have plenty of rebate options that you could benefit.
Bring your coworkers! Register 3 or more people to get 10% off. We also do Corporate On-Site Training, contact us.
What is Vue Academy ?
This is a 2-day classroom training for professional web developers, available as a public course or as a corporate on-site training.
Are you offering online courses as well ?
No, we do not believe in the efficiency of online courses for programmers training. Our courseware is delivered live as a "bootcamp style" workshop with plenty of interactivity with the instructor in the room.
Which version of Vue are you using ?
We are *always* teaching on the latest version of Vue. We constantly update the training material to follow the new Vue versions.
Can we customize the content ?
Public classes are strictly following the outline as seen above. But we can definitely tailor the content for corporate on-site training to fit your team's preferences and expectations.
Is it for students ?
What if I don't have a laptop ?
Is it ok to bring a MAC, or a PC ?
What is the setup required ?
A detailed setup document is sent to attendees one week prior to the training. Basically you'll have to install Node.js and the latest version of the Vue CLI.
Which code editor or IDE will we use ?
Can we host this workshop at our office ?
Of course! If you provide a room with WiFi, projector/TV, classroom setup and big enough to accept external attendees as well, you'll get a great discount! Contact us to schedule a session now.
© 2018-2019 Vue Academy by LDEX.
|
from __future__ import absolute_import
from . import build, dependencies, docker, push
class Shipwright(object):
def __init__(self, source_control, docker_client, tags):
self.source_control = source_control
self.docker_client = docker_client
self.tags = tags
def targets(self):
return self.source_control.targets()
def build(self, build_targets):
targets = dependencies.eval(build_targets, self.targets())
this_ref_str = self.source_control.this_ref_str()
return self._build(this_ref_str, targets)
def _build(self, this_ref_str, targets):
# what needs building
for evt in build.do_build(self.docker_client, this_ref_str, targets):
yield evt
# now that we're built and tagged all the images.
# (either during the process of building or forwarding the tag)
# tag all containers with the human readable tags.
tags = self.source_control.default_tags() + self.tags + [this_ref_str]
for image in targets:
for tag in tags:
yield docker.tag_container(
self.docker_client,
image,
tag,
)
def push(self, build_targets, no_build=False):
"""
Pushes the latest images to the repository.
"""
targets = dependencies.eval(build_targets, self.targets())
this_ref_str = self.source_control.this_ref_str()
if not no_build:
for evt in self.build_tree(this_ref_str, targets):
yield evt
this_ref_str = self.source_control.this_ref_str()
tags = self.source_control.default_tags() + self.tags + [this_ref_str]
names_and_tags = []
for image in targets:
for tag in tags:
names_and_tags.append((image.name, tag))
for evt in push.do_push(self.docker_client, names_and_tags):
yield evt
|
What drives people from all around the world to Las Vegas? I was asking myself this question for a long time. Surprisingly, despite my adventurous nature and longing for exploring, I never had a burning desire to visit the gambling capital of the world. The fact that I live about 4 hours away from Las Vegas makes it even worse. So I can say that I am guilty of not exploring “my backyard”. After postponing this trip for years, I finally put aside all excuses and decided to visit the home of 15 largest hotels on the planet. And I am glad I did! Especially happy I was to explore 5 prettiest hotel lobbies in Las Vegas.
I should admit, Las Vegas is quite an interesting city. The main point of the city is the Strip. This, a little bit over 4 miles long, stretch of Las Vegas Boulevard is worldwide know for the concentration of the hotels and casinos on its territory. If you are a gambler, you know that you are in a right place. The casinos are everywhere. Slots machines, table games… Your choices are unlimited! I am not a gambler and wasn’t impressed by all this “casino business”, but what really got my attention were the hotels themselves. To be precise, the hotel lobbies!.. They have been surely designed to wow their guests. Each lobby is unique in its own way. Some of them are traditionally elegant, some are glamorous, while others are so spectacular that you don’t even feel like you are in a hotel lobby. Without further ado, here is a list of my 5 favorite hotel lobbies in Las Vegas.
This was the first hotel we explored. The lobby looked amazing. I literally couldn’t stop taking pictures. As soon as you walk into the hotel, you find yourself in a beautiful garden with trees covered in sparkling lights and decorated with shining balls. The trees are professionally lined up on both sides of a walkway to the grand hotel and casino. However, the spring theme of the hotel would not be complete without stunning carousel made of the flowers and mosaic tiles on the floor designed in the shapes of the flowers and butterflies.
The second place on the list of the prettiest hotel lobbies in Las Vegas belongs to the Venetian. If you like Italy and especially Venetia, this is a place for you. At some point you even forget that you walk in a hotel lobby. It’s all thanks to the design of the hotel that was inspired by Italian Venice itself. No wonder, the guest and visitors of the hotel are completely captivated by beautiful architectural replicas of Venetian landmarks. The painted ceiling that looks like a real sky and the canal surrounded by shops and restaurants create atmosphere of an Italian street or shopping plaza. And of course, who can stay away from a gondola ride with a singing boatman?
The first thing that comes to my mind when I think of the Bellagio Las Vegas are its famous musical fountains. Every thirty (fifteen in the evening) minutes there is a fountain show: the synchronized water and light show accompanied with music. Bellagio is also known for “Fiori Di Como”, the ceiling glass sculpture created by Dale Chihuly. This masterpiece that consists of 2,000 hand-blown glass blossoms attracts daily over 15,000 – 20,000 people to one of the prettiest hotel lobbies in Las Vegas.
Just in front of the Bellagio is located another hotel that is worth mentioning here. Paris Las Vegas, as its name suggests, was inspired by Paris the capital of France. The hotel with a half-size Eiffel Tower and a two-thirds size Arc De Triomphe, a replica of La Fontaine des Mers, effortlessly transports its guest and visitors to the beautiful streets of Paris with endless number of shops and restaurants. Take your time and stroll down these “streets” of another beautiful hotel lobby in Las Vegas. You won’t regret it!
There are a few things that make the Palazzo stand out among numerous fabulous hotel lobbies in Las Vegas. First, the Palazzo, like the Venetian, takes its inspiration from exquisite Italy. Second, the lobbies of the Palazzo are decorated seasonally. The mesmerizing floral installations change their colors in order to match current holidays or seasonal flower bloom. You will be also amazed by stunning displays in front of the Waterfall Atrium inspired by Venetian women and their lavish dresses.
Wow, I’ve been to Las Vegas a while ago and I really wish I visited the Wynn Las Vegas!
It is really worth visiting. Next time you are in Las Vegas, try to make it. You won’t regret it.
|
import logging
import subprocess as sp
import sys
import tempfile
_log = logging.getLogger('flactools')
def encode_wav_to_mp3(wav_filename, output_filename, lame_options=['-V', '0']):
cmd = ['lame'] + lame_options + ['-S'] # Enforce no progress bar, etc
cmd += [wav_filename, output_filename]
_log.info(' '.join(cmd))
stdout, stderr = sp.Popen(cmd,
stdout=sp.PIPE,
stderr=sp.PIPE).communicate()
# stdout does not have anything
_log.debug(stderr.decode('utf-8').strip())
with open(output_filename, 'rb'):
pass
_log.info('Finished encoding to MP3')
def escape_quotes_for_cue(path):
return path.replace('"', r'\"')
def merge_audio(list_of_files):
merged_wav_output = tempfile.mkstemp(prefix='flactools.util.merge_audio__',
suffix='__.wav')[1]
sox_cmd = ['sox'] + list_of_files + [merged_wav_output]
_log.info(' '.join(sox_cmd))
p = sp.Popen(sox_cmd)
p.wait()
with open(merged_wav_output, 'rb'):
pass
return merged_wav_output
def get_logger(level=logging.ERROR, channel=None):
logger = logging.getLogger('flactools')
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
formatter = logging.Formatter(log_format)
if not channel:
channel = logging.StreamHandler(sys.stderr)
logger.setLevel(level)
channel.setLevel(level)
channel.setFormatter(formatter)
logger.addHandler(channel)
return logger
|
Occupational Health among Iranian Commercial Motor Vehicle Drivers.
(2014) Occupational Health among Iranian Commercial Motor Vehicle Drivers. International journal of preventive medicine.
|
'''OpenGL extension ARB.enhanced_layouts
This module customises the behaviour of the
OpenGL.raw.GL.ARB.enhanced_layouts to provide a more
Python-friendly API
Overview (from the spec)
This extension adds the following functionality to layout qualifiers,
including broadening the API where this functionality is reflected.
The following are added:
1) Use compile-time constant expressions. E.g.,
const int start = 6;
layout(location = start + 2) int vec4 v;
2) Specify explicit byte offsets within a uniform or shader storage block.
For example, if you want two vec4 variables "batman" and "robin" to
appear at byte offsets 0 and 64 in your block, you can say:
uniform Block {
layout(offset = 0) vec4 batman;
layout(offset = 64) vec4 robin;
};
3) Force alignment within a uniform or shader storage block. The previous
example could also be expressed:
uniform Block {
vec4 batman;
layout(align = 64) vec4 robin;
};
This says the member 'robin' must start at the next address that is a
multiple of 64. It allows constructing the same layout in C and in GLSL
without inventing explicit offsets.
Explicit offsets and aligned offsets can be combined:
uniform Block {
vec4 batman;
layout(offset = 44, align = 8) vec4 robin;
};
would make 'robin' be at the first 8-byte aligned address, starting at
44, which is 48. This is more useful when using the *align* at
the block level, which will apply to all members.
4) Specify component numbers to more fully utilize the vec4-slot interfaces
between shader outputs and shader inputs.
For example, you could fit the following
- an array of 32 vec3
- a single float
into the space of 32 vec4 slots using the following code:
// consume X/Y/Z components of 32 vectors
layout(location = 0) in vec3 batman[32];
// consumes W component of first vector
layout(location = 0, component = 3) in float robin;
Further, an array of vec3 and an array of float can be stored
interleaved, using the following.
// consumes W component of 32 vectors
layout(location = 0, component = 3) in float robin[32];
// consume X/Y/Z components of 32 vectors
layout(location = 0) in vec3 batman[32];
5) Specify transform/feedback buffers, locations, and widths. For example:
layout(xfb_buffer = 0, xfb_offset = 0) out vec3 var1;
layout(xfb_buffer = 0, xfb_offset = 24) out vec3 var2;
layout(xfb_buffer = 1, xfb_offset = 0) out vec4 var3;
The second line above says to write var2 out to byte offset 24 of
transform/feedback buffer 0. (When doing this, output are only
captured when xfb_offset is used.)
To specify the total number of bytes per entry in a buffer:
layout(xfb_buffer = 1, xfb_stride = 32) out;
This is necessary if, say, var3 above, which uses bytes 0-11,
does not fully fill the buffer, which in this case takes 32 bytes.
Use of this feature effectively eliminates the need to use previously
existing API commands to describe the transform feedback layout.
6) Allow locations on input and output blocks for SSO interface matching.
For example:
layout(location = 4) in block {
vec4 batman; // gets location 4
vec4 robin; // gets location 5
layout(location = 7) vec4 joker; // gets location 7
vec4 riddler; // location 8
};
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/enhanced_layouts.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.enhanced_layouts import *
from OpenGL.raw.GL.ARB.enhanced_layouts import _EXTENSION_NAME
def glInitEnhancedLayoutsARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
1.) For the love of Bath and Bodyworks, take a shower already. New moms and teen boys seem to be the only social groups who brag about how long it’s been since they’ve showered. We even take it as a point of pride — we must love our babies so much if we refuse to leave them even for five minutes. I’m giving you permission to shower. Actually, more than that — I’m asking nicely if you would please shower. You’ll feel better afterward. You’ll smell better afterward. It can change your whole day.
2.) Take a walk. With or without baby. You just need a little fresh air, and the exercise from a light walk can wake you up and reinvigorate you and change your entire frame of mind. Sometimes you just need a little change of scenery.
3.) Pray, meditate, sit still. Just for five minutes. Without folding laundry or anything. It helps to just be for a few minutes before pushing on with the day.
4.) Get a cup of coffee. Or tea. Or hot chocolate. One of my big luxuries is, on occasion, running into Starbucks for five minutes and sitting all by myself. I like getting out of the house, but if that’s not possible, even a cup of coffee on the back deck, or in a garden, or on your couch, can be all you need to reset for the day.
Make sure you check out the Ultimate Guide to Baby’s First Year, for more great advice and articles! And enter the giveaway for a ton of really awesome baby stuff!
Great tips! As I’ve discovered in the last weeks just a few minutes for myself, even if it is just to take a nice shower, can do wonders in improving the outlook on life 🙂 Spending over an hour by myself at the hairdresser’s the other day felt like a luxury.
|
#Copyright (C) 2011 Gabriel Gregori Manzano
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from string import capwords
from constants import VM_STATUS, TRANSFER_STAGE
from instructions import OpCodes
from assemblyloader import AssemblyLoader
from interpretererror import InterpreterError
class Interpreter:
"""Interprets an op code and executes the appropriate instruction."""
def __init__(self, vm):
#Access to the data structures of the vm is needed.
self.vm = vm
self.systemStack = vm.stack
self.callStack = vm.callStack
#Record if the last instruction modified the vm's PC.
self.modifiedPC = False
#Create a dictionary with the opCode as key and its processing method.
self.methods = {}
for attribute in dir(OpCodes):
if not attribute.startswith("__"):
opCode = getattr(OpCodes, attribute)
methodName = "execute"
methodName += capwords(attribute, '_').replace('_', '')
self.methods[opCode] = methodName
def raiseError(self, msg):
"""Raise an error to handle it in the main process."""
self.vmStatus = VM_STATUS.FAILED
raise InterpreterError("{}".format(msg))
def modifyPC(self, newPC):
"""Modify the vm's PC and set it as modified for the interpreter."""
self.vm.PC = newPC
self.modifiedPC = True
def preprocess(self):
"""Execute all the code inside the preprocessing code section."""
for instr in self.vm.preprocessCode: self.execute(instr)
def execute(self, instr):
"""Execute a instruction, modifying the vm accordingly."""
opCode = instr[0]
methodName = self.methods[opCode]
if not hasattr(self, methodName):
self.raiseError("Can't find processing method {} for instruction {}"
.format(methodName,
AssemblyLoader.reversedOpCodes[instr[0]]))
else:
method = getattr(self, methodName)
method(instr)
#If the last instruction didn't modify the PC, point it to the next
#instruction. In the other case, keep the modified PC.
if not self.modifiedPC: self.vm.PC += 1
else: self.modifiedPC = False
def getNOperands(self, n):
"""Get n operands from the stack and return them reversed."""
ops = []
for i in range(n): ops.insert(0, self.systemStack.pop())
return ops
def getOperands(self, instr):
"""Get the operands of instr from the stack and return them reversed."""
numOps = int(instr[1])
ops = []
for i in range(numOps): ops.insert(0, self.systemStack.pop())
return ops
def getCase(self, string):
"""Get the case of a string, defaulting to capitals."""
isFirstUpper = string[0].isupper()
isUpper = string.isupper()
#If it's a 1-length string and is upper, capitalize it.
if isUpper and len(string) == 1: return "Aa"
elif isFirstUpper and not isUpper: return "Aa"
elif isUpper: return "AA"
else: return "aa"
def getSourceLexicalUnit(self, pos):
"""Get a word from the source side for every transfer stage."""
if self.vm.transferStage == TRANSFER_STAGE.CHUNKER:
return self.vm.words[self.vm.currentWords[pos - 1]].source
elif self.vm.transferStage == TRANSFER_STAGE.INTERCHUNK:
return self.vm.words[self.vm.currentWords[pos - 1]].chunk
else:
word = self.vm.words[self.vm.currentWords[0]]
#If it's a macro, get the position passed as a parameter.
if len(self.vm.currentWords) > 1: pos = self.vm.currentWords[pos]
if pos == 0: return word.chunk
else: return word.content[pos - 1]
def getTargetLexicalUnit(self, pos):
"""Get a word from the target side only for the chunker stage."""
return self.vm.words[self.vm.currentWords[pos - 1]].target
def executeAddtrie(self, instr):
#Append N number of patterns.
pattern = []
numberOfPatterns = self.systemStack.pop()
while numberOfPatterns > 0:
pattern.insert(0, self.systemStack.pop().replace("\"", ''))
numberOfPatterns -= 1
#Add the pattern with the rule number to the trie.
ruleNumber = instr[1]
self.vm.trie.addPattern(pattern, ruleNumber)
def executeAnd(self, instr):
#Get all the operands.
ops = self.getOperands(instr)
#Return false (0) if one operand if false.
for op in ops:
if op == 0:
self.systemStack.push(0)
return
#Else, return true (1).
self.systemStack.push(1)
def executeOr(self, instr):
#Get all the operands.
ops = self.getOperands(instr)
#Return true (1) if one operand if true.
for op in ops:
if op == 1:
self.systemStack.push(1)
return
#Else, return false (0).
self.systemStack.push(0)
def executeNot(self, instr):
op1 = self.systemStack.pop()
if op1 == 0: self.systemStack.push(1)
elif op1 == 1: self.systemStack.push(0)
def executeAppend(self, instr):
ops = self.getOperands(instr)
string = ""
for op in ops: string += op
varName = self.systemStack.pop()
self.vm.variables[varName] = self.vm.variables[varName] + string
def executeBeginsWith(self, instr):
prefixes = self.systemStack.pop()
word = self.systemStack.pop()
for prefix in prefixes.split("|"):
if word.startswith(prefix):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeBeginsWithIg(self, instr):
prefixes = self.systemStack.pop()
word = self.systemStack.pop().lower()
for prefix in prefixes.split("|"):
if word.startswith(prefix.lower()):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeCall(self, instr):
#Save current PC to return later when the macro ends.
self.callStack.saveCurrentPC()
#Get the words passed as argument to the macro.
ops = self.getNOperands(self.systemStack.pop())
words = []
#For the postchunk append the index of the only current word and then
#append all the parameters.
if self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
words.append(self.vm.currentWords[0])
for op in ops: words.append(op)
#For the rest, just append the index of the current words.
else:
for op in ops: words.append(self.vm.currentWords[op - 1])
#Create an entry in the call stack with the macro called.
macroNumber = int(instr[1])
self.callStack.push("macros", macroNumber, words)
#Tell the interpreter that the PC has been modified, so it does not.
self.modifyPC(self.vm.PC)
def executeRet(self, instr):
#Restore the last code section and its PC.
self.callStack.pop()
def executeClip(self, instr):
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
if len(instr) > 1: linkTo = str(instr[1].replace('"', ''))
else: linkTo = None
lemmaAndTags = lu.attrs['lem'] + lu.attrs['tags']
self.handleClipInstruction(parts, lu, lemmaAndTags, linkTo)
def executeClipsl(self, instr):
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
if len(instr) > 1: linkTo = str(instr[1].replace('"', ''))
else: linkTo = None
self.handleClipInstruction(parts, lu, lu.lu, linkTo)
def executeCliptl(self, instr):
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getTargetLexicalUnit(pos)
if len(instr) > 1: linkTo = str(instr[1].replace('"', ''))
else: linkTo = None
self.handleClipInstruction(parts, lu, lu.lu, linkTo)
def handleClipInstruction(self, parts, lu, lemmaAndTags, linkTo):
if linkTo is None and parts in ("lem", "lemh", "lemq", "tags", "chcontent"):
try:
self.systemStack.push(lu.attrs[parts])
except KeyError:
self.systemStack.push("")
return
elif linkTo is None and parts == "whole":
self.systemStack.push(lu.lu)
return
else:
longestMatch = ""
for part in parts.split('|'):
if part in lemmaAndTags:
if linkTo:
self.systemStack.push(linkTo)
return
else:
if len(part) > len(longestMatch): longestMatch = part
if longestMatch:
self.systemStack.push(longestMatch)
return
#If the lu doesn't have the part needed, return "".
self.systemStack.push("")
def executeCmp(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1 == op2: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeCmpi(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1.lower() == op2.lower(): self.systemStack.push(1)
else: self.systemStack.push(0)
def executeCmpSubstr(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1 in op2: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeCmpiSubstr(self, instr):
op1 = self.systemStack.pop()
op2 = self.systemStack.pop()
if op1.lower() in op2.lower(): self.systemStack.push(1)
else: self.systemStack.push(0)
def executeIn(self, instr):
list = self.systemStack.pop()
list = list.split('|')
value = self.systemStack.pop()
if value in list: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeInig(self, instr):
list = self.systemStack.pop()
list = list.split('|')
list = [w.lower() for w in list]
value = self.systemStack.pop()
value = value.lower()
if value in list: self.systemStack.push(1)
else: self.systemStack.push(0)
def executeConcat(self, instr):
ops = self.getOperands(instr)
string = ""
for op in ops: string += op
self.systemStack.push(string)
def executeChunk(self, instr):
ops = self.getOperands(instr)
#If there is only one operand it's the full content of the chunk.
if len(ops) == 1:
chunk = '^' + ops[0] + '$'
else:
name = ops[0]
tags = ops[1]
chunk = '^' + name + tags
if len(ops) > 2:
#Only output enclosing {} in the chunker, in the interchunk the
#'chcontent' will already have the {}.
if self.vm.transferStage == TRANSFER_STAGE.CHUNKER: chunk += '{'
for op in ops[2:]: chunk += op
if self.vm.transferStage == TRANSFER_STAGE.CHUNKER: chunk += '}'
chunk += '$'
self.systemStack.push(chunk)
def executeEndsWith(self, instr):
suffixes = self.systemStack.pop()
word = self.systemStack.pop()
for suffix in suffixes.split("|"):
if word.endswith(suffix):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeEndsWithIg(self, instr):
suffixes = self.systemStack.pop()
word = self.systemStack.pop().lower()
for suffix in suffixes.split("|"):
if word.endswith(suffix.lower()):
self.systemStack.push(1)
return
self.systemStack.push(0)
def executeJmp(self, instr):
jumpTo = int(instr[1])
self.modifyPC(jumpTo)
def executeJz(self, instr):
condition = self.systemStack.pop()
if condition == 0:
jumpTo = int(instr[1])
self.modifyPC(jumpTo)
def executeJnz(self, instr):
condition = self.systemStack.pop()
if condition != 0:
jumpTo = int(instr[1])
self.modifyPC(jumpTo)
def executeLu(self, instr):
ops = self.getOperands(instr)
lu = "^"
for op in ops: lu += op
lu += "$"
#If the lu is empty, only the ^$, then push an empty string.
if len(lu) == 2: self.systemStack.push("")
else: self.systemStack.push(lu)
def executeLuCount(self, instr):
chunk = self.vm.words[self.vm.currentWords[0]]
self.systemStack.push(len(chunk.content))
def executeMlu(self, instr):
ops = self.getOperands(instr)
#Append the lexical units, removing its ^...$
mlu = "^" + ops[0][1:-1]
for op in ops[1:]: mlu += "+" + op[1:-1]
mlu += "$"
self.systemStack.push(mlu)
def executeCaseOf(self, instr):
value = self.systemStack.pop()
case = self.getCase(value)
self.systemStack.push(case)
def executeGetCaseFrom(self, instr):
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
lem = lu.attrs['lem']
case = self.getCase(lem)
self.systemStack.push(case)
def executeModifyCase(self, instr):
case = self.systemStack.pop()
container = self.systemStack.pop()
if container != "":
if case == "aa": container = container.lower()
elif case == "Aa": container = container[0].upper() + container[1:]
elif case == "AA": container = container.upper()
self.systemStack.push(container)
def executeOut(self, instr):
ops = self.getOperands(instr)
out = ""
for op in ops: out += op
self.vm.writeOutput(out)
def executePush(self, instr):
#If it's a string, push it without quotes.
if '"' in instr[1]: self.systemStack.push(instr[1].replace('"', ''))
#Push strings containing numbers as int.
elif instr[1].isnumeric(): self.systemStack.push(int(instr[1]))
#If it's a variable reference, eval it and push the value.
else:
varName = instr[1]
try:
self.systemStack.push(self.vm.variables[varName])
except:
self.vm.variables[varName] = ""
self.systemStack.push("")
def executePushbl(self, instr):
self.systemStack.push(" ")
def executePushsb(self, instr):
#The position is relative to the current word(s), so we have to get the
#actual one. For the postchunk, the relative is the actual one because
#each chunk stores the blanks in their content.
relativePos = int(instr[1])
try:
if self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
word = self.vm.words[self.vm.currentWords[0]]
self.systemStack.push(word.blanks[relativePos])
else:
actualPos = relativePos + self.vm.currentWords[0]
self.systemStack.push(self.vm.superblanks[actualPos])
except:
self.systemStack.push("")
def executeStorecl(self, instr):
value = self.systemStack.pop()
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
lemmaAndTags = lu.attrs['lem'] + lu.attrs['tags']
self.handleStoreClipInstruction(parts, lu, lemmaAndTags, value)
def executeStoresl(self, instr):
value = self.systemStack.pop()
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getSourceLexicalUnit(pos)
self.handleStoreClipInstruction(parts, lu, lu.lu, value)
def executeStoretl(self, instr):
value = self.systemStack.pop()
parts = self.systemStack.pop()
pos = self.systemStack.pop()
lu = self.getTargetLexicalUnit(pos)
self.handleStoreClipInstruction(parts, lu, lu.lu, value)
def handleStoreClipInstruction(self, parts, lu, lemmaAndTags, value):
oldLu = lu.lu
change = False
if parts in ('lem', 'lemh', 'lemq', 'tags'):
lu.modifyAttr(parts, value)
change = True
elif parts == 'chcontent':
lu.modifyAttr(parts, value)
if self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
#If we are in the postchunk stage and change the chunk content
#we need to parse it again, so we can use it as lexical units.
chunkWord = self.vm.words[self.vm.currentWords[0]]
chunkWord.parseChunkContent()
elif parts == 'whole':
lu.modifyAttr(parts, value)
change = True
else:
longestMatch = ""
for part in parts.split('|'):
if part in lemmaAndTags:
if len(part) > len(longestMatch): longestMatch = part
if longestMatch:
lu.modifyTag(longestMatch, value)
change = True
if change and self.vm.transferStage == TRANSFER_STAGE.POSTCHUNK:
#Update the chunk content when changing a lu inside the chunk.
chunkWord = self.vm.words[self.vm.currentWords[0]]
chunkWord.updateChunkContent(oldLu, lu.lu)
def executeStorev(self, instr):
value = self.systemStack.pop()
varName = self.systemStack.pop()
self.vm.variables[varName] = value
|
If there is one thing to be said about NeNe Leakes, it’s that she doesn’t hold back, not even from the other Real Housewives franchise stars.
She went on to express some not-so-friendly words about Real Housewives of New Jersey’s Melissa Gorga saying that she should show her sister-in-law more respect.
Now the ladies are firing back.
In a recent interview with InTouch, Glanville responded to the New Normal actress. "I feel like if NeNe is going to make statements about me, she should spend more than five minutes with me. She should get to know me before judging me,” she said.
|
# -*- coding: utf-8 -*-
"""
Created on 14 Apr 2014
@author: Kimon Tsitsikas
Copyright © 2013-2014 Kimon Tsitsikas, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License version 2 as published by the Free
Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
from __future__ import division
from concurrent.futures._base import CancelledError, CANCELLED, FINISHED, \
RUNNING
import logging
import math
import numpy
from odemis import model
from odemis.acq.align import coordinates, autofocus
from odemis.acq.align.autofocus import AcquireNoBackground, MTD_EXHAUSTIVE
from odemis.dataio import tiff
from odemis.util import executeAsyncTask
from odemis.util.spot import FindCenterCoordinates, GridPoints, MaximaFind, EstimateLatticeConstant
from odemis.util.transform import AffineTransform
import os
from scipy.spatial import cKDTree as KDTree
import threading
import time
ROUGH_MOVE = 1 # Number of max steps to reach the center in rough move
FINE_MOVE = 10 # Number of max steps to reach the center in fine move
FOV_MARGIN = 250 # pixels
# Type of move in order to center the spot
STAGE_MOVE = "Stage move"
BEAM_SHIFT = "Beam shift"
OBJECTIVE_MOVE = "Objective lens move"
def MeasureSNR(image):
# Estimate noise
bl = image.metadata.get(model.MD_BASELINE, 0)
if image.max() < bl * 2:
return 0 # nothing looks like signal
sdn = numpy.std(image[image < (bl * 2)])
ms = numpy.mean(image[image >= (bl * 2)]) - bl
# Guarantee no negative snr
if ms <= 0 or sdn <= 0:
return 0
snr = ms / sdn
return snr
def AlignSpot(ccd, stage, escan, focus, type=OBJECTIVE_MOVE, dfbkg=None, rng_f=None, logpath=None):
"""
Wrapper for DoAlignSpot. It provides the ability to check the progress of
spot mode procedure or even cancel it.
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
focus (model.Actuator): The optical focus
type (string): Type of move in order to align
dfbkg (model.DataFlow): dataflow of se- or bs- detector for background
subtraction
rng_f (tuple of floats): range to apply Autofocus on if needed
returns (model.ProgressiveFuture): Progress of DoAlignSpot,
whose result() will return:
returns (float): Final distance to the center (m)
"""
# Create ProgressiveFuture and update its state to RUNNING
est_start = time.time() + 0.1
f = model.ProgressiveFuture(start=est_start,
end=est_start + estimateAlignmentTime(ccd.exposureTime.value))
f._task_state = RUNNING
# Task to run
f.task_canceller = _CancelAlignSpot
f._alignment_lock = threading.Lock()
f._done = threading.Event()
# Create autofocus and centerspot module
f._autofocusf = model.InstantaneousFuture()
f._centerspotf = model.InstantaneousFuture()
# Run in separate thread
executeAsyncTask(f, _DoAlignSpot,
args=(f, ccd, stage, escan, focus, type, dfbkg, rng_f, logpath))
return f
def _DoAlignSpot(future, ccd, stage, escan, focus, type, dfbkg, rng_f, logpath):
"""
Adjusts settings until we have a clear and well focused optical spot image,
detects the spot and manipulates the stage so as to move the spot center to
the optical image center. If no spot alignment is achieved an exception is
raised.
future (model.ProgressiveFuture): Progressive future provided by the wrapper
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
focus (model.Actuator): The optical focus
type (string): Type of move in order to align
dfbkg (model.DataFlow): dataflow of se- or bs- detector
rng_f (tuple of floats): range to apply Autofocus on if needed
returns (float): Final distance to the center #m
raises:
CancelledError() if cancelled
IOError
"""
init_binning = ccd.binning.value
init_et = ccd.exposureTime.value
init_cres = ccd.resolution.value
init_scale = escan.scale.value
init_eres = escan.resolution.value
# TODO: allow to pass the precision as argument. As for the Delphi, we don't
# need such an accuracy on the alignment (as it's just for twin stage calibration).
# TODO: take logpath as argument, to store images later on
logging.debug("Starting Spot alignment...")
try:
if future._task_state == CANCELLED:
raise CancelledError()
# Configure CCD and set ebeam to spot mode
logging.debug("Configure CCD and set ebeam to spot mode...")
_set_blanker(escan, False)
ccd.binning.value = ccd.binning.clip((2, 2))
ccd.resolution.value = ccd.resolution.range[1]
ccd.exposureTime.value = 0.3
escan.scale.value = (1, 1)
escan.resolution.value = (1, 1)
if future._task_state == CANCELLED:
raise CancelledError()
logging.debug("Adjust exposure time...")
if dfbkg is None:
# Long exposure time to compensate for no background subtraction
ccd.exposureTime.value = 1.1
else:
# TODO: all this code to decide whether to pick exposure 0.3 or 1.5?
# => KISS! Use always 1s... or allow up to 5s?
# Estimate noise and adjust exposure time based on "Rose criterion"
image = AcquireNoBackground(ccd, dfbkg)
snr = MeasureSNR(image)
while snr < 5 and ccd.exposureTime.value < 1.5:
ccd.exposureTime.value = ccd.exposureTime.value + 0.2
image = AcquireNoBackground(ccd, dfbkg)
snr = MeasureSNR(image)
logging.debug("Using exposure time of %g s", ccd.exposureTime.value)
if logpath:
tiff.export(os.path.join(logpath, "align_spot_init.tiff"), [image])
hqet = ccd.exposureTime.value # exposure time for high-quality (binning == 1x1)
if ccd.binning.value == (2, 2):
hqet *= 4 # To compensate for smaller binning
logging.debug("Trying to find spot...")
for i in range(3):
if future._task_state == CANCELLED:
raise CancelledError()
if i == 0:
future._centerspotf = CenterSpot(ccd, stage, escan, ROUGH_MOVE, type, dfbkg)
dist, vector = future._centerspotf.result()
elif i == 1:
logging.debug("Spot not found, auto-focusing...")
try:
# When Autofocus set binning 8 if possible, and use exhaustive
# method to be sure not to miss the spot.
ccd.binning.value = ccd.binning.clip((8, 8))
future._autofocusf = autofocus.AutoFocus(ccd, None, focus, dfbkg, rng_focus=rng_f, method=MTD_EXHAUSTIVE)
lens_pos, fm_level = future._autofocusf.result()
# Update progress of the future
future.set_progress(end=time.time() +
estimateAlignmentTime(hqet, dist, 1))
except IOError as ex:
logging.error("Autofocus on spot image failed: %s", ex)
raise IOError('Spot alignment failure. AutoFocus failed.')
logging.debug("Trying again to find spot...")
future._centerspotf = CenterSpot(ccd, stage, escan, ROUGH_MOVE, type, dfbkg)
dist, vector = future._centerspotf.result()
elif i == 2:
if dfbkg is not None:
# In some case background subtraction goes wrong, and makes
# things worse, so try without.
logging.debug("Trying again to find spot, without background subtraction...")
dfbkg = None
future._centerspotf = CenterSpot(ccd, stage, escan, ROUGH_MOVE, type, dfbkg)
dist, vector = future._centerspotf.result()
if dist is not None:
if logpath:
image = AcquireNoBackground(ccd, dfbkg)
tiff.export(os.path.join(logpath, "align_spot_found.tiff"), [image])
break
else:
raise IOError('Spot alignment failure. Spot not found')
ccd.binning.value = (1, 1)
ccd.exposureTime.value = ccd.exposureTime.clip(hqet)
# Update progress of the future
future.set_progress(end=time.time() +
estimateAlignmentTime(hqet, dist, 1))
logging.debug("After rough alignment, spot center is at %s m", vector)
# Limit FoV to save time
logging.debug("Cropping FoV...")
CropFoV(ccd, dfbkg)
if future._task_state == CANCELLED:
raise CancelledError()
# Update progress of the future
future.set_progress(end=time.time() +
estimateAlignmentTime(hqet, dist, 0))
# Center spot
if future._task_state == CANCELLED:
raise CancelledError()
logging.debug("Aligning spot...")
# No need to be so precise with a stage move (eg, on the DELPHI), as the
# stage is quite imprecise anyway and the alignment is further adjusted
# using the beam shift (later).
mx_steps = FINE_MOVE if type != STAGE_MOVE else ROUGH_MOVE
future._centerspotf = CenterSpot(ccd, stage, escan, mx_steps, type, dfbkg, logpath)
dist, vector = future._centerspotf.result()
if dist is None:
raise IOError('Spot alignment failure. Cannot reach the center.')
logging.info("After fine alignment, spot center is at %s m", vector)
return dist, vector
finally:
ccd.binning.value = init_binning
ccd.exposureTime.value = init_et
ccd.resolution.value = init_cres
escan.scale.value = init_scale
escan.resolution.value = init_eres
_set_blanker(escan, True)
with future._alignment_lock:
future._done.set()
if future._task_state == CANCELLED:
raise CancelledError()
future._task_state = FINISHED
def _CancelAlignSpot(future):
"""
Canceller of _DoAlignSpot task.
"""
logging.debug("Cancelling spot alignment...")
with future._alignment_lock:
if future._task_state == FINISHED:
return False
future._task_state = CANCELLED
future._autofocusf.cancel()
future._centerspotf.cancel()
logging.debug("Spot alignment cancelled.")
# Do not return until we are really done (modulo 10 seconds timeout)
future._done.wait(10)
return True
def estimateAlignmentTime(et, dist=None, n_autofocus=2):
"""
Estimates spot alignment procedure duration
et (float): exposure time #s
dist (float): distance from center #m
n_autofocus (int): number of autofocus procedures
returns (float): process estimated time #s
"""
return estimateCenterTime(et, dist) + n_autofocus * autofocus.estimateAutoFocusTime(et) # s
def _set_blanker(escan, active):
"""
Set the blanker to the given state iif the blanker doesn't support "automatic"
mode (ie, None).
escan (ebeam scanner)
active (bool): True = blanking = no ebeam
"""
try:
if (model.hasVA(escan, "blanker")
and not None in escan.blanker.choices
):
# Note: we assume that this is blocking, until the e-beam is
# ready to acquire an image.
escan.blanker.value = active
except Exception:
logging.exception("Failed to set the blanker to %s", active)
def FindSpot(image, sensitivity_limit=100):
"""
This function detects the spot and calculates and returns the coordinates of
its center. The algorithms for spot detection and center calculation are
similar to the ones that are used in Fine alignment.
image (model.DataArray): Optical image
sensitivity_limit (int): Limit of sensitivity in spot detection
returns (tuple of floats): Position of the spot center in px (from the
left-top corner of the image), possibly with sub-pixel resolution.
raises:
LookupError() if spot was not found
"""
subimages, subimage_coordinates = coordinates.DivideInNeighborhoods(image, (1, 1), 20, sensitivity_limit)
if not subimages:
raise LookupError("No spot detected")
spot_coordinates = [FindCenterCoordinates(i) for i in subimages]
optical_coordinates = coordinates.ReconstructCoordinates(subimage_coordinates, spot_coordinates)
# Too many spots detected
if len(optical_coordinates) > 10:
logging.info("Found %d potential spots on image with data %s -> %s",
len(optical_coordinates), image.min(), image.max())
raise LookupError("Too many spots detected")
# Pick the brightest one
max_intensity = 0
max_pos = optical_coordinates[0]
for i in optical_coordinates:
x, y = int(round(i[1])), int(round(i[0]))
if image[x, y] >= max_intensity:
max_pos = i
max_intensity = image[x, y]
return max_pos
def FindGridSpots(image, repetition):
"""
Find the coordinates of a grid of spots in an image. And find the
corresponding transformation to transform a grid centered around the origin
to the spots in an image.
Parameters
----------
image : array like
Data array containing the greyscale image.
repetition : tuple of ints
Number of expected spots in (X, Y).
Returns
-------
spot_coordinates : array like
A 2D array of shape (N, 2) containing the coordinates of the spots,
in respect to the top left of the image.
translation : tuple of two floats
Translation from the origin to the center of the grid in image space,
origin is top left of the image. Primary axis points right and the
secondary axis points down.
scaling : tuple of two floats
Scaling factors for primary and secondary axis.
rotation : float
Rotation in image space, positive rotation is clockwise.
shear : float
Horizontal shear factor. A positive shear factor transforms a coordinate
in the positive x direction parallel to the x axis.
"""
# Find the center coordinates of the spots in the image.
spot_positions = MaximaFind(image, repetition[0] * repetition[1])
if len(spot_positions) < repetition[0] * repetition[1]:
logging.warning('Not enough spots found, returning only the found spots.')
return spot_positions, None, None, None
# Estimate the two most common (orthogonal) directions in the grid of spots, defined in the image coordinate system.
lattice_constants = EstimateLatticeConstant(spot_positions)
# Each row in the lattice_constants array corresponds to one direction. By transposing the array the direction
# vectors are on the columns of the array. This allows us to directly use them as a transformation matrix.
transformation_matrix = numpy.transpose(lattice_constants)
# Translation is the mean of the spots, which is the distance from the origin to the center of the grid of spots.
translation = numpy.mean(spot_positions, axis=0)
transform_to_spot_positions = AffineTransform(matrix=transformation_matrix, translation=translation)
# Iterative closest point algorithm - single iteration, to fit a grid to the found spot positions
grid = GridPoints(*repetition)
spot_grid = transform_to_spot_positions(grid)
tree = KDTree(spot_positions)
dd, ii = tree.query(spot_grid, k=1)
# Sort the original spot positions by mapping them to the order of the GridPoints.
pos_sorted = spot_positions[ii.ravel(), :]
# Find the transformation from a grid centered around the origin to the sorted positions.
transformation = AffineTransform.from_pointset(grid, pos_sorted)
spot_coordinates = transformation(grid)
return spot_coordinates, translation, transformation.scale, transformation.rotation, transformation.shear
def CropFoV(ccd, dfbkg=None):
"""
Limit the ccd FoV to just contain the spot, in order to save some time
on AutoFocus process.
ccd (model.DigitalCamera): The CCD
"""
image = AcquireNoBackground(ccd, dfbkg)
center_pxs = ((image.shape[1] / 2),
(image.shape[0] / 2))
try:
spot_pxs = FindSpot(image)
except LookupError:
logging.warning("Couldn't locate spot when cropping CCD image, will use whole FoV")
ccd.binning.value = (1, 1)
ccd.resolution.value = ccd.resolution.range[1]
return
tab_pxs = [a - b for a, b in zip(spot_pxs, center_pxs)]
max_dim = int(max(abs(tab_pxs[0]), abs(tab_pxs[1])))
range_x = (ccd.resolution.range[0][0], ccd.resolution.range[1][0])
range_y = (ccd.resolution.range[0][1], ccd.resolution.range[1][1])
ccd.resolution.value = (sorted((range_x[0], 2 * max_dim + FOV_MARGIN, range_x[1]))[1],
sorted((range_y[0], 2 * max_dim + FOV_MARGIN, range_y[1]))[1])
ccd.binning.value = (1, 1)
def CenterSpot(ccd, stage, escan, mx_steps, type=OBJECTIVE_MOVE, dfbkg=None, logpath=None):
"""
Wrapper for _DoCenterSpot.
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
mx_steps (int): Maximum number of steps to reach the center
type (*_MOVE or BEAM_SHIFT): Type of move in order to align
dfbkg (model.DataFlow or None): If provided, will be used to start/stop
the e-beam emission (it must be the dataflow of se- or bs-detector) in
order to do background subtraction. If None, no background subtraction is
performed.
returns (model.ProgressiveFuture): Progress of _DoCenterSpot,
whose result() will return:
(float): Final distance to the center #m
(2 floats): vector to the spot from the center (m, m)
"""
# Create ProgressiveFuture and update its state to RUNNING
est_start = time.time() + 0.1
f = model.ProgressiveFuture(start=est_start,
end=est_start + estimateCenterTime(ccd.exposureTime.value))
f._spot_center_state = RUNNING
f.task_canceller = _CancelCenterSpot
f._center_lock = threading.Lock()
# Run in separate thread
executeAsyncTask(f, _DoCenterSpot,
args=(f, ccd, stage, escan, mx_steps, type, dfbkg, logpath))
return f
def _DoCenterSpot(future, ccd, stage, escan, mx_steps, type, dfbkg, logpath):
"""
Iteratively acquires an optical image, finds the coordinates of the spot
(center) and moves the stage to this position. Repeats until the found
coordinates are at the center of the optical image or a maximum number of
steps is reached.
future (model.ProgressiveFuture): Progressive future provided by the wrapper
ccd (model.DigitalCamera): The CCD
stage (model.Actuator): The stage
escan (model.Emitter): The e-beam scanner
mx_steps (int): Maximum number of steps to reach the center
type (*_MOVE or BEAM_SHIFT): Type of move in order to align
dfbkg (model.DataFlow or None): If provided, will be used to start/stop
the e-beam emmision (it must be the dataflow of se- or bs-detector) in
order to do background subtraction. If None, no background subtraction is
performed.
returns (float or None): Final distance to the center (m)
(2 floats): vector to the spot from the center (m, m)
raises:
CancelledError() if cancelled
"""
try:
logging.debug("Aligning spot...")
steps = 0
# Stop once spot is found on the center of the optical image
dist = None
while True:
if future._spot_center_state == CANCELLED:
raise CancelledError()
# Wait to make sure no previous spot is detected
image = AcquireNoBackground(ccd, dfbkg)
if logpath:
tiff.export(os.path.join(logpath, "center_spot_%d.tiff" % (steps,)), [image])
try:
spot_pxs = FindSpot(image)
except LookupError:
return None, None
# Center of optical image
pixelSize = image.metadata[model.MD_PIXEL_SIZE]
center_pxs = (image.shape[1] / 2, image.shape[0] / 2)
# Epsilon distance below which the lens is considered centered. The worse of:
# * 1.5 pixels (because the CCD resolution cannot give us better)
# * 1 µm (because that's the best resolution of our actuators)
err_mrg = max(1.5 * pixelSize[0], 1e-06) # m
tab_pxs = [a - b for a, b in zip(spot_pxs, center_pxs)]
tab = (tab_pxs[0] * pixelSize[0], tab_pxs[1] * pixelSize[1])
logging.debug("Found spot @ %s px", spot_pxs)
# Stop if spot near the center or max number of steps is reached
dist = math.hypot(*tab)
if steps >= mx_steps or dist <= err_mrg:
break
# Move to the found spot
if type == OBJECTIVE_MOVE:
f = stage.moveRel({"x": tab[0], "y":-tab[1]})
f.result()
elif type == STAGE_MOVE:
f = stage.moveRel({"x":-tab[0], "y": tab[1]})
f.result()
else:
escan.translation.value = (-tab_pxs[0], -tab_pxs[1])
steps += 1
# Update progress of the future
future.set_progress(end=time.time() +
estimateCenterTime(ccd.exposureTime.value, dist))
return dist, tab
finally:
with future._center_lock:
if future._spot_center_state == CANCELLED:
raise CancelledError()
future._spot_center_state = FINISHED
def _CancelCenterSpot(future):
"""
Canceller of _DoCenterSpot task.
"""
logging.debug("Cancelling spot center...")
with future._center_lock:
if future._spot_center_state == FINISHED:
return False
future._spot_center_state = CANCELLED
logging.debug("Spot center cancelled.")
return True
def estimateCenterTime(et, dist=None):
"""
Estimates duration of reaching the center
"""
if dist is None:
steps = FINE_MOVE
else:
err_mrg = 1e-06
steps = math.log(dist / err_mrg) / math.log(2)
steps = min(steps, FINE_MOVE)
return steps * (et + 2) # s
|
Description Image shows a spring house in a field of tall grass. The house has wood siding and a shingled roof, and a well is visible outside of the house, under an overhang of the roof, to the left of the photograph. There are trees in the background and tree branches hang just above the house, appearing in the top left corner of the photograph. Image taken in section 1R of the Blue Ridge Parkway, between mileposts 155.4-165.3, in Floyd County, Va. Image was taken by Kenneth McCarter in July of 1939.
|
"""
Filesystem related methods
"""
import os
import yaml
from . import command as subprocess
from . import util
GIT = ['git']
REQCONF_FILE = '.reqconfig'
def read_file(path, ref=None):
"""
Read file from filesystem or git tree
"""
def _load_file_from_fs():
"""
Read and parse file from filesystem
"""
with open(path) as file_:
return file_.read()
def _load_file_from_git():
"""
Load file from git tree
"""
blob_sha1 = subprocess.get_output(
GIT + ['ls-tree', ref, path]
).split()[2]
return subprocess.get_output(
GIT + ['cat-file', 'blob', blob_sha1]
)
return _load_file_from_git() if ref else _load_file_from_fs()
def load_yamlfile(reqfile, ref=None, multiple=False):
"""
Load requirement file
"""
data = read_file(reqfile, ref)
return yaml.load_all(data) if multiple else yaml.load(data)
def reqroot():
"""
Get .req-dir
"""
def _find_reqroot():
"""
Find reqroot
"""
for dir_ in util.walkup(os.getcwd()):
if os.path.exists(os.path.join(dir_, REQCONF_FILE)):
return dir_
raise Exception("Not inside req directory")
cwd = os.getcwd()
if cwd not in reqroot.cache:
reqroot.cache[cwd] = _find_reqroot()
return reqroot.cache[cwd]
reqroot.cache = {}
|
If you're a sole proprietor, you might believe running a business is a solo endeavor. But as you grow and expand, you'll learn this is not the case. There are certain aspects of a business many leaders need outside help with managing — such as accounting, legal processes or human resources — because they might not have the necessary skill, or they might simply lack the time.
When you're entrusting such important parts of your business to a third party, it's absolutely critical to find the right fit. To help local business owners find and evaluate vendors in the Dallas area, we asked a panel of Forbes Dallas Council members how they recommend finding trusted partners. Their best answers are below.
Members share their best advice for finding reliable business vendors in Dallas.
1. Seek recommendations from your peers.
2. Tap into the networks of people with shared experiences.
3. Ask your social media connections.
4. Do your research after receiving personal referrals.
As CFOs, we are continually helping our clients find new bookkeepers and accountants. Start with a personal referral, then do your due diligence. With a bookkeeper, this means calling the referral's accountant and seeing if they like the work product. Interview at least three vendors, and always go with the one you like best. You have to work with them, so you need to have a sense of connection. - Brooke Lively, Cathedral Capital, Inc.
5. Use Facebook's 'Recommendations' feature.
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from google.auth import credentials as auth_credentials
from google.cloud import aiplatform
# [START aiplatform_sdk_init_sample]
def init_sample(
project: Optional[str] = None,
location: Optional[str] = None,
experiment: Optional[str] = None,
staging_bucket: Optional[str] = None,
credentials: Optional[auth_credentials.Credentials] = None,
encryption_spec_key_name: Optional[str] = None,
):
aiplatform.init(
project=project,
location=location,
experiment=experiment,
staging_bucket=staging_bucket,
credentials=credentials,
encryption_spec_key_name=encryption_spec_key_name,
)
# [END aiplatform_sdk_init_sample]
|
It’s time the Government ended the confusion around casual employment agreements, Labour’s Workplace Relations and Safety spokesperson Iain Lees-Galloway says.
“Bay of Plenty District Health Board has been ordered to pay $7,500 to an employee who was inappropriately employed on a casual agreement. The determination of the Employment Relations Authority makes it clear that Wendy Rahiri was in fact a permanent part-time employee.
The lowest inflation since last century combined with rising unemployment and turbulent global markets is making a farce of monetary policy, says Labour’s Finance spokesperson Grant Robertson.
“New Zealand’s monetary policy is outdated and not fit to handle significant changes in the economy. This year is off to a volatile start already with markets plunging across the globe and major investor uncertainty. It’s important that we have up-to-date tools to handle it.
Global dairy prices have fallen for the second time in a row and with whole milk powder down 23 per cent since October, the worrying start to the year continues, says Labour’s Finance spokesperson Grant Robertson.
“The global economy has had a volatile start to 2016, with markets plunging, declining confidence in China’s economy and commodity prices on the slide. New Zealand isn’t insulated from this, especially with our over-reliance on dairy.
In the face of recent turmoil in the financial markets and a forecast increase in unemployment, National must work with business to keep up the positive economic confidence from the end of last year, says Labour’s Economic Development spokesperson David Clark.
“The optimism coming out of the December quarter surveys is good to see. It’s vital that employers have the enthusiasm to hire in the new year after a difficult 2015 and to counter Treasury’s predictions that unemployment will rise to 6.5% in the first half of the year.
Labour Finance Spokesperson and Chair of the Future of Work Commission Grant Robertson will travel to Paris this week to attend the OECD Future of Work Forum and to London where he will also undertake meetings and visits.
“We are at the halfway mark in terms of Labour’s Future of Work Commission, and this conference is a great opportunity to check in on the latest thinking in this area and test that against our work.
Plans to privatise workplace mediation services in provincial New Zealand have been met with dismay from both unions and employers, says Labour’s Workplace Relations and Safety Spokesperson Iain Lees-Galloway.
“In another attempt by the National Party to privatise core Government services, MBIE has quietly begun consultation on their plans to outsource mediation services and close their regional offices in Palmerston North, Napier and Dunedin. It is driven by ideology rather than evidence and the response from those who most often use the service has been uniformly negative.
New Zealanders are worse off with economic growth per person going backwards over the first nine months of 2015, says Labour's Finance Spokesperson Grant Robertson.
“Per capita GDP still remains lower now than it was at the end of 2014. This shows many Kiwis are not getting ahead under this Government. It is why employment is falling and unemployment is rising.
|
''' http.py
Copyright 2008 Corey Tabaka
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from bot import MessageHandler, BotCommandHandler
from admin import trusted
from threaded import threaded
import re, urllib
_reURL = re.compile('.*?((?:(https?)://|(?=www\.))[\w:#@%/;$()~_?\+-=\\\.&]+).*?', re.I)
_reTitle = re.compile('<title>(.*?)</title>', re.I | re.M)
_enabled = True
@BotCommandHandler('http')
@trusted
def _http(context):
'''Usage: http [enable|disable]\nEnables or disables URL titles; no param returns state'''
m = re.match('\s*(enable|disable)\s*', context.args or '', re.I)
if m:
op, = m.groups()
global _enabled
_enabled = op.lower() == 'enable'
elif not (context.args or ''):
context.reply('http titles %s' % ['DISABLED', 'ENABLED'][_enabled])
else:
context.reply('Usage: http [enable|disable]')
@MessageHandler
@threaded
def _handler(context):
m = _reURL.match(context.message)
if _enabled and m:
address, proto = m.groups()
proto = (proto or '').lower()
if not proto:
address = 'http://' + address
if proto in ('http', 'https', None):
fin = urllib.urlopen(address)
if fin.headers.gettype() == 'text/html':
title = ' '.join(_reTitle.findall(fin.read(4096))).strip()
fin.close()
if title:
context.reply('Title: ' + title)
|
You may be wondering what the Right to Manage is, or what impact it will have on you. We have set out some information to questions that we are frequently asked about the Right to Manage. If you have a question that is not answered with the below information please feel free to contact us.
The Right to Manage is a statutory right for owners of leasehold flats to manage their block or estate. It allows the Property Owners to appoint a Managing Agent of their choosing or manage the property themselves if they so wish.
Right to Manage is the little brother of Enfranchisement, which is the statutory right of Property Owners to purchase the Freehold of their block. Enfranchisement has been around longer than Right to Manage, and while it would be a better option for Property Owners to purchase the Freehold, it became apparent that this was not an option for all Property Owners. This gave rise to the Right to Manage, which allows Property Owners to have control of the management of their property without having to go to the expense of purchasing the Freehold.
Why is the Right To Manage necessary?
There may be a number of reasons why exercising the Right to Manage is necessary. Usually it is because the property owners are unhappy with the current management arrangements and exercising the Right to Manage provides a means of exercising oversight and control over the current Managing Agent or appoint a Managing Agent of their choosing.
Often Managing Agents are appointed by the developer before a site is complete. The Managing Agent will continue to manage the property even though the developer no longer has any interest in the scheme and may have even sold their interest to an investment fund. In these circumstances, exercising the Right to Manage is usually the most appropriate way for property owners to have an involvement in the management of the property.
It may also be that the property owners would like to manage the property themselves. There does not have to be any dissatisfaction with the current management arrangements for property owners to exercise the Right to Manage.
What's the Process of Acquiring the Right to Manage?
The process for obtaining the Right to Manage is set out in the relevant legislation. There is an overview of the process below and we will undertake all the steps necessary to acquire the Right to Manage where the Property Owners have decided to do so.
The first, and possibly most important part of the Right to Manage is obtaining the agreement of the majority of Property Owners. Once there is an indication that there is sufficient interest a company will be incorporated as a vehicle to acquire the right. All those that agree to acquire the Right to Manage will become members of this company, which is how a say in the management is achieved.
The company is limited by guarantee and therefore there is no financial liability associated with being a member of this company. Property Owners may become Directors of the company which will allow a greater deal of involvement in the management of the property.
The next stage is to serve a formal notice on the Freeholder. The Freeholder has the option to allow the right to be acquired or to deny the right. The ability to deny the right is very limited, and usually the Freeholder will either allow the right or not respond, which would mean the right to acquire is obtained.
A Managing Agent will only be permitted to contest the Right to Manage where they are named in the Lease. Again, their ability to contest the right is very limited.
Once the above steps are complete, the Right to Manage will be acquired and the Right to Manage Company will take over the management from a given date.
How much work will I have to put in?
Very little! Property Owners are only required to agree to acquiring the Right to Manage in order to gain a sufficient majority. Usually there will be a small number of individuals who are driving the process forward. Once the sufficient majority has been acquired we will undertake the necessary formalities at no cost to the Property Owners.
Property Owners can have as little or as much input into the running of the property as they wish. Property Owners are able to become Directors of the company that will be set up to acquire the Right to Manage. This will allow Property Owners to be more involved in the day to day management of the property.
|
# Copyright (C) 2006-2008, Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import signal
import subprocess
import sys
import time
from optparse import OptionParser
from gettext import gettext as _
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
from sugar3 import env
ERROR_NO_DISPLAY = 30
ERROR_NO_SERVER = 31
default_dimensions = (800, 600)
def _run_xephyr(display, dpi, dimensions, fullscreen):
cmd = ['Xephyr']
cmd.append(':%d' % display)
cmd.append('-ac')
cmd += ['-title', _('Sugar in a window')]
screen_size = (Gdk.Screen.width(), Gdk.Screen.height())
if (not dimensions) and (fullscreen is None) and \
(screen_size <= default_dimensions):
# no forced settings, screen too small => fit screen
fullscreen = True
elif not dimensions:
# screen is big enough or user has en/disabled fullscreen manually
# => use default size (will get ignored for fullscreen)
dimensions = '%dx%d' % default_dimensions
if not dpi:
dpi = Gtk.Settings.get_default().get_property('gtk-xft-dpi') / 1024
if fullscreen:
cmd.append('-fullscreen')
if dimensions:
cmd.append('-screen')
cmd.append(dimensions)
if dpi:
cmd.append('-dpi')
cmd.append('%d' % dpi)
cmd.append('-noreset')
try:
pipe = subprocess.Popen(cmd)
except OSError, exc:
sys.stderr.write('Error executing server: %s\n' % (exc, ))
return None
return pipe
def _check_server(display):
result = subprocess.call(['xdpyinfo', '-display', ':%d' % display],
stdout=open(os.devnull, 'w'),
stderr=open(os.devnull, 'w'))
return result == 0
def _kill_pipe(pipe):
"""Terminate and wait for child process."""
try:
os.kill(pipe.pid, signal.SIGTERM)
except OSError:
pass
pipe.wait()
def _start_xephyr(dpi, dimensions, fullscreen):
for display in range(30, 40):
if not _check_server(display):
pipe = _run_xephyr(display, dpi, dimensions, fullscreen)
if pipe is None:
return None, None
for i_ in range(10):
if _check_server(display):
return pipe, display
time.sleep(0.1)
_kill_pipe(pipe)
return None, None
def _start_window_manager():
cmd = ['metacity']
cmd.extend(['--no-force-fullscreen'])
GObject.spawn_async(cmd, flags=GObject.SPAWN_SEARCH_PATH)
def _setup_env(display, scaling, emulator_pid):
# We need to remove the environment related to gnome-keyring-daemon,
# so a new instance of gnome-keyring-daemon can be started and
# registered properly.
for variable in ['GPG_AGENT_INFO', 'SSH_AUTH_SOCK',
'GNOME_KEYRING_CONTROL', 'GNOME_KEYRING_PID']:
if variable in os.environ:
del os.environ[variable]
os.environ['SUGAR_EMULATOR'] = 'yes'
os.environ['GABBLE_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'telepathy-gabble.log')
os.environ['SALUT_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'telepathy-salut.log')
os.environ['MC_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'mission-control.log')
os.environ['STREAM_ENGINE_LOGFILE'] = os.path.join(
env.get_profile_path(), 'logs', 'telepathy-stream-engine.log')
os.environ['DISPLAY'] = ':%d' % (display)
os.environ['SUGAR_EMULATOR_PID'] = emulator_pid
os.environ['MC_ACCOUNT_DIR'] = os.path.join(
env.get_profile_path(), 'accounts')
if scaling:
os.environ['SUGAR_SCALING'] = scaling
def main():
"""Script-level operations"""
parser = OptionParser()
parser.add_option('-d', '--dpi', dest='dpi', type='int',
help='Emulator dpi')
parser.add_option('-s', '--scaling', dest='scaling',
help='Sugar scaling in %')
parser.add_option('-i', '--dimensions', dest='dimensions',
help='Emulator dimensions (ex. 1200x900)')
parser.add_option('-f', '--fullscreen', dest='fullscreen',
action='store_true', default=None,
help='Run emulator in fullscreen mode')
parser.add_option('-F', '--no-fullscreen', dest='fullscreen',
action='store_false',
help='Do not run emulator in fullscreen mode')
(options, args) = parser.parse_args()
if not os.environ.get('DISPLAY'):
sys.stderr.write('DISPLAY not set, cannot connect to host X server.\n')
return ERROR_NO_DISPLAY
server, display = _start_xephyr(options.dpi, options.dimensions,
options.fullscreen)
if server is None:
sys.stderr.write('Failed to start server. Please check output above'
' for any error message.\n')
return ERROR_NO_SERVER
_setup_env(display, options.scaling, str(server.pid))
command = ['dbus-launch', '--exit-with-session']
if not args:
command.append('sugar')
else:
_start_window_manager()
if args[0].endswith('.py'):
command.append('python')
command.append(args[0])
subprocess.call(command)
_kill_pipe(server)
|
← Do You Feel Like the Enemy Keeps Calling “Checkmate” On You? God Has Provides an Escape Move!
It is interesting that these words come right on the heels of the tragic suicides of well known men and women like Kate Spade (fashion designer) and Anthony Bourdain (celebrity chef).
Darkness is often a trap that leads people to death and destruction, but the Lord has come to give abundant life and to light up the places where darkness has held many captive. People who have been trapped in darkness will suddenly see a great light; they will feel the wind of His Spirit blowing away the darkness and resurrecting their life. Lands (cities, regions, nations) will also experience a wind that comes to blow away the darkness that the enemy has brought upon it and God will breathe resurrection life into His original intent upon the land.
The spirit of death that dwells in dark places is being uncovered and blown away. The enemy has been whispering lies of torment to bring people to a place of utter hopelessness and despair, leading to an agreement with death. But the Lord has been raising up those who will stand in the gap through deep intercession and “groanings too deep for words” as conduits of resurrection life and light into the dark places. The prophets are prophesying life, freedom, light, and reformation and the revivalists are going out and bringing deliverance into the byways and highways.
This is the time of a great shift…in lives and in nations. A shift from darkness to light, from death to life. It is a battle that is being won on our knees, through our voices and upon the feet that bring good news.
Then He said to me, “Prophesy to the breath, prophesy, son of man, and say to the breath, ‘Thus says the Lord GOD, “Come from the four winds, O breath, and breathe on these slain, that they come to life.”‘” So I prophesied as He commanded me, and the breath came into them, and they came to life and stood on their feet, an exceedingly great army. Then He said to me, “Son of man, these bones are the whole house of Israel; behold, they say, ‘Our bones are dried up and our hope has perished We are completely cut off.’ read more.
Prophesy, release your intercession, groan and watch the wind of His Spirit blow away the darkness. It is time and it is upon us!
This entry was tagged Bible, breakthrough, Deliverance, Encouragement, freedom, God, Hope, Jesus, kathipelton, light, miracles, NOW, prayer, Prophesy, prophetic, propheticword, restoration, scripture, Supernatural, truth. Bookmark the permalink.
|
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Layout to monitor crosswind flight status."""
from makani.control import system_params
from makani.gs.monitor import monitor_params
from makani.gs.monitor2.apps.layout import base
from makani.gs.monitor2.apps.plugins import common
from makani.gs.monitor2.apps.plugins.indicators import control
from makani.gs.monitor2.apps.plugins.indicators import ground_station
from makani.gs.monitor2.apps.plugins.indicators import motor
MONITOR_PARAMS = monitor_params.GetMonitorParams().contents
SYSTEM_PARAMS = system_params.GetSystemParams().contents
class CrosswindLayout(base.BaseLayout):
"""The crosswind layout."""
_NAME = 'Crosswind'
_DESIRED_VIEW_COLS = 12
_ORDER_HORIZONTALLY = False
# Derived class should set the _MODE.
_MODE = '<unset>'
def Initialize(self):
self._AddIndicators('Indicators', [
control.FlightPlanIndicator(),
control.FlightModeIndicator(self._MODE),
control.FlightModeGatesIndicator(self._MODE),
control.ControlTimeIndicator(self._MODE),
control.LoopCountIndicator(self._MODE),
control.TetherSphereDeviationIndicator(self._MODE),
control.CrosswindPlaybookIndicator(),
control.AlphaErrorIndicator(),
control.BetaErrorIndicator(),
control.AirspeedErrorIndicator(),
], properties={'cols': 2})
self._AddIndicators('Altitude', [
control.AltitudeChart(
self._MODE, panel_ratio=0.26, aspect_ratio=1.8, num_yticks=5,
ylim=[-20, 500]),
ground_station.DetwistErrorChart(
num_yticks=5, aspect_ratio=1.5),
], properties={'cols': 2})
self._AddIndicators('', [
motor.StackBusPowerChart(
self._MODE, 'Gen. Wing Power', num_yticks=5,
aspect_ratio=2.5, ylim=[-1000, 1000]),
ground_station.WindIndicator(),
], properties={'cols': 3})
widget_kwargs = {
'panel_ratio': 0.17,
'aspect_ratio': 7.5,
'num_yticks': 7,
}
max_tension_kn = round(MONITOR_PARAMS.tether.proof_load / 1e3)
self._AddIndicators('Charts', [
control.TensionChart(self._MODE, ylim=[0.0, max_tension_kn],
**widget_kwargs),
control.AeroAnglesChart(self._MODE, ylim=[-10, 10], **widget_kwargs),
control.AirSpeedChart(self._MODE, ylim=[0, 80], **widget_kwargs),
control.BodyRatesChart(self._MODE, ylim=(-15.0, 15.0),
angles=['Pitch', 'Roll'], **widget_kwargs),
control.CrosswindDeltasChart(
self._MODE, ylim=[-15, 15], **widget_kwargs),
], properties={'cols': 8})
self._AddBreak()
self._AddIndicators('Flight Circle', [
# TODO: Use full comms mode currently, because target
# path radius is not in TetherDown yet.
control.CrosswindCircleWindow(common.FULL_COMMS_MODE),
], properties={'cols': 2})
self._AddIndicators('Aero angles', [
control.AeroAnglesXYPlot(self._MODE),
], properties={'cols': 2})
self._AddIndicators('Trans-in Trajectory', [
control.TransInTrajectoryChart(self._MODE),
], properties={'cols': 2})
self._AddIndicators('Crosswind Stats', [
control.LowBoundLoopAltitudeIndicator(self._MODE),
], properties={'cols': 2})
|
Nunes’ announcement came as the House Ethics Committee released a statement disclosing that the committee was investigating public allegations that Nunes made unauthorized disclosure of classified documents.
Nunes was not pushed by House Republican leadership to recuse himself, but it was not helpful to have other Republicans, notably Sen. John McCain (R-AZ) and Sen. Lindsey Graham (R-SC) join Democrats in questioning his motives and competence.
Nunes’ actions further demonstrated the need for an independent inquiry into Russia’s actions during the campaign, Schiff said.
The recusal is temporary and as soon as possible, the chairman intends to resume full control of the committee as soon as the House Ethics Committee resolves the charges made by left-wing groups, the source said.
The source said Nunes hopes the Ethics Committee will work on his case during the Easter recess so the investigation can be wrapped up as soon as possible.
Capitol Hill conservatives are now watching to see if Nunes will also recuse himself from the probe into former national security adviser Susan Rice.
|
from __future__ import print_function, division
import matplotlib
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import Net, RealApplianceSource, BLSTMLayer, DimshuffleLayer
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import crossentropy, mse
from lasagne.init import Uniform, Normal
from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer, FeaturePoolLayer
from lasagne.updates import nesterov_momentum
from functools import partial
import os
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment
from neuralnilm.net import TrainingError
import __main__
from copy import deepcopy
from math import sqrt
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 250
GRADIENT_STEPS = 100
"""
e233
based on e131c but with:
* lag=32
* pool
e234
* init final layer and conv layer
235
no lag
236
should be exactly as 131c: no pool, no lag, no init for final and conv layer
237
putting the pool back
238
seems pooling hurts us! disable pooling.
enable lag = 32
239
BLSTM
lag = 20
240
LSTM not BLSTM
various lags
241
output is prediction
252
attempt to predict fdiff 1 sample ahead. Unfair?
253
regurgitate fdiff from 1 sample ago
254
lag of 10 time steps
255
lag of 5 time steps
257
slowly increasing lag
258
output is different appliances diff
259
start off just trying to regurgitate diff of aggregate
then swap to disaggregation (to diff)
"""
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television',
'dish washer',
['washer dryer', 'washing machine']
],
max_appliance_powers=[2500]*5,
on_power_thresholds=[5] * 5,
max_input_power=5900,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=1500,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
# skip_probability=0.0,
n_seq_per_batch=50,
# subsample_target=5,
include_diff=True,
include_power=False,
clip_appliance_power=True,
target_is_prediction=True,
lag=1,
target_is_diff=True
)
def change_learning_rate(net, epoch, learning_rate):
net.updates = partial(nesterov_momentum, learning_rate=learning_rate)
net.compile()
def change_lag(net, epoch, lag):
net.source.lag = lag
net.compile()
from theano.ifelse import ifelse
import theano.tensor as T
THRESHOLD = 0
def scaled_cost(x, t):
sq_error = (x - t) ** 2
def mask_and_mean_sq_error(mask):
masked_sq_error = sq_error[mask.nonzero()]
mean = masked_sq_error.mean()
mean = ifelse(T.isnan(mean), 0.0, mean)
return mean
above_thresh_mean = mask_and_mean_sq_error(t > THRESHOLD)
below_thresh_mean = mask_and_mean_sq_error(t <= THRESHOLD)
return (above_thresh_mean + below_thresh_mean) / 2.0
def new_source(net, epoch):
source_dict_copy = deepcopy(source_dict)
source_dict_copy['target_is_prediction'] = False
net.source = RealApplianceSource(**source_dict_copy)
net.generate_validation_data_and_set_shapes()
net.loss_function = scaled_cost
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
loss_function=mse,
updates=partial(nesterov_momentum, learning_rate=0.1),
layers_config=[
{
'type': LSTMLayer,
'num_units': 50,
'gradient_steps': GRADIENT_STEPS,
'peepholes': False,
'W_in_to_cell': Normal(std=1.)
}
],
layer_changes={
1001: {
'remove_from': -3,
'callback': new_source,
'new_layers': [
{
'type': DenseLayer,
'num_units': 5,
'nonlinearity': None,
'W': Normal(std=(1/sqrt(50)))
}
]
}
}
)
def exp_x(name):
global source
# source = RealApplianceSource(**source_dict)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'].append(
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': None,
'W': Normal(std=(1/sqrt(50)))
}
)
net = Net(**net_dict_copy)
return net
def main():
experiment = 'a'
full_exp_name = NAME + experiment
path = os.path.join(PATH, full_exp_name)
print("***********************************")
print("Preparing", full_exp_name, "...")
try:
net = exp_x(full_exp_name)
run_experiment(net, path, epochs=None)
except KeyboardInterrupt:
return
except TrainingError as exception:
print("EXCEPTION:", exception)
except Exception as exception:
print("EXCEPTION:", exception)
if __name__ == "__main__":
main()
|
You’ve discovered JOCO and super keen to stock our products. Now take a moment to complete your details on the form below. We will do our very best to contact you as quickly as possible. Thanks for your interest. Chat soon!
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-19 17:19
from __future__ import unicode_literals
import django.core.files.storage
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('world', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='world',
name='description',
),
migrations.AddField(
model_name='world',
name='rating',
field=models.PositiveIntegerField(default=80, verbose_name='Rating'),
),
migrations.AlterField(
model_name='world',
name='image',
field=models.ImageField(storage=django.core.files.storage.FileSystemStorage(base_url='/web/worlds/__attach/', location='../media/worlds/__attach/'), upload_to='', verbose_name='Image'),
),
migrations.AlterField(
model_name='world',
name='slug',
field=models.SlugField(unique=True, verbose_name='Slug'),
),
migrations.AlterField(
model_name='world',
name='title',
field=models.CharField(max_length=255, verbose_name='Title'),
),
]
|
Title-What is the Conference about?
The Ageing of Regional Victoria: Problem or Opportunity?
Supermarkets – Scourge or Saviour?
How does film-induced tourism affect a country town?
MY TOWN - A town responding to the challenge of change.
|
#The purpose of this program is to quickly evaluate and test different DFT routines in Python for comparison to the slightly more *ahem* arcane implementation of FFTW
import scipy
import math
import numpy as np
import matplotlib.pyplot as pyplot
size = 20 #Array size for functions
def main():
f = []
print np.arange(size)
for i in np.arange(size):
#f.append(5) #Contant function
#f.append(math.sin(2*math.pi*i/size)) #Single-frequency sine wave
f.append(math.sin(2*math.pi*i/size) + math.sin(10*math.pi*i/size)) #Multiple sine waves
#pyplot.plot(2*math.pi*np.arange(size)/size, f)
pyplot.plot(np.arange(size), f)
pyplot.show()
npf = np.array(f)
print npf
npf_fft = np.fft.fft(npf)
print npf_fft
#pyplot.plot(2*math.pi*np.arange(size)/size, np.imag(npf_fft), 'b')
#pyplot.plot(2*math.pi*np.arange(size)/size, np.real(npf_fft), 'r')
#pyplot.plot(2*math.pi*np.arange(size)/size, np.abs(npf_fft), 'k')
pyplot.plot(np.arange(size), np.imag(npf_fft), 'b')
pyplot.plot(np.arange(size), np.real(npf_fft), 'r')
pyplot.plot(np.arange(size), np.abs(npf_fft), 'k')
pyplot.show()
npf_fft_ifft = np.fft.ifft(npf_fft)
print npf_fft_ifft
#pyplot.plot(2*math.pi*np.arange(size)/size, np.real(npf), 'b')
#pyplot.plot(2*math.pi*np.arange(size)/size, np.real(npf_fft_ifft), 'r')
pyplot.plot(np.arange(size), np.real(npf), 'b')
pyplot.plot(np.arange(size), np.real(npf_fft_ifft), 'r')
pyplot.show()
if __name__ == '__main__':
main()
|
Ausbert The Senator Of The was born about 0536 in Of, Old Saxony, , Germany. Ausbert The Senator Of The's father is Ferreolus, Duke Of MOSELLE and his mother is Outeria, Duchess Of MOSELLE. His paternal grandparents are Sigimberus I Bishop Of AUVERGNE and Miss TONANTIUS. He was an only child. He died about 0570.
|
# Copyright (c) 2003-2012 CORE Security Technologies)
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: smb.py 602 2012-07-12 16:22:04Z [email protected] $
#
# Copyright (C) 2001 Michael Teo <[email protected]>
# smb.py - SMB/CIFS library
#
# This software is provided 'as-is', without any express or implied warranty.
# In no event will the author be held liable for any damages arising from the
# use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
#
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
#
# 3. This notice cannot be removed or altered from any source distribution.
#
# Altered source done by Alberto Solino
# Todo:
# [ ] Try [SMB]transport fragmentation using Transact requests
# [ ] Try other methods of doing write (write_raw, transact2, write, write_and_unlock, write_and_close, write_mpx)
# [-] Try replacements for SMB_COM_NT_CREATE_ANDX (CREATE, T_TRANSACT_CREATE, OPEN_ANDX works
# [x] Fix forceWriteAndx, which needs to send a RecvRequest, because recv() will not send it
# [x] Fix Recv() when using RecvAndx and the answer comes splet in several packets
# [ ] Try [SMB]transport fragmentation with overlaping segments
# [ ] Try [SMB]transport fragmentation with out of order segments
# [x] Do chained AndX requests
# [ ] Transform the rest of the calls to structure
# [ ] Implement TRANS/TRANS2 reassembly for list_path
import os, sys, socket, string, re, select, errno
import nmb
import types
from binascii import a2b_hex
import ntlm
import random
import datetime, time
from random import randint
from struct import *
from dcerpc import samr
import struct
from structure import Structure
from contextlib import contextmanager
# For signing
import hashlib
unicode_support = 0
unicode_convert = 1
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Shared Device Type
SHARED_DISK = 0x00
SHARED_DISK_HIDDEN = 0x80000000
SHARED_PRINT_QUEUE = 0x01
SHARED_DEVICE = 0x02
SHARED_IPC = 0x03
# Extended attributes mask
ATTR_ARCHIVE = 0x020
ATTR_COMPRESSED = 0x800
ATTR_NORMAL = 0x080
ATTR_HIDDEN = 0x002
ATTR_READONLY = 0x001
ATTR_TEMPORARY = 0x100
ATTR_DIRECTORY = 0x010
ATTR_SYSTEM = 0x004
# Service Type
SERVICE_DISK = 'A:'
SERVICE_PRINTER = 'LPT1:'
SERVICE_IPC = 'IPC'
SERVICE_COMM = 'COMM'
SERVICE_ANY = '?????'
# Server Type (Can be used to mask with SMBMachine.get_type() or SMBDomain.get_type())
SV_TYPE_WORKSTATION = 0x00000001
SV_TYPE_SERVER = 0x00000002
SV_TYPE_SQLSERVER = 0x00000004
SV_TYPE_DOMAIN_CTRL = 0x00000008
SV_TYPE_DOMAIN_BAKCTRL = 0x00000010
SV_TYPE_TIME_SOURCE = 0x00000020
SV_TYPE_AFP = 0x00000040
SV_TYPE_NOVELL = 0x00000080
SV_TYPE_DOMAIN_MEMBER = 0x00000100
SV_TYPE_PRINTQ_SERVER = 0x00000200
SV_TYPE_DIALIN_SERVER = 0x00000400
SV_TYPE_XENIX_SERVER = 0x00000800
SV_TYPE_NT = 0x00001000
SV_TYPE_WFW = 0x00002000
SV_TYPE_SERVER_NT = 0x00004000
SV_TYPE_POTENTIAL_BROWSER = 0x00010000
SV_TYPE_BACKUP_BROWSER = 0x00020000
SV_TYPE_MASTER_BROWSER = 0x00040000
SV_TYPE_DOMAIN_MASTER = 0x00080000
SV_TYPE_LOCAL_LIST_ONLY = 0x40000000
SV_TYPE_DOMAIN_ENUM = 0x80000000
# Options values for SMB.stor_file and SMB.retr_file
SMB_O_CREAT = 0x10 # Create the file if file does not exists. Otherwise, operation fails.
SMB_O_EXCL = 0x00 # When used with SMB_O_CREAT, operation fails if file exists. Cannot be used with SMB_O_OPEN.
SMB_O_OPEN = 0x01 # Open the file if the file exists
SMB_O_TRUNC = 0x02 # Truncate the file if the file exists
# Share Access Mode
SMB_SHARE_COMPAT = 0x00
SMB_SHARE_DENY_EXCL = 0x10
SMB_SHARE_DENY_WRITE = 0x20
SMB_SHARE_DENY_READEXEC = 0x30
SMB_SHARE_DENY_NONE = 0x40
SMB_ACCESS_READ = 0x00
SMB_ACCESS_WRITE = 0x01
SMB_ACCESS_READWRITE = 0x02
SMB_ACCESS_EXEC = 0x03
TRANS_DISCONNECT_TID = 1
TRANS_NO_RESPONSE = 2
STATUS_SUCCESS = 0x00000000
STATUS_LOGON_FAILURE = 0xC000006D
STATUS_LOGON_TYPE_NOT_GRANTED = 0xC000015B
MAX_TFRAG_SIZE = 5840
EVASION_NONE = 0
EVASION_LOW = 1
EVASION_HIGH = 2
EVASION_MAX = 3
RPC_X_BAD_STUB_DATA = 0x6F7
# SMB_FILE_ATTRIBUTES
SMB_FILE_ATTRIBUTE_NORMAL = 0x0000
SMB_FILE_ATTRIBUTE_READONLY = 0x0001
SMB_FILE_ATTRIBUTE_HIDDEN = 0x0002
SMB_FILE_ATTRIBUTE_SYSTEM = 0x0004
SMB_FILE_ATTRIBUTE_VOLUME = 0x0008
SMB_FILE_ATTRIBUTE_DIRECORY = 0x0010
SMB_FILE_ATTRIBUTE_ARCHIVE = 0x0020
SMB_SEARCH_ATTRIBUTE_READONLY = 0x0100
SMB_SEARCH_ATTRIBUTE_HIDDEN = 0x0200
SMB_SEARCH_ATTRIBUTE_SYSTEM = 0x0400
SMB_SEARCH_ATTRIBUTE_DIRECTORY = 0x1000
SMB_SEARCH_ATTRIBUTE_ARCHIVE = 0x2000
# Session SetupAndX Action flags
SMB_SETUP_GUEST = 0x01
SMB_SETUP_USE_LANMAN_KEY = 0x02
# QUERY_INFORMATION levels
SMB_INFO_ALLOCATION = 0x0001
SMB_INFO_VOLUME = 0x0002
SMB_QUERY_FS_VOLUME_INFO = 0x0102
SMB_QUERY_FS_SIZE_INFO = 0x0103
SMB_QUERY_FILE_EA_INFO = 0x0103
SMB_QUERY_FS_DEVICE_INFO = 0x0104
SMB_QUERY_FS_ATTRIBUTE_INFO = 0x0105
SMB_QUERY_FILE_BASIC_INFO = 0x0101
SMB_QUERY_FILE_STANDARD_INFO = 0x0102
SMB_QUERY_FILE_ALL_INFO = 0x0107
# SET_INFORMATION levels
SMB_SET_FILE_DISPOSITION_INFO = 0x0102
SMB_SET_FILE_BASIC_INFO = 0x0101
SMB_SET_FILE_END_OF_FILE_INFO = 0x0104
# File System Attributes
FILE_CASE_SENSITIVE_SEARCH = 0x00000001
FILE_CASE_PRESERVED_NAMES = 0x00000002
FILE_UNICODE_ON_DISK = 0x00000004
FILE_PERSISTENT_ACLS = 0x00000008
FILE_FILE_COMPRESSION = 0x00000010
FILE_VOLUME_IS_COMPRESSED = 0x00008000
# FIND_FIRST2 flags and levels
SMB_FIND_CLOSE_AFTER_REQUEST = 0x0001
SMB_FIND_CLOSE_AT_EOS = 0x0002
SMB_FIND_RETURN_RESUME_KEYS = 0x0004
SMB_FIND_CONTINUE_FROM_LAST = 0x0008
SMB_FIND_WITH_BACKUP_INTENT = 0x0010
FILE_DIRECTORY_FILE = 0x00000001
FILE_DELETE_ON_CLOSE = 0x00001000
FILE_NON_DIRECTORY_FILE = 0x00000040
SMB_FIND_INFO_STANDARD = 0x0001
SMB_FIND_FILE_DIRECTORY_INFO = 0x0101
SMB_FIND_FILE_FULL_DIRECTORY_INFO= 0x0102
SMB_FIND_FILE_NAMES_INFO = 0x0103
SMB_FIND_FILE_BOTH_DIRECTORY_INFO= 0x0104
SMB_FIND_FILE_ID_FULL_DIRECTORY_INFO = 0x105
SMB_FIND_FILE_ID_BOTH_DIRECTORY_INFO = 0x106
# DesiredAccess flags
FILE_READ_DATA = 0x00000001
FILE_WRITE_DATA = 0x00000002
FILE_APPEND_DATA = 0x00000004
FILE_EXECUTE = 0x00000020
MAXIMUM_ALLOWED = 0200000000
GENERIC_ALL = 0x10000000
GENERIC_EXECUTE = 0x20000000
GENERIC_WRITE = 0x40000000
GENERIC_READ = 0x80000000
# ShareAccess flags
FILE_SHARE_NONE = 0x00000000
FILE_SHARE_READ = 0x00000001
FILE_SHARE_WRITE = 0x00000002
FILE_SHARE_DELETE = 0x00000004
# CreateDisposition flags
FILE_SUPERSEDE = 0x00000000
FILE_OPEN = 0x00000001
FILE_CREATE = 0x00000002
FILE_OPEN_IF = 0x00000003
FILE_OVERWRITE = 0x00000004
FILE_OVERWRITE_IF = 0x00000005
############### GSS Stuff ################
GSS_API_SPNEGO_UUID = '\x2b\x06\x01\x05\x05\x02'
ASN1_SEQUENCE = 0x30
ASN1_SEQUENCE = 0x30
ASN1_AID = 0x60
ASN1_OID = 0x06
ASN1_OCTET_STRING = 0x04
ASN1_MECH_TYPE = 0xa0
ASN1_MECH_TOKEN = 0xa2
ASN1_SUPPORTED_MECH = 0xa1
ASN1_RESPONSE_TOKEN = 0xa2
ASN1_ENUMERATED = 0x0a
MechTypes = {
'+\x06\x01\x04\x01\x827\x02\x02\x1e': 'SNMPv2-SMI::enterprises.311.2.2.30',
'+\x06\x01\x04\x01\x827\x02\x02\n': 'NTLMSSP - Microsoft NTLM Security Support Provider',
'*\x86H\x82\xf7\x12\x01\x02\x02': 'MS KRB5 - Microsoft Kerberos 5',
'*\x86H\x86\xf7\x12\x01\x02\x02': 'KRB5 - Kerberos 5',
'*\x86H\x86\xf7\x12\x01\x02\x02\x03': 'KRB5 - Kerberos 5 - User to User'
}
TypesMech = dict((v,k) for k, v in MechTypes.iteritems())
def asn1encode(data = ''):
#res = asn1.SEQUENCE(str).encode()
#import binascii
#print '\nalex asn1encode str: %s\n' % binascii.hexlify(str)
if len(data) >= 0 and len(data) <= 0x7F:
res = pack('B', len(data)) + data
elif len(data) >= 0x80 and len(data) <= 0xFF:
res = pack('BB', 0x81, len(data)) + data
elif len(data) >= 0x100 and len(data) <= 0xFFFF:
res = pack('!BH', 0x82, len(data)) + data
elif len(data) >= 0x10000 and len(data) <= 0xffffff:
res = pack('!BBH', 0x83, len(data) >> 16, len(data) & 0xFFFF) + data
elif len(data) >= 0x1000000 and len(data) <= 0xffffffff:
res = pack('!BL', 0x84, len(data)) + data
else:
raise Exception('Error in asn1encode')
return str(res)
def asn1decode(data = ''):
len1 = unpack('B', data[:1])[0]
data = data[1:]
if len1 == 0x81:
pad = calcsize('B')
len2 = unpack('B',data[:pad])[0]
data = data[pad:]
ans = data[:len2]
elif len1 == 0x82:
pad = calcsize('H')
len2 = unpack('!H', data[:pad])[0]
data = data[pad:]
ans = data[:len2]
elif len1 == 0x83:
pad = calcsize('B') + calcsize('!H')
len2, len3 = unpack('!BH', data[:pad])
data = data[pad:]
ans = data[:len2 << 16 + len3]
elif len1 == 0x84:
pad = calcsize('!L')
len2 = unpack('!L', data[:pad])[0]
data = data[pad:]
ans = data[:len2]
# 1 byte length, string <= 0x7F
else:
pad = 0
ans = data[:len1]
return ans, len(ans)+pad+1
class GSSAPI():
# Generic GSSAPI Header Format
def __init__(self, data = None):
self.fields = {}
self['UUID'] = GSS_API_SPNEGO_UUID
if data:
self.fromString(data)
pass
def __setitem__(self,key,value):
self.fields[key] = value
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __len__(self):
return len(self.getData())
def __str__(self):
return len(self.getData())
def fromString(self, data = None):
# Manual parse of the GSSAPI Header Format
# It should be something like
# AID = 0x60 TAG, BER Length
# OID = 0x06 TAG
# GSSAPI OID
# UUID data (BER Encoded)
# Payload
next_byte = unpack('B',data[:1])[0]
if next_byte != ASN1_AID:
raise Exception('Unknown AID=%x' % next_byte)
data = data[1:]
decode_data, total_bytes = asn1decode(data)
# Now we should have a OID tag
next_byte = unpack('B',decode_data[:1])[0]
if next_byte != ASN1_OID:
raise Exception('OID tag not found %x' % next_byte)
decode_data = decode_data[1:]
# Now the OID contents, should be SPNEGO UUID
uuid, total_bytes = asn1decode(decode_data)
self['OID'] = uuid
# the rest should be the data
self['Payload'] = decode_data[total_bytes:]
#pass
def dump(self):
for i in self.fields.keys():
print "%s: {%r}" % (i,self[i])
def getData(self):
ans = pack('B',ASN1_AID)
ans += asn1encode(
pack('B',ASN1_OID) +
asn1encode(self['UUID']) +
self['Payload'] )
return ans
class SPNEGO_NegTokenResp():
# http://tools.ietf.org/html/rfc4178#page-9
# NegTokenResp ::= SEQUENCE {
# negState [0] ENUMERATED {
# accept-completed (0),
# accept-incomplete (1),
# reject (2),
# request-mic (3)
# } OPTIONAL,
# -- REQUIRED in the first reply from the target
# supportedMech [1] MechType OPTIONAL,
# -- present only in the first reply from the target
# responseToken [2] OCTET STRING OPTIONAL,
# mechListMIC [3] OCTET STRING OPTIONAL,
# ...
# }
# This structure is not prepended by a GSS generic header!
SPNEGO_NEG_TOKEN_RESP = 0xa1
SPNEGO_NEG_TOKEN_TARG = 0xa0
def __init__(self, data = None):
self.fields = {}
if data:
self.fromString(data)
pass
def __setitem__(self,key,value):
self.fields[key] = value
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __len__(self):
return len(self.getData())
def __str__(self):
return len(self.getData())
def fromString(self, data = 0):
payload = data
next_byte = unpack('B', payload[:1])[0]
if next_byte != SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_RESP:
raise Exception('NegTokenResp not found %x' % next_byte)
payload = payload[1:]
decode_data, total_bytes = asn1decode(payload)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SEQUENCE:
raise Exception('SEQUENCE tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
next_byte = unpack('B',decode_data[:1])[0]
if next_byte != ASN1_MECH_TYPE:
# MechType not found, could be an AUTH answer
if next_byte != ASN1_RESPONSE_TOKEN:
raise Exception('MechType/ResponseToken tag not found %x' % next_byte)
else:
decode_data2 = decode_data[1:]
decode_data2, total_bytes = asn1decode(decode_data2)
next_byte = unpack('B', decode_data2[:1])[0]
if next_byte != ASN1_ENUMERATED:
raise Exception('Enumerated tag not found %x' % next_byte)
decode_data2 = decode_data2[1:]
item, total_bytes2 = asn1decode(decode_data)
self['NegResult'] = item
decode_data = decode_data[1:]
decode_data = decode_data[total_bytes:]
# Do we have more data?
if len(decode_data) == 0:
return
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SUPPORTED_MECH:
if next_byte != ASN1_RESPONSE_TOKEN:
raise Exception('Supported Mech/ResponseToken tag not found %x' % next_byte)
else:
decode_data2 = decode_data[1:]
decode_data2, total_bytes = asn1decode(decode_data2)
next_byte = unpack('B', decode_data2[:1])[0]
if next_byte != ASN1_OID:
raise Exception('OID tag not found %x' % next_byte)
decode_data2 = decode_data2[1:]
item, total_bytes2 = asn1decode(decode_data2)
self['SuportedMech'] = item
decode_data = decode_data[1:]
decode_data = decode_data[total_bytes:]
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_RESPONSE_TOKEN:
raise Exception('Response token tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_OCTET_STRING:
raise Exception('Octet string token tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
self['ResponseToken'] = decode_data
def dump(self):
for i in self.fields.keys():
print "%s: {%r}" % (i,self[i])
def getData(self):
ans = pack('B',SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_RESP)
if self.fields.has_key('NegResult') and self.fields.has_key('SupportedMech'):
# Server resp
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B',SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_TARG) +
asn1encode(
pack('B',ASN1_ENUMERATED) +
asn1encode( self['NegResult'] )) +
pack('B',ASN1_SUPPORTED_MECH) +
asn1encode(
pack('B',ASN1_OID) +
asn1encode(self['SupportedMech'])) +
pack('B',ASN1_RESPONSE_TOKEN ) +
asn1encode(
pack('B', ASN1_OCTET_STRING) + asn1encode(self['ResponseToken']))))
elif self.fields.has_key('NegResult'):
# Server resp
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B', SPNEGO_NegTokenResp.SPNEGO_NEG_TOKEN_TARG) +
asn1encode(
pack('B',ASN1_ENUMERATED) +
asn1encode( self['NegResult'] ))))
else:
# Client resp
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B', ASN1_RESPONSE_TOKEN) +
asn1encode(
pack('B', ASN1_OCTET_STRING) + asn1encode(self['ResponseToken']))))
return ans
class SPNEGO_NegTokenInit(GSSAPI):
# http://tools.ietf.org/html/rfc4178#page-8
# NegTokeInit :: = SEQUENCE {
# mechTypes [0] MechTypeList,
# reqFlags [1] ContextFlags OPTIONAL,
# mechToken [2] OCTET STRING OPTIONAL,
# mechListMIC [3] OCTET STRING OPTIONAL,
# }
SPNEGO_NEG_TOKEN_INIT = 0xa0
def fromString(self, data = 0):
GSSAPI.fromString(self, data)
payload = self['Payload']
next_byte = unpack('B', payload[:1])[0]
if next_byte != SPNEGO_NegTokenInit.SPNEGO_NEG_TOKEN_INIT:
raise Exception('NegTokenInit not found %x' % next_byte)
payload = payload[1:]
decode_data, total_bytes = asn1decode(payload)
# Now we should have a SEQUENCE Tag
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SEQUENCE:
raise Exception('SEQUENCE tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes2 = asn1decode(decode_data)
next_byte = unpack('B',decode_data[:1])[0]
if next_byte != ASN1_MECH_TYPE:
raise Exception('MechType tag not found %x' % next_byte)
decode_data = decode_data[1:]
remaining_data = decode_data
decode_data, total_bytes3 = asn1decode(decode_data)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_SEQUENCE:
raise Exception('SEQUENCE tag not found %x' % next_byte)
decode_data = decode_data[1:]
decode_data, total_bytes4 = asn1decode(decode_data)
# And finally we should have the MechTypes
self['MechTypes'] = []
i = 1
while decode_data:
next_byte = unpack('B', decode_data[:1])[0]
if next_byte != ASN1_OID:
# Not a valid OID, there must be something else we won't unpack
break
decode_data = decode_data[1:]
item, total_bytes = asn1decode(decode_data)
self['MechTypes'].append(item)
decode_data = decode_data[total_bytes:]
# Do we have MechTokens as well?
decode_data = remaining_data[total_bytes3:]
if len(decode_data) > 0:
next_byte = unpack('B', decode_data[:1])[0]
if next_byte == ASN1_MECH_TOKEN:
# We have tokens in here!
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
next_byte = unpack('B', decode_data[:1])[0]
if next_byte == ASN1_OCTET_STRING:
decode_data = decode_data[1:]
decode_data, total_bytes = asn1decode(decode_data)
self['MechToken'] = decode_data
def getData(self):
mechTypes = ''
for i in self['MechTypes']:
mechTypes += pack('B', ASN1_OID)
mechTypes += asn1encode(i)
mechToken = ''
# Do we have tokens to send?
if self.fields.has_key('MechToken'):
mechToken = pack('B', ASN1_MECH_TOKEN) + asn1encode(
pack('B', ASN1_OCTET_STRING) + asn1encode(
self['MechToken']))
ans = pack('B',SPNEGO_NegTokenInit.SPNEGO_NEG_TOKEN_INIT)
ans += asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(
pack('B', ASN1_MECH_TYPE) +
asn1encode(
pack('B', ASN1_SEQUENCE) +
asn1encode(mechTypes)) + mechToken ))
self['Payload'] = ans
return GSSAPI.getData(self)
def strerror(errclass, errcode):
if errclass == 0x01:
return 'OS error', ERRDOS.get(errcode, 'Unknown error')
elif errclass == 0x02:
return 'Server error', ERRSRV.get(errcode, 'Unknown error')
elif errclass == 0x03:
return 'Hardware error', ERRHRD.get(errcode, 'Unknown error')
# This is not a standard error class for SMB
#elif errclass == 0x80:
# return 'Browse error', ERRBROWSE.get(errcode, 'Unknown error')
elif errclass == 0xff:
return 'Bad command', 'Bad command. Please file bug report'
else:
return 'Unknown error', 'Unknown error'
# Raised when an error has occured during a session
class SessionError(Exception):
# SMB X/Open error codes for the ERRDOS error class
ERRsuccess = 0
ERRbadfunc = 1
ERRbadfile = 2
ERRbadpath = 3
ERRnofids = 4
ERRnoaccess = 5
ERRbadfid = 6
ERRbadmcb = 7
ERRnomem = 8
ERRbadmem = 9
ERRbadenv = 10
ERRbadaccess = 12
ERRbaddata = 13
ERRres = 14
ERRbaddrive = 15
ERRremcd = 16
ERRdiffdevice = 17
ERRnofiles = 18
ERRgeneral = 31
ERRbadshare = 32
ERRlock = 33
ERRunsup = 50
ERRnetnamedel = 64
ERRnosuchshare = 67
ERRfilexists = 80
ERRinvalidparam = 87
ERRcannotopen = 110
ERRinsufficientbuffer = 122
ERRinvalidname = 123
ERRunknownlevel = 124
ERRnotlocked = 158
ERRrename = 183
ERRbadpipe = 230
ERRpipebusy = 231
ERRpipeclosing = 232
ERRnotconnected = 233
ERRmoredata = 234
ERRnomoreitems = 259
ERRbaddirectory = 267
ERReasnotsupported = 282
ERRlogonfailure = 1326
ERRbuftoosmall = 2123
ERRunknownipc = 2142
ERRnosuchprintjob = 2151
ERRinvgroup = 2455
# here's a special one from observing NT
ERRnoipc = 66
# These errors seem to be only returned by the NT printer driver system
ERRdriveralreadyinstalled = 1795
ERRunknownprinterport = 1796
ERRunknownprinterdriver = 1797
ERRunknownprintprocessor = 1798
ERRinvalidseparatorfile = 1799
ERRinvalidjobpriority = 1800
ERRinvalidprintername = 1801
ERRprinteralreadyexists = 1802
ERRinvalidprintercommand = 1803
ERRinvaliddatatype = 1804
ERRinvalidenvironment = 1805
ERRunknownprintmonitor = 3000
ERRprinterdriverinuse = 3001
ERRspoolfilenotfound = 3002
ERRnostartdoc = 3003
ERRnoaddjob = 3004
ERRprintprocessoralreadyinstalled = 3005
ERRprintmonitoralreadyinstalled = 3006
ERRinvalidprintmonitor = 3007
ERRprintmonitorinuse = 3008
ERRprinterhasjobsqueued = 3009
# Error codes for the ERRSRV class
ERRerror = 1
ERRbadpw = 2
ERRbadtype = 3
ERRaccess = 4
ERRinvnid = 5
ERRinvnetname = 6
ERRinvdevice = 7
ERRqfull = 49
ERRqtoobig = 50
ERRinvpfid = 52
ERRsmbcmd = 64
ERRsrverror = 65
ERRfilespecs = 67
ERRbadlink = 68
ERRbadpermits = 69
ERRbadpid = 70
ERRsetattrmode = 71
ERRpaused = 81
ERRmsgoff = 82
ERRnoroom = 83
ERRrmuns = 87
ERRtimeout = 88
ERRnoresource = 89
ERRtoomanyuids = 90
ERRbaduid = 91
ERRuseMPX = 250
ERRuseSTD = 251
ERRcontMPX = 252
ERRbadPW = None
ERRnosupport = 0
ERRunknownsmb = 22
# Error codes for the ERRHRD class
ERRnowrite = 19
ERRbadunit = 20
ERRnotready = 21
ERRbadcmd = 22
ERRdata = 23
ERRbadreq = 24
ERRseek = 25
ERRbadmedia = 26
ERRbadsector = 27
ERRnopaper = 28
ERRwrite = 29
ERRread = 30
ERRgeneral = 31
ERRwrongdisk = 34
ERRFCBunavail = 35
ERRsharebufexc = 36
ERRdiskfull = 39
hard_msgs = {
19: ("ERRnowrite", "Attempt to write on write-protected diskette."),
20: ("ERRbadunit", "Unknown unit."),
21: ("ERRnotready", "Drive not ready."),
22: ("ERRbadcmd", "Unknown command."),
23: ("ERRdata", "Data error (CRC)."),
24: ("ERRbadreq", "Bad request structure length."),
25: ("ERRseek", "Seek error."),
26: ("ERRbadmedia", "Unknown media type."),
27: ("ERRbadsector", "Sector not found."),
28: ("ERRnopaper", "Printer out of paper."),
29: ("ERRwrite", "Write fault."),
30: ("ERRread", "Read fault."),
31: ("ERRgeneral", "General failure."),
32: ("ERRbadshare", "An open conflicts with an existing open."),
33: ("ERRlock", "A Lock request conflicted with an existing lock or specified an invalid mode, or an Unlock requested attempted to remove a lock held by another process."),
34: ("ERRwrongdisk", "The wrong disk was found in a drive."),
35: ("ERRFCBUnavail", "No FCBs are available to process request."),
36: ("ERRsharebufexc", "A sharing buffer has been exceeded.")
}
nt_msgs = {
0x0000: ("NT_STATUS_OK","The operation completed successfully."),
0x0001: ("NT_STATUS_UNSUCCESSFUL","A device attached to the system is not functioning."),
0x0002: ("NT_STATUS_NOT_IMPLEMENTED","Incorrect function."),
0x0003: ("NT_STATUS_INVALID_INFO_CLASS","The parameter is incorrect."),
0x0004: ("NT_STATUS_INFO_LENGTH_MISMATCH","The program issued a command but the command length is incorrect."),
0x0005: ("NT_STATUS_ACCESS_VIOLATION","Invalid access to memory location."),
0x0006: ("NT_STATUS_IN_PAGE_ERROR","Error performing inpage operation."),
0x0007: ("NT_STATUS_PAGEFILE_QUOTA","Insufficient quota to complete the requested service."),
0x0008: ("NT_STATUS_INVALID_HANDLE","The handle is invalid."),
0x0009: ("NT_STATUS_BAD_INITIAL_STACK","Recursion too deep, stack overflowed."),
0x000a: ("NT_STATUS_BAD_INITIAL_PC","Not a valid Windows NT application."),
0x000b: ("NT_STATUS_INVALID_CID","The parameter is incorrect."),
0x000c: ("NT_STATUS_TIMER_NOT_CANCELED","NT_STATUS_TIMER_NOT_CANCELED"),
0x000d: ("NT_STATUS_INVALID_PARAMETER","The parameter is incorrect."),
0x000e: ("NT_STATUS_NO_SUCH_DEVICE","The system cannot find the file specified."),
0x000f: ("NT_STATUS_NO_SUCH_FILE","The system cannot find the file specified."),
0x0010: ("NT_STATUS_INVALID_DEVICE_REQUEST","Incorrect function."),
0x0011: ("NT_STATUS_END_OF_FILE","Reached end of file."),
0x0012: ("NT_STATUS_WRONG_VOLUME","The wrong diskette is in the drive. Insert %2 (Volume Serial Number: %3) into drive %1."),
0x0013: ("NT_STATUS_NO_MEDIA_IN_DEVICE","The device is not ready."),
0x0014: ("NT_STATUS_UNRECOGNIZED_MEDIA","The disk media is not recognized. It may not be formatted."),
0x0015: ("NT_STATUS_NONEXISTENT_SECTOR","The drive cannot find the sector requested."),
0x0016: ("NT_STATUS_MORE_PROCESSING_REQUIRED","More data is available."),
0x0017: ("NT_STATUS_NO_MEMORY","Not enough storage is available to process this command."),
0x0018: ("NT_STATUS_CONFLICTING_ADDRESSES","Attempt to access invalid address."),
0x0019: ("NT_STATUS_NOT_MAPPED_VIEW","Attempt to access invalid address."),
0x001a: ("NT_STATUS_UNABLE_TO_FREE_VM","The parameter is incorrect."),
0x001b: ("NT_STATUS_UNABLE_TO_DELETE_SECTION","The parameter is incorrect."),
0x001c: ("NT_STATUS_INVALID_SYSTEM_SERVICE","Incorrect function."),
0x001d: ("NT_STATUS_ILLEGAL_INSTRUCTION","NT_STATUS_ILLEGAL_INSTRUCTION"),
0x001e: ("NT_STATUS_INVALID_LOCK_SEQUENCE","Access is denied."),
0x001f: ("NT_STATUS_INVALID_VIEW_SIZE","Access is denied."),
0x0020: ("NT_STATUS_INVALID_FILE_FOR_SECTION","Not a valid Windows NT application."),
0x0021: ("NT_STATUS_ALREADY_COMMITTED","Access is denied."),
0x0022: ("NT_STATUS_ACCESS_DENIED","Access is denied."),
0x0023: ("NT_STATUS_BUFFER_TOO_SMALL","The data area passed to a system call is too small."),
0x0024: ("NT_STATUS_OBJECT_TYPE_MISMATCH","The handle is invalid."),
0x0025: ("NT_STATUS_NONCONTINUABLE_EXCEPTION","NT_STATUS_NONCONTINUABLE_EXCEPTION"),
0x0026: ("NT_STATUS_INVALID_DISPOSITION","NT_STATUS_INVALID_DISPOSITION"),
0x0027: ("NT_STATUS_UNWIND","NT_STATUS_UNWIND"),
0x0028: ("NT_STATUS_BAD_STACK","NT_STATUS_BAD_STACK"),
0x0029: ("NT_STATUS_INVALID_UNWIND_TARGET","NT_STATUS_INVALID_UNWIND_TARGET"),
0x002a: ("NT_STATUS_NOT_LOCKED","The segment is already unlocked."),
0x002b: ("NT_STATUS_PARITY_ERROR","NT_STATUS_PARITY_ERROR"),
0x002c: ("NT_STATUS_UNABLE_TO_DECOMMIT_VM","Attempt to access invalid address."),
0x002d: ("NT_STATUS_NOT_COMMITTED","Attempt to access invalid address."),
0x002e: ("NT_STATUS_INVALID_PORT_ATTRIBUTES","NT_STATUS_INVALID_PORT_ATTRIBUTES"),
0x002f: ("NT_STATUS_PORT_MESSAGE_TOO_LONG","NT_STATUS_PORT_MESSAGE_TOO_LONG"),
0x0030: ("NT_STATUS_INVALID_PARAMETER_MIX","The parameter is incorrect."),
0x0031: ("NT_STATUS_INVALID_QUOTA_LOWER","NT_STATUS_INVALID_QUOTA_LOWER"),
0x0032: ("NT_STATUS_DISK_CORRUPT_ERROR","The disk structure is corrupt and non-readable."),
0x0033: ("NT_STATUS_OBJECT_NAME_INVALID","The filename, directory name, or volume label syntax is incorrect."),
0x0034: ("NT_STATUS_OBJECT_NAME_NOT_FOUND","The system cannot find the file specified."),
0x0035: ("NT_STATUS_OBJECT_NAME_COLLISION","Cannot create a file when that file already exists."),
0x0036: ("NT_STATUS_HANDLE_NOT_WAITABLE","NT_STATUS_HANDLE_NOT_WAITABLE"),
0x0037: ("NT_STATUS_PORT_DISCONNECTED","The handle is invalid."),
0x0038: ("NT_STATUS_DEVICE_ALREADY_ATTACHED","NT_STATUS_DEVICE_ALREADY_ATTACHED"),
0x0039: ("NT_STATUS_OBJECT_PATH_INVALID","The specified path is invalid."),
0x003a: ("NT_STATUS_OBJECT_PATH_NOT_FOUND","The system cannot find the path specified."),
0x003b: ("NT_STATUS_OBJECT_PATH_SYNTAX_BAD","The specified path is invalid."),
0x003c: ("NT_STATUS_DATA_OVERRUN","The request could not be performed because of an I/O device error."),
0x003d: ("NT_STATUS_DATA_LATE_ERROR","The request could not be performed because of an I/O device error."),
0x003e: ("NT_STATUS_DATA_ERROR","Data error (cyclic redundancy check)"),
0x003f: ("NT_STATUS_CRC_ERROR","Data error (cyclic redundancy check)"),
0x0040: ("NT_STATUS_SECTION_TOO_BIG","Not enough storage is available to process this command."),
0x0041: ("NT_STATUS_PORT_CONNECTION_REFUSED","Access is denied."),
0x0042: ("NT_STATUS_INVALID_PORT_HANDLE","The handle is invalid."),
0x0043: ("NT_STATUS_SHARING_VIOLATION","The process cannot access the file because it is being used by another process."),
0x0044: ("NT_STATUS_QUOTA_EXCEEDED","Not enough quota is available to process this command."),
0x0045: ("NT_STATUS_INVALID_PAGE_PROTECTION","The parameter is incorrect."),
0x0046: ("NT_STATUS_MUTANT_NOT_OWNED","Attempt to release mutex not owned by caller."),
0x0047: ("NT_STATUS_SEMAPHORE_LIMIT_EXCEEDED","Too many posts were made to a semaphore."),
0x0048: ("NT_STATUS_PORT_ALREADY_SET","The parameter is incorrect."),
0x0049: ("NT_STATUS_SECTION_NOT_IMAGE","The parameter is incorrect."),
0x004a: ("NT_STATUS_SUSPEND_COUNT_EXCEEDED","The recipient process has refused the signal."),
0x004b: ("NT_STATUS_THREAD_IS_TERMINATING","Access is denied."),
0x004c: ("NT_STATUS_BAD_WORKING_SET_LIMIT","The parameter is incorrect."),
0x004d: ("NT_STATUS_INCOMPATIBLE_FILE_MAP","The parameter is incorrect."),
0x004e: ("NT_STATUS_SECTION_PROTECTION","The parameter is incorrect."),
0x004f: ("NT_STATUS_EAS_NOT_SUPPORTED","NT_STATUS_EAS_NOT_SUPPORTED"),
0x0050: ("NT_STATUS_EA_TOO_LARGE","The extended attributes are inconsistent."),
0x0051: ("NT_STATUS_NONEXISTENT_EA_ENTRY","The file or directory is corrupt and non-readable."),
0x0052: ("NT_STATUS_NO_EAS_ON_FILE","The file or directory is corrupt and non-readable."),
0x0053: ("NT_STATUS_EA_CORRUPT_ERROR","The file or directory is corrupt and non-readable."),
0x0054: ("NT_STATUS_FILE_LOCK_CONFLICT","The process cannot access the file because another process has locked a portion of the file."),
0x0055: ("NT_STATUS_LOCK_NOT_GRANTED","The process cannot access the file because another process has locked a portion of the file."),
0x0056: ("NT_STATUS_DELETE_PENDING","Access is denied."),
0x0057: ("NT_STATUS_CTL_FILE_NOT_SUPPORTED","The network request is not supported."),
0x0058: ("NT_STATUS_UNKNOWN_REVISION","The revision level is unknown."),
0x0059: ("NT_STATUS_REVISION_MISMATCH","Indicates two revision levels are incompatible."),
0x005a: ("NT_STATUS_INVALID_OWNER","This security ID may not be assigned as the owner of this object."),
0x005b: ("NT_STATUS_INVALID_PRIMARY_GROUP","This security ID may not be assigned as the primary group of an object."),
0x005c: ("NT_STATUS_NO_IMPERSONATION_TOKEN","An attempt has been made to operate on an impersonation token by a thread that is not currently impersonating a client."),
0x005d: ("NT_STATUS_CANT_DISABLE_MANDATORY","The group may not be disabled."),
0x005e: ("NT_STATUS_NO_LOGON_SERVERS","There are currently no logon servers available to service the logon request."),
0x005f: ("NT_STATUS_NO_SUCH_LOGON_SESSION","A specified logon session does not exist. It may already have been terminated."),
0x0060: ("NT_STATUS_NO_SUCH_PRIVILEGE","A specified privilege does not exist."),
0x0061: ("NT_STATUS_PRIVILEGE_NOT_HELD","A required privilege is not held by the client."),
0x0062: ("NT_STATUS_INVALID_ACCOUNT_NAME","The name provided is not a properly formed account name."),
0x0063: ("NT_STATUS_USER_EXISTS","The specified user already exists."),
0x0064: ("NT_STATUS_NO_SUCH_USER","The specified user does not exist."),
0x0065: ("NT_STATUS_GROUP_EXISTS","The specified group already exists."),
0x0066: ("NT_STATUS_NO_SUCH_GROUP","The specified group does not exist."),
0x0067: ("NT_STATUS_MEMBER_IN_GROUP","Either the specified user account is already a member of the specified group, or the specified group cannot be deleted because it contains a member."),
0x0068: ("NT_STATUS_MEMBER_NOT_IN_GROUP","The specified user account is not a member of the specified group account."),
0x0069: ("NT_STATUS_LAST_ADMIN","The last remaining administration account cannot be disabled or deleted."),
0x006a: ("NT_STATUS_WRONG_PASSWORD","The specified network password is not correct."),
0x006b: ("NT_STATUS_ILL_FORMED_PASSWORD","Unable to update the password. The value provided for the new password contains values that are not allowed in passwords."),
0x006c: ("NT_STATUS_PASSWORD_RESTRICTION","Unable to update the password because a password update rule has been violated."),
0x006d: ("NT_STATUS_LOGON_FAILURE","Logon failure: unknown user name or bad password."),
0x006e: ("NT_STATUS_ACCOUNT_RESTRICTION","Logon failure: user account restriction."),
0x006f: ("NT_STATUS_INVALID_LOGON_HOURS","Logon failure: account logon time restriction violation."),
0x0070: ("NT_STATUS_INVALID_WORKSTATION","Logon failure: user not allowed to log on to this computer."),
0x0071: ("NT_STATUS_PASSWORD_EXPIRED","Logon failure: the specified account password has expired."),
0x0072: ("NT_STATUS_ACCOUNT_DISABLED","Logon failure: account currently disabled."),
0x0073: ("NT_STATUS_NONE_MAPPED","No mapping between account names and security IDs was done."),
0x0074: ("NT_STATUS_TOO_MANY_LUIDS_REQUESTED","Too many local user identifiers (LUIDs) were requested at one time."),
0x0075: ("NT_STATUS_LUIDS_EXHAUSTED","No more local user identifiers (LUIDs) are available."),
0x0076: ("NT_STATUS_INVALID_SUB_AUTHORITY","The subauthority part of a security ID is invalid for this particular use."),
0x0077: ("NT_STATUS_INVALID_ACL","The access control list (ACL) structure is invalid."),
0x0078: ("NT_STATUS_INVALID_SID","The security ID structure is invalid."),
0x0079: ("NT_STATUS_INVALID_SECURITY_DESCR","The security descriptor structure is invalid."),
0x007a: ("NT_STATUS_PROCEDURE_NOT_FOUND","The specified procedure could not be found."),
0x007b: ("NT_STATUS_INVALID_IMAGE_FORMAT","%1 is not a valid Windows NT application."),
0x007c: ("NT_STATUS_NO_TOKEN","An attempt was made to reference a token that does not exist."),
0x007d: ("NT_STATUS_BAD_INHERITANCE_ACL","The inherited access control list (ACL) or access control entry (ACE) could not be built."),
0x007e: ("NT_STATUS_RANGE_NOT_LOCKED","The segment is already unlocked."),
0x007f: ("NT_STATUS_DISK_FULL","There is not enough space on the disk."),
0x0080: ("NT_STATUS_SERVER_DISABLED","The server is currently disabled."),
0x0081: ("NT_STATUS_SERVER_NOT_DISABLED","The server is currently enabled."),
0x0082: ("NT_STATUS_TOO_MANY_GUIDS_REQUESTED","The name limit for the local computer network adapter card was exceeded."),
0x0083: ("NT_STATUS_GUIDS_EXHAUSTED","No more data is available."),
0x0084: ("NT_STATUS_INVALID_ID_AUTHORITY","The value provided was an invalid value for an identifier authority."),
0x0085: ("NT_STATUS_AGENTS_EXHAUSTED","No more data is available."),
0x0086: ("NT_STATUS_INVALID_VOLUME_LABEL","The volume label you entered exceeds the label character limit of the target file system."),
0x0087: ("NT_STATUS_SECTION_NOT_EXTENDED","Not enough storage is available to complete this operation."),
0x0088: ("NT_STATUS_NOT_MAPPED_DATA","Attempt to access invalid address."),
0x0089: ("NT_STATUS_RESOURCE_DATA_NOT_FOUND","The specified image file did not contain a resource section."),
0x008a: ("NT_STATUS_RESOURCE_TYPE_NOT_FOUND","The specified resource type can not be found in the image file."),
0x008b: ("NT_STATUS_RESOURCE_NAME_NOT_FOUND","The specified resource name can not be found in the image file."),
0x008c: ("NT_STATUS_ARRAY_BOUNDS_EXCEEDED","NT_STATUS_ARRAY_BOUNDS_EXCEEDED"),
0x008d: ("NT_STATUS_FLOAT_DENORMAL_OPERAND","NT_STATUS_FLOAT_DENORMAL_OPERAND"),
0x008e: ("NT_STATUS_FLOAT_DIVIDE_BY_ZERO","NT_STATUS_FLOAT_DIVIDE_BY_ZERO"),
0x008f: ("NT_STATUS_FLOAT_INEXACT_RESULT","NT_STATUS_FLOAT_INEXACT_RESULT"),
0x0090: ("NT_STATUS_FLOAT_INVALID_OPERATION","NT_STATUS_FLOAT_INVALID_OPERATION"),
0x0091: ("NT_STATUS_FLOAT_OVERFLOW","NT_STATUS_FLOAT_OVERFLOW"),
0x0092: ("NT_STATUS_FLOAT_STACK_CHECK","NT_STATUS_FLOAT_STACK_CHECK"),
0x0093: ("NT_STATUS_FLOAT_UNDERFLOW","NT_STATUS_FLOAT_UNDERFLOW"),
0x0094: ("NT_STATUS_INTEGER_DIVIDE_BY_ZERO","NT_STATUS_INTEGER_DIVIDE_BY_ZERO"),
0x0095: ("NT_STATUS_INTEGER_OVERFLOW","Arithmetic result exceeded 32 bits."),
0x0096: ("NT_STATUS_PRIVILEGED_INSTRUCTION","NT_STATUS_PRIVILEGED_INSTRUCTION"),
0x0097: ("NT_STATUS_TOO_MANY_PAGING_FILES","Not enough storage is available to process this command."),
0x0098: ("NT_STATUS_FILE_INVALID","The volume for a file has been externally altered such that the opened file is no longer valid."),
0x0099: ("NT_STATUS_ALLOTTED_SPACE_EXCEEDED","No more memory is available for security information updates."),
0x009a: ("NT_STATUS_INSUFFICIENT_RESOURCES","Insufficient system resources exist to complete the requested service."),
0x009b: ("NT_STATUS_DFS_EXIT_PATH_FOUND","The system cannot find the path specified."),
0x009c: ("NT_STATUS_DEVICE_DATA_ERROR","Data error (cyclic redundancy check)"),
0x009d: ("NT_STATUS_DEVICE_NOT_CONNECTED","The device is not ready."),
0x009e: ("NT_STATUS_DEVICE_POWER_FAILURE","The device is not ready."),
0x009f: ("NT_STATUS_FREE_VM_NOT_AT_BASE","Attempt to access invalid address."),
0x00a0: ("NT_STATUS_MEMORY_NOT_ALLOCATED","Attempt to access invalid address."),
0x00a1: ("NT_STATUS_WORKING_SET_QUOTA","Insufficient quota to complete the requested service."),
0x00a2: ("NT_STATUS_MEDIA_WRITE_PROTECTED","The media is write protected."),
0x00a3: ("NT_STATUS_DEVICE_NOT_READY","The device is not ready."),
0x00a4: ("NT_STATUS_INVALID_GROUP_ATTRIBUTES","The specified attributes are invalid, or incompatible with the attributes for the group as a whole."),
0x00a5: ("NT_STATUS_BAD_IMPERSONATION_LEVEL","Either a required impersonation level was not provided, or the provided impersonation level is invalid."),
0x00a6: ("NT_STATUS_CANT_OPEN_ANONYMOUS","Cannot open an anonymous level security token."),
0x00a7: ("NT_STATUS_BAD_VALIDATION_CLASS","The validation information class requested was invalid."),
0x00a8: ("NT_STATUS_BAD_TOKEN_TYPE","The type of the token is inappropriate for its attempted use."),
0x00a9: ("NT_STATUS_BAD_MASTER_BOOT_RECORD","NT_STATUS_BAD_MASTER_BOOT_RECORD"),
0x00aa: ("NT_STATUS_INSTRUCTION_MISALIGNMENT","NT_STATUS_INSTRUCTION_MISALIGNMENT"),
0x00ab: ("NT_STATUS_INSTANCE_NOT_AVAILABLE","All pipe instances are busy."),
0x00ac: ("NT_STATUS_PIPE_NOT_AVAILABLE","All pipe instances are busy."),
0x00ad: ("NT_STATUS_INVALID_PIPE_STATE","The pipe state is invalid."),
0x00ae: ("NT_STATUS_PIPE_BUSY","All pipe instances are busy."),
0x00af: ("NT_STATUS_ILLEGAL_FUNCTION","Incorrect function."),
0x00b0: ("NT_STATUS_PIPE_DISCONNECTED","No process is on the other end of the pipe."),
0x00b1: ("NT_STATUS_PIPE_CLOSING","The pipe is being closed."),
0x00b2: ("NT_STATUS_PIPE_CONNECTED","There is a process on other end of the pipe."),
0x00b3: ("NT_STATUS_PIPE_LISTENING","Waiting for a process to open the other end of the pipe."),
0x00b4: ("NT_STATUS_INVALID_READ_MODE","The pipe state is invalid."),
0x00b5: ("NT_STATUS_IO_TIMEOUT","The semaphore timeout period has expired."),
0x00b6: ("NT_STATUS_FILE_FORCED_CLOSED","Reached end of file."),
0x00b7: ("NT_STATUS_PROFILING_NOT_STARTED","NT_STATUS_PROFILING_NOT_STARTED"),
0x00b8: ("NT_STATUS_PROFILING_NOT_STOPPED","NT_STATUS_PROFILING_NOT_STOPPED"),
0x00b9: ("NT_STATUS_COULD_NOT_INTERPRET","NT_STATUS_COULD_NOT_INTERPRET"),
0x00ba: ("NT_STATUS_FILE_IS_A_DIRECTORY","Access is denied."),
0x00bb: ("NT_STATUS_NOT_SUPPORTED","The network request is not supported."),
0x00bc: ("NT_STATUS_REMOTE_NOT_LISTENING","The remote computer is not available."),
0x00bd: ("NT_STATUS_DUPLICATE_NAME","A duplicate name exists on the network."),
0x00be: ("NT_STATUS_BAD_NETWORK_PATH","The network path was not found."),
0x00bf: ("NT_STATUS_NETWORK_BUSY","The network is busy."),
0x00c0: ("NT_STATUS_DEVICE_DOES_NOT_EXIST","The specified network resource or device is no longer available."),
0x00c1: ("NT_STATUS_TOO_MANY_COMMANDS","The network BIOS command limit has been reached."),
0x00c2: ("NT_STATUS_ADAPTER_HARDWARE_ERROR","A network adapter hardware error occurred."),
0x00c3: ("NT_STATUS_INVALID_NETWORK_RESPONSE","The specified server cannot perform the requested operation."),
0x00c4: ("NT_STATUS_UNEXPECTED_NETWORK_ERROR","An unexpected network error occurred."),
0x00c5: ("NT_STATUS_BAD_REMOTE_ADAPTER","The remote adapter is not compatible."),
0x00c6: ("NT_STATUS_PRINT_QUEUE_FULL","The printer queue is full."),
0x00c7: ("NT_STATUS_NO_SPOOL_SPACE","Space to store the file waiting to be printed is not available on the server."),
0x00c8: ("NT_STATUS_PRINT_CANCELLED","Your file waiting to be printed was deleted."),
0x00c9: ("NT_STATUS_NETWORK_NAME_DELETED","The specified network name is no longer available."),
0x00ca: ("NT_STATUS_NETWORK_ACCESS_DENIED","Network access is denied."),
0x00cb: ("NT_STATUS_BAD_DEVICE_TYPE","The network resource type is not correct."),
0x00cc: ("NT_STATUS_BAD_NETWORK_NAME","The network name cannot be found."),
0x00cd: ("NT_STATUS_TOO_MANY_NAMES","The name limit for the local computer network adapter card was exceeded."),
0x00ce: ("NT_STATUS_TOO_MANY_SESSIONS","The network BIOS session limit was exceeded."),
0x00cf: ("NT_STATUS_SHARING_PAUSED","The remote server has been paused or is in the process of being started."),
0x00d0: ("NT_STATUS_REQUEST_NOT_ACCEPTED","No more connections can be made to this remote computer at this time because there are already as many connections as the computer can accept."),
0x00d1: ("NT_STATUS_REDIRECTOR_PAUSED","The specified printer or disk device has been paused."),
0x00d2: ("NT_STATUS_NET_WRITE_FAULT","A write fault occurred on the network."),
0x00d3: ("NT_STATUS_PROFILING_AT_LIMIT","NT_STATUS_PROFILING_AT_LIMIT"),
0x00d4: ("NT_STATUS_NOT_SAME_DEVICE","The system cannot move the file to a different disk drive."),
0x00d5: ("NT_STATUS_FILE_RENAMED","NT_STATUS_FILE_RENAMED"),
0x00d6: ("NT_STATUS_VIRTUAL_CIRCUIT_CLOSED","The session was cancelled."),
0x00d7: ("NT_STATUS_NO_SECURITY_ON_OBJECT","Unable to perform a security operation on an object which has no associated security."),
0x00d8: ("NT_STATUS_CANT_WAIT","NT_STATUS_CANT_WAIT"),
0x00d9: ("NT_STATUS_PIPE_EMPTY","The pipe is being closed."),
0x00da: ("NT_STATUS_CANT_ACCESS_DOMAIN_INFO","Indicates a Windows NT Server could not be contacted or that objects within the domain are protected such that necessary information could not be retrieved."),
0x00db: ("NT_STATUS_CANT_TERMINATE_SELF","NT_STATUS_CANT_TERMINATE_SELF"),
0x00dc: ("NT_STATUS_INVALID_SERVER_STATE","The security account manager (SAM) or local security authority (LSA) server was in the wrong state to perform the security operation."),
0x00dd: ("NT_STATUS_INVALID_DOMAIN_STATE","The domain was in the wrong state to perform the security operation."),
0x00de: ("NT_STATUS_INVALID_DOMAIN_ROLE","This operation is only allowed for the Primary Domain Controller of the domain."),
0x00df: ("NT_STATUS_NO_SUCH_DOMAIN","The specified domain did not exist."),
0x00e0: ("NT_STATUS_DOMAIN_EXISTS","The specified domain already exists."),
0x00e1: ("NT_STATUS_DOMAIN_LIMIT_EXCEEDED","An attempt was made to exceed the limit on the number of domains per server."),
0x00e2: ("NT_STATUS_OPLOCK_NOT_GRANTED","NT_STATUS_OPLOCK_NOT_GRANTED"),
0x00e3: ("NT_STATUS_INVALID_OPLOCK_PROTOCOL","NT_STATUS_INVALID_OPLOCK_PROTOCOL"),
0x00e4: ("NT_STATUS_INTERNAL_DB_CORRUPTION","Unable to complete the requested operation because of either a catastrophic media failure or a data structure corruption on the disk."),
0x00e5: ("NT_STATUS_INTERNAL_ERROR","The security account database contains an internal inconsistency."),
0x00e6: ("NT_STATUS_GENERIC_NOT_MAPPED","Generic access types were contained in an access mask which should already be mapped to non-generic types."),
0x00e7: ("NT_STATUS_BAD_DESCRIPTOR_FORMAT","A security descriptor is not in the right format (absolute or self-relative)."),
0x00e8: ("NT_STATUS_INVALID_USER_BUFFER","The supplied user buffer is not valid for the requested operation."),
0x00e9: ("NT_STATUS_UNEXPECTED_IO_ERROR","NT_STATUS_UNEXPECTED_IO_ERROR"),
0x00ea: ("NT_STATUS_UNEXPECTED_MM_CREATE_ERR","NT_STATUS_UNEXPECTED_MM_CREATE_ERR"),
0x00eb: ("NT_STATUS_UNEXPECTED_MM_MAP_ERROR","NT_STATUS_UNEXPECTED_MM_MAP_ERROR"),
0x00ec: ("NT_STATUS_UNEXPECTED_MM_EXTEND_ERR","NT_STATUS_UNEXPECTED_MM_EXTEND_ERR"),
0x00ed: ("NT_STATUS_NOT_LOGON_PROCESS","The requested action is restricted for use by logon processes only. The calling process has not registered as a logon process."),
0x00ee: ("NT_STATUS_LOGON_SESSION_EXISTS","Cannot start a new logon session with an ID that is already in use."),
0x00ef: ("NT_STATUS_INVALID_PARAMETER_1","The parameter is incorrect."),
0x00f0: ("NT_STATUS_INVALID_PARAMETER_2","The parameter is incorrect."),
0x00f1: ("NT_STATUS_INVALID_PARAMETER_3","The parameter is incorrect."),
0x00f2: ("NT_STATUS_INVALID_PARAMETER_4","The parameter is incorrect."),
0x00f3: ("NT_STATUS_INVALID_PARAMETER_5","The parameter is incorrect."),
0x00f4: ("NT_STATUS_INVALID_PARAMETER_6","The parameter is incorrect."),
0x00f5: ("NT_STATUS_INVALID_PARAMETER_7","The parameter is incorrect."),
0x00f6: ("NT_STATUS_INVALID_PARAMETER_8","The parameter is incorrect."),
0x00f7: ("NT_STATUS_INVALID_PARAMETER_9","The parameter is incorrect."),
0x00f8: ("NT_STATUS_INVALID_PARAMETER_10","The parameter is incorrect."),
0x00f9: ("NT_STATUS_INVALID_PARAMETER_11","The parameter is incorrect."),
0x00fa: ("NT_STATUS_INVALID_PARAMETER_12","The parameter is incorrect."),
0x00fb: ("NT_STATUS_REDIRECTOR_NOT_STARTED","The system cannot find the path specified."),
0x00fc: ("NT_STATUS_REDIRECTOR_STARTED","NT_STATUS_REDIRECTOR_STARTED"),
0x00fd: ("NT_STATUS_STACK_OVERFLOW","Recursion too deep, stack overflowed."),
0x00fe: ("NT_STATUS_NO_SUCH_PACKAGE","A specified authentication package is unknown."),
0x00ff: ("NT_STATUS_BAD_FUNCTION_TABLE","NT_STATUS_BAD_FUNCTION_TABLE"),
0x0101: ("NT_STATUS_DIRECTORY_NOT_EMPTY","The directory is not empty."),
0x0102: ("NT_STATUS_FILE_CORRUPT_ERROR","The file or directory is corrupt and non-readable."),
0x0103: ("NT_STATUS_NOT_A_DIRECTORY","The directory name is invalid."),
0x0104: ("NT_STATUS_BAD_LOGON_SESSION_STATE","The logon session is not in a state that is consistent with the requested operation."),
0x0105: ("NT_STATUS_LOGON_SESSION_COLLISION","The logon session ID is already in use."),
0x0106: ("NT_STATUS_NAME_TOO_LONG","The filename or extension is too long."),
0x0107: ("NT_STATUS_FILES_OPEN","NT_STATUS_FILES_OPEN"),
0x0108: ("NT_STATUS_CONNECTION_IN_USE","The device is being accessed by an active process."),
0x0109: ("NT_STATUS_MESSAGE_NOT_FOUND","NT_STATUS_MESSAGE_NOT_FOUND"),
0x010a: ("NT_STATUS_PROCESS_IS_TERMINATING","Access is denied."),
0x010b: ("NT_STATUS_INVALID_LOGON_TYPE","A logon request contained an invalid logon type value."),
0x010c: ("NT_STATUS_NO_GUID_TRANSLATION","NT_STATUS_NO_GUID_TRANSLATION"),
0x010d: ("NT_STATUS_CANNOT_IMPERSONATE","Unable to impersonate via a named pipe until data has been read from that pipe."),
0x010e: ("NT_STATUS_IMAGE_ALREADY_LOADED","An instance of the service is already running."),
0x010f: ("NT_STATUS_ABIOS_NOT_PRESENT","NT_STATUS_ABIOS_NOT_PRESENT"),
0x0110: ("NT_STATUS_ABIOS_LID_NOT_EXIST","NT_STATUS_ABIOS_LID_NOT_EXIST"),
0x0111: ("NT_STATUS_ABIOS_LID_ALREADY_OWNED","NT_STATUS_ABIOS_LID_ALREADY_OWNED"),
0x0112: ("NT_STATUS_ABIOS_NOT_LID_OWNER","NT_STATUS_ABIOS_NOT_LID_OWNER"),
0x0113: ("NT_STATUS_ABIOS_INVALID_COMMAND","NT_STATUS_ABIOS_INVALID_COMMAND"),
0x0114: ("NT_STATUS_ABIOS_INVALID_LID","NT_STATUS_ABIOS_INVALID_LID"),
0x0115: ("NT_STATUS_ABIOS_SELECTOR_NOT_AVAILABLE","NT_STATUS_ABIOS_SELECTOR_NOT_AVAILABLE"),
0x0116: ("NT_STATUS_ABIOS_INVALID_SELECTOR","NT_STATUS_ABIOS_INVALID_SELECTOR"),
0x0117: ("NT_STATUS_NO_LDT","NT_STATUS_NO_LDT"),
0x0118: ("NT_STATUS_INVALID_LDT_SIZE","NT_STATUS_INVALID_LDT_SIZE"),
0x0119: ("NT_STATUS_INVALID_LDT_OFFSET","NT_STATUS_INVALID_LDT_OFFSET"),
0x011a: ("NT_STATUS_INVALID_LDT_DESCRIPTOR","NT_STATUS_INVALID_LDT_DESCRIPTOR"),
0x011b: ("NT_STATUS_INVALID_IMAGE_NE_FORMAT","%1 is not a valid Windows NT application."),
0x011c: ("NT_STATUS_RXACT_INVALID_STATE","The transaction state of a Registry subtree is incompatible with the requested operation."),
0x011d: ("NT_STATUS_RXACT_COMMIT_FAILURE","An internal security database corruption has been encountered."),
0x011e: ("NT_STATUS_MAPPED_FILE_SIZE_ZERO","The volume for a file has been externally altered such that the opened file is no longer valid."),
0x011f: ("NT_STATUS_TOO_MANY_OPENED_FILES","The system cannot open the file."),
0x0120: ("NT_STATUS_CANCELLED","The I/O operation has been aborted because of either a thread exit or an application request."),
0x0121: ("NT_STATUS_CANNOT_DELETE","Access is denied."),
0x0122: ("NT_STATUS_INVALID_COMPUTER_NAME","The format of the specified computer name is invalid."),
0x0123: ("NT_STATUS_FILE_DELETED","Access is denied."),
0x0124: ("NT_STATUS_SPECIAL_ACCOUNT","Cannot perform this operation on built-in accounts."),
0x0125: ("NT_STATUS_SPECIAL_GROUP","Cannot perform this operation on this built-in special group."),
0x0126: ("NT_STATUS_SPECIAL_USER","Cannot perform this operation on this built-in special user."),
0x0127: ("NT_STATUS_MEMBERS_PRIMARY_GROUP","The user cannot be removed from a group because the group is currently the user's primary group."),
0x0128: ("NT_STATUS_FILE_CLOSED","The handle is invalid."),
0x0129: ("NT_STATUS_TOO_MANY_THREADS","NT_STATUS_TOO_MANY_THREADS"),
0x012a: ("NT_STATUS_THREAD_NOT_IN_PROCESS","NT_STATUS_THREAD_NOT_IN_PROCESS"),
0x012b: ("NT_STATUS_TOKEN_ALREADY_IN_USE","The token is already in use as a primary token."),
0x012c: ("NT_STATUS_PAGEFILE_QUOTA_EXCEEDED","NT_STATUS_PAGEFILE_QUOTA_EXCEEDED"),
0x012d: ("NT_STATUS_COMMITMENT_LIMIT","The paging file is too small for this operation to complete."),
0x012e: ("NT_STATUS_INVALID_IMAGE_LE_FORMAT","%1 is not a valid Windows NT application."),
0x012f: ("NT_STATUS_INVALID_IMAGE_NOT_MZ","%1 is not a valid Windows NT application."),
0x0130: ("NT_STATUS_INVALID_IMAGE_PROTECT","%1 is not a valid Windows NT application."),
0x0131: ("NT_STATUS_INVALID_IMAGE_WIN_16","%1 is not a valid Windows NT application."),
0x0132: ("NT_STATUS_LOGON_SERVER_CONFLICT","NT_STATUS_LOGON_SERVER_CONFLICT"),
0x0133: ("NT_STATUS_TIME_DIFFERENCE_AT_DC","NT_STATUS_TIME_DIFFERENCE_AT_DC"),
0x0134: ("NT_STATUS_SYNCHRONIZATION_REQUIRED","NT_STATUS_SYNCHRONIZATION_REQUIRED"),
0x0135: ("NT_STATUS_DLL_NOT_FOUND","The specified module could not be found."),
0x0136: ("NT_STATUS_OPEN_FAILED","NT_STATUS_OPEN_FAILED"),
0x0137: ("NT_STATUS_IO_PRIVILEGE_FAILED","NT_STATUS_IO_PRIVILEGE_FAILED"),
0x0138: ("NT_STATUS_ORDINAL_NOT_FOUND","The operating system cannot run %1."),
0x0139: ("NT_STATUS_ENTRYPOINT_NOT_FOUND","The specified procedure could not be found."),
0x013a: ("NT_STATUS_CONTROL_C_EXIT","NT_STATUS_CONTROL_C_EXIT"),
0x013b: ("NT_STATUS_LOCAL_DISCONNECT","The specified network name is no longer available."),
0x013c: ("NT_STATUS_REMOTE_DISCONNECT","The specified network name is no longer available."),
0x013d: ("NT_STATUS_REMOTE_RESOURCES","The remote computer is not available."),
0x013e: ("NT_STATUS_LINK_FAILED","An unexpected network error occurred."),
0x013f: ("NT_STATUS_LINK_TIMEOUT","An unexpected network error occurred."),
0x0140: ("NT_STATUS_INVALID_CONNECTION","An unexpected network error occurred."),
0x0141: ("NT_STATUS_INVALID_ADDRESS","An unexpected network error occurred."),
0x0142: ("NT_STATUS_DLL_INIT_FAILED","A dynamic link library (DLL) initialization routine failed."),
0x0143: ("NT_STATUS_MISSING_SYSTEMFILE","NT_STATUS_MISSING_SYSTEMFILE"),
0x0144: ("NT_STATUS_UNHANDLED_EXCEPTION","NT_STATUS_UNHANDLED_EXCEPTION"),
0x0145: ("NT_STATUS_APP_INIT_FAILURE","NT_STATUS_APP_INIT_FAILURE"),
0x0146: ("NT_STATUS_PAGEFILE_CREATE_FAILED","NT_STATUS_PAGEFILE_CREATE_FAILED"),
0x0147: ("NT_STATUS_NO_PAGEFILE","NT_STATUS_NO_PAGEFILE"),
0x0148: ("NT_STATUS_INVALID_LEVEL","The system call level is not correct."),
0x0149: ("NT_STATUS_WRONG_PASSWORD_CORE","The specified network password is not correct."),
0x014a: ("NT_STATUS_ILLEGAL_FLOAT_CONTEXT","NT_STATUS_ILLEGAL_FLOAT_CONTEXT"),
0x014b: ("NT_STATUS_PIPE_BROKEN","The pipe has been ended."),
0x014c: ("NT_STATUS_REGISTRY_CORRUPT","The configuration registry database is corrupt."),
0x014d: ("NT_STATUS_REGISTRY_IO_FAILED","An I/O operation initiated by the Registry failed unrecoverably. The Registry could not read in, or write out, or flush, one of the files that contain the system's image of the Registry."),
0x014e: ("NT_STATUS_NO_EVENT_PAIR","NT_STATUS_NO_EVENT_PAIR"),
0x014f: ("NT_STATUS_UNRECOGNIZED_VOLUME","The volume does not contain a recognized file system. Please make sure that all required file system drivers are loaded and that the volume is not corrupt."),
0x0150: ("NT_STATUS_SERIAL_NO_DEVICE_INITED","No serial device was successfully initialized. The serial driver will unload."),
0x0151: ("NT_STATUS_NO_SUCH_ALIAS","The specified local group does not exist."),
0x0152: ("NT_STATUS_MEMBER_NOT_IN_ALIAS","The specified account name is not a member of the local group."),
0x0153: ("NT_STATUS_MEMBER_IN_ALIAS","The specified account name is already a member of the local group."),
0x0154: ("NT_STATUS_ALIAS_EXISTS","The specified local group already exists."),
0x0155: ("NT_STATUS_LOGON_NOT_GRANTED","Logon failure: the user has not been granted the requested logon type at this computer."),
0x0156: ("NT_STATUS_TOO_MANY_SECRETS","The maximum number of secrets that may be stored in a single system has been exceeded."),
0x0157: ("NT_STATUS_SECRET_TOO_LONG","The length of a secret exceeds the maximum length allowed."),
0x0158: ("NT_STATUS_INTERNAL_DB_ERROR","The local security authority database contains an internal inconsistency."),
0x0159: ("NT_STATUS_FULLSCREEN_MODE","The requested operation cannot be performed in full-screen mode."),
0x015a: ("NT_STATUS_TOO_MANY_CONTEXT_IDS","During a logon attempt, the user's security context accumulated too many security IDs."),
0x015b: ("NT_STATUS_LOGON_TYPE_NOT_GRANTED","Logon failure: the user has not been granted the requested logon type at this computer."),
0x015c: ("NT_STATUS_NOT_REGISTRY_FILE","The system has attempted to load or restore a file into the Registry, but the specified file is not in a Registry file format."),
0x015d: ("NT_STATUS_NT_CROSS_ENCRYPTION_REQUIRED","A cross-encrypted password is necessary to change a user password."),
0x015e: ("NT_STATUS_DOMAIN_CTRLR_CONFIG_ERROR","NT_STATUS_DOMAIN_CTRLR_CONFIG_ERROR"),
0x015f: ("NT_STATUS_FT_MISSING_MEMBER","The request could not be performed because of an I/O device error."),
0x0160: ("NT_STATUS_ILL_FORMED_SERVICE_ENTRY","NT_STATUS_ILL_FORMED_SERVICE_ENTRY"),
0x0161: ("NT_STATUS_ILLEGAL_CHARACTER","NT_STATUS_ILLEGAL_CHARACTER"),
0x0162: ("NT_STATUS_UNMAPPABLE_CHARACTER","No mapping for the Unicode character exists in the target multi-byte code page."),
0x0163: ("NT_STATUS_UNDEFINED_CHARACTER","NT_STATUS_UNDEFINED_CHARACTER"),
0x0164: ("NT_STATUS_FLOPPY_VOLUME","NT_STATUS_FLOPPY_VOLUME"),
0x0165: ("NT_STATUS_FLOPPY_ID_MARK_NOT_FOUND","No ID address mark was found on the floppy disk."),
0x0166: ("NT_STATUS_FLOPPY_WRONG_CYLINDER","Mismatch between the floppy disk sector ID field and the floppy disk controller track address."),
0x0167: ("NT_STATUS_FLOPPY_UNKNOWN_ERROR","The floppy disk controller reported an error that is not recognized by the floppy disk driver."),
0x0168: ("NT_STATUS_FLOPPY_BAD_REGISTERS","The floppy disk controller returned inconsistent results in its registers."),
0x0169: ("NT_STATUS_DISK_RECALIBRATE_FAILED","While accessing the hard disk, a recalibrate operation failed, even after retries."),
0x016a: ("NT_STATUS_DISK_OPERATION_FAILED","While accessing the hard disk, a disk operation failed even after retries."),
0x016b: ("NT_STATUS_DISK_RESET_FAILED","While accessing the hard disk, a disk controller reset was needed, but even that failed."),
0x016c: ("NT_STATUS_SHARED_IRQ_BUSY","Unable to open a device that was sharing an interrupt request (IRQ) with other devices. At least one other device that uses that IRQ was already opened."),
0x016d: ("NT_STATUS_FT_ORPHANING","The request could not be performed because of an I/O device error."),
0x0172: ("NT_STATUS_PARTITION_FAILURE","Tape could not be partitioned."),
0x0173: ("NT_STATUS_INVALID_BLOCK_LENGTH","When accessing a new tape of a multivolume partition, the current blocksize is incorrect."),
0x0174: ("NT_STATUS_DEVICE_NOT_PARTITIONED","Tape partition information could not be found when loading a tape."),
0x0175: ("NT_STATUS_UNABLE_TO_LOCK_MEDIA","Unable to lock the media eject mechanism."),
0x0176: ("NT_STATUS_UNABLE_TO_UNLOAD_MEDIA","Unable to unload the media."),
0x0177: ("NT_STATUS_EOM_OVERFLOW","Physical end of tape encountered."),
0x0178: ("NT_STATUS_NO_MEDIA","No media in drive."),
0x017a: ("NT_STATUS_NO_SUCH_MEMBER","A new member could not be added to a local group because the member does not exist."),
0x017b: ("NT_STATUS_INVALID_MEMBER","A new member could not be added to a local group because the member has the wrong account type."),
0x017c: ("NT_STATUS_KEY_DELETED","Illegal operation attempted on a Registry key which has been marked for deletion."),
0x017d: ("NT_STATUS_NO_LOG_SPACE","System could not allocate the required space in a Registry log."),
0x017e: ("NT_STATUS_TOO_MANY_SIDS","Too many security IDs have been specified."),
0x017f: ("NT_STATUS_LM_CROSS_ENCRYPTION_REQUIRED","A cross-encrypted password is necessary to change this user password."),
0x0180: ("NT_STATUS_KEY_HAS_CHILDREN","Cannot create a symbolic link in a Registry key that already has subkeys or values."),
0x0181: ("NT_STATUS_CHILD_MUST_BE_VOLATILE","Cannot create a stable subkey under a volatile parent key."),
0x0182: ("NT_STATUS_DEVICE_CONFIGURATION_ERROR","The parameter is incorrect."),
0x0183: ("NT_STATUS_DRIVER_INTERNAL_ERROR","The request could not be performed because of an I/O device error."),
0x0184: ("NT_STATUS_INVALID_DEVICE_STATE","The device does not recognize the command."),
0x0185: ("NT_STATUS_IO_DEVICE_ERROR","The request could not be performed because of an I/O device error."),
0x0186: ("NT_STATUS_DEVICE_PROTOCOL_ERROR","The request could not be performed because of an I/O device error."),
0x0187: ("NT_STATUS_BACKUP_CONTROLLER","NT_STATUS_BACKUP_CONTROLLER"),
0x0188: ("NT_STATUS_LOG_FILE_FULL","The event log file is full."),
0x0189: ("NT_STATUS_TOO_LATE","The media is write protected."),
0x018a: ("NT_STATUS_NO_TRUST_LSA_SECRET","The workstation does not have a trust secret."),
0x018b: ("NT_STATUS_NO_TRUST_SAM_ACCOUNT","The SAM database on the Windows NT Server does not have a computer account for this workstation trust relationship."),
0x018c: ("NT_STATUS_TRUSTED_DOMAIN_FAILURE","The trust relationship between the primary domain and the trusted domain failed."),
0x018d: ("NT_STATUS_TRUSTED_RELATIONSHIP_FAILURE","The trust relationship between this workstation and the primary domain failed."),
0x018e: ("NT_STATUS_EVENTLOG_FILE_CORRUPT","The event log file is corrupt."),
0x018f: ("NT_STATUS_EVENTLOG_CANT_START","No event log file could be opened, so the event logging service did not start."),
0x0190: ("NT_STATUS_TRUST_FAILURE","The network logon failed."),
0x0191: ("NT_STATUS_MUTANT_LIMIT_EXCEEDED","NT_STATUS_MUTANT_LIMIT_EXCEEDED"),
0x0192: ("NT_STATUS_NETLOGON_NOT_STARTED","An attempt was made to logon, but the network logon service was not started."),
0x0193: ("NT_STATUS_ACCOUNT_EXPIRED","The user's account has expired."),
0x0194: ("NT_STATUS_POSSIBLE_DEADLOCK","A potential deadlock condition has been detected."),
0x0195: ("NT_STATUS_NETWORK_CREDENTIAL_CONFLICT","The credentials supplied conflict with an existing set of credentials."),
0x0196: ("NT_STATUS_REMOTE_SESSION_LIMIT","An attempt was made to establish a session to a network server, but there are already too many sessions established to that server."),
0x0197: ("NT_STATUS_EVENTLOG_FILE_CHANGED","The event log file has changed between reads."),
0x0198: ("NT_STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT","The account used is an interdomain trust account. Use your global user account or local user account to access this server."),
0x0199: ("NT_STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT","The account used is a Computer Account. Use your global user account or local user account to access this server."),
0x019a: ("NT_STATUS_NOLOGON_SERVER_TRUST_ACCOUNT","The account used is an server trust account. Use your global user account or local user account to access this server."),
0x019b: ("NT_STATUS_DOMAIN_TRUST_INCONSISTENT","The name or security ID (SID) of the domain specified is inconsistent with the trust information for that domain."),
0x019c: ("NT_STATUS_FS_DRIVER_REQUIRED","NT_STATUS_FS_DRIVER_REQUIRED"),
0x0202: ("NT_STATUS_NO_USER_SESSION_KEY","There is no user session key for the specified logon session."),
0x0203: ("NT_STATUS_USER_SESSION_DELETED","An unexpected network error occurred."),
0x0204: ("NT_STATUS_RESOURCE_LANG_NOT_FOUND","The specified resource language ID cannot be found in the image file."),
0x0205: ("NT_STATUS_INSUFF_SERVER_RESOURCES","Not enough server storage is available to process this command."),
0x0206: ("NT_STATUS_INVALID_BUFFER_SIZE","The supplied user buffer is not valid for the requested operation."),
0x0207: ("NT_STATUS_INVALID_ADDRESS_COMPONENT","The format of the specified network name is invalid."),
0x0208: ("NT_STATUS_INVALID_ADDRESS_WILDCARD","The format of the specified network name is invalid."),
0x0209: ("NT_STATUS_TOO_MANY_ADDRESSES","The name limit for the local computer network adapter card was exceeded."),
0x020a: ("NT_STATUS_ADDRESS_ALREADY_EXISTS","A duplicate name exists on the network."),
0x020b: ("NT_STATUS_ADDRESS_CLOSED","The specified network name is no longer available."),
0x020c: ("NT_STATUS_CONNECTION_DISCONNECTED","The specified network name is no longer available."),
0x020d: ("NT_STATUS_CONNECTION_RESET","The specified network name is no longer available."),
0x020e: ("NT_STATUS_TOO_MANY_NODES","The name limit for the local computer network adapter card was exceeded."),
0x020f: ("NT_STATUS_TRANSACTION_ABORTED","An unexpected network error occurred."),
0x0210: ("NT_STATUS_TRANSACTION_TIMED_OUT","An unexpected network error occurred."),
0x0211: ("NT_STATUS_TRANSACTION_NO_RELEASE","An unexpected network error occurred."),
0x0212: ("NT_STATUS_TRANSACTION_NO_MATCH","An unexpected network error occurred."),
0x0213: ("NT_STATUS_TRANSACTION_RESPONDED","An unexpected network error occurred."),
0x0214: ("NT_STATUS_TRANSACTION_INVALID_ID","An unexpected network error occurred."),
0x0215: ("NT_STATUS_TRANSACTION_INVALID_TYPE","An unexpected network error occurred."),
0x0216: ("NT_STATUS_NOT_SERVER_SESSION","The network request is not supported."),
0x0217: ("NT_STATUS_NOT_CLIENT_SESSION","The network request is not supported."),
0x0218: ("NT_STATUS_CANNOT_LOAD_REGISTRY_FILE","NT_STATUS_CANNOT_LOAD_REGISTRY_FILE"),
0x0219: ("NT_STATUS_DEBUG_ATTACH_FAILED","NT_STATUS_DEBUG_ATTACH_FAILED"),
0x021a: ("NT_STATUS_SYSTEM_PROCESS_TERMINATED","NT_STATUS_SYSTEM_PROCESS_TERMINATED"),
0x021b: ("NT_STATUS_DATA_NOT_ACCEPTED","NT_STATUS_DATA_NOT_ACCEPTED"),
0x021c: ("NT_STATUS_NO_BROWSER_SERVERS_FOUND","The list of servers for this workgroup is not currently available"),
0x021d: ("NT_STATUS_VDM_HARD_ERROR","NT_STATUS_VDM_HARD_ERROR"),
0x021e: ("NT_STATUS_DRIVER_CANCEL_TIMEOUT","NT_STATUS_DRIVER_CANCEL_TIMEOUT"),
0x021f: ("NT_STATUS_REPLY_MESSAGE_MISMATCH","NT_STATUS_REPLY_MESSAGE_MISMATCH"),
0x0220: ("NT_STATUS_MAPPED_ALIGNMENT","The base address or the file offset specified does not have the proper alignment."),
0x0221: ("NT_STATUS_IMAGE_CHECKSUM_MISMATCH","%1 is not a valid Windows NT application."),
0x0222: ("NT_STATUS_LOST_WRITEBEHIND_DATA","NT_STATUS_LOST_WRITEBEHIND_DATA"),
0x0223: ("NT_STATUS_CLIENT_SERVER_PARAMETERS_INVALID","NT_STATUS_CLIENT_SERVER_PARAMETERS_INVALID"),
0x0224: ("NT_STATUS_PASSWORD_MUST_CHANGE","The user must change his password before he logs on the first time."),
0x0225: ("NT_STATUS_NOT_FOUND","NT_STATUS_NOT_FOUND"),
0x0226: ("NT_STATUS_NOT_TINY_STREAM","NT_STATUS_NOT_TINY_STREAM"),
0x0227: ("NT_STATUS_RECOVERY_FAILURE","NT_STATUS_RECOVERY_FAILURE"),
0x0228: ("NT_STATUS_STACK_OVERFLOW_READ","NT_STATUS_STACK_OVERFLOW_READ"),
0x0229: ("NT_STATUS_FAIL_CHECK","NT_STATUS_FAIL_CHECK"),
0x022a: ("NT_STATUS_DUPLICATE_OBJECTID","NT_STATUS_DUPLICATE_OBJECTID"),
0x022b: ("NT_STATUS_OBJECTID_EXISTS","NT_STATUS_OBJECTID_EXISTS"),
0x022c: ("NT_STATUS_CONVERT_TO_LARGE","NT_STATUS_CONVERT_TO_LARGE"),
0x022d: ("NT_STATUS_RETRY","NT_STATUS_RETRY"),
0x022e: ("NT_STATUS_FOUND_OUT_OF_SCOPE","NT_STATUS_FOUND_OUT_OF_SCOPE"),
0x022f: ("NT_STATUS_ALLOCATE_BUCKET","NT_STATUS_ALLOCATE_BUCKET"),
0x0230: ("NT_STATUS_PROPSET_NOT_FOUND","NT_STATUS_PROPSET_NOT_FOUND"),
0x0231: ("NT_STATUS_MARSHALL_OVERFLOW","NT_STATUS_MARSHALL_OVERFLOW"),
0x0232: ("NT_STATUS_INVALID_VARIANT","NT_STATUS_INVALID_VARIANT"),
0x0233: ("NT_STATUS_DOMAIN_CONTROLLER_NOT_FOUND","Could not find the domain controller for this domain."),
0x0234: ("NT_STATUS_ACCOUNT_LOCKED_OUT","The referenced account is currently locked out and may not be logged on to."),
0x0235: ("NT_STATUS_HANDLE_NOT_CLOSABLE","The handle is invalid."),
0x0236: ("NT_STATUS_CONNECTION_REFUSED","The remote system refused the network connection."),
0x0237: ("NT_STATUS_GRACEFUL_DISCONNECT","The network connection was gracefully closed."),
0x0238: ("NT_STATUS_ADDRESS_ALREADY_ASSOCIATED","The network transport endpoint already has an address associated with it."),
0x0239: ("NT_STATUS_ADDRESS_NOT_ASSOCIATED","An address has not yet been associated with the network endpoint."),
0x023a: ("NT_STATUS_CONNECTION_INVALID","An operation was attempted on a non-existent network connection."),
0x023b: ("NT_STATUS_CONNECTION_ACTIVE","An invalid operation was attempted on an active network connection."),
0x023c: ("NT_STATUS_NETWORK_UNREACHABLE","The remote network is not reachable by the transport."),
0x023d: ("NT_STATUS_HOST_UNREACHABLE","The remote system is not reachable by the transport."),
0x023e: ("NT_STATUS_PROTOCOL_UNREACHABLE","The remote system does not support the transport protocol."),
0x023f: ("NT_STATUS_PORT_UNREACHABLE","No service is operating at the destination network endpoint on the remote system."),
0x0240: ("NT_STATUS_REQUEST_ABORTED","The request was aborted."),
0x0241: ("NT_STATUS_CONNECTION_ABORTED","The network connection was aborted by the local system."),
0x0242: ("NT_STATUS_BAD_COMPRESSION_BUFFER","NT_STATUS_BAD_COMPRESSION_BUFFER"),
0x0243: ("NT_STATUS_USER_MAPPED_FILE","The requested operation cannot be performed on a file with a user mapped section open."),
0x0244: ("NT_STATUS_AUDIT_FAILED","NT_STATUS_AUDIT_FAILED"),
0x0245: ("NT_STATUS_TIMER_RESOLUTION_NOT_SET","NT_STATUS_TIMER_RESOLUTION_NOT_SET"),
0x0246: ("NT_STATUS_CONNECTION_COUNT_LIMIT","A connection to the server could not be made because the limit on the number of concurrent connections for this account has been reached."),
0x0247: ("NT_STATUS_LOGIN_TIME_RESTRICTION","Attempting to login during an unauthorized time of day for this account."),
0x0248: ("NT_STATUS_LOGIN_WKSTA_RESTRICTION","The account is not authorized to login from this station."),
0x0249: ("NT_STATUS_IMAGE_MP_UP_MISMATCH","%1 is not a valid Windows NT application."),
0x0250: ("NT_STATUS_INSUFFICIENT_LOGON_INFO","NT_STATUS_INSUFFICIENT_LOGON_INFO"),
0x0251: ("NT_STATUS_BAD_DLL_ENTRYPOINT","NT_STATUS_BAD_DLL_ENTRYPOINT"),
0x0252: ("NT_STATUS_BAD_SERVICE_ENTRYPOINT","NT_STATUS_BAD_SERVICE_ENTRYPOINT"),
0x0253: ("NT_STATUS_LPC_REPLY_LOST","The security account database contains an internal inconsistency."),
0x0254: ("NT_STATUS_IP_ADDRESS_CONFLICT1","NT_STATUS_IP_ADDRESS_CONFLICT1"),
0x0255: ("NT_STATUS_IP_ADDRESS_CONFLICT2","NT_STATUS_IP_ADDRESS_CONFLICT2"),
0x0256: ("NT_STATUS_REGISTRY_QUOTA_LIMIT","NT_STATUS_REGISTRY_QUOTA_LIMIT"),
0x0257: ("NT_STATUS_PATH_NOT_COVERED","The remote system is not reachable by the transport."),
0x0258: ("NT_STATUS_NO_CALLBACK_ACTIVE","NT_STATUS_NO_CALLBACK_ACTIVE"),
0x0259: ("NT_STATUS_LICENSE_QUOTA_EXCEEDED","The service being accessed is licensed for a particular number of connections. No more connections can be made to the service at this time because there are already as many connections as the service can accept."),
0x025a: ("NT_STATUS_PWD_TOO_SHORT","NT_STATUS_PWD_TOO_SHORT"),
0x025b: ("NT_STATUS_PWD_TOO_RECENT","NT_STATUS_PWD_TOO_RECENT"),
0x025c: ("NT_STATUS_PWD_HISTORY_CONFLICT","NT_STATUS_PWD_HISTORY_CONFLICT"),
0x025e: ("NT_STATUS_PLUGPLAY_NO_DEVICE","The specified service is disabled and cannot be started."),
0x025f: ("NT_STATUS_UNSUPPORTED_COMPRESSION","NT_STATUS_UNSUPPORTED_COMPRESSION"),
0x0260: ("NT_STATUS_INVALID_HW_PROFILE","NT_STATUS_INVALID_HW_PROFILE"),
0x0261: ("NT_STATUS_INVALID_PLUGPLAY_DEVICE_PATH","NT_STATUS_INVALID_PLUGPLAY_DEVICE_PATH"),
0x0262: ("NT_STATUS_DRIVER_ORDINAL_NOT_FOUND","The operating system cannot run %1."),
0x0263: ("NT_STATUS_DRIVER_ENTRYPOINT_NOT_FOUND","The specified procedure could not be found."),
0x0264: ("NT_STATUS_RESOURCE_NOT_OWNED","Attempt to release mutex not owned by caller."),
0x0265: ("NT_STATUS_TOO_MANY_LINKS","An attempt was made to create more links on a file than the file system supports."),
0x0266: ("NT_STATUS_QUOTA_LIST_INCONSISTENT","NT_STATUS_QUOTA_LIST_INCONSISTENT"),
0x0267: ("NT_STATUS_FILE_IS_OFFLINE","NT_STATUS_FILE_IS_OFFLINE"),
0x0275: ("NT_STATUS_NOT_A_REPARSE_POINT","NT_STATUS_NOT_A_REPARSE_POINT"),
0x0EDE: ("NT_STATUS_NO_SUCH_JOB","NT_STATUS_NO_SUCH_JOB"),
}
dos_msgs = {
ERRbadfunc: ("ERRbadfunc", "Invalid function."),
ERRbadfile: ("ERRbadfile", "File not found."),
ERRbadpath: ("ERRbadpath", "Directory invalid."),
ERRnofids: ("ERRnofids", "No file descriptors available"),
ERRnoaccess: ("ERRnoaccess", "Access denied."),
ERRbadfid: ("ERRbadfid", "Invalid file handle."),
ERRbadmcb: ("ERRbadmcb", "Memory control blocks destroyed."),
ERRnomem: ("ERRnomem", "Insufficient server memory to perform the requested function."),
ERRbadmem: ("ERRbadmem", "Invalid memory block address."),
ERRbadenv: ("ERRbadenv", "Invalid environment."),
11: ("ERRbadformat", "Invalid format."),
ERRbadaccess: ("ERRbadaccess", "Invalid open mode."),
ERRbaddata: ("ERRbaddata", "Invalid data."),
ERRres: ("ERRres", "reserved."),
ERRbaddrive: ("ERRbaddrive", "Invalid drive specified."),
ERRremcd: ("ERRremcd", "A Delete Directory request attempted to remove the server's current directory."),
ERRdiffdevice: ("ERRdiffdevice", "Not same device."),
ERRnofiles: ("ERRnofiles", "A File Search command can find no more files matching the specified criteria."),
ERRbadshare: ("ERRbadshare", "The sharing mode specified for an Open conflicts with existing FIDs on the file."),
ERRlock: ("ERRlock", "A Lock request conflicted with an existing lock or specified an invalid mode, or an Unlock requested attempted to remove a lock held by another process."),
ERRunsup: ("ERRunsup", "The operation is unsupported"),
ERRnosuchshare: ("ERRnosuchshare", "You specified an invalid share name"),
ERRfilexists: ("ERRfilexists", "The file named in a Create Directory, Make New File or Link request already exists."),
ERRinvalidname: ("ERRinvalidname", "Invalid name"),
ERRbadpipe: ("ERRbadpipe", "Pipe invalid."),
ERRpipebusy: ("ERRpipebusy", "All instances of the requested pipe are busy."),
ERRpipeclosing: ("ERRpipeclosing", "Pipe close in progress."),
ERRnotconnected: ("ERRnotconnected", "No process on other end of pipe."),
ERRmoredata: ("ERRmoredata", "There is more data to be returned."),
ERRinvgroup: ("ERRinvgroup", "Invalid workgroup (try the -W option)"),
ERRlogonfailure: ("ERRlogonfailure", "Logon failure"),
ERRdiskfull: ("ERRdiskfull", "Disk full"),
ERRgeneral: ("ERRgeneral", "General failure"),
ERRunknownlevel: ("ERRunknownlevel", "Unknown info level")
}
server_msgs = {
1: ("ERRerror", "Non-specific error code."),
2: ("ERRbadpw", "Bad password - name/password pair in a Tree Connect or Session Setup are invalid."),
3: ("ERRbadtype", "reserved."),
4: ("ERRaccess", "The requester does not have the necessary access rights within the specified context for the requested function. The context is defined by the TID or the UID."),
5: ("ERRinvnid", "The tree ID (TID) specified in a command was invalid."),
6: ("ERRinvnetname", "Invalid network name in tree connect."),
7: ("ERRinvdevice", "Invalid device - printer request made to non-printer connection or non-printer request made to printer connection."),
49: ("ERRqfull", "Print queue full (files) -- returned by open print file."),
50: ("ERRqtoobig", "Print queue full -- no space."),
51: ("ERRqeof", "EOF on print queue dump."),
52: ("ERRinvpfid", "Invalid print file FID."),
64: ("ERRsmbcmd", "The server did not recognize the command received."),
65: ("ERRsrverror","The server encountered an internal error, e.g., system file unavailable."),
67: ("ERRfilespecs", "The file handle (FID) and pathname parameters contained an invalid combination of values."),
68: ("ERRreserved", "reserved."),
69: ("ERRbadpermits", "The access permissions specified for a file or directory are not a valid combination. The server cannot set the requested attribute."),
70: ("ERRreserved", "reserved."),
71: ("ERRsetattrmode", "The attribute mode in the Set File Attribute request is invalid."),
81: ("ERRpaused", "Server is paused."),
82: ("ERRmsgoff", "Not receiving messages."),
83: ("ERRnoroom", "No room to buffer message."),
87: ("ERRrmuns", "Too many remote user names."),
88: ("ERRtimeout", "Operation timed out."),
89: ("ERRnoresource", "No resources currently available for request."),
90: ("ERRtoomanyuids", "Too many UIDs active on this session."),
91: ("ERRbaduid", "The UID is not known as a valid ID on this session."),
250: ("ERRusempx","Temp unable to support Raw, use MPX mode."),
251: ("ERRusestd","Temp unable to support Raw, use standard read/write."),
252: ("ERRcontmpx", "Continue in MPX mode."),
253: ("ERRreserved", "reserved."),
254: ("ERRreserved", "reserved."),
0xFFFF: ("ERRnosupport", "Function not supported.")
}
# Error clases
ERRDOS = 0x1
error_classes = { 0: ("SUCCESS", {}),
ERRDOS: ("ERRDOS", dos_msgs),
0x02: ("ERRSRV",server_msgs),
0x03: ("ERRHRD",hard_msgs),
0x04: ("ERRXOS", {} ),
0xE1: ("ERRRMX1", {} ),
0xE2: ("ERRRMX2", {} ),
0xE3: ("ERRRMX3", {} ),
0xC000: ("ERRNT", nt_msgs),
0xFF: ("ERRCMD", {} ) }
def __init__( self, str, error_class, error_code, nt_status = 0):
Exception.__init__(self, str)
self._args = str
if nt_status:
self.error_class = error_code
self.error_code = error_class
else:
self.error_class = error_class
self.error_code = error_code
def get_error_class( self ):
return self.error_class
def get_error_code( self ):
return self.error_code
def __str__( self ):
error_class = SessionError.error_classes.get( self.error_class, None )
if not error_class:
error_code_str = self.error_code
error_class_str = self.error_class
else:
error_class_str = error_class[0]
error_code = error_class[1].get( self.error_code, None )
if not error_code:
error_code_str = self.error_code
else:
error_code_str = '%s(%s)' % (error_code)
return 'SMB SessionError: class: %s, code: %s' % (error_class_str, error_code_str)
# Raised when an supported feature is present/required in the protocol but is not
# currently supported by pysmb
class UnsupportedFeature(Exception): pass
# Contains information about a SMB shared device/service
class SharedDevice:
def __init__(self, name, type, comment):
self.__name = name
self.__type = type
self.__comment = comment
def get_name(self):
return self.__name
def get_type(self):
return self.__type
def get_comment(self):
return self.__comment
def __repr__(self):
return '<SharedDevice instance: name=' + self.__name + ', type=' + str(self.__type) + ', comment="' + self.__comment + '">'
# Contains information about the shared file/directory
class SharedFile:
def __init__(self, ctime, atime, mtime, filesize, allocsize, attribs, shortname, longname):
self.__ctime = ctime
self.__atime = atime
self.__mtime = mtime
self.__filesize = filesize
self.__allocsize = allocsize
self.__attribs = attribs
try:
self.__shortname = shortname[:string.index(shortname, '\0')]
except ValueError:
self.__shortname = shortname
try:
self.__longname = longname[:string.index(longname, '\0')]
except ValueError:
self.__longname = longname
def get_ctime(self):
return self.__ctime
def get_ctime_epoch(self):
return self.__convert_smbtime(self.__ctime)
def get_mtime(self):
return self.__mtime
def get_mtime_epoch(self):
return self.__convert_smbtime(self.__mtime)
def get_atime(self):
return self.__atime
def get_atime_epoch(self):
return self.__convert_smbtime(self.__atime)
def get_filesize(self):
return self.__filesize
def get_allocsize(self):
return self.__allocsize
def get_attributes(self):
return self.__attribs
def is_archive(self):
return self.__attribs & ATTR_ARCHIVE
def is_compressed(self):
return self.__attribs & ATTR_COMPRESSED
def is_normal(self):
return self.__attribs & ATTR_NORMAL
def is_hidden(self):
return self.__attribs & ATTR_HIDDEN
def is_readonly(self):
return self.__attribs & ATTR_READONLY
def is_temporary(self):
return self.__attribs & ATTR_TEMPORARY
def is_directory(self):
return self.__attribs & ATTR_DIRECTORY
def is_system(self):
return self.__attribs & ATTR_SYSTEM
def get_shortname(self):
return self.__shortname
def get_longname(self):
return self.__longname
def __repr__(self):
return '<SharedFile instance: shortname="' + self.__shortname + '", longname="' + self.__longname + '", filesize=' + str(self.__filesize) + '>'
def __convert_smbtime(self, t):
x = t >> 32
y = t & 0xffffffffL
geo_cal_offset = 11644473600.0 # = 369.0 * 365.25 * 24 * 60 * 60 - (3.0 * 24 * 60 * 60 + 6.0 * 60 * 60)
return ((x * 4.0 * (1 << 30) + (y & 0xfff00000L)) * 1.0e-7 - geo_cal_offset)
# Contain information about a SMB machine
class SMBMachine:
def __init__(self, nbname, type, comment):
self.__nbname = nbname
self.__type = type
self.__comment = comment
def __repr__(self):
return '<SMBMachine instance: nbname="' + self.__nbname + '", type=' + hex(self.__type) + ', comment="' + self.__comment + '">'
class SMBDomain:
def __init__(self, nbgroup, type, master_browser):
self.__nbgroup = nbgroup
self.__type = type
self.__master_browser = master_browser
def __repr__(self):
return '<SMBDomain instance: nbgroup="' + self.__nbgroup + '", type=' + hex(self.__type) + ', master browser="' + self.__master_browser + '">'
# Represents a SMB Packet
class NewSMBPacket(Structure):
structure = (
('Signature', '"\xffSMB'),
('Command','B=0'),
('ErrorClass','B=0'),
('_reserved','B=0'),
('ErrorCode','<H=0'),
('Flags1','B=0'),
('Flags2','<H=0'),
('PIDHigh','<H=0'),
('SecurityFeatures','8s=""'),
('Reserved','<H=0'),
('Tid','<H=0xffff'),
('Pid','<H=0'),
('Uid','<H=0'),
('Mid','<H=0'),
('Data','*:'),
)
def __init__(self, **kargs):
Structure.__init__(self, **kargs)
if self.fields.has_key('Flags2') is False:
self['Flags2'] = 0
if self.fields.has_key('Flags1') is False:
self['Flags1'] = 0
if not kargs.has_key('data'):
self['Data'] = []
def addCommand(self, command):
if len(self['Data']) == 0:
self['Command'] = command.command
else:
self['Data'][-1]['Parameters']['AndXCommand'] = command.command
self['Data'][-1]['Parameters']['AndXOffset'] = len(self)
self['Data'].append(command)
def isMoreData(self):
return (self['Command'] in [SMB.SMB_COM_TRANSACTION, SMB.SMB_COM_READ_ANDX, SMB.SMB_COM_READ_RAW] and
self['ErrorClass'] == 1 and self['ErrorCode'] == SessionError.ERRmoredata)
def isMoreProcessingRequired(self):
return self['ErrorClass'] == 0x16 and self['ErrorCode'] == 0xc000
def isValidAnswer(self, cmd):
# this was inside a loop reading more from the net (with recv_packet(None))
if self['Command'] == cmd:
if (self['ErrorClass'] == 0x00 and
self['ErrorCode'] == 0x00):
return 1
elif self.isMoreData():
return 1
elif self.isMoreProcessingRequired():
return 1
raise SessionError, ("SMB Library Error", self['ErrorClass'] + (self['_reserved'] << 8), self['ErrorCode'], self['Flags2'] & SMB.FLAGS2_NT_STATUS)
else:
raise UnsupportedFeature, ("Unexpected answer from server: Got %d, Expected %d" % (self['Command'], cmd))
class SMBPacket:
def __init__(self,data = ''):
# The uid attribute will be set when the client calls the login() method
self._command = 0x0
self._error_class = 0x0
self._error_code = 0x0
self._flags = 0x0
self._flags2 = 0x0
self._pad = '\0' * 12
self._tid = 0x0
self._pid = 0x0
self._uid = 0x0
self._mid = 0x0
self._wordcount = 0x0
self._parameter_words = ''
self._bytecount = 0x0
self._buffer = ''
if data != '':
self._command = ord(data[4])
self._error_class = ord(data[5])
self._reserved = ord(data[6])
self._error_code = unpack('<H',data[7:9])[0]
self._flags = ord(data[9])
self._flags2 = unpack('<H',data[10:12])[0]
self._tid = unpack('<H',data[24:26])[0]
self._pid = unpack('<H',data[26:28])[0]
self._uid = unpack('<H',data[28:30])[0]
self._mid = unpack('<H',data[30:32])[0]
self._wordcount = ord(data[32])
self._parameter_words = data[33:33+self._wordcount*2]
self._bytecount = ord(data[33+self._wordcount*2])
self._buffer = data[35+self._wordcount*2:]
def set_command(self,command):
self._command = command
def set_error_class(self, error_class):
self._error_class = error_class
def set_error_code(self,error_code):
self._error_code = error_code
def set_flags(self,flags):
self._flags = flags
def set_flags2(self, flags2):
self._flags2 = flags2
def set_pad(self, pad):
self._pad = pad
def set_tid(self,tid):
self._tid = tid
def set_pid(self,pid):
self._pid = pid
def set_uid(self,uid):
self._uid = uid
def set_mid(self,mid):
self._mid = mid
def set_parameter_words(self,param):
self._parameter_words = param
self._wordcount = len(param)/2
def set_buffer(self,buffer):
if type(buffer) is types.UnicodeType:
raise Exception('SMBPacket: Invalid buffer. Received unicode')
self._buffer = buffer
self._bytecount = len(buffer)
def get_command(self):
return self._command
def get_error_class(self):
return self._error_class
def get_error_code(self):
return self._error_code
def get_reserved(self):
return self._reserved
def get_flags(self):
return self._flags
def get_flags2(self):
return self._flags2
def get_pad(self):
return self._pad
def get_tid(self):
return self._tid
def get_pid(self):
return self._pid
def get_uid(self):
return self._uid
def get_mid(self):
return self._mid
def get_parameter_words(self):
return self._parameter_words
def get_wordcount(self):
return self._wordcount
def get_bytecount(self):
return self._bytecount
def get_buffer(self):
return self._buffer
def rawData(self):
data = pack('<4sBBBHBH12sHHHHB','\xffSMB',self._command,self._error_class,0,self._error_code,self._flags,
self._flags2,self._pad,self._tid, self._pid, self._uid, self._mid, self._wordcount) + self._parameter_words + pack('<H',self._bytecount) + self._buffer
return data
class SMBCommand(Structure):
structure = (
('WordCount', 'B=len(Parameters)/2'),
('_ParametersLength','_-Parameters','WordCount*2'),
('Parameters',':'), # default set by constructor
('ByteCount','<H-Data'),
('Data',':'), # default set by constructor
)
def __init__(self, commandOrData = None, data = None, **kargs):
if type(commandOrData) == type(0):
self.command = commandOrData
else:
data = data or commandOrData
Structure.__init__(self, data = data, **kargs)
if data is None:
self['Parameters'] = ''
self['Data'] = ''
class AsciiOrUnicodeStructure(Structure):
def __init__(self, flags = 0, **kargs):
if flags & SMB.FLAGS2_UNICODE:
self.structure = self.UnicodeStructure
else:
self.structure = self.AsciiStructure
return Structure.__init__(self, **kargs)
class SMBCommand_Parameters(Structure):
pass
class SMBAndXCommand_Parameters(Structure):
commonHdr = (
('AndXCommand','B=0xff'),
('_reserved','B=0'),
('AndXOffset','<H=0'),
)
structure = ( # default structure, overriden by subclasses
('Data',':=""'),
)
############# TRANSACTIONS RELATED
# TRANS2_QUERY_FS_INFORMATION
# QUERY_FS Information Levels
# SMB_QUERY_FS_ATTRIBUTE_INFO
class SMBQueryFsAttributeInfo(Structure):
structure = (
('FileSystemAttributes','<L'),
('MaxFilenNameLengthInBytes','<L'),
('LengthOfFileSystemName','<L-FileSystemName'),
('FileSystemName',':'),
)
class SMBQueryFsInfoVolume(Structure):
structure = (
('ulVolSerialNbr','<L=0xABCDEFAA'),
('cCharCount','<B-VolumeLabel'),
('VolumeLabel','z'),
)
# SMB_QUERY_FS_SIZE_INFO
class SMBQueryFsSizeInfo(Structure):
structure = (
('TotalAllocationUnits','<q=148529400'),
('TotalFreeAllocationUnits','<q=14851044'),
('SectorsPerAllocationUnit','<L=2'),
('BytesPerSector','<L=512'),
)
# SMB_QUERY_FS_VOLUME_INFO
class SMBQueryFsVolumeInfo(Structure):
structure = (
('VolumeCreationTime','<q'),
('SerialNumber','<L=0xABCDEFAA'),
('VolumeLabelSize','<L=len(VolumeLabel)/2'),
('Reserved','<H=0'),
('VolumeLabel',':')
)
# SMB_FIND_FILE_BOTH_DIRECTORY_INFO level
class SMBFindFileBothDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=0'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L=0'),
#('ShortNameLength','<B-ShortName','len(ShortName)'),
('ShortNameLength','<B=0'),
('Reserved','<B=0'),
('ShortName','24s'),
('FileName',':'),
)
# SMB_FIND_FILE_ID_FULL_DIRECTORY_INFO level
class SMBFindFileIdFullDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=0'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L=0'),
#('ShortNameLength','<B-ShortName','len(ShortName)'),
('FileID','<q=0'),
('FileName',':'),
)
# SMB_FIND_FILE_ID_BOTH_DIRECTORY_INFO level
class SMBFindFileIdBothDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=0'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L=0'),
#('ShortNameLength','<B-ShortName','len(ShortName)'),
('ShortNameLength','<B=0'),
('Reserved','<B=0'),
('ShortName','24s'),
('Reserved','<H=0'),
('FileID','<q=0'),
('FileName',':'),
)
# SMB_FIND_FILE_DIRECTORY_INFO level
class SMBFindFileDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=1'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('FileName','z'),
)
# SMB_FIND_FILE_NAMES_INFO level
class SMBFindFileNamesInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('FileName','z'),
)
# SMB_FIND_FILE_FULL_DIRECTORY_INFO level
class SMBFindFileFullDirectoryInfo(Structure):
structure = (
('NextEntryOffset','<L=0'),
('FileIndex','<L=0'),
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('EndOfFile','<q=0'),
('AllocationSize','<q=1'),
('ExtFileAttributes','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('EaSize','<L'),
('FileName','z'),
)
# SMB_FIND_INFO_STANDARD level
class SMBFindInfoStandard(Structure):
structure = (
('ResumeKey','<L=0xff'),
('CreationDate','<H=0'),
('CreationTime','<H=0'),
('LastAccessDate','<H=0'),
('LastAccessTime','<H=0'),
('LastWriteDate','<H=0'),
('LastWriteTime','<H=0'),
('EaSize','<L'),
('AllocationSize','<L=1'),
('ExtFileAttributes','<H=0'),
('FileNameLength','<B-FileName','len(FileName)'),
('FileName','z'),
)
# SET_FILE_INFORMATION structures
# SMB_SET_FILE_DISPOSITION_INFO
class SMBSetFileDispositionInfo(Structure):
structure = (
('DeletePending','<B'),
)
# SMB_SET_FILE_BASIC_INFO
class SMBSetFileBasicInfo(Structure):
structure = (
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('ChangeTime','<q'),
('ExtFileAttributes','<H'),
('Reserved','<L'),
)
# SMB_SET_FILE_END_OF_FILE_INFO
class SMBSetFileEndOfFileInfo(Structure):
structure = (
('EndOfFile','<q'),
)
# TRANS2_FIND_NEXT2
class SMBFindNext2_Parameters(Structure):
structure = (
('SID','<H'),
('SearchCount','<H'),
('InformationLevel','<H'),
('ResumeKey','<L'),
('Flags','<H'),
('FileName','z'),
)
class SMBFindNext2Response_Parameters(Structure):
structure = (
('SearchCount','<H'),
('EndOfSearch','<H=1'),
('EaErrorOffset','<H=0'),
('LastNameOffset','<H=0'),
)
class SMBFindNext2_Data(Structure):
structure = (
('GetExtendedAttributesListLength','_-GetExtendedAttributesList', 'self["GetExtendedAttributesListLength"]'),
('GetExtendedAttributesList',':'),
)
# TRANS2_FIND_FIRST2
class SMBFindFirst2Response_Parameters(Structure):
structure = (
('SID','<H'),
('SearchCount','<H'),
('EndOfSearch','<H=1'),
('EaErrorOffset','<H=0'),
('LastNameOffset','<H=0'),
)
class SMBFindFirst2_Parameters(Structure):
structure = (
('SearchAttributes','<H'),
('SearchCount','<H'),
('Flags','<H'),
('InformationLevel','<H'),
('SearchStorageType','<L'),
('FileName','z'),
)
class SMBFindFirst2_Data(Structure):
structure = (
('GetExtendedAttributesListLength','_-GetExtendedAttributesList', 'self["GetExtendedAttributesListLength"]'),
('GetExtendedAttributesList',':'),
)
# TRANS2_SET_PATH_INFORMATION
class SMBSetPathInformation_Parameters(Structure):
structure = (
('InformationLevel','<H'),
('Reserved','<L'),
('FileName','z'),
)
class SMBSetPathInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
# TRANS2_SET_FILE_INFORMATION
class SMBSetFileInformation_Parameters(Structure):
structure = (
('FID','<H'),
('InformationLevel','<H'),
('Reserved','<H'),
)
class SMBSetFileInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
# TRANS2_QUERY_FILE_INFORMATION
class SMBQueryFileInformation_Parameters(Structure):
structure = (
('FID','<H'),
('InformationLevel','<H'),
)
class SMBQueryFileInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0')
)
class SMBQueryFileInformation_Data(Structure):
structure = (
('GetExtendedAttributeList',':'),
)
class SMBQueryFileInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
# TRANS2_QUERY_PATH_INFORMATION
class SMBQueryPathInformationResponse_Parameters(Structure):
structure = (
('EaErrorOffset','<H=0'),
)
class SMBQueryPathInformation_Parameters(Structure):
structure = (
('InformationLevel','<H'),
('Reserved','<L=0'),
('FileName','z'),
)
class SMBQueryPathInformation_Data(Structure):
structure = (
('GetExtendedAttributeList',':'),
)
# SMB_QUERY_FILE_EA_INFO
class SMBQueryFileEaInfo(Structure):
structure = (
('EaSize','<L=0'),
)
# SMB_QUERY_FILE_BASIC_INFO
class SMBQueryFileBasicInfo(Structure):
structure = (
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('ExtFileAttributes','<L'),
#('Reserved','<L=0'),
)
# SMB_QUERY_FILE_STANDARD_INFO
class SMBQueryFileStandardInfo(Structure):
structure = (
('AllocationSize','<q'),
('EndOfFile','<q'),
('NumberOfLinks','<L=0'),
('DeletePending','<B=0'),
('Directory','<B'),
)
# SMB_QUERY_FILE_ALL_INFO
class SMBQueryFileAllInfo(Structure):
structure = (
('CreationTime','<q'),
('LastAccessTime','<q'),
('LastWriteTime','<q'),
('LastChangeTime','<q'),
('ExtFileAttributes','<L'),
('Reserved','<L=0'),
('AllocationSize','<q'),
('EndOfFile','<q'),
('NumberOfLinks','<L=0'),
('DeletePending','<B=0'),
('Directory','<B'),
('Reserved','<H=0'),
('EaSize','<L=0'),
('FileNameLength','<L-FileName','len(FileName)'),
('FileName','z'),
)
# \PIPE\LANMAN NetShareEnum
class SMBNetShareEnum(Structure):
structure = (
('RAPOpcode','<H=0'),
('ParamDesc','z'),
('DataDesc','z'),
('InfoLevel','<H'),
('ReceiveBufferSize','<H'),
)
class SMBNetShareEnumResponse(Structure):
structure = (
('Status','<H=0'),
('Convert','<H=0'),
('EntriesReturned','<H'),
('EntriesAvailable','<H'),
)
class NetShareInfo1(Structure):
structure = (
('NetworkName','13s'),
('Pad','<B=0'),
('Type','<H=0'),
('RemarkOffsetLow','<H=0'),
('RemarkOffsetHigh','<H=0'),
)
# \PIPE\LANMAN NetServerGetInfo
class SMBNetServerGetInfoResponse(Structure):
structure = (
('Status','<H=0'),
('Convert','<H=0'),
('TotalBytesAvailable','<H'),
)
class SMBNetServerInfo1(Structure):
# Level 1 Response
structure = (
('ServerName','16s'),
('MajorVersion','B=5'),
('MinorVersion','B=0'),
('ServerType','<L=3'),
('ServerCommentLow','<H=0'),
('ServerCommentHigh','<H=0'),
)
# \PIPE\LANMAN NetShareGetInfo
class SMBNetShareGetInfo(Structure):
structure = (
('RAPOpcode','<H=0'),
('ParamDesc','z'),
('DataDesc','z'),
('ShareName','z'),
('InfoLevel','<H'),
('ReceiveBufferSize','<H'),
)
class SMBNetShareGetInfoResponse(Structure):
structure = (
('Status','<H=0'),
('Convert','<H=0'),
('TotalBytesAvailable','<H'),
)
############# Security Features
class SecurityFeatures(Structure):
structure = (
('Key','<L=0'),
('CID','<H=0'),
('SequenceNumber','<H=0'),
)
############# SMB_COM_QUERY_INFORMATION2 (0x23)
class SMBQueryInformation2_Parameters(Structure):
structure = (
('Fid','<H'),
)
class SMBQueryInformation2Response_Parameters(Structure):
structure = (
('CreateDate','<H'),
('CreationTime','<H'),
('LastAccessDate','<H'),
('LastAccessTime','<H'),
('LastWriteDate','<H'),
('LastWriteTime','<H'),
('FileDataSize','<L'),
('FileAllocationSize','<L'),
('FileAttributes','<L'),
)
############# SMB_COM_SESSION_SETUP_ANDX (0x73)
class SMBSessionSetupAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('MaxBuffer','<H'),
('MaxMpxCount','<H'),
('VCNumber','<H'),
('SessionKey','<L'),
('AnsiPwdLength','<H'),
('UnicodePwdLength','<H'),
('_reserved','<L=0'),
('Capabilities','<L'),
)
class SMBSessionSetupAndX_Extended_Parameters(SMBAndXCommand_Parameters):
structure = (
('MaxBufferSize','<H'),
('MaxMpxCount','<H'),
('VcNumber','<H'),
('SessionKey','<L'),
('SecurityBlobLength','<H'),
('Reserved','<L=0'),
('Capabilities','<L'),
)
class SMBSessionSetupAndX_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('AnsiPwdLength','_-AnsiPwd','self["AnsiPwdLength"]'),
('UnicodePwdLength','_-UnicodePwd','self["UnicodePwdLength"]'),
('AnsiPwd',':=""'),
('UnicodePwd',':=""'),
('Account','z=""'),
('PrimaryDomain','z=""'),
('NativeOS','z=""'),
('NativeLanMan','z=""'),
)
UnicodeStructure = (
('AnsiPwdLength','_-AnsiPwd','self["AnsiPwdLength"]'),
('UnicodePwdLength','_-UnicodePwd','self["UnicodePwdLength"]'),
('AnsiPwd',':=""'),
('UnicodePwd',':=""'),
('Account','u=""'),
('PrimaryDomain','u=""'),
('NativeOS','u=""'),
('NativeLanMan','u=""'),
)
class SMBSessionSetupAndX_Extended_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','z=""'),
('NativeLanMan','z=""'),
)
UnicodeStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','u=""'),
('NativeLanMan','u=""'),
)
class SMBSessionSetupAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Action','<H'),
)
class SMBSessionSetupAndX_Extended_Response_Parameters(SMBAndXCommand_Parameters):
structure = (
('Action','<H=0'),
('SecurityBlobLength','<H'),
)
class SMBSessionSetupAndXResponse_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('NativeOS','z=""'),
('NativeLanMan','z=""'),
('PrimaryDomain','z=""'),
)
UnicodeStructure = (
('NativeOS','u=""'),
('NativeLanMan','u=""'),
('PrimaryDomain','u=""'),
)
class SMBSessionSetupAndX_Extended_Response_Data(AsciiOrUnicodeStructure):
AsciiStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','z=""'),
('NativeLanMan','z=""'),
)
UnicodeStructure = (
('SecurityBlobLength','_-SecurityBlob','self["SecurityBlobLength"]'),
('SecurityBlob',':'),
('NativeOS','u=""'),
('NativeLanMan','u=""'),
)
############# SMB_COM_TREE_CONNECT (0x70)
class SMBTreeConnect_Parameters(SMBCommand_Parameters):
structure = (
)
class SMBTreeConnect_Data(SMBCommand_Parameters):
structure = (
('PathFormat','"\x04'),
('Path','z'),
('PasswordFormat','"\x04'),
('Password','z'),
('ServiceFormat','"\x04'),
('Service','z'),
)
############# SMB_COM_TREE_CONNECT (0x75)
class SMBTreeConnectAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Flags','<H=0'),
('PasswordLength','<H'),
)
class SMBTreeConnectAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('OptionalSupport','<H=0'),
)
class SMBTreeConnectAndXExtendedResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('OptionalSupport','<H=1'),
('MaximalShareAccessRights','<L=0x1fffff'),
('GuestMaximalShareAccessRights','<L=0x1fffff'),
)
class SMBTreeConnectAndX_Data(Structure):
structure = (
('_PasswordLength','_-Password','self["_PasswordLength"]'),
('Password',':'),
('Path','z'),
('Service','z'),
)
class SMBTreeConnectAndXResponse_Data(Structure):
structure = (
('Service','z'),
('PadLen','_-Pad','self["PadLen"]'),
('Pad',':=""'),
('NativeFileSystem','z'),
)
############# SMB_COM_NT_CREATE_ANDX (0xA2)
class SMBNtCreateAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('_reserved', 'B=0'),
('FileNameLength','<H'), # NameLength
('CreateFlags','<L'), # Flags
('RootFid','<L=0'), # RootDirectoryFID
('AccessMask','<L'), # DesiredAccess
('AllocationSizeLo','<L=0'), # AllocationSize
('AllocationSizeHi','<L=0'),
('FileAttributes','<L=0'), # ExtFileAttributes
('ShareAccess','<L=3'), #
('Disposition','<L=1'), # CreateDisposition
('CreateOptions','<L'), # CreateOptions
('Impersonation','<L=2'),
('SecurityFlags','B=3'),
)
class SMBNtCreateAndXResponse_Parameters(SMBAndXCommand_Parameters):
# XXX Is there a memory leak in the response for NTCreate (where the Data section would be) in Win 2000, Win XP, and Win 2003?
structure = (
('OplockLevel', 'B=0'),
('Fid','<H'),
('CreateAction','<L'),
('CreateTime','<q=0'),
('LastAccessTime','<q=0'),
('LastWriteTime','<q=0'),
('LastChangeTime','<q=0'),
('FileAttributes','<L=0x80'),
('AllocationSize','<q=0'),
('EndOfFile','<q=0'),
('FileType','<H=0'),
('IPCState','<H=0'),
('IsDirectory','B'),
)
class SMBNtCreateAndXExtendedResponse_Parameters(SMBAndXCommand_Parameters):
# [MS-SMB] Extended response description
structure = (
('OplockLevel', 'B=0'),
('Fid','<H'),
('CreateAction','<L'),
('CreateTime','<q=0'),
('LastAccessTime','<q=0'),
('LastWriteTime','<q=0'),
('LastChangeTime','<q=0'),
('FileAttributes','<L=0x80'),
('AllocationSize','<q=0'),
('EndOfFile','<q=0'),
('FileType','<H=0'),
('IPCState','<H=0'),
('IsDirectory','B'),
('VolumeGUID','16s'),
('FileIdLow','<L=0'),
('FileIdHigh','<L=0'),
('MaximalAccessRights','<L=0x12019b'),
('GuestMaximalAccessRights','<L=0x120089'),
)
class SMBNtCreateAndX_Data(Structure):
structure = (
('FileName','z'),
)
############# SMB_COM_OPEN_ANDX (0xD2)
class SMBOpenAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Flags','<H=0'),
('DesiredAccess','<H=0'),
('SearchAttributes','<H=0'),
('FileAttributes','<H=0'),
('CreationTime','<L=0'),
('OpenMode','<H=1'), # SMB_O_OPEN = 1
('AllocationSize','<L=0'),
('Reserved','8s=""'),
)
class SMBOpenAndX_Data(SMBNtCreateAndX_Data):
pass
class SMBOpenAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Fid','<H=0'),
('FileAttributes','<H=0'),
('LastWriten','<L=0'),
('FileSize','<L=0'),
('GrantedAccess','<H=0'),
('FileType','<H=0'),
('IPCState','<H=0'),
('Action','<H=0'),
('ServerFid','<L=0'),
('_reserved','<H=0'),
)
############# SMB_COM_WRITE (0x0B)
class SMBWrite_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Count','<H'),
('Offset','<L'),
('Remaining','<H'),
)
class SMBWriteResponse_Parameters(SMBCommand_Parameters):
structure = (
('Count','<H'),
)
class SMBWrite_Data(Structure):
structure = (
('BufferFormat','<B=1'),
('DataLength','<H-Data'),
('Data',':'),
)
############# SMB_COM_WRITE_ANDX (0x2F)
class SMBWriteAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('_reserved','<L=0xff'),
('WriteMode','<H=8'),
('Remaining','<H'),
('DataLength_Hi','<H=0'),
('DataLength','<H'),
('DataOffset','<H=0'),
('HighOffset','<L=0'),
)
class SMBWriteAndX_Data(Structure):
structure = (
('Pad','<B=0'),
('DataLength','_-Data','self["DataLength"]'),
('Data',':'),
)
class SMBWriteAndX_Parameters2(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('_reserved','<L=0xff'),
('WriteMode','<H=8'),
('Remaining','<H'),
('DataLength_Hi','<H=0'),
('DataLength','<H'),
('DataOffset','<H=0'),
)
class SMBWriteAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Count','<H'),
('Available','<H'),
('Reserved','<L=0'),
)
############# SMB_COM_WRITE_RAW (0x1D)
class SMBWriteRaw_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Count','<H'),
('_reserved','<H=0'),
('Offset','<L'),
('Timeout','<L=0'),
('WriteMode','<H=0'),
('_reserved2','<L=0'),
('DataLength','<H'),
('DataOffset','<H=0'),
)
############# SMB_COM_READ (0x0A)
class SMBRead_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Count','<H'),
('Offset','<L'),
('Remaining','<H=Count'),
)
class SMBReadResponse_Parameters(Structure):
structure = (
('Count','<H=0'),
('_reserved','"\0\0\0\0\0\0\0\0'),
)
class SMBReadResponse_Data(Structure):
structure = (
('BufferFormat','<B=0x1'),
('DataLength','<H-Data'),
('Data',':'),
)
############# SMB_COM_READ_RAW (0x1A)
class SMBReadRaw_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('MaxCount','<H'),
('MinCount','<H=MaxCount'),
('Timeout','<L=0'),
('_reserved','<H=0'),
)
############# SMB_COM_NT_TRANSACT (0xA0)
class SMBNTTransaction_Parameters(SMBCommand_Parameters):
structure = (
('MaxSetupCount','<B=0'),
('Reserved1','<H=0'),
('TotalParameterCount','<L'),
('TotalDataCount','<L'),
('MaxParameterCount','<L=1024'),
('MaxDataCount','<L=65504'),
('ParameterCount','<L'),
('ParameterOffset','<L'),
('DataCount','<L'),
('DataOffset','<L'),
('SetupCount','<B=len(Setup)/2'),
('Function','<H=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBNTTransactionResponse_Parameters(SMBCommand_Parameters):
structure = (
('Reserved1','"\0\0\0'),
('TotalParameterCount','<L'),
('TotalDataCount','<L'),
('ParameterCount','<L'),
('ParameterOffset','<L'),
('ParameterDisplacement','<L=0'),
('DataCount','<L'),
('DataOffset','<L'),
('DataDisplacement','<L=0'),
('SetupCount','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBNTTransaction_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('NT_Trans_ParametersLength','_-NT_Trans_Parameters','self["NT_Trans_ParametersLength"]'),
('NT_Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('NT_Trans_DataLength','_-NT_Trans_Data','self["NT_Trans_DataLength"]'),
('NT_Trans_Data',':'),
)
class SMBNTTransactionResponse_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
############# SMB_COM_TRANSACTION2_SECONDARY (0x33)
class SMBTransaction2Secondary_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('DataCount','<H'),
('DataOffset','<H'),
('DataDisplacement','<H=0'),
('FID','<H'),
)
class SMBTransaction2Secondary_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
############# SMB_COM_TRANSACTION2 (0x32)
class SMBTransaction2_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('MaxParameterCount','<H=1024'),
('MaxDataCount','<H=65504'),
('MaxSetupCount','<B=0'),
('Reserved1','<B=0'),
('Flags','<H=0'),
('Timeout','<L=0'),
('Reserved2','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('DataCount','<H'),
('DataOffset','<H'),
('SetupCount','<B=len(Setup)/2'),
('Reserved3','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBTransaction2Response_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('Reserved1','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('ParameterDisplacement','<H=0'),
('DataCount','<H'),
('DataOffset','<H'),
('DataDisplacement','<H=0'),
('SetupCount','<B=0'),
('Reserved2','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBTransaction2_Data(Structure):
structure = (
# ('NameLength','_-Name','1'),
# ('Name',':'),
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
class SMBTransaction2Response_Data(Structure):
structure = (
('Pad1Length','_-Pad1','self["Pad1Length"]'),
('Pad1',':'),
('Trans_ParametersLength','_-Trans_Parameters','self["Trans_ParametersLength"]'),
('Trans_Parameters',':'),
('Pad2Length','_-Pad2','self["Pad2Length"]'),
('Pad2',':'),
('Trans_DataLength','_-Trans_Data','self["Trans_DataLength"]'),
('Trans_Data',':'),
)
############# SMB_COM_QUERY_INFORMATION (0x08)
class SMBQueryInformation_Data(Structure):
structure = (
('BufferFormat','B=4'),
('FileName','z'),
)
class SMBQueryInformationResponse_Parameters(Structure):
structure = (
('FileAttributes','<H'),
('LastWriteTime','<L'),
('FileSize','<L'),
('Reserved','"0123456789'),
)
############# SMB_COM_TRANSACTION (0x25)
class SMBTransaction_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('MaxParameterCount','<H=1024'),
('MaxDataCount','<H=65504'),
('MaxSetupCount','<B=0'),
('Reserved1','<B=0'),
('Flags','<H=0'),
('Timeout','<L=0'),
('Reserved2','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('DataCount','<H'),
('DataOffset','<H'),
('SetupCount','<B=len(Setup)/2'),
('Reserved3','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
class SMBTransactionResponse_Parameters(SMBCommand_Parameters):
structure = (
('TotalParameterCount','<H'),
('TotalDataCount','<H'),
('Reserved1','<H=0'),
('ParameterCount','<H'),
('ParameterOffset','<H'),
('ParameterDisplacement','<H=0'),
('DataCount','<H'),
('DataOffset','<H'),
('DataDisplacement','<H=0'),
('SetupCount','<B'),
('Reserved2','<B=0'),
('SetupLength','_-Setup','SetupCount*2'),
('Setup',':'),
)
# TODO: We should merge these both. But this will require fixing
# the instances where this structure is used on the client side
class SMBTransaction_SData(Structure):
structure = (
('Name','z'),
('Trans_ParametersLength','_-Trans_Parameters'),
('Trans_Parameters',':'),
('Trans_DataLength','_-Trans_Data'),
('Trans_Data',':'),
)
class SMBTransaction_Data(Structure):
structure = (
('NameLength','_-Name'),
('Name',':'),
('Trans_ParametersLength','_-Trans_Parameters'),
('Trans_Parameters',':'),
('Trans_DataLength','_-Trans_Data'),
('Trans_Data',':'),
)
class SMBTransactionResponse_Data(Structure):
structure = (
('Trans_ParametersLength','_-Trans_Parameters'),
('Trans_Parameters',':'),
('Trans_DataLength','_-Trans_Data'),
('Trans_Data',':'),
)
############# SMB_COM_READ_ANDX (0x2E)
class SMBReadAndX_Parameters(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('MaxCount','<H'),
('MinCount','<H=MaxCount'),
('_reserved','<L=0xffffffff'),
('Remaining','<H=MaxCount'),
('HighOffset','<L=0'),
)
class SMBReadAndX_Parameters2(SMBAndXCommand_Parameters):
structure = (
('Fid','<H'),
('Offset','<L'),
('MaxCount','<H'),
('MinCount','<H=MaxCount'),
('_reserved','<L=0xffffffff'),
('Remaining','<H=MaxCount'),
)
class SMBReadAndXResponse_Parameters(SMBAndXCommand_Parameters):
structure = (
('Remaining','<H=0'),
('DataMode','<H=0'),
('_reserved','<H=0'),
('DataCount','<H'),
('DataOffset','<H'),
('DataCount_Hi','<L'),
('_reserved2','"\0\0\0\0\0\0'),
)
############# SMB_COM_ECHO (0x2B)
class SMBEcho_Data(Structure):
structure = (
('Data',':'),
)
class SMBEcho_Parameters(Structure):
structure = (
('EchoCount','<H'),
)
class SMBEchoResponse_Data(Structure):
structure = (
('Data',':'),
)
class SMBEchoResponse_Parameters(Structure):
structure = (
('SequenceNumber','<H=1'),
)
############# SMB_COM_QUERY_INFORMATION_DISK (0x80)
class SMBQueryInformationDiskResponse_Parameters(Structure):
structure = (
('TotalUnits','<H'),
('BlocksPerUnit','<H'),
('BlockSize','<H'),
('FreeUnits','<H'),
('Reserved','<H=0'),
)
############# SMB_COM_LOGOFF_ANDX (0x74)
class SMBLogOffAndX(SMBAndXCommand_Parameters):
strucure = ()
############# SMB_COM_CLOSE (0x04)
class SMBClose_Parameters(SMBCommand_Parameters):
structure = (
('FID','<H'),
('Time','<L=0'),
)
############# SMB_COM_CREATE_DIRECTORY (0x00)
class SMBCreateDirectory_Data(Structure):
structure = (
('BufferFormat','<B=4'),
('DirectoryName','z'),
)
############# SMB_COM_DELETE (0x06)
class SMBDelete_Data(Structure):
structure = (
('BufferFormat','<B=4'),
('FileName','z'),
)
class SMBDelete_Parameters(Structure):
structure = (
('SearchAttributes','<H'),
)
############# SMB_COM_DELETE_DIRECTORY (0x01)
class SMBDeleteDirectory_Data(Structure):
structure = (
('BufferFormat','<B=4'),
('DirectoryName','z'),
)
############# SMB_COM_RENAME (0x07)
class SMBRename_Parameters(SMBCommand_Parameters):
structure = (
('SearchAttributes','<H'),
)
class SMBRename_Data(Structure):
structure = (
('BufferFormat1','<B=4'),
('OldFileName','z'),
('BufferFormat2','<B=4'),
('NewFileName','z'),
)
############# SMB_COM_OPEN (0x02)
class SMBOpen_Parameters(SMBCommand_Parameters):
structure = (
('DesiredAccess','<H=0'),
('SearchAttributes','<H=0'),
)
class SMBOpen_Data(Structure):
structure = (
('FileNameFormat','"\x04'),
('FileName','z'),
)
class SMBOpenResponse_Parameters(SMBCommand_Parameters):
structure = (
('Fid','<H=0'),
('FileAttributes','<H=0'),
('LastWriten','<L=0'),
('FileSize','<L=0'),
('GrantedAccess','<H=0'),
)
############# EXTENDED SECURITY CLASSES
class SMBExtended_Security_Parameters(Structure):
structure = (
('DialectIndex','<H'),
('SecurityMode','<B'),
('MaxMpxCount','<H'),
('MaxNumberVcs','<H'),
('MaxBufferSize','<L'),
('MaxRawSize','<L'),
('SessionKey','<L'),
('Capabilities','<L'),
('LowDateTime','<L'),
('HighDateTime','<L'),
('ServerTimeZone','<H'),
('ChallengeLength','<B'),
)
class SMBExtended_Security_Data(Structure):
structure = (
('ServerGUID','16s'),
('SecurityBlob',':'),
)
class SMBNTLMDialect_Parameters(Structure):
structure = (
('DialectIndex','<H'),
('SecurityMode','<B'),
('MaxMpxCount','<H'),
('MaxNumberVcs','<H'),
('MaxBufferSize','<L'),
('MaxRawSize','<L'),
('SessionKey','<L'),
('Capabilities','<L'),
('LowDateTime','<L'),
('HighDateTime','<L'),
('ServerTimeZone','<H'),
('ChallengeLength','<B'),
)
class SMBNTLMDialect_Data(Structure):
structure = (
('ChallengeLength','_-Challenge','self["ChallengeLength"]'),
('Challenge',':'),
('Payload',':'),
# For some reason on an old Linux this field is not present, we have to check this out. There must be a flag stating this.
('DomainName','_'),
('ServerName','_'),
)
def __init__(self,data = None, alignment = 0):
Structure.__init__(self,data,alignment)
#self['ChallengeLength']=8
def fromString(self,data):
Structure.fromString(self,data)
self['DomainName'] = ''
self['ServerName'] = ''
class SMB:
# SMB Command Codes
SMB_COM_CREATE_DIRECTORY = 0x00
SMB_COM_DELETE_DIRECTORY = 0x01
SMB_COM_OPEN = 0x02
SMB_COM_CREATE = 0x03
SMB_COM_CLOSE = 0x04
SMB_COM_FLUSH = 0x05
SMB_COM_DELETE = 0x06
SMB_COM_RENAME = 0x07
SMB_COM_QUERY_INFORMATION = 0x08
SMB_COM_SET_INFORMATION = 0x09
SMB_COM_READ = 0x0A
SMB_COM_WRITE = 0x0B
SMB_COM_LOCK_BYTE_RANGE = 0x0C
SMB_COM_UNLOCK_BYTE_RANGE = 0x0D
SMB_COM_CREATE_TEMPORARY = 0x0E
SMB_COM_CREATE_NEW = 0x0F
SMB_COM_CHECK_DIRECTORY = 0x10
SMB_COM_PROCESS_EXIT = 0x11
SMB_COM_SEEK = 0x12
SMB_COM_LOCK_AND_READ = 0x13
SMB_COM_WRITE_AND_UNLOCK = 0x14
SMB_COM_READ_RAW = 0x1A
SMB_COM_READ_MPX = 0x1B
SMB_COM_READ_MPX_SECONDARY = 0x1C
SMB_COM_WRITE_RAW = 0x1D
SMB_COM_WRITE_MPX = 0x1E
SMB_COM_WRITE_MPX_SECONDARY = 0x1F
SMB_COM_WRITE_COMPLETE = 0x20
SMB_COM_QUERY_SERVER = 0x21
SMB_COM_SET_INFORMATION2 = 0x22
SMB_COM_QUERY_INFORMATION2 = 0x23
SMB_COM_LOCKING_ANDX = 0x24
SMB_COM_TRANSACTION = 0x25
SMB_COM_TRANSACTION_SECONDARY = 0x26
SMB_COM_IOCTL = 0x27
SMB_COM_IOCTL_SECONDARY = 0x28
SMB_COM_COPY = 0x29
SMB_COM_MOVE = 0x2A
SMB_COM_ECHO = 0x2B
SMB_COM_WRITE_AND_CLOSE = 0x2C
SMB_COM_OPEN_ANDX = 0x2D
SMB_COM_READ_ANDX = 0x2E
SMB_COM_WRITE_ANDX = 0x2F
SMB_COM_NEW_FILE_SIZE = 0x30
SMB_COM_CLOSE_AND_TREE_DISC = 0x31
SMB_COM_TRANSACTION2 = 0x32
SMB_COM_TRANSACTION2_SECONDARY = 0x33
SMB_COM_FIND_CLOSE2 = 0x34
SMB_COM_FIND_NOTIFY_CLOSE = 0x35
# Used by Xenix/Unix 0x60 - 0x6E
SMB_COM_TREE_CONNECT = 0x70
SMB_COM_TREE_DISCONNECT = 0x71
SMB_COM_NEGOTIATE = 0x72
SMB_COM_SESSION_SETUP_ANDX = 0x73
SMB_COM_LOGOFF_ANDX = 0x74
SMB_COM_TREE_CONNECT_ANDX = 0x75
SMB_COM_QUERY_INFORMATION_DISK = 0x80
SMB_COM_SEARCH = 0x81
SMB_COM_FIND = 0x82
SMB_COM_FIND_UNIQUE = 0x83
SMB_COM_FIND_CLOSE = 0x84
SMB_COM_NT_TRANSACT = 0xA0
SMB_COM_NT_TRANSACT_SECONDARY = 0xA1
SMB_COM_NT_CREATE_ANDX = 0xA2
SMB_COM_NT_CANCEL = 0xA4
SMB_COM_NT_RENAME = 0xA5
SMB_COM_OPEN_PRINT_FILE = 0xC0
SMB_COM_WRITE_PRINT_FILE = 0xC1
SMB_COM_CLOSE_PRINT_FILE = 0xC2
SMB_COM_GET_PRINT_QUEUE = 0xC3
SMB_COM_READ_BULK = 0xD8
SMB_COM_WRITE_BULK = 0xD9
SMB_COM_WRITE_BULK_DATA = 0xDA
# TRANSACT codes
TRANS_TRANSACT_NMPIPE = 0x26
# TRANSACT2 codes
TRANS2_FIND_FIRST2 = 0x0001
TRANS2_FIND_NEXT2 = 0x0002
TRANS2_QUERY_FS_INFORMATION = 0x0003
TRANS2_QUERY_PATH_INFORMATION = 0x0005
TRANS2_QUERY_FILE_INFORMATION = 0x0007
TRANS2_SET_FILE_INFORMATION = 0x0008
TRANS2_SET_PATH_INFORMATION = 0x0006
# Security Share Mode (Used internally by SMB class)
SECURITY_SHARE_MASK = 0x01
SECURITY_SHARE_SHARE = 0x00
SECURITY_SHARE_USER = 0x01
SECURITY_SIGNATURES_ENABLED = 0X04
SECURITY_SIGNATURES_REQUIRED = 0X08
# Security Auth Mode (Used internally by SMB class)
SECURITY_AUTH_MASK = 0x02
SECURITY_AUTH_ENCRYPTED = 0x02
SECURITY_AUTH_PLAINTEXT = 0x00
# Raw Mode Mask (Used internally by SMB class. Good for dialect up to and including LANMAN2.1)
RAW_READ_MASK = 0x01
RAW_WRITE_MASK = 0x02
# Capabilities Mask (Used internally by SMB class. Good for dialect NT LM 0.12)
CAP_RAW_MODE = 0x00000001
CAP_MPX_MODE = 0x0002
CAP_UNICODE = 0x0004
CAP_LARGE_FILES = 0x0008
CAP_EXTENDED_SECURITY = 0x80000000
CAP_USE_NT_ERRORS = 0x40
CAP_NT_SMBS = 0x10
CAP_LARGE_READX = 0x00004000
CAP_LARGE_WRITEX = 0x00008000
# Flags1 Mask
FLAGS1_LOCK_AND_READ_OK = 0x01
FLAGS1_PATHCASELESS = 0x08
FLAGS1_CANONICALIZED_PATHS = 0x10
FLAGS1_REPLY = 0x80
# Flags2 Mask
FLAGS2_LONG_NAMES = 0x0001
FLAGS2_EAS = 0x0002
FLAGS2_SMB_SECURITY_SIGNATURE = 0x0004
FLAGS2_IS_LONG_NAME = 0x0040
FLAGS2_DFS = 0x1000
FLAGS2_PAGING_IO = 0x2000
FLAGS2_NT_STATUS = 0x4000
FLAGS2_UNICODE = 0x8000
FLAGS2_COMPRESSED = 0x0008
FLAGS2_SMB_SECURITY_SIGNATURE_REQUIRED = 0x0010
FLAGS2_EXTENDED_SECURITY = 0x0800
# Dialect's Security Mode flags
NEGOTIATE_USER_SECURITY = 0x01
NEGOTIATE_ENCRYPT_PASSWORDS = 0x02
NEGOTIATE_SECURITY_SIGNATURE_ENABLE = 0x04
NEGOTIATE_SECURITY_SIGNATURE_REQUIRED = 0x08
# Tree Connect AndX Response optionalSuppor flags
SMB_SUPPORT_SEARCH_BITS = 0x01
SMB_SHARE_IS_IN_DFS = 0x02
def __init__(self, remote_name, remote_host, my_name = None, host_type = nmb.TYPE_SERVER, sess_port = 445, timeout=None, UDP = 0):
# The uid attribute will be set when the client calls the login() method
self._uid = 0
self.__server_name = ''
self.__server_os = ''
self.__server_lanman = ''
self.__server_domain = ''
self.__remote_name = string.upper(remote_name)
self.__remote_host = remote_host
self.__is_pathcaseless = 0
self.__isNTLMv2 = True
# Negotiate Protocol Result, used everywhere
# Could be extended or not, flags should be checked before
self._dialect_data = 0
self._dialect_parameters = 0
self._action = 0
self._sess = None
self.encrypt_passwords = True
self.tid = 0
self.fid = 0
# Signing stuff
self._SignSequenceNumber = 0
self._SigningSessionKey = ''
self._SigningChallengeResponse = ''
self._SignatureEnabled = False
self._SignatureVerificationEnabled = False
self._SignatureRequired = False
# Base flags
self.__flags1 = 0
self.__flags2 = 0
if timeout==None:
self.__timeout = 10
else:
self.__timeout = timeout
if not my_name:
my_name = socket.gethostname()
i = string.find(my_name, '.')
if i > -1:
my_name = my_name[:i]
# If port 445 and the name sent is *SMBSERVER we're setting the name to the IP. This is to help some old applications still believing
# *SMSBSERVER will work against modern OSes. If port is NETBIOS_SESSION_PORT the user better know about *SMBSERVER's limitations
if sess_port == 445 and remote_name == '*SMBSERVER':
self.__remote_name = remote_host
if UDP:
self._sess = nmb.NetBIOSUDPSession(my_name, remote_name, remote_host, host_type, sess_port, self.__timeout)
else:
self._sess = nmb.NetBIOSTCPSession(my_name, remote_name, remote_host, host_type, sess_port, self.__timeout)
# Initialize session values (_dialect_data and _dialect_parameters)
self.neg_session()
# Call login() without any authentication information to
# setup a session if the remote server
# is in share mode.
if (self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SHARE_MASK) == SMB.SECURITY_SHARE_SHARE:
self.login('', '')
def ntlm_supported(self):
return False
def get_remote_name(self):
return self.__remote_name
def get_remote_host(self):
return self.__remote_host
def get_flags(self):
return self.__flags1, self.__flags2
def set_flags(self, flags1=None, flags2=None):
if flags1 is not None:
self.__flags1 = flags1
if flags2 is not None:
self.__flags2 = flags2
def set_timeout(self, timeout):
self.__timeout = timeout
def get_timeout(self):
return self.__timeout
@contextmanager
def use_timeout(self, timeout):
prev_timeout = self.set_timeout(timeout)
try:
yield
finally:
self.set_timeout(prev_timeout)
def get_session(self):
return self._sess
def get_tid(self):
return self.tid
def get_fid(self):
return self.fid
def isGuestSession(self):
return self._action & SMB_SETUP_GUEST
def doesSupportNTLMv2(self):
return self.__isNTLMv2
def __del__(self):
if self._sess:
self._sess.close()
def recvSMB(self):
r = self._sess.recv_packet(self.__timeout)
return NewSMBPacket(data = r.get_trailer())
def recv_packet(self):
r = self._sess.recv_packet(self.__timeout)
return SMBPacket(r.get_trailer())
def __decode_trans(self, params, data):
totparamcnt, totdatacnt, _, paramcnt, paramoffset, paramds, datacnt, dataoffset, datads, setupcnt = unpack('<HHHHHHHHHB', params[:19])
if paramcnt + paramds < totparamcnt or datacnt + datads < totdatacnt:
has_more = 1
else:
has_more = 0
paramoffset = paramoffset - 55 - setupcnt * 2
dataoffset = dataoffset - 55 - setupcnt * 2
return has_more, params[20:20 + setupcnt * 2], data[paramoffset:paramoffset + paramcnt], data[dataoffset:dataoffset + datacnt]
# TODO: Move this to NewSMBPacket, it belongs there
def signSMB(self, packet, signingSessionKey, signingChallengeResponse):
# This logic MUST be applied for messages sent in response to any of the higher-layer actions and in
# compliance with the message sequencing rules.
# * The client or server that sends the message MUST provide the 32-bit sequence number for this
# message, as specified in sections 3.2.4.1 and 3.3.4.1.
# * The SMB_FLAGS2_SMB_SECURITY_SIGNATURE flag in the header MUST be set.
# * To generate the signature, a 32-bit sequence number is copied into the
# least significant 32 bits of the SecuritySignature field and the remaining
# 4 bytes are set to 0x00.
# * The MD5 algorithm, as specified in [RFC1321], MUST be used to generate a hash of the SMB
# message from the start of the SMB Header, which is defined as follows.
# CALL MD5Init( md5context )
# CALL MD5Update( md5context, Connection.SigningSessionKey )
# CALL MD5Update( md5context, Connection.SigningChallengeResponse )
# CALL MD5Update( md5context, SMB message )
# CALL MD5Final( digest, md5context )
# SET signature TO the first 8 bytes of the digest
# The resulting 8-byte signature MUST be copied into the SecuritySignature field of the SMB Header,
# after which the message can be transmitted.
#print "seq(%d) signingSessionKey %r, signingChallengeResponse %r" % (self._SignSequenceNumber, signingSessionKey, signingChallengeResponse)
packet['SecurityFeatures'] = struct.pack('<q',self._SignSequenceNumber)
# Sign with the sequence
m = hashlib.md5()
m.update( signingSessionKey )
m.update( signingChallengeResponse )
m.update( str(packet) )
# Replace sequence with acual hash
packet['SecurityFeatures'] = m.digest()[:8]
if self._SignatureVerificationEnabled:
self._SignSequenceNumber +=1
else:
self._SignSequenceNumber +=2
def checkSignSMB(self, packet, signingSessionKey, signingChallengeResponse):
# Let's check
signature = packet['SecurityFeatures']
#print "Signature received: %r " % signature
self.signSMB(packet, signingSessionKey, signingChallengeResponse)
#print "Signature calculated: %r" % packet['SecurityFeatures']
if self._SignatureVerificationEnabled is not True:
self._SignSequenceNumber -= 1
return packet['SecurityFeatures'] == signature
def sendSMB(self,smb):
smb['Uid'] = self._uid
smb['Pid'] = os.getpid()
smb['Flags1'] |= self.__flags1
smb['Flags2'] |= self.__flags2
if self._SignatureEnabled:
smb['Flags2'] |= SMB.FLAGS2_SMB_SECURITY_SIGNATURE
self.signSMB(smb, self._SigningSessionKey, self._SigningChallengeResponse)
self._sess.send_packet(str(smb))
# Should be gone soon. Not used anymore within the library. DON'T use it!
# Use sendSMB instead (and build the packet with NewSMBPacket)
def send_smb(self,s):
s.set_uid(self._uid)
s.set_pid(os.getpid())
self._sess.send_packet(s.rawData())
def __send_smb_packet(self, cmd, flags, flags2, tid, mid, params = '', data = ''):
smb = NewSMBPacket()
smb['Flags1'] = flags
smb['Flags2'] = flags2
smb['Tid'] = tid
smb['Mid'] = mid
cmd = SMBCommand(cmd)
smb.addCommand(cmd)
cmd['Parameters'] = params
cmd['Data'] = data
self.sendSMB(smb)
def isValidAnswer(self, s, cmd):
while 1:
if s.rawData():
if s.get_command() == cmd:
if s.get_error_class() == 0x00 and s.get_error_code() == 0x00:
return 1
else:
raise SessionError, ( "SMB Library Error", s.get_error_class()+ (s.get_reserved() << 8), s.get_error_code() , s.get_flags2() & SMB.FLAGS2_NT_STATUS )
else:
break
return 0
def neg_session(self, extended_security = True):
smb = NewSMBPacket()
negSession = SMBCommand(SMB.SMB_COM_NEGOTIATE)
if extended_security == True:
smb['Flags2']=SMB.FLAGS2_EXTENDED_SECURITY
negSession['Data'] = '\x02NT LM 0.12\x00'
smb.addCommand(negSession)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_NEGOTIATE):
sessionResponse = SMBCommand(smb['Data'][0])
self._dialects_parameters = SMBNTLMDialect_Parameters(sessionResponse['Parameters'])
self._dialects_data = SMBNTLMDialect_Data()
self._dialects_data['ChallengeLength'] = self._dialects_parameters['ChallengeLength']
self._dialects_data.fromString(sessionResponse['Data'])
if self._dialects_parameters['Capabilities'] & SMB.CAP_EXTENDED_SECURITY:
# Whether we choose it or it is enforced by the server, we go for extended security
self._dialects_parameters = SMBExtended_Security_Parameters(sessionResponse['Parameters'])
self._dialects_data = SMBExtended_Security_Data(sessionResponse['Data'])
# Let's setup some variable for later use
if self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SIGNATURES_REQUIRED:
self._SignatureRequired = True
# Interestingly, the security Blob might be missing sometimes.
#spnego = SPNEGO_NegTokenInit(self._dialects_data['SecurityBlob'])
#for i in spnego['MechTypes']:
# print "Mech Found: %s" % MechTypes[i]
return 1
# If not, let's try the old way
else:
if self._dialects_data['ServerName'] is not None:
self.__server_name = self._dialects_data['ServerName']
if self._dialects_parameters['DialectIndex'] == 0xffff:
raise UnsupportedFeature,"Remote server does not know NT LM 0.12"
self.__is_pathcaseless = smb['Flags1'] & SMB.FLAGS1_PATHCASELESS
return 1
else:
return 0
def tree_connect(self, path, password = '', service = SERVICE_ANY):
print "[MS-CIFS] This is an original Core Protocol command.\nThis command has been deprecated.\nClient Implementations SHOULD use SMB_COM_TREE_CONNECT_ANDX"
# return 0x800
if password:
# Password is only encrypted if the server passed us an "encryption" during protocol dialect
if self._dialects_parameters['ChallengeLength'] > 0:
# this code is untested
password = self.get_ntlmv1_response(ntlm.compute_lmhash(password))
if not unicode_support:
if unicode_convert:
path = str(path)
else:
raise Exception('SMB: Can\t conver path from unicode!')
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
treeConnect = SMBCommand(SMB.SMB_COM_TREE_CONNECT)
treeConnect['Parameters'] = SMBTreeConnect_Parameters()
treeConnect['Data'] = SMBTreeConnect_Data()
treeConnect['Data']['Path'] = path.upper()
treeConnect['Data']['Password'] = password
treeConnect['Data']['Service'] = service
smb.addCommand(treeConnect)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TREE_CONNECT):
# XXX Here we are ignoring the rest of the response
return smb['Tid']
return smb['Tid']
def get_uid(self):
return self._uid
def set_uid(self, uid):
self._uid = uid
def tree_connect_andx(self, path, password = None, service = SERVICE_ANY, smb_packet=None):
if password:
# Password is only encrypted if the server passed us an "encryption" during protocol dialect
if self._dialects_parameters['ChallengeLength'] > 0:
# this code is untested
password = self.get_ntlmv1_response(ntlm.compute_lmhash(password))
else:
password = '\x00'
if not unicode_support:
if unicode_convert:
path = str(path)
else:
raise Exception('SMB: Can\t convert path from unicode!')
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
else:
smb = smb_packet
treeConnect = SMBCommand(SMB.SMB_COM_TREE_CONNECT_ANDX)
treeConnect['Parameters'] = SMBTreeConnectAndX_Parameters()
treeConnect['Data'] = SMBTreeConnectAndX_Data()
treeConnect['Parameters']['PasswordLength'] = len(password)
treeConnect['Data']['Password'] = password
treeConnect['Data']['Path'] = path.upper()
treeConnect['Data']['Service'] = service
smb.addCommand(treeConnect)
# filename = "\PIPE\epmapper"
# ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
# ntCreate['Parameters'] = SMBNtCreateAndX_Parameters()
# ntCreate['Data'] = SMBNtCreateAndX_Data()
# ntCreate['Parameters']['FileNameLength'] = len(filename)
# ntCreate['Parameters']['CreateFlags'] = 0
# ntCreate['Parameters']['AccessMask'] = 0x3
# ntCreate['Parameters']['CreateOptions'] = 0x0
# ntCreate['Data']['FileName'] = filename
# smb.addCommand(ntCreate)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TREE_CONNECT_ANDX):
# XXX Here we are ignoring the rest of the response
self.tid = smb['Tid']
return self.tid
self.tid = smb['Tid']
return self.tid
# backwars compatibility
connect_tree = tree_connect_andx
def get_server_name(self):
#return self._dialects_data['ServerName']
return self.__server_name
def get_session_key(self):
return self._dialects_parameters['SessionKey']
def get_encryption_key(self):
if self._dialects_data.fields.has_key('Challenge'):
return self._dialects_data['Challenge']
else:
return None
def get_server_time(self):
timestamp = self._dialects_parameters['HighDateTime']
timestamp <<= 32
timestamp |= self._dialects_parameters['LowDateTime']
timestamp -= 116444736000000000
timestamp /= 10000000
d = datetime.datetime.utcfromtimestamp(timestamp)
return d.strftime("%a, %d %b %Y %H:%M:%S GMT")
def disconnect_tree(self, tid):
smb = NewSMBPacket()
smb['Tid'] = tid
smb.addCommand(SMBCommand(SMB.SMB_COM_TREE_DISCONNECT))
self.sendSMB(smb)
smb = self.recvSMB()
def open(self, tid, filename, open_mode, desired_access):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
openFile = SMBCommand(SMB.SMB_COM_OPEN)
openFile['Parameters'] = SMBOpen_Parameters()
openFile['Parameters']['DesiredAccess'] = desired_access
openFile['Parameters']['OpenMode'] = open_mode
openFile['Parameters']['SearchAttributes'] = ATTR_READONLY | ATTR_HIDDEN | ATTR_ARCHIVE
openFile['Data'] = SMBOpen_Data()
openFile['Data']['FileName'] = filename
smb.addCommand(openFile)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_OPEN):
# XXX Here we are ignoring the rest of the response
openFileResponse = SMBCommand(smb['Data'][0])
openFileParameters = SMBOpenResponse_Parameters(openFileResponse['Parameters'])
return (
openFileParameters['Fid'],
openFileParameters['FileAttributes'],
openFileParameters['LastWriten'],
openFileParameters['FileSize'],
openFileParameters['GrantedAccess'],
)
def open_andx(self, tid, filename, open_mode, desired_access):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
openFile = SMBCommand(SMB.SMB_COM_OPEN_ANDX)
openFile['Parameters'] = SMBOpenAndX_Parameters()
openFile['Parameters']['DesiredAccess'] = desired_access
openFile['Parameters']['OpenMode'] = open_mode
openFile['Parameters']['SearchAttributes'] = ATTR_READONLY | ATTR_HIDDEN | ATTR_ARCHIVE
openFile['Data'] = SMBOpenAndX_Data()
openFile['Data']['FileName'] = filename
smb.addCommand(openFile)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_OPEN_ANDX):
# XXX Here we are ignoring the rest of the response
openFileResponse = SMBCommand(smb['Data'][0])
openFileParameters = SMBOpenAndXResponse_Parameters(openFileResponse['Parameters'])
return (
openFileParameters['Fid'],
openFileParameters['FileAttributes'],
openFileParameters['LastWriten'],
openFileParameters['FileSize'],
openFileParameters['GrantedAccess'],
openFileParameters['FileType'],
openFileParameters['IPCState'],
openFileParameters['Action'],
openFileParameters['ServerFid'],
)
def close(self, tid, fid):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
closeFile = SMBCommand(SMB.SMB_COM_CLOSE)
closeFile['Parameters'] = SMBClose_Parameters()
closeFile['Parameters']['FID'] = fid
smb.addCommand(closeFile)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_CLOSE):
return 1
return 0
def send_trans(self, tid, setup, name, param, data, noAnswer = 0):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
transCommand = SMBCommand(SMB.SMB_COM_TRANSACTION)
transCommand['Parameters'] = SMBTransaction_Parameters()
transCommand['Data'] = SMBTransaction_Data()
transCommand['Parameters']['Setup'] = setup
transCommand['Parameters']['TotalParameterCount'] = len(param)
transCommand['Parameters']['TotalDataCount'] = len(data)
transCommand['Parameters']['ParameterCount'] = len(param)
transCommand['Parameters']['ParameterOffset'] = 32+3+28+len(setup)+len(name)
transCommand['Parameters']['DataCount'] = len(data)
transCommand['Parameters']['DataOffset'] = transCommand['Parameters']['ParameterOffset'] + len(param)
transCommand['Data']['Name'] = name
transCommand['Data']['Trans_Parameters'] = param
transCommand['Data']['Trans_Data'] = data
if noAnswer:
transCommand['Parameters']['Flags'] = TRANS_NO_RESPONSE
smb.addCommand(transCommand)
self.sendSMB(smb)
def trans2(self, tid, setup, name, param, data):
data_len = len(data)
name_len = len(name)
param_len = len(param)
setup_len = len(setup)
assert setup_len & 0x01 == 0
param_offset = name_len + setup_len + 63
data_offset = param_offset + param_len
self.__send_smb_packet(SMB.SMB_COM_TRANSACTION2, self.__is_pathcaseless, SMB.FLAGS2_LONG_NAMES, tid, 0, pack('<HHHHBBHLHHHHHBB', param_len, data_len, 1024, self._dialects_parameters['MaxBufferSize'], 0, 0, 0, 0, 0, param_len, param_offset, data_len, data_offset, setup_len / 2, 0) + setup, name + param + data)
def query_file_info(self, tid, fid):
self.trans2(tid, '\x07\x00', '\x00', pack('<HH', fid, 0x107), '')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_TRANSACTION2):
f1, f2 = unpack('<LL', s.get_buffer()[53:53+8])
return (f2 & 0xffffffffL) << 32 | f1
def __nonraw_retr_file(self, tid, fid, offset, datasize, callback):
if (self._dialects_parameters['Capabilities'] & SMB.CAP_LARGE_READX) and self._SignatureEnabled is False:
max_buf_size = 65000
else:
max_buf_size = self._dialects_parameters['MaxBufferSize'] & ~0x3ff # Read in multiple KB blocks
read_offset = offset
while read_offset < datasize:
data = self.read_andx(tid, fid, read_offset, max_buf_size)
callback(data)
read_offset += len(data)
def __raw_retr_file(self, tid, fid, offset, datasize, callback):
print "[MS-CIFS] This command was introduced in the CorePlus dialect, but is often listed as part of the LAN Manager 1.0 dialect.\nThis command has been deprecated.\nClients SHOULD use SMB_COM_READ_ANDX"
max_buf_size = self._dialects_parameters['MaxBufferSize'] & ~0x3ff # Write in multiple KB blocks
read_offset = offset
while read_offset < datasize:
data = self.read_raw(tid, fid, read_offset, 0xffff)
if not data:
# No data returned. Need to send SMB_COM_READ_ANDX to find out what is the error.
data = self.read_andx(tid, fid, read_offset, max_buf_size)
callback(data)
read_offset += len(data)
def __nonraw_stor_file(self, tid, fid, offset, datasize, callback):
if (self._dialects_parameters['Capabilities'] & SMB.CAP_LARGE_WRITEX) and self._SignatureEnabled is False:
max_buf_size = 65000
else:
max_buf_size = self._dialects_parameters['MaxBufferSize'] & ~0x3ff # Write in multiple KB blocks
write_offset = offset
while 1:
data = callback(max_buf_size)
if not data:
break
smb = self.write_andx(tid,fid,data, write_offset)
writeResponse = SMBCommand(smb['Data'][0])
writeResponseParameters = SMBWriteAndXResponse_Parameters(writeResponse['Parameters'])
write_offset += writeResponseParameters['Count']
def __raw_stor_file(self, tid, fid, offset, datasize, callback):
print "[MS-CIFS] This command was introduced in the CorePlus dialect, but is often listed as part of the LAN Manager 1.0 dialect.\nThis command has been deprecated.\nClients SHOULD use SMB_COM_WRITE_ANDX"
write_offset = offset
while 1:
max_raw_size = self._dialects_parameters['MaxRawSize']
# Due to different dialects interpretation of MaxRawSize, we're limiting it to 0xffff
if max_raw_size > 65535:
max_raw_size = 65535
read_data = callback(max_raw_size)
if not read_data:
break
read_len = len(read_data)
self.__send_smb_packet(SMB.SMB_COM_WRITE_RAW, 0, 0, tid, 0, pack('<HHHLLHLHH', fid, read_len, 0, write_offset, 0, 0, 0, 0, 59), '')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_WRITE_RAW):
self._sess.send_packet(read_data)
write_offset = write_offset + read_len
break
def get_server_domain(self):
return self.__server_domain
def get_server_os(self):
return self.__server_os
def set_server_os(self, os):
self.__server_os = os
def get_server_lanman(self):
return self.__server_lanman
def is_login_required(self):
# Login is required if share mode is user.
# Otherwise only public services or services in share mode
# are allowed.
return (self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SHARE_MASK) == SMB.SECURITY_SHARE_USER
def get_ntlmv1_response(self, key):
challenge = self._dialects_data['Challenge']
return ntlm.get_ntlmv1_response(key, challenge)
def login_extended(self, user, password, domain = '', lmhash = '', nthash = '', use_ntlmv2 = True ):
# Once everything's working we should join login methods into a single one
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_EXTENDED_SECURITY
# Are we required to sign SMB? If so we do it, if not we skip it
if self._SignatureRequired:
smb['Flags2'] |= SMB.FLAGS2_SMB_SECURITY_SIGNATURE
sessionSetup = SMBCommand(SMB.SMB_COM_SESSION_SETUP_ANDX)
sessionSetup['Parameters'] = SMBSessionSetupAndX_Extended_Parameters()
sessionSetup['Data'] = SMBSessionSetupAndX_Extended_Data()
sessionSetup['Parameters']['MaxBufferSize'] = 61440
sessionSetup['Parameters']['MaxMpxCount'] = 2
sessionSetup['Parameters']['VcNumber'] = 1
sessionSetup['Parameters']['SessionKey'] = 0
sessionSetup['Parameters']['Capabilities'] = SMB.CAP_EXTENDED_SECURITY | SMB.CAP_USE_NT_ERRORS | SMB.CAP_UNICODE | SMB.CAP_LARGE_READX | SMB.CAP_LARGE_WRITEX
# Let's build a NegTokenInit with the NTLMSSP
# TODO: In the future we should be able to choose different providers
blob = SPNEGO_NegTokenInit()
# NTLMSSP
blob['MechTypes'] = [TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']]
auth = ntlm.getNTLMSSPType1('',domain,self._SignatureRequired, use_ntlmv2 = use_ntlmv2)
blob['MechToken'] = str(auth)
sessionSetup['Parameters']['SecurityBlobLength'] = len(blob)
sessionSetup['Parameters'].getData()
sessionSetup['Data']['SecurityBlob'] = blob.getData()
# Fake Data here, don't want to get us fingerprinted
sessionSetup['Data']['NativeOS'] = 'Unix'
sessionSetup['Data']['NativeLanMan'] = 'Samba'
smb.addCommand(sessionSetup)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_SESSION_SETUP_ANDX):
# We will need to use this uid field for all future requests/responses
self._uid = smb['Uid']
# Now we have to extract the blob to continue the auth process
sessionResponse = SMBCommand(smb['Data'][0])
sessionParameters = SMBSessionSetupAndX_Extended_Response_Parameters(sessionResponse['Parameters'])
sessionData = SMBSessionSetupAndX_Extended_Response_Data(flags = smb['Flags2'])
sessionData['SecurityBlobLength'] = sessionParameters['SecurityBlobLength']
sessionData.fromString(sessionResponse['Data'])
respToken = SPNEGO_NegTokenResp(sessionData['SecurityBlob'])
# Let's parse some data and keep it to ourselves in case it is asked
ntlmChallenge = ntlm.NTLMAuthChallenge(respToken['ResponseToken'])
if ntlmChallenge['TargetInfoFields_len'] > 0:
infoFields = ntlmChallenge['TargetInfoFields']
av_pairs = ntlm.AV_PAIRS(ntlmChallenge['TargetInfoFields'][:ntlmChallenge['TargetInfoFields_len']])
if av_pairs[ntlm.NTLMSSP_AV_HOSTNAME] is not None:
try:
self.__server_name = av_pairs[ntlm.NTLMSSP_AV_HOSTNAME][1].decode('utf-16le')
except:
# For some reason, we couldn't decode Unicode here.. silently discard the operation
pass
if av_pairs[ntlm.NTLMSSP_AV_DOMAINNAME] is not None:
try:
if self.__server_name != av_pairs[ntlm.NTLMSSP_AV_DOMAINNAME][1].decode('utf-16le'):
self.__server_domain = av_pairs[ntlm.NTLMSSP_AV_DOMAINNAME][1].decode('utf-16le')
except:
# For some reason, we couldn't decode Unicode here.. silently discard the operation
pass
type3, exportedSessionKey = ntlm.getNTLMSSPType3(auth, respToken['ResponseToken'], user, password, domain, lmhash, nthash, use_ntlmv2 = use_ntlmv2)
if exportedSessionKey is not None:
self._SigningSessionKey = exportedSessionKey
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_EXTENDED_SECURITY #| SMB.FLAGS2_NT_STATUS
# Are we required to sign SMB? If so we do it, if not we skip it
if self._SignatureRequired:
smb['Flags2'] |= SMB.FLAGS2_SMB_SECURITY_SIGNATURE
respToken2 = SPNEGO_NegTokenResp()
respToken2['ResponseToken'] = str(type3)
# Reusing the previous structure
sessionSetup['Parameters']['SecurityBlobLength'] = len(respToken2)
sessionSetup['Data']['SecurityBlob'] = respToken2.getData()
# Storing some info for later use
self.__server_os = sessionData['NativeOS']
self.__server_lanman = sessionData['NativeLanMan']
smb.addCommand(sessionSetup)
self.sendSMB(smb)
smb = self.recvSMB()
self._uid = 0
if smb.isValidAnswer(SMB.SMB_COM_SESSION_SETUP_ANDX):
self._uid = smb['Uid']
sessionResponse = SMBCommand(smb['Data'][0])
sessionParameters = SMBSessionSetupAndXResponse_Parameters(sessionResponse['Parameters'])
sessionData = SMBSessionSetupAndXResponse_Data(flags = smb['Flags2'], data = sessionResponse['Data'])
self._action = sessionParameters['Action']
# If smb sign required, let's enable it for the rest of the connection
if self._dialects_parameters['SecurityMode'] & SMB.SECURITY_SIGNATURES_REQUIRED:
self._SignSequenceNumber = 2
self._SignatureEnabled = True
# Set up the flags to be used from now on
self.__flags1 = SMB.FLAGS1_PATHCASELESS
self.__flags2 = SMB.FLAGS2_EXTENDED_SECURITY
return 1
else:
raise Exception('Error: Could not login successfully')
def login(self, user, password, domain = '', lmhash = '', nthash = ''):
# If we have hashes, normalize them
if ( lmhash != '' or nthash != ''):
if len(lmhash) % 2: lmhash = '0%s' % lmhash
if len(nthash) % 2: nthash = '0%s' % nthash
try: # just in case they were converted already
lmhash = a2b_hex(lmhash)
nthash = a2b_hex(nthash)
except:
pass
if self._dialects_parameters['Capabilities'] & SMB.CAP_EXTENDED_SECURITY:
try:
self.login_extended(user, password, domain, lmhash, nthash, use_ntlmv2 = True)
except:
# If the target OS is Windows 5.0 or Samba, let's try using NTLMv1
if (self.get_server_lanman().find('Windows 2000') != -1) or (self.get_server_lanman().find('Samba') != -1):
self.login_extended(user, password, domain, lmhash, nthash, use_ntlmv2 = False)
self.__isNTLMv2 = False
else:
raise
else:
self.login_standard(user, password, domain, lmhash, nthash)
self.__isNTLMv2 = False
def login_standard(self, user, password, domain = '', lmhash = '', nthash = ''):
# Only supports NTLMv1
# Password is only encrypted if the server passed us an "encryption key" during protocol dialect negotiation
if self._dialects_parameters['ChallengeLength'] > 0:
if lmhash != '' or nthash != '':
pwd_ansi = self.get_ntlmv1_response(lmhash)
pwd_unicode = self.get_ntlmv1_response(nthash)
elif password:
lmhash = ntlm.compute_lmhash(password)
nthash = ntlm.compute_nthash(password)
pwd_ansi = self.get_ntlmv1_response(lmhash)
pwd_unicode = self.get_ntlmv1_response(nthash)
else: # NULL SESSION
pwd_ansi = ''
pwd_unicode = ''
else:
pwd_ansi = password
pwd_unicode = ''
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
sessionSetup = SMBCommand(SMB.SMB_COM_SESSION_SETUP_ANDX)
sessionSetup['Parameters'] = SMBSessionSetupAndX_Parameters()
sessionSetup['Data'] = SMBSessionSetupAndX_Data()
sessionSetup['Parameters']['MaxBuffer'] = 61440
sessionSetup['Parameters']['MaxMpxCount'] = 2
sessionSetup['Parameters']['VCNumber'] = os.getpid()
sessionSetup['Parameters']['SessionKey'] = self._dialects_parameters['SessionKey']
sessionSetup['Parameters']['AnsiPwdLength'] = len(pwd_ansi)
sessionSetup['Parameters']['UnicodePwdLength'] = len(pwd_unicode)
sessionSetup['Parameters']['Capabilities'] = SMB.CAP_RAW_MODE | SMB.CAP_USE_NT_ERRORS | SMB.CAP_LARGE_READX | SMB.CAP_LARGE_WRITEX
sessionSetup['Data']['AnsiPwd'] = pwd_ansi
sessionSetup['Data']['UnicodePwd'] = pwd_unicode
sessionSetup['Data']['Account'] = str(user)
sessionSetup['Data']['PrimaryDomain'] = str(domain)
sessionSetup['Data']['NativeOS'] = str(os.name)
sessionSetup['Data']['NativeLanMan'] = 'pysmb'
smb.addCommand(sessionSetup)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_SESSION_SETUP_ANDX):
# We will need to use this uid field for all future requests/responses
self._uid = smb['Uid']
sessionResponse = SMBCommand(smb['Data'][0])
sessionParameters = SMBSessionSetupAndXResponse_Parameters(sessionResponse['Parameters'])
sessionData = SMBSessionSetupAndXResponse_Data(flags = smb['Flags2'], data = sessionResponse['Data'])
self._action = sessionParameters['Action']
# Still gotta figure out how to do this with no EXTENDED_SECURITY
if sessionParameters['Action'] & SMB_SETUP_USE_LANMAN_KEY == 0:
self._SigningChallengeResponse = sessionSetup['Data']['UnicodePwd']
self._SigningSessionKey = nthash
else:
self._SigningChallengeResponse = sessionSetup['Data']['AnsiPwd']
self._SigningSessionKey = lmhash
#self._SignSequenceNumber = 1
#self.checkSignSMB(smb, self._SigningSessionKey ,self._SigningChallengeResponse)
#self._SignatureEnabled = True
self.__server_os = sessionData['NativeOS']
self.__server_lanman = sessionData['NativeLanMan']
self.__server_domain = sessionData['PrimaryDomain']
# Set up the flags to be used from now on
self.__flags1 = SMB.FLAGS1_PATHCASELESS
self.__flags2 = 0
return 1
else: raise Exception('Error: Could not login successfully')
def waitNamedPipe(self, tid, pipe, noAnswer = 0):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
transCommand = SMBCommand(SMB.SMB_COM_TRANSACTION)
transCommand['Parameters'] = SMBTransaction_Parameters()
transCommand['Data'] = SMBTransaction_Data()
setup = '\x53\x00\x00\x00'
name = '\\PIPE%s\x00' % pipe
transCommand['Parameters']['Setup'] = setup
transCommand['Parameters']['TotalParameterCount'] = 0
transCommand['Parameters']['TotalDataCount'] = 0
transCommand['Parameters']['MaxParameterCount'] = 0
transCommand['Parameters']['MaxDataCount'] = 0
transCommand['Parameters']['Timeout'] = 5000
transCommand['Parameters']['ParameterCount'] = 0
transCommand['Parameters']['ParameterOffset'] = 32+3+28+len(setup)+len(name)
transCommand['Parameters']['DataCount'] = 0
transCommand['Parameters']['DataOffset'] = 0
transCommand['Data']['Name'] = name
transCommand['Data']['Trans_Parameters'] = ''
transCommand['Data']['Trans_Data'] = ''
if noAnswer:
transCommand['Parameters']['Flags'] = TRANS_NO_RESPONSE
smb.addCommand(transCommand)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TRANSACTION):
return 1
return 0
def read(self, tid, fid, offset=0, max_size = None, wait_answer=1):
if not max_size:
max_size = self._dialects_parameters['MaxBufferSize'] # Read in multiple KB blocks
# max_size is not working, because although it would, the server returns an error (More data avail)
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
read = SMBCommand(SMB.SMB_COM_READ)
read['Parameters'] = SMBRead_Parameters()
read['Parameters']['Fid'] = fid
read['Parameters']['Offset'] = offset
read['Parameters']['Count'] = max_size
smb.addCommand(read)
if wait_answer:
answer = ''
while 1:
self.sendSMB(smb)
ans = self.recvSMB()
if ans.isValidAnswer(SMB.SMB_COM_READ):
readResponse = SMBCommand(ans['Data'][0])
readParameters = SMBReadResponse_Parameters(readResponse['Parameters'])
readData = SMBReadResponse_Data(readResponse['Data'])
return readData['Data']
return None
def read_andx(self, tid, fid, offset=0, max_size = None, wait_answer=1, smb_packet=None):
if not max_size:
if (self._dialects_parameters['Capabilities'] & SMB.CAP_LARGE_READX) and self._SignatureEnabled is False:
max_size = 65000
else:
max_size = self._dialects_parameters['MaxBufferSize'] # Read in multiple KB blocks
# max_size is not working, because although it would, the server returns an error (More data avail)
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
readAndX = SMBCommand(SMB.SMB_COM_READ_ANDX)
readAndX['Parameters'] = SMBReadAndX_Parameters()
readAndX['Parameters']['Fid'] = fid
readAndX['Parameters']['Offset'] = offset
readAndX['Parameters']['MaxCount'] = max_size
smb.addCommand(readAndX)
else:
smb = smb_packet
if wait_answer:
answer = ''
while 1:
self.sendSMB(smb)
ans = self.recvSMB()
if ans.isValidAnswer(SMB.SMB_COM_READ_ANDX):
# XXX Here we are only using a few fields from the response
readAndXResponse = SMBCommand(ans['Data'][0])
readAndXParameters = SMBReadAndXResponse_Parameters(readAndXResponse['Parameters'])
offset = readAndXParameters['DataOffset']
count = readAndXParameters['DataCount']+0x10000*readAndXParameters['DataCount_Hi']
answer += str(ans)[offset:offset+count]
if not ans.isMoreData():
return answer
max_size = min(max_size, readAndXParameters['Remaining'])
readAndX['Parameters']['Offset'] += count # XXX Offset is not important (apparently)
else:
self.sendSMB(smb)
ans = self.recvSMB()
try:
if ans.isValidAnswer(SMB.SMB_COM_READ_ANDX):
return ans
else:
return None
except:
return ans
return None
def read_raw(self, tid, fid, offset=0, max_size = None, wait_answer=1):
if not max_size:
max_size = self._dialects_parameters['MaxBufferSize'] # Read in multiple KB blocks
# max_size is not working, because although it would, the server returns an error (More data avail)
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
readRaw = SMBCommand(SMB.SMB_COM_READ_RAW)
readRaw['Parameters'] = SMBReadRaw_Parameters()
readRaw['Parameters']['Fid'] = fid
readRaw['Parameters']['Offset'] = offset
readRaw['Parameters']['MaxCount'] = max_size
smb.addCommand(readRaw)
self.sendSMB(smb)
if wait_answer:
data = self._sess.recv_packet(self.__timeout).get_trailer()
if not data:
# If there is no data it means there was an error
data = self.read_andx(tid, fid, offset, max_size)
return data
return None
def write(self,tid,fid,data, offset = 0, wait_answer=1):
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
write = SMBCommand(SMB.SMB_COM_WRITE)
smb.addCommand(write)
write['Parameters'] = SMBWrite_Parameters()
write['Data'] = SMBWrite_Data()
write['Parameters']['Fid'] = fid
write['Parameters']['Count'] = len(data)
write['Parameters']['Offset'] = offset
write['Parameters']['Remaining'] = len(data)
write['Data']['Data'] = data
self.sendSMB(smb)
if wait_answer:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_WRITE):
return smb
return None
def write_andx(self,tid,fid,data, offset = 0, wait_answer=1, write_pipe_mode = False, smb_packet=None):
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
writeAndX = SMBCommand(SMB.SMB_COM_WRITE_ANDX)
smb.addCommand(writeAndX)
writeAndX['Parameters'] = SMBWriteAndX_Parameters()
writeAndX['Parameters']['Fid'] = fid
writeAndX['Parameters']['Offset'] = offset
writeAndX['Parameters']['WriteMode'] = 8
writeAndX['Parameters']['Remaining'] = len(data)
writeAndX['Parameters']['DataLength'] = len(data)
writeAndX['Parameters']['DataOffset'] = len(smb) # this length already includes the parameter
writeAndX['Data'] = data
if write_pipe_mode is True:
# First of all we gotta know what the MaxBuffSize is
maxBuffSize = self._dialects_parameters['MaxBufferSize']
if len(data) > maxBuffSize:
chunks_size = maxBuffSize - 5
writeAndX['Parameters']['WriteMode'] = 0x0c
sendData = '\xff\xff' + data
totalLen = len(sendData)
writeAndX['Parameters']['DataLength'] = chunks_size
writeAndX['Parameters']['Remaining'] = totalLen-2
writeAndX['Data'] = sendData[:chunks_size]
self.sendSMB(smb)
if wait_answer:
smbResp = self.recvSMB()
smbResp.isValidAnswer(SMB.SMB_COM_WRITE_ANDX)
alreadySent = chunks_size
sendData = sendData[chunks_size:]
while alreadySent < totalLen:
writeAndX['Parameters']['WriteMode'] = 0x04
writeAndX['Parameters']['DataLength'] = len(sendData[:chunks_size])
writeAndX['Data'] = sendData[:chunks_size]
self.sendSMB(smb)
if wait_answer:
smbResp = self.recvSMB()
smbResp.isValidAnswer(SMB.SMB_COM_WRITE_ANDX)
alreadySent += writeAndX['Parameters']['DataLength']
sendData = sendData[chunks_size:]
return smbResp
else:
smb = smb_packet
self.sendSMB(smb)
if wait_answer:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_WRITE_ANDX):
return smb
return None
def write_raw(self,tid,fid,data, offset = 0, wait_answer=1):
print "[MS-CIFS] This command was introduced in the CorePlus dialect, but is often listed as part of the LAN Manager 1.0 dialect.\nThis command has been deprecated.\nClients SHOULD use SMB_COM_WRITE_ANDX"
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = 0
smb['Tid'] = tid
writeRaw = SMBCommand(SMB.SMB_COM_WRITE_RAW)
smb.addCommand(writeRaw)
writeRaw['Parameters'] = SMBWriteRaw_Parameters()
writeRaw['Parameters']['Fid'] = fid
writeRaw['Parameters']['Offset'] = offset
writeRaw['Parameters']['Count'] = len(data)
writeRaw['Parameters']['DataLength'] = 0
writeRaw['Parameters']['DataOffset'] = 0
self.sendSMB(smb)
self._sess.send_packet(data)
if wait_answer:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_WRITE_RAW):
return smb
return None
def TransactNamedPipe(self, tid, fid, data = '', noAnswer = 0, waitAnswer = 1, offset = 0):
self.send_trans(tid,pack('<HH', 0x26, fid),'\\PIPE\\\x00','',data, noAnswer = noAnswer)
if noAnswer or not waitAnswer:
return
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_TRANSACTION):
transResponse = SMBCommand(smb['Data'][0])
transParameters = SMBTransactionResponse_Parameters(transResponse['Parameters'])
return transResponse['Data'][-transParameters['TotalDataCount']:] # Remove Potential Prefix Padding
return None
def nt_create_andx(self,tid,filename, smb_packet=None, cmd = None):
if smb_packet == None:
smb = NewSMBPacket()
smb['Flags1'] = SMB.FLAGS1_CANONICALIZED_PATHS | SMB.FLAGS1_PATHCASELESS
smb['Flags2'] = SMB.FLAGS2_LONG_NAMES
smb['Tid'] = tid
else:
smb = smb_packet
if cmd == None:
ntCreate = SMBCommand(SMB.SMB_COM_NT_CREATE_ANDX)
ntCreate['Parameters'] = SMBNtCreateAndX_Parameters()
ntCreate['Data'] = SMBNtCreateAndX_Data()
ntCreate['Parameters']['FileNameLength'] = len(filename)
ntCreate['Parameters']['CreateFlags'] = 0x16
ntCreate['Parameters']['AccessMask'] = 0x2019f
ntCreate['Parameters']['CreateOptions'] = 0x40
ntCreate['Data']['FileName'] = filename
else:
ntCreate = cmd
smb.addCommand(ntCreate)
self.sendSMB(smb)
while 1:
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_NT_CREATE_ANDX):
# XXX Here we are ignoring the rest of the response
ntCreateResponse = SMBCommand(smb['Data'][0])
ntCreateParameters = SMBNtCreateAndXResponse_Parameters(ntCreateResponse['Parameters'])
self.fid = ntCreateParameters['Fid']
return ntCreateParameters['Fid']
def logoff(self):
smb = NewSMBPacket()
logOff = SMBCommand(SMB.SMB_COM_LOGOFF_ANDX)
logOff['Parameters'] = SMBLogOffAndX()
smb.addCommand(logOff)
self.sendSMB(smb)
smb = self.recvSMB()
# Let's clear some fields so you can login again under the same session
self._uid = 0
def list_shared(self):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\IPC$')
buf = StringIO()
try:
self.send_trans(tid, '', '\\PIPE\\LANMAN\0', '\x00\x00WrLeh\0B13BWz\0\x01\x00\xe0\xff', '')
numentries = 0
share_list = [ ]
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_TRANSACTION):
has_more, _, transparam, transdata = self.__decode_trans(s.get_parameter_words(), s.get_buffer())
if not numentries:
status, data_offset, numentries = unpack('<HHH', transparam[:6])
buf.write(transdata)
if not has_more:
share_data = buf.getvalue()
offset = 0
for i in range(0, numentries):
name = share_data[offset:string.find(share_data, '\0', offset)]
type, commentoffset = unpack('<HH', share_data[offset + 14:offset + 18])
comment = share_data[commentoffset-data_offset:share_data.find('\0', commentoffset-data_offset)]
offset = offset + 20
share_list.append(SharedDevice(name, type, comment))
return share_list
finally:
buf.close()
self.disconnect_tree(tid)
def list_path(self, service, path = '*', password = None):
path = string.replace(path, '/', '\\')
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.trans2(tid, '\x01\x00', '\x00', '\x16\x00\x00\x02\x06\x00\x04\x01\x00\x00\x00\x00' + path + '\x00', '')
resume = False
files = [ ]
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_TRANSACTION2):
has_more, _, transparam, transdata = self.__decode_trans(s.get_parameter_words(), s.get_buffer())
# A fairly quick trans reassembly.
while has_more:
s2 = self.recv_packet()
if self.isValidAnswer(s2,SMB.SMB_COM_TRANSACTION2):
has_more, _, transparam2, transdata2 = self.__decode_trans(s2.get_parameter_words(), s2.get_buffer())
transdata += transdata2
transparam += transparam2
if not resume:
sid, searchcnt, eos, erroffset, lastnameoffset = unpack('<HHHHH', transparam)
else:
searchcnt, eos, erroffset, lastnameoffset = unpack('<HHHH', transparam)
offset = 0
data_len = len(transdata)
while offset < data_len:
nextentry, fileindex, lowct, highct, lowat, highat, lowmt, highmt, lowcht, hightcht, loweof, higheof, lowsz, highsz, attrib, longnamelen, easz, shortnamelen = unpack('<lL12LLlLB', transdata[offset:offset + 69])
files.append(SharedFile(highct << 32 | lowct, highat << 32 | lowat, highmt << 32 | lowmt, higheof << 32 | loweof, highsz << 32 | lowsz, attrib, transdata[offset + 70:offset + 70 + shortnamelen], transdata[offset + 94:offset + 94 + longnamelen]))
resume_filename = transdata[offset + 94:offset + 94 + longnamelen]
offset = offset + nextentry
if not nextentry:
break
if eos:
return files
else:
self.trans2(tid, '\x02\x00', '\x00', pack('<H', sid) + '\x56\x05\x04\x01\x00\x00\x00\x00\x06\x00' + resume_filename + '\x00', '')
resume = True
resume_filename = ''
finally:
self.disconnect_tree(tid)
def retr_file(self, service, filename, callback, mode = SMB_O_OPEN, offset = 0, password = None):
filename = string.replace(filename, '/', '\\')
fid = -1
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
fid, attrib, lastwritetime, datasize, grantedaccess, filetype, devicestate, action, serverfid = self.open_andx(tid, filename, mode, SMB_ACCESS_READ | SMB_SHARE_DENY_WRITE)
if not datasize:
datasize = self.query_file_info(tid, fid)
self.__nonraw_retr_file(tid, fid, offset, datasize, callback)
finally:
if fid >= 0:
self.close(tid, fid)
self.disconnect_tree(tid)
def stor_file(self, service, filename, callback, mode = SMB_O_CREAT | SMB_O_TRUNC, offset = 0, password = None):
filename = string.replace(filename, '/', '\\')
fid = -1
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
fid, attrib, lastwritetime, datasize, grantedaccess, filetype, devicestate, action, serverfid = self.open_andx(tid, filename, mode, SMB_ACCESS_WRITE | SMB_SHARE_DENY_WRITE)
self.__nonraw_stor_file(tid, fid, offset, datasize, callback)
finally:
if fid >= 0:
self.close(tid, fid)
self.disconnect_tree(tid)
def stor_file_nonraw(self, service, filename, callback, mode = SMB_O_CREAT | SMB_O_TRUNC, offset = 0, password = None):
filename = string.replace(filename, '/', '\\')
fid = -1
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
fid, attrib, lastwritetime, datasize, grantedaccess, filetype, devicestate, action, serverfid = self.open_andx(tid, filename, mode, SMB_ACCESS_WRITE | SMB_SHARE_DENY_WRITE)
self.__nonraw_stor_file(tid, fid, offset, datasize, callback)
finally:
if fid >= 0:
self.close(tid, fid)
self.disconnect_tree(tid)
def copy(self, src_service, src_path, dest_service, dest_path, callback = None, write_mode = SMB_O_CREAT | SMB_O_TRUNC, src_password = None, dest_password = None):
dest_path = string.replace(dest_path, '/', '\\')
src_path = string.replace(src_path, '/', '\\')
src_tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + src_service, src_password)
dest_tid = -1
try:
if src_service == dest_service:
dest_tid = src_tid
else:
dest_tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + dest_service, dest_password)
dest_fid = self.open_andx(dest_tid, dest_path, write_mode, SMB_ACCESS_WRITE | SMB_SHARE_DENY_WRITE)[0]
src_fid, _, _, src_datasize, _, _, _, _, _ = self.open_andx(src_tid, src_path, SMB_O_OPEN, SMB_ACCESS_READ | SMB_SHARE_DENY_WRITE)
if not src_datasize:
src_datasize = self.query_file_info(src_tid, src_fid)
if callback:
callback(0, src_datasize)
max_buf_size = (self._dialects_parameters['MaxBufferSize'] >> 10) << 10
read_offset = 0
write_offset = 0
while read_offset < src_datasize:
self.__send_smb_packet(SMB.SMB_COM_READ_ANDX, 0, 0, src_tid, 0, pack('<BBHHLHHLH', 0xff, 0, 0, src_fid, read_offset, max_buf_size, max_buf_size, 0, 0), '')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_READ_ANDX):
offset = unpack('<H', s.get_parameter_words()[2:4])[0]
data_len, dataoffset = unpack('<HH', s.get_parameter_words()[10+offset:14+offset])
d = s.get_buffer()
if data_len == len(d):
self.__send_smb_packet(SMB.SMB_COM_WRITE_ANDX, 0, 0, dest_tid, 0, pack('<BBHHLLHHHHH', 0xff, 0, 0, dest_fid, write_offset, 0, 0, 0, 0, data_len, 59), d)
else:
self.__send_smb_packet(SMB.SMB_COM_WRITE_ANDX, 0, 0, dest_tid, 0, pack('<BBHHLLHHHHH', 0xff, 0, 0, dest_fid, write_offset, 0, 0, 0, 0, data_len, 59), d[dataoffset - 59:dataoffset - 59 + data_len])
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_WRITE_ANDX):
data_len, dataoffset = unpack('<HH', s.get_parameter_words()[4:8])
break
read_offset = read_offset + data_len
if callback:
callback(read_offset, src_datasize)
break
finally:
self.disconnect_tree(src_tid)
if dest_tid > -1 and src_service != dest_service:
self.disconnect_tree(dest_tid)
def check_dir(self, service, path, password = None):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.__send_smb_packet(SMB.SMB_COM_CHECK_DIRECTORY, 0x08, 0, tid, 0, '', '\x04' + path + '\x00')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_CHECK_DIRECTORY):
return
finally:
self.disconnect_tree(tid)
def remove(self, service, path, password = None):
# Perform a list to ensure the path exists
self.list_path(service, path, password)
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.__send_smb_packet(SMB.SMB_COM_DELETE, 0x08, 0, tid, 0, pack('<H', ATTR_HIDDEN | ATTR_SYSTEM | ATTR_ARCHIVE), '\x04' + path + '\x00')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_DELETE):
return
finally:
self.disconnect_tree(tid)
def rmdir(self, service, path, password = None):
# Check that the directory exists
self.check_dir(service, path, password)
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
self.__send_smb_packet(SMB.SMB_COM_DELETE_DIRECTORY, 0x08, 0, tid, 0, '', '\x04' + path + '\x00')
while 1:
s = self.recv_packet()
if self.isValidAnswer(s,SMB.SMB_COM_DELETE_DIRECTORY):
return
finally:
self.disconnect_tree(tid)
def mkdir(self, service, path, password = None):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
smb = NewSMBPacket()
smb['Tid'] = tid
createDir = SMBCommand(SMB.SMB_COM_CREATE_DIRECTORY)
createDir['Data'] = SMBCreateDirectory_Data()
createDir['Data']['DirectoryName'] = path
smb.addCommand(createDir)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_CREATE_DIRECTORY):
return 1
return 0
finally:
self.disconnect_tree(tid)
def rename(self, service, old_path, new_path, password = None):
tid = self.tree_connect_andx('\\\\' + self.__remote_name + '\\' + service, password)
try:
smb = NewSMBPacket()
smb['Tid'] = tid
smb['Flags'] = SMB.FLAGS1_PATHCASELESS
renameCmd = SMBCommand(SMB.SMB_COM_RENAME)
renameCmd['Parameters'] = SMBRename_Parameters()
renameCmd['Parameters']['SearchAttributes'] = ATTR_SYSTEM | ATTR_HIDDEN | ATTR_DIRECTORY
renameCmd['Data'] = SMBRename_Data()
renameCmd['Data']['OldFileName'] = old_path
renameCmd['Data']['NewFileName'] = new_path
smb.addCommand(renameCmd)
self.sendSMB(smb)
smb = self.recvSMB()
if smb.isValidAnswer(SMB.SMB_COM_RENAME):
return 1
return 0
finally:
self.disconnect_tree(tid)
def get_socket(self):
return self._sess.get_socket()
ERRDOS = { 1: 'Invalid function',
2: 'File not found',
3: 'Invalid directory',
4: 'Too many open files',
5: 'Access denied',
6: 'Invalid file handle. Please file a bug report.',
7: 'Memory control blocks destroyed',
8: 'Out of memory',
9: 'Invalid memory block address',
10: 'Invalid environment',
11: 'Invalid format',
12: 'Invalid open mode',
13: 'Invalid data',
15: 'Invalid drive',
16: 'Attempt to remove server\'s current directory',
17: 'Not the same device',
18: 'No files found',
32: 'Sharing mode conflicts detected',
33: 'Lock request conflicts detected',
80: 'File already exists'
}
ERRSRV = { 1: 'Non-specific error',
2: 'Bad password',
4: 'Access denied',
5: 'Invalid tid. Please file a bug report.',
6: 'Invalid network name',
7: 'Invalid device',
49: 'Print queue full',
50: 'Print queue full',
51: 'EOF on print queue dump',
52: 'Invalid print file handle',
64: 'Command not recognized. Please file a bug report.',
65: 'Internal server error',
67: 'Invalid path',
69: 'Invalid access permissions',
71: 'Invalid attribute mode',
81: 'Server is paused',
82: 'Not receiving messages',
83: 'No room to buffer messages',
87: 'Too many remote user names',
88: 'Operation timeout',
89: 'Out of resources',
91: 'Invalid user handle. Please file a bug report.',
250: 'Temporarily unable to support raw mode for transfer',
251: 'Temporarily unable to support raw mode for transfer',
252: 'Continue in MPX mode',
65535: 'Unsupported function'
}
ERRHRD = { 19: 'Media is write-protected',
20: 'Unknown unit',
21: 'Drive not ready',
22: 'Unknown command',
23: 'CRC error',
24: 'Bad request',
25: 'Seek error',
26: 'Unknown media type',
27: 'Sector not found',
28: 'Printer out of paper',
29: 'Write fault',
30: 'Read fault',
31: 'General failure',
32: 'Open conflicts with an existing open',
33: 'Invalid lock request',
34: 'Wrong disk in drive',
35: 'FCBs not available',
36: 'Sharing buffer exceeded'
}
|
This matter is before the Court on the defendant's Motion for Judgment on the Pleadings and/or Summary Judgment, filed herein on June 13, 1997. This is an action to determine the dischargeability of a debt pursuant to 11 U.S.C. '523(a)(5) or '523(a)(15). This Court has jurisdiction of this matter pursuant to 28 U.S.C. '1334(b); it is a core proceeding pursuant to 28 U.S.C. '157(b)(2)(I).
After the filing of the defendant's Motion for Judgment on the Pleadings and/or Summary Judgment, as set out above, the plaintiff responded by filing his Memorandum in Opposition to Defendant's Motion for Judgment on the Pleadings or for Summary Judgment. The Court conducted a hearing on the Motion and the response and the matter was taken under consideration.
The defendant points out that the debt is owed to the plaintiff's mother and not to plaintiff personally, and contends that it cannot be considered under '523(a)(15) unless he can establish that a new obligation in his favor was created by the above-referenced Agreed Order. The defendant cites this Court's opinion in In re Owens, 191 B.R. 669 (Bkrtcy.E.D.Ky. 1996), in support of her position.
The first indebtedness, wherein the debtor was required to make payments ... on the loan for the Jeep driven by the plaintiff, contains the significant language '...and hold the Petitioner harmless from ...' with respect to this indebtedness. It appears that this 'hold harmless' language is significant in that it appears to be language which creates a new indebtedness, from respondent/defendant to petitioner/plaintiff in this particular case .... as contemplated by the statute. The Court concludes that the Jeep indebtedness is a debt 'incurred by the debtor in the course of a divorce or separation or in connection with a separation agreement' contemplated by '523(a)(15) and, unless the defendant is successful in his assertion of the affirmative defenses set forth in subparagraph (A) or subparagraph (B), would be a non-dischargeable debt.
With respect to the debts set forth in paragraph (11) of the Agreement, the remaining debts of which are the notes ... totaling $5,650.57, the hold harmless language found in paragraph (5) is conspicuously absent. The absence of the hold harmless language or any other language in this paragraph which could be construed to make a new debt from respondent/defendant to petitioner/plaintiff, leads to the conclusion that this is not a debt 'incurred by the debtor in the course of the ... separation agreement'. In re Stegall, 188 B.R. 597 (Bkrtcy.W.D.Mo. 1995). Therefore, the debts described in paragraph (11), to the extent that they remain owing, are not made non-dischargeable by the language of '523(a)(15).
[p]rior to the divorce the debtor, his wife, and her parents were all liable to [the creditor]. The property settlement agreement and decree did not change that. If, by contrast, the agreement had provided that debtor would indemnify and hold plaintiff harmless to the extent she made payments to [the creditor], a new obligation might have been incurred. And, to the extent plaintiff actually made such payments, section 523(a)(15) might have come into play. Section 523(a)(15) is not applicable, however, in this case because neither the property settlement agreement nor the decree created a debt not otherwise in existence.
In re Stegall, 188 B.R. at page 398.
In the case at bar, the same reasoning obtains. The debt to the plaintiff's mother existed before the parties' divorce, memorialized by a promissory note apparently entered into sometime in 1985. It was addressed in the Agreed Order which assigned responsibility for repayment of specified amounts by the plaintiff and the defendant. Neither one agreed to indemnify or hold the other harmless in regard to liability for his or her portion of the debt. Pursuant to the terms of the Agreed Order, the plaintiff and defendant were liable to his mother just as they had been before their divorce. Dividing the total due between the two of them did not change the fact that the plaintiff's mother could pursue one or the other upon any failure to pay.
The plaintiff counters with the argument that the presence or absence of hold harmless language is insignificant, and that '523(a)(15) "has the effect of making all divorce related obligations subject to a presumption of non-dischargeability." In re Armstrong, 205 B.R. 386, 391 (Bkrtcy.W.D.Tenn. 1996). The Marital Dissolution Agreement before the court in Armstrong required the parties to hold each other harmless for the debts each had assumed, however, and this Court can only conclude that these new debts are the "divorce related obligations" to which the Armstrong court refers. This case does not, therefore, support the plaintiff's position. In fairness to the plaintiff, however, the Armstrong court cites as authority two cases which clearly reach a different conclusion than this Court reached in Owens. In re Cleveland, 198 B.R. 394 (Bkrtcy.N.D.Ga. 1996); In re Schmitt, 197 B.R. 312 (Bkrtcy.W.D.Ark. 1996). For reasons set out below, this Court disagrees with those holdings.
The plaintiff goes on to argue that Kentucky law, as set out in KRS 403.180(5), accords the terms of separation agreements the status of new and enforceable obligations without hold harmless or indemnity language. That statute, enacted by the Kentucky legislature many years prior to Congress= enactment of '523(a)(15), provides that "[t]erms of the agreement set forth in the decree are enforceable by all remedies available for enforcement of a judgment, including contempt, and are enforceable as contract terms." The plaintiff cites In re Carlisle, 205 B.R. 812 (Bkrtcy.W.D.La. 1997), in support of this position.
The performance under consideration in the Carlisle case was the debtor's agreement to fund a Chapter 13 plan which was intended to save marital property from foreclosure. The court went on to say that pursuant to KRS 403.180(5), the debtor's spouse could receive a damage award for the debtor's failure to perform his obligations to her. These obligations, the court opined, had been created in the course of the divorce and property settlement, thus implicating '523(a)(15). The court did not agree with this Court that the absence of hold harmless language was significant. The Carlisle court=s interpretation of the Kentucky statute would make all debts allocated to the debtor spouse in a divorce proceeding non-dischargeable under '523(a)(15) unless the debtor could succeed in asserting either of the two affirmative defenses set out in that section. That result appears to be contrary to the legislative history of this section enacted as part of Public Law No. 103-394.
The fact that one spouse has a state remedy for the other spouse's failure to perform under a property settlement agreement does not change the fact that a debt for which a debtor has agreed to be responsible is still a debt owed to a creditor; and that absent hold harmless or similar language creating a new debt from the debtor to his spouse, the relationships among the debtor, his spouse, and the creditor as concerns the debt remain the same.
This Court must therefore agree with the defendant that, consistent with its holding in Owens, supra, the debt being considered herein is dischargeable as not being within the purview of a debt contemplated by 11 U.S.C. '523(a)(15). The defendant has carried forward her burden of demonstrating that there is no genuine issue as to any material fact and that she is entitled to judgment as a matter of law. An order in conformity with this opinion will be entered separately.
|
# Copyright 2010 Jacob Kaplan-Moss
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base utilities to build API operation managers and objects on top of.
"""
import contextlib
import hashlib
import os
from manilaclient import exceptions
from manilaclient.openstack.common import cliutils
from manilaclient import utils
# Python 2.4 compat
try:
all
except NameError:
def all(iterable):
return True not in (not x for x in iterable)
class Manager(utils.HookableMixin):
"""Manager for CRUD operations.
Managers interact with a particular type of API (shares, snapshots,
etc.) and provide CRUD operations for them.
"""
resource_class = None
def __init__(self, api):
self.api = api
@property
def api_version(self):
return self.api.api_version
def _list(self, url, response_key, obj_class=None, body=None):
resp = None
if body:
resp, body = self.api.client.post(url, body=body)
else:
resp, body = self.api.client.get(url)
if obj_class is None:
obj_class = self.resource_class
data = body[response_key]
# NOTE(ja): keystone returns values as list as {'values': [ ... ]}
# unlike other services which just return the list...
if isinstance(data, dict):
try:
data = data['values']
except KeyError:
pass
with self.completion_cache('human_id', obj_class, mode="w"):
with self.completion_cache('uuid', obj_class, mode="w"):
return [obj_class(self, res, loaded=True)
for res in data if res]
@contextlib.contextmanager
def completion_cache(self, cache_type, obj_class, mode):
"""Bash autocompletion items storage.
The completion cache store items that can be used for bash
autocompletion, like UUIDs or human-friendly IDs.
A resource listing will clear and repopulate the cache.
A resource create will append to the cache.
Delete is not handled because listings are assumed to be performed
often enough to keep the cache reasonably up-to-date.
"""
base_dir = cliutils.env('manilaclient_UUID_CACHE_DIR',
'MANILACLIENT_UUID_CACHE_DIR',
default="~/.manilaclient")
# NOTE(sirp): Keep separate UUID caches for each username + endpoint
# pair
username = cliutils.env('OS_USERNAME', 'MANILA_USERNAME')
url = cliutils.env('OS_URL', 'MANILA_URL')
uniqifier = hashlib.md5(username.encode('utf-8') +
url.encode('utf-8')).hexdigest()
cache_dir = os.path.expanduser(os.path.join(base_dir, uniqifier))
try:
os.makedirs(cache_dir, 0o755)
except OSError:
# NOTE(kiall): This is typically either permission denied while
# attempting to create the directory, or the directory
# already exists. Either way, don't fail.
pass
resource = obj_class.__name__.lower()
filename = "%s-%s-cache" % (resource, cache_type.replace('_', '-'))
path = os.path.join(cache_dir, filename)
cache_attr = "_%s_cache" % cache_type
try:
setattr(self, cache_attr, open(path, mode))
except IOError:
# NOTE(kiall): This is typically a permission denied while
# attempting to write the cache file.
pass
try:
yield
finally:
cache = getattr(self, cache_attr, None)
if cache:
cache.close()
delattr(self, cache_attr)
def write_to_completion_cache(self, cache_type, val):
cache = getattr(self, "_%s_cache" % cache_type, None)
if cache:
cache.write("%s\n" % val)
def _get(self, url, response_key=None):
resp, body = self.api.client.get(url)
if response_key:
return self.resource_class(self, body[response_key], loaded=True)
else:
return self.resource_class(self, body, loaded=True)
def _get_with_base_url(self, url, response_key=None):
resp, body = self.api.client.get_with_base_url(url)
if response_key:
return [self.resource_class(self, res, loaded=True)
for res in body[response_key] if res]
else:
return self.resource_class(self, body, loaded=True)
def _create(self, url, body, response_key, return_raw=False, **kwargs):
self.run_hooks('modify_body_for_create', body, **kwargs)
resp, body = self.api.client.post(url, body=body)
if return_raw:
return body[response_key]
with self.completion_cache('human_id', self.resource_class, mode="a"):
with self.completion_cache('uuid', self.resource_class, mode="a"):
return self.resource_class(self, body[response_key])
def _delete(self, url):
resp, body = self.api.client.delete(url)
def _update(self, url, body, response_key=None, **kwargs):
self.run_hooks('modify_body_for_update', body, **kwargs)
resp, body = self.api.client.put(url, body=body)
if body:
if response_key:
return self.resource_class(self, body[response_key])
else:
return self.resource_class(self, body)
class ManagerWithFind(Manager):
"""Like a `Manager`, but with additional `find()`/`findall()` methods."""
def find(self, **kwargs):
"""Find a single item with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
matches = self.findall(**kwargs)
num_matches = len(matches)
if num_matches == 0:
msg = "No %s matching %s." % (self.resource_class.__name__, kwargs)
raise exceptions.NotFound(404, msg)
elif num_matches > 1:
raise exceptions.NoUniqueMatch
else:
return matches[0]
def findall(self, **kwargs):
"""Find all items with attributes matching ``**kwargs``.
This isn't very efficient: it loads the entire list then filters on
the Python side.
"""
found = []
searches = list(kwargs.items())
for obj in self.list():
try:
if all(getattr(obj, attr) == value
for (attr, value) in searches):
found.append(obj)
except AttributeError:
continue
return found
def list(self):
raise NotImplementedError
|
Compare the best business VoIP providers in Thida, AR side by side on VoipReview. Find and compare top-rated providers across price, features, ratings, and user-submitted reviews to find the best deal on a business VoIP service. Start comparing below to see how much you can be saving on your monthly phone bill.
Voice over Internet Protocol (VoIP), is a technology which transmits voice calls as data over a network, offering an alternative to traditional landlines for making and receiving phone calls. Using your broadband connection, Business VoIP plans in Thida, AR offer reliable phone systems integrated with features that enhance business productivity and reduce operating costs when compared to traditional landline services.
VoIP services in Thida, AR are offered in residential and business plans. While a residential plan may be less expensive, they typically provide only basic inbound and outbound calling. A business plan will include advanced calling features essential to professional and efficient call management, such as PBX systems, call routing, hold music and automated line attendants.
VoIP is quickly becoming the standard phone system for successful businesses, as it offers superior call quality, increased efficiency and lower costs when compared to a landline. Prices vary widely among VoIP providers in Thida, AR, so it is important to compare prices and features.
How Should I Examine Business VoIP Plans in Thida, AR?
It's fast and easy to review businesses and the plans they provide based upon their recurring rates, plan options, and client ratings. Our research tables on VoipReview let you assess multiple Thida business VoIP providers at once. Browse through plan advantages, expenses, rankings, and other facts to get an idea of the best business VoIP carriers.
The stars presented on the chart are the averaged rankings obtained from our customer-submitted evaluations. To peruse evaluations for one particular carrier, go to the 'reviews' hyperlink to be forwarded to its information page. Scoping out customer-submitted evaluations is a valuable technique to get the details on VoIP carriers in Thida, AR.
What Kind of Features Can I Expect From a Business VoIP Plan in Thida, AR?
Generally, you will find that business VoIP plans offer many included features that would come at an additional charge with a standard PSTN service in Thida, AR. Examples of VoIP plan features include: call waiting, caller ID, find me/follow me, call forwarding, number portability, virtual extensions, hold music and voicemail.
How Does Business VoIP in Thida, AR Differ From a Traditional Landline Phone?
Business VoIP in Thida, AR provides users with superior call quality, flexible communications options, and more included features. We understand that many business owners are hesitant about switching to VoIP because they are comfortable with PSTN. However, the latest improvements in VoIP technology have virtually eliminated many of the issues that were associated with Voice over Internet Protocol in the beginning. These days, it is entirely possible to get a VoIP system that outperforms a landline phone for a much lower monthly cost.
Before deciding on a particular provider, we strongly encourage researching all Thida, AR VoIP companies and plans. We also recommend checking each provider's website to view disclaimers, rates, and additional fees-information that may not be displayed on our website. Here at VoipReview, we do our best to provide up-to-date information, but providers often change rates and plan offerings without warning so we cannot give a 100% guarantee.
|
#!/usr/bin/env python
import os, sys
import json
from collections import namedtuple
import requests
Key = namedtuple("Key", ["name","version"])
class ValidatorBase(object):
"""
This is the base class for all backends including
"""
def __init__(self, name, version, description, supported=[]):
"""
Parameters:
-----------
name: Name of the backend service e.g., s3
version: Version of this implementation
description: Text description of this service
supported: supported services with including name
For example, there may be multiple s3 implementations that
support different kinds of services.
"""
self.enable = 'y'
self.name = name
self.version = version
self.description = description
self.support = supported + [name]
self.initialize()
def __str__(self):
return self.name
def initialize(self):
"""
Called to initialize sessions, internal objects etc.
"""
return
def autooptions(self):
"""
Get default options
"""
return None
def evaluate(self, repo, files, rules):
"""
Evaluate the repo
returns: A list of dictionaries with:
target: relative path of the file
rules: rules file used
validator: name of the validator
status: OK/Success/Error
Message: Any additional information
"""
return []
|
1997 Infiniti Qx4 Factory Service Repair Manual - AUTEX Engine Knock Detonation Sensor KS79 KS79T 22060-30P00 compatible w/Nissan 200SX SE-R 2.0L l4 240SX 300ZX Altima D21 Frontier Maxima/Maxima SE NX, Mercury Villager, Infiniti G20 I30 J30 Q45 QX4. Save $3,879 on a 2006 INFINITI M45. Search pre-owned 2006 INFINITI M45 listings to find the best local deals. CarGurus analyzes over 6 million cars daily.. Motor Trend reviews the 1997 Ford F-250 where consumers can find detailed information on specs, fuel economy, transmission and safety. Find local 1997 Ford F-250 prices online..
The Dodge Ram 1500 was equipped with the A518 (46RE) or the 545RFE transmission. The late model Ram 1500 also had the option for the 6-speed 65RFE.But they aren’t without their problems though, so let’s look at some of the most common Dodge Ram 1500 transmission problems, look at cost estimates and figure out what you can do about them.. Become a friend of www.drivetrain.com. Register on our Facebook page and qualify for. $10.00 Discount on next order over $100.00! We are one of a few web sites that publish prices.. El Club de Diagramas es dónde los técnicos intercambian y comparten diagramas, manuales de servicio y todo archivo de información técnica útil para las reparaciones electrónicas..
Manufacturer's report date 04/12/13 . Component AIR BAGS . Summary Nissan is recalling certain model year 2001-2003 Maxima, Pathfinder, and Infiniti I35; model year 2002-2003 Infiniti QX4; and model year 2003 Infiniti FX35 and FX45 and possibly certain Sentra vehicles to address a safety defect in the passenger side frontal air bag which may produce excessive internal pressure causing the. recall id#06e022000 - service brakes, hydraulic consequence: loss of brake fluid can lead to a decrease in brake line pressure and an increase in stopping distance, which can result in a vehicle. The award winning experts at Mobile Edge in Lehighton, PA dispel many of the most common myths surrounding remote car starters in today's complex vehicles..
Welcome to P0420.com, the place dedicated to that DTC (diagnostic trouble code) P0420.The P0420 code is a very popular one and that's why there's a site dedicated to that specific trouble code..
|
from fabric.api import *
from fabric.colors import cyan
from fabric.contrib import files
packages = (
'build-essential',
'git',
'mercurial',
'rsync',
'vim',
)
def install_base_packages():
sudo('apt-get update')
for package in packages:
sudo('apt-get install %s --assume-yes' % package)
def upgrade_system():
sudo('apt-get update')
sudo('apt-get dist-upgrade --assume-yes --quiet')
def create_deploy_user():
"creates deployment user"
username = 'deploy'
# create deploy user & home without password
if files.contains('/etc/passwd', username):
return
sudo('useradd %s --create-home --shell /bin/bash' % username)
# create authorized_keys & upload public key
sudo('mkdir -p /home/deploy/.ssh')
sudo('chmod 700 /home/deploy/.ssh')
pub_key = open(env.key_filename, 'rb').read()
files.append('/home/%s/.ssh/authorized_keys' % username, pub_key, use_sudo=True)
# update authorized_keys permissions
sudo('chmod 400 /home/%s/.ssh/authorized_keys' % username)
sudo('chown deploy:deploy /home/%s/.ssh -R' % username)
# create sudo password & add to sudoers
print(cyan('set sudo password for "%s" user' % username))
sudo('passwd %s' % username)
files.append('/etc/sudoers', '%s ALL=(ALL) ALL' % username, use_sudo=True)
def automate_security_updates():
"enable automatic installation of security updates"
sudo('apt-get install unattended-upgrades')
files.upload_template(
'apt/10periodic',
'/etc/apt/apt.conf.d/10periodic',
env,
template_dir='fabfile/templates',
use_sudo=True,
mode=644,
)
# TODO: checkout apticron for email alerts
def install_rackspace_monitoring():
# add the rackspace apt repo to list
files.append("/etc/apt/sources.list.d/rackspace-monitoring-agent.list",
"deb http://stable.packages.cloudmonitoring.rackspace.com/ubuntu-12.04-x86_64 cloudmonitoring main",
use_sudo=True)
# install rackspace repo signing key
run('curl https://monitoring.api.rackspacecloud.com/pki/agent/linux.asc | apt-key add -')
# install the monitoring agent
run('apt-get update')
run('apt-get install rackspace-monitoring-agent')
# run setup
run('rackspace-monitoring-agent --setup')
def harden_sudoers():
"""
>> /etc/sudoers
root ALL=(ALL) ALL
deploy ALL=(ALL) ALL
"""
pass
def harden_ssh():
"""
>> /etc/ssh/sshd_config
PermitRootLogin no
PasswordAuthentication no
"""
run('service ssh restart')
def setup_firewall():
"""
ufw allow from {your-ip} to any port 22
ufw allow 80
ufw enable
"""
pass
def harden_server():
setup_firewall()
harden_ssh()
harden_sudoers()
def provision_base_server():
upgrade_system()
install_base_packages()
automate_security_updates()
create_deploy_user()
|
Tomorrow is the #lillyfortarget launch and tonight my friends and I are celebrating my birthday + the lilly launch with our own #27lillydresses party! Anyone else celebrating the launch with a party?
Aren’t these the most gorgeous flowers that my husband had sent yesterday?
|
#! /usr/bin/env python
# This package performs a sequential calculations of a given number of events,
# after reading parameters from ParameterDict.py. The most important control
# parameters are set in controlParameterList, for other parameters see
# allParameterLists. This package is intended to be working-at-background thus
# only basic output are generated. When necessary, other functions given in the
# package for single executables can be invoked individually for more
# flexibilities.
# The main entry is the sequentialEventDriverShell function.
from os import path, getcwd, remove, makedirs
from sys import stdout, exit
from shutil import move, copy, copytree, rmtree
from glob import glob
from subprocess import call
import numpy as np
import re
class ExecutionError(Exception): pass # used to signal my own exception
# set global default parameters
allParameterLists = [
'controlParameterList',
'initial_condition_control',
'superMCControl',
'superMCParameters',
'preEquilibriumControl',
'preEquilibriumParameters',
'hydroControl',
'hydroParameters',
'iSSControl',
'iSSParameters',
'iSControl',
'iSParameters',
'photonEmissionControl',
'photonEmissionParameters',
'osc2uControl',
'osc2uParameters',
'urqmdControl',
'urqmdParameters',
'binUtilitiesControl',
'binUtilitiesParameters',
'HoTCoffeehControl',
'HoTCoffeehParameters',
]
controlParameterList = {
'simulation_type' : 'hybrid', # 'hybrid' or 'hydro'
'niceness' : 10, # range from 0 to 19 for process priority, 0 for the highest priority
'numberOfEvents' : 10, # how many sequential calculations
'rootDir' : path.abspath('../'),
'resultDir' : path.abspath('../finalResults'), # final results will be saved here, absolute
'eventResultDirPattern' : 'event-%d', # %d->event_id, where event results are saved
'eventResultDir' : None, # used to pass event result folder from sequentialEventDriverShell to others
'combinedUrqmdFile' : 'urqmdCombined.txt', # urqmd from all events will be combined into this file
'buildCMD' : 'make build',
'cleanCMD' : 'make clean',
}
initial_condition_control = {
'centrality': '0-5%', # centrality bin
# centrality cut variable: total_entropy or Npart
'cut_type': 'total_entropy',
'initial_condition_type': 'superMC', # type of initial conditions
# file path for the pre-generated initial condition files
'pre-generated_initial_file_path': 'initial_conditions',
# name pattern for the initial condition files
'pre-generated_initial_file_pattern': 'sd_event_[0-9]*_block.dat',
'pre-generated_initial_file_read_in_mode': 2, # read in mode for VISH2+1
}
superMCControl = {
'mainDir' : 'superMC',
'dataDir' : 'data', # where initial conditions are stored, relative
'saveICFile' : True, # whether to save initial condition file
'dataFiles' : '*event_%d_*.dat', # data filenames
'initialFiles' : 'sd_event_*_block.dat', #initial density profile filenames
'numberOfEventsParameterName' : 'nev',
'executable' : 'superMC.e',
}
superMCParameters = {
'model_name' : 'MCGlb',
'which_mc_model' : 5,
'sub_model' : 1,
'Npmin' : 0,
'Npmax' : 1000,
'bmin' : 0,
'bmax' : 20,
'cutdSdy' : 1,
'cutdSdy_lowerBound' : 551.864,
'cutdSdy_upperBound' : 1000000.0,
'ecm' : 2760,
'Aproj' : 208,
'Atarg' : 208,
'proj_deformed' : 0,
'targ_deformed' : 0,
'finalFactor' : 56.763,
'use_ed' : 0,
'alpha' : 0.118,
'lambda' : 0.288,
'operation' : 1,
'include_NN_correlation' : 1,
'cc_fluctuation_model' : 6,
'cc_fluctuation_Gamma_theta' : 0.75,
'maxx' : 13.0, # grid size in x (fm)
'maxy' : 13.0, # grid size in y (fm)
'dx' : 0.1, # grid spacing in x (fm)
'dy' : 0.1, # grid spacing in y (fm)
'nev' : 1,
}
preEquilibriumControl = {
'mainDir' : 'fs',
'initialConditionDir' : 'data/events', # where initial conditions are stored
'initialConditionFile' : 'sd_event_1_block.dat', # IC filename
'resultDir' : 'data/result/event_1/%g', # pre-equilibrium results folder
'resultFiles' : '*', # results files
'executable' : 'lm.e',
}
preEquilibriumParameters = {
'event_mode' : 1,
'taumin' : 0.6,
'taumax' : 0.6,
'dtau' : 0.2,
}
hydroControl = {
'mainDir' : 'VISHNew',
'initialConditionDir' : 'Initial', # hydro initial condition folder, relative
'initialConditionFile' : 'InitialSd.dat', # IC filename
'resultDir' : 'results', # hydro results folder, relative
'resultFiles' : '*', # results files
'saveICFile' : True, # whether to save initial condition file
'saveResultGlobs' : ['*.h5','surface.dat', 'dec*.dat', 'ecc*.dat'],
# files match these globs will be saved
'executable' : 'VISHNew.e',
}
hydroParameters = {
'IINIT' : 2,
'IEOS' : 7,
'iEin' : 1,
'vis' : 0.08,
'Ivisflag' : 0,
'IvisBulkFlag' : 0, # flag for temperature dependence of bulk viscosity
'visbulknorm' : 0.0, # the overall normalization of the bulk viscosity
'IviscousEqsType' : 1, # type of evolution equations for viscous quantities
'iLS' : 130, # lattice points in the transverse plane
'dx' : 0.1, # lattice spacing in x
'dy' : 0.1, # lattice spacing in y
'T0' : 0.6, # tau_0
'dt' : 0.02, # dtau
'Edec' : 0.3, # 0.3->160 MeV, 0.18->120 MeV
'factor' : 1.0,
'IhydroJetoutput' : 1, # switch for output hydro evolution history into hdf5 file
'InitialURead' : 1, # switch to read in initial flow velocity and shear tensor
}
iSSControl = {
'mainDir' : 'iSS',
'operationDir' : 'results',
'saveResultGlobs' : ['*vn*.dat','OSCAR.DAT'], # files in the operation directory matching these globs will be saved
'OSCARFile' : 'OSCAR.DAT',
'executable' : 'iSS.e',
}
iSSParameters = {
'turn_on_bulk' : 0,
'include_deltaf_bulk' : 0,
'include_deltaf_shear' : 0,
'calculate_vn' : 0,
'MC_sampling' : 2,
'number_of_repeated_sampling' : 10,
'y_LB' : -2.5,
'y_RB' : 2.5,
}
iSControl = {
'mainDir' : 'iS',
'operationDir' : 'results',
'saveResultGlobs' : ['dN_ptdptdphidy.dat', '*_vndata.dat', 'v2data*'], # files in the operation directory matching these globs will be saved
'executables' : ('iS.e', 'resonance.e', 'iInteSp.e'),
'entryShell' : 'iS_withResonance.sh',
}
iSParameters = {}
photonEmissionControl = {
'mainDir' : 'photonEmission',
'operationDir' : 'results',
'saveResultGlobs' : ['*Sp*.dat', '*dTdtau*.dat'], # files in the operation directory matching these globs will be saved
'executable' : 'hydro_photonEmission.e',
}
photonEmissionParameters = {
'dx' : 0.5,
'dy' : 0.5,
'dTau' : 0.02,
'T_dec' : 0.120,
'tau_start' : 0.6,
'calHGIdFlag' : 0,
}
osc2uControl = {
'mainDir' : 'osc2u',
'outputFilename' : 'fort.14',
'saveOSCAR' : False, # whether to save OSCAR file
'executable' : 'osc2u.e',
}
osc2uParameters = {}
urqmdControl = {
'mainDir' : 'urqmd',
'controlFilename' : 'uqmd.burner',
'ICFilename' : 'OSCAR.input',
'outputFilename' : 'particle_list.dat',
'saveOutputFile' : True, # whether to save the output file
'executable' : 'urqmd.e',
'entryShell' : 'runqmd.sh',
'run_UrQMD' : False, # don't run UrQMD by default
}
urqmdParameters = {}
binUtilitiesControl = {
'mainDir' : 'binUtilities',
'operationDir' : 'results',
'saveResultGlobs' : ['*flow*.dat', 'pT_*.dat'], # files in the operation directory matching these globs will be saved
'executable' : 'urqmdBinShell.py',
}
binUtilitiesParameters = {}
EbeCollectorControl = {
'mainDir' : 'EbeCollector',
'executable_hybrid' : 'EbeCollectorShell_hydroWithUrQMD.py',
'executable_hydro' : 'EbeCollectorShell_pureHydro.py',
'executable_hydroEM' : 'EbeCollectorShell_HydroEM.py',
'executable_hydroEM_with_decaycocktail' : 'EbeCollectorShell_HydroEM_with_decaycocktail.py',
}
EbeCollectorParameters = {
'subfolderPattern' : '"event-(\d*)"',
'databaseFilename' : 'collected.db',
}
HoTCoffeehControl = {
'mainDir' : 'HoTCoffeeh',
'operationDir' : 'results',
'runHoTCoffeeh' : False,
'executables' : ('cfwr.e', 'svwr.e'),
'entryShell' : 'HoTCoffeeh.sh',
'saveResultGlobs' : ['all*dat', 'total*dat', 'correlfunct3D*.dat', \
'*spectra.dat', 'HBT*dat', 'resonance*h5', \
'target*h5', 'resonance_fraction.dat', 'chosen_resonances.dat'],
}
HoTCoffeehParameters = {
'grouping_particles' : 0,
'particle_diff_tolerance' : 0.00,
'use_plane_psi_order' : 0,
'ignore_long_lived_resonances' : 1,
'max_lifetime' : 100.0,
'include_delta_f' : 1,
'include_bulk_pi' : 1,
'n_order' : 4,
'tolerance' : 0.00,
'flag_negative_S' : 1,
'chosenParticlesMode' : 0,
'nKT' : 101,
'nKphi' : 48,
'KTmin' : 0.01,
'KTmax' : 1.01,
'SV_npT' : 15,
'SV_npphi' : 48,
'SV_resonanceThreshold' : 1.00,
'CF_npT' : 15,
'CF_npphi' : 36,
'CF_npY' : 21,
'CF_resonanceThreshold' : 0.60,
'use_lambda' : 1,
'use_log_fit' : 1,
'use_extrapolation' : 1,
'fit_with_projected_cfvals' : 1,
'flesh_out_cf' : 1,
'calculate_CF_mode' : 0,
'qtnpts' : 51,
'qxnpts' : 7,
'qynpts' : 7,
'qznpts' : 7,
'delta_qx' : 0.025,
'delta_qy' : 0.025,
'delta_qz' : 0.0125,
}
def readInParameters():
""" Overwrite default parameter lists with those in ParameterDict. """
try:
import ParameterDict
for aParameterList in allParameterLists:
if aParameterList in dir(ParameterDict):
exec("%s.update(ParameterDict.%s)" % (aParameterList, aParameterList))
except (IOError, SyntaxError):
raise ExecutionError("Errors trying to open/read the ParameterDict.py file!")
def sorted_nicely( l ):
""" Sorts the given iterable in the way that is expected.
Required arguments:
l -- The iterable to be sorted.
"""
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return sorted(l, key = alphanum_key)
def translate_centrality_cut():
"""
translate the centrality boundaries to Npart, dS/dy, b values and update
the parameter lists for simulations
"""
cut_type = initial_condition_control['cut_type']
if cut_type not in ['total_entropy', 'Npart']:
print "invalid centrality cut type: ", cut_type
exit(1)
centrality_string = initial_condition_control['centrality']
centrality_lower_bound = float(centrality_string.split('-')[0])
centrality_upper_bound = float(
centrality_string.split('-')[1].split('%')[0])
if superMCParameters['model_name'] == 'MCGlb':
superMCParameters['which_mc_model'] == 5
superMCParameters['sub_model'] == 1
model_name = 'MCGlb'
elif superMCParameters['model_name'] == 'MCKLN':
superMCParameters['which_mc_model'] == 1
superMCParameters['sub_model'] == 7
model_name = 'MCKLN'
if superMCParameters['cc_fluctuation_model'] != 0:
multiplicity_fluctuation = 'withMultFluct'
else:
multiplicity_fluctuation = 'noMultFluct'
if superMCParameters['include_NN_correlation'] != 0:
NNcorrelation = 'withNNcorrelation'
else:
NNcorrelation = 'd0.9'
collision_energy = str(superMCParameters['ecm'])
Aproj = superMCParameters['Aproj']
Atrag = superMCParameters['Atarg']
nucleus_name_dict = {
208: 'Pb',
197: 'Au',
238: 'U',
63: 'Cu',
27: 'Al',
1: 'p',
2: 'd',
3: 'He',
}
if Aproj == Atrag: #symmetric collision
nucleus_name = nucleus_name_dict[Aproj]+nucleus_name_dict[Atrag]
else: # asymmetric collision
nucleus_name = (nucleus_name_dict[min(Aproj, Atrag)]
+ nucleus_name_dict[max(Aproj, Atrag)])
centrality_cut_file_name = (
'iebe_centralityCut_%s_%s_sigmaNN_gauss_%s_%s.dat'
% (cut_type, model_name + nucleus_name + collision_energy,
NNcorrelation, multiplicity_fluctuation)
)
try:
centrality_cut_file = np.loadtxt(
path.join(path.abspath('../centrality_cut_tables'),
centrality_cut_file_name))
except IOError:
print "Can not find the centrality cut table for the collision system"
print centrality_cut_file_name
exit(1)
lower_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_lower_bound+1e-30))
upper_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_upper_bound))
cut_value_upper = (
(centrality_cut_file[lower_idx-1, 1]
- centrality_cut_file[lower_idx, 1])
/(centrality_cut_file[lower_idx-1, 0]
- centrality_cut_file[lower_idx, 0])
*(centrality_lower_bound - centrality_cut_file[lower_idx-1, 0])
+ centrality_cut_file[lower_idx-1, 1]
)
cut_value_low = (
(centrality_cut_file[upper_idx-1, 1]
- centrality_cut_file[upper_idx, 1])
/(centrality_cut_file[upper_idx-1, 0]
- centrality_cut_file[upper_idx, 0])
*(centrality_upper_bound - centrality_cut_file[upper_idx-1, 0])
+ centrality_cut_file[upper_idx-1, 1]
)
if cut_type == 'total_entropy':
superMCParameters['cutdSdy'] = 1
npart_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
npart_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 4])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 5])
superMCParameters['cutdSdy_lowerBound'] = cut_value_low
superMCParameters['cutdSdy_upperBound'] = cut_value_upper
elif cut_type == 'Npart':
superMCParameters['cutdSdy'] = 0
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
npart_min = cut_value_low
npart_max = cut_value_upper
superMCParameters['Npmax'] = npart_max
superMCParameters['Npmin'] = npart_min
superMCParameters['bmax'] = b_max
superMCParameters['bmin'] = b_min
#print out information
print '-'*80
print('%s collisions at sqrt{s} = %s A GeV with %s initial conditions'
% (nucleus_name , collision_energy, model_name))
print("Centrality : %g - %g"
% (centrality_lower_bound, centrality_upper_bound) + r"%")
print 'centrality cut on ', cut_type
if cut_type == 'total_entropy':
print 'dS/dy :', cut_value_low, '-', cut_value_upper
print "Npart: ", npart_min, '-', npart_max
print "b: ", b_min, '-', b_max, ' fm'
print '-'*80
return
def get_initial_condition_list():
"""
return a list of initial condition file
"""
file_list = []
initial_type = initial_condition_control['initial_condition_type']
if initial_type == 'superMC':
nev = controlParameterList['numberOfEvents']
file_list = [afile for afile in generateSuperMCInitialConditions(nev)]
elif initial_type == 'pre-generated':
file_list = [
afile for afile in get_pre_generated_initial_conditions_list()]
file_list = sorted_nicely(file_list) # make sure files are in correct order
return(file_list)
def get_pre_generated_initial_conditions_list():
"""
Yield the pre-generated initial conditions absolute path
"""
# set directory strings
initial_condition_dirName = initial_condition_control['pre-generated_initial_file_path']
initial_condition_path = path.join(controlParameterList['rootDir'],
initial_condition_dirName)
print 'Initial conditions path:', initial_condition_path
#copytree( path.commonprefix(fileList), HoTCoffeehOperationDirectory )
# yield initial conditions
file_list = glob(path.join(initial_condition_path,
initial_condition_control['pre-generated_initial_file_pattern']))
for afile in file_list:
# then yield it
yield path.join(initial_condition_path, afile)
def generateSuperMCInitialConditions(numberOfEvents):
"""
Generate initial conditions using superMC. It then yield the absolute
path for all the initial conditions.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
superMCDirectory = path.join(controlParameterList['rootDir'],
superMCControl['mainDir'])
superMCDataDirectory = path.join(superMCDirectory,
superMCControl['dataDir'])
superMCExecutable = superMCControl['executable']
# clean up the data subfolder for output
cleanUpFolder(superMCDataDirectory)
# check executable
checkExistenceOfExecutable(path.join(superMCDirectory, superMCExecutable))
# set "nev=#" in superMCParameters
superMCParameters[superMCControl['numberOfEventsParameterName']] = (
numberOfEvents)
# form assignment string
assignments = formAssignmentStringFromDict(superMCParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ superMCExecutable + assignments)
# execute!
run(executableString, cwd=superMCDirectory)
# yield initial conditions
file_list = glob(path.join(superMCDataDirectory,
superMCControl['initialFiles']))
for aFile in file_list:
# then yield it
yield path.join(superMCDataDirectory, aFile)
def hydroWithInitialCondition(aFile):
"""
Perform a single hydro calculation with the given absolute path to an
initial condition. Yield the result files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
hydroDirectory = path.join(controlParameterList['rootDir'],
hydroControl['mainDir'])
hydroICDirectory = path.join(hydroDirectory,
hydroControl['initialConditionDir'])
hydroResultsDirectory = path.join(hydroDirectory,
hydroControl['resultDir'])
hydroExecutable = hydroControl['executable']
# check executable
checkExistenceOfExecutable(path.join(hydroDirectory, hydroExecutable))
# clean up initial and results folder
cleanUpFolder(hydroICDirectory)
cleanUpFolder(hydroResultsDirectory)
# check existence of the initial conditions
if not path.exists(aFile):
raise ExecutionError("Hydro initial condition file %s not found!"
% aFile)
# storing initial condition file
if hydroControl['saveICFile']:
copy(aFile, controlParameterList['eventResultDir'])
# move initial condition to the designated folder
move(aFile, path.join(hydroICDirectory,
hydroControl['initialConditionFile']))
# form assignment string
assignments = formAssignmentStringFromDict(hydroParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ hydroExecutable + assignments)
# execute!
run(executableString, cwd=hydroDirectory)
# yield result files
worthStoring = []
for aGlob in hydroControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(hydroResultsDirectory, aGlob)))
file_list = glob(path.join(hydroResultsDirectory,
hydroControl['resultFiles']))
for aFile in file_list:
# check if this file worth storing, then copy to event result folder
if aFile in worthStoring:
copy(aFile, controlParameterList['eventResultDir'])
# yield it
yield path.join(hydroResultsDirectory, aFile)
def hydro_with_pre_equilbirium(aFile):
"""
Perform a single pre-equilibrium evolution and hydro calculation with
the given absolute path to an initial condition. Yield the result
files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
# pre-equilibrium model
pre_equilibrium_directory = path.join(
controlParameterList['rootDir'], preEquilibriumControl['mainDir'])
pre_equilibrium_ic_directory = path.join(
pre_equilibrium_directory, preEquilibriumControl['initialConditionDir']
)
pre_equilibrium_results_directory = path.join(
pre_equilibrium_directory, preEquilibriumControl['resultDir']
% preEquilibriumParameters['taumin']
)
pre_equilibrium_executable = preEquilibriumControl['executable']
# hydro model
hydroDirectory = path.join(controlParameterList['rootDir'],
hydroControl['mainDir'])
hydroICDirectory = path.join(hydroDirectory,
hydroControl['initialConditionDir'])
hydroResultsDirectory = path.join(hydroDirectory,
hydroControl['resultDir'])
hydroExecutable = hydroControl['executable']
# check executable
checkExistenceOfExecutable(path.join(pre_equilibrium_directory,
pre_equilibrium_executable))
checkExistenceOfExecutable(path.join(hydroDirectory, hydroExecutable))
# clean up initial and results folder
cleanUpFolder(pre_equilibrium_ic_directory)
cleanUpFolder(pre_equilibrium_results_directory)
cleanUpFolder(hydroICDirectory)
cleanUpFolder(hydroResultsDirectory)
# check existence of the initial conditions
if not path.exists(aFile):
raise ExecutionError("Hydro initial condition file %s not found!"
% aFile)
# storing initial condition file
if hydroControl['saveICFile']:
copy(aFile, controlParameterList['eventResultDir'])
# first move initial condition to the pre-equilibrium folder
move(aFile, path.join(pre_equilibrium_ic_directory,
preEquilibriumControl['initialConditionFile']))
# form assignment string
assignments = formAssignmentStringFromDict(preEquilibriumParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ pre_equilibrium_executable + assignments)
# execute!
run(executableString, cwd=pre_equilibrium_directory)
# then move pre-equilibrium results to hydro folder
for aFile in glob(path.join(pre_equilibrium_results_directory,
preEquilibriumControl['resultFiles'])):
file_name = aFile.split('/')[-1].split('kln')[0] + 'kln.dat'
move(aFile, path.join(hydroICDirectory, file_name))
# form assignment string
assignments = formAssignmentStringFromDict(hydroParameters)
# form executable string
executableString = ("nice -n %d ./" % (ProcessNiceness)
+ hydroExecutable + assignments)
# execute!
run(executableString, cwd=hydroDirectory)
# yield result files
worthStoring = []
for aGlob in hydroControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(hydroResultsDirectory, aGlob)))
for aFile in glob(path.join(hydroResultsDirectory,
hydroControl['resultFiles'])):
# check if this file worth storing, then copy to event result folder
if aFile in worthStoring:
copy(aFile, controlParameterList['eventResultDir'])
# yield it
yield path.join(hydroResultsDirectory, aFile)
def iSSWithHydroResultFiles(fileList):
"""
Perform iSS calculation using the given list of hydro result files.
Return the path to the OSCAR file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSSDirectory = path.join(controlParameterList['rootDir'],
iSSControl['mainDir'])
iSSOperationDirectory = path.join(iSSDirectory, iSSControl['operationDir'])
iSSOSCARFilepath = path.join(iSSDirectory, iSSControl['OSCARFile'])
iSSExecutable = iSSControl['executable']
# check executable
checkExistenceOfExecutable(path.join(iSSDirectory, iSSExecutable))
# clean up operation folder
cleanUpFolder(iSSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSSOperationDirectory)
# move(aFile, iSSOperationDirectory)
# make sure all hadrons up to 2 GeV are calculated
#copy(path.join(iSSDirectory, 'EOS', 'chosen_particles_urqmd_v3.3+.dat'),
# path.join(iSSDirectory, 'EOS', 'chosen_particles.dat'))
# make sure to use the pdg table with tagged decay photons
copy(path.join(iSSDirectory, 'EOS', 'pdg-urqmd_v3.3+.dat'),
path.join(iSSDirectory, 'EOS', 'pdg.dat'))
# form assignment string
assignments = formAssignmentStringFromDict(iSSParameters)
# form executable string
executableString = (
"nice -n %d ./" % (ProcessNiceness) + iSSExecutable + assignments)
# execute!
run(executableString, cwd=iSSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return OSCAR file path
#print 'iSSOSCARFilepath =', iSSOSCARFilepath
return iSSOSCARFilepath
def iSWithResonancesWithHydroResultFiles(fileList):
"""
Perform iS calculation using the given list of hydro result files,
followed by resonance calculations and iInteSp calculations.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSDirectory = path.join(controlParameterList['rootDir'],
iSControl['mainDir'])
iSOperationDirectory = path.join(iSDirectory, iSControl['operationDir'])
iSExecutables = iSControl['executables']
iSExecutionEntry = iSControl['entryShell']
# check executable
checkExistenceOfExecutables(
[path.join(iSDirectory, aExe) for aExe in iSExecutables])
# clean up operation folder
cleanUpFolder(iSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSOperationDirectory)
# move(aFile, iSOperationDirectory)
copy(path.join(iSDirectory, 'EOS', 'chosen_particles_s95pv1.dat'),
path.join(iSDirectory, 'EOS', 'chosen_particles.dat'))
copy(path.join(iSDirectory, 'EOS', 'pdg-s95pv1_withDecayPhotons.dat'),
path.join(iSDirectory, 'EOS', 'pdg.dat'))
# execute!
run("nice -n %d bash ./" % (ProcessNiceness) + iSExecutionEntry,
cwd=iSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def write_RUN_CFWR_PBS(HCDirectory, assignments):
open(path.join(HCDirectory, "run_cfwr.pbs"), "w").write(
"""
#!/usr/bin/env bash
#PBS -l walltime=48:00:00
#PBS -l mem=8gb
#PBS -j oe
#PBS -S /bin/bash
cd %s
(
ulimit -n 1000
./cfwr.e %s
)
""" % (HCDirectory, assignments)
)
def doHBTWithHydroResultFiles(fileList):
"""
Perform HoTCoffeeh calculation.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
HoTCoffeehDirectory = path.join(controlParameterList['rootDir'],
HoTCoffeehControl['mainDir'])
HoTCoffeehOperationDirectory = path.join(HoTCoffeehDirectory, HoTCoffeehControl['operationDir'])
HoTCoffeehExecutables = HoTCoffeehControl['executables']
HoTCoffeehExecutionEntry = HoTCoffeehControl['entryShell']
print 'fileList =', fileList
# check executable
checkExistenceOfExecutables(
[path.join(HoTCoffeehDirectory, aExe) for aExe in HoTCoffeehExecutables])
# clean up operation folder
cleanUpFolder(HoTCoffeehOperationDirectory)
#rmtree(HoTCoffeehOperationDirectory)
#copytree( path.commonprefix(fileList), HoTCoffeehOperationDirectory )
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, HoTCoffeehOperationDirectory)
# move(aFile, HoTCoffeehOperationDirectory)
# form assignment string
assignments = formAssignmentStringFromDict(HoTCoffeehParameters)
#runSVWR = "true"
#runCFWR = "true"
#if HoTCoffeehExecutables[0]==0:
# runSVWR = "false"
#
#if HoTCoffeehExecutables[1]==0:
# runCFWR = "false"
# execute!
#print 'Running', "nice -n %d bash ./" % (ProcessNiceness) \
# + HoTCoffeehExecutionEntry + " " + runSVWR + " " + runCFWR + " " + assignments
#run("nice -n %d bash ./" % (ProcessNiceness) \
# + HoTCoffeehExecutionEntry + " " + runSVWR + " " + runCFWR + " " + assignments, \
# cwd=HoTCoffeehDirectory)
commandToExecute = "nice -n %d bash ./" % (ProcessNiceness) \
+ HoTCoffeehExecutionEntry + " true true " + assignments
print 'Running', commandToExecute
write_RUN_CFWR_PBS(HoTCoffeehDirectory, assignments)
run(commandToExecute, cwd=HoTCoffeehDirectory)
# save some of the important result files
worthStoring = []
for aGlob in HoTCoffeehControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(HoTCoffeehOperationDirectory, aGlob)))
for aFile in glob(path.join(HoTCoffeehOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def iSSeventplaneAngleWithHydroResultFiles(fileList):
"""
Perform iSS calculation using the given list of hydro result files.
Return the path to the OSCAR file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSSDirectory = path.join(controlParameterList['rootDir'],
iSSControl['mainDir'])
iSSOperationDirectory = path.join(iSSDirectory,
iSSControl['operationDir'])
hydroH5Filepath = path.join(iSSOperationDirectory, 'JetData.h5')
iSSExecutable = iSSControl['executable']
# check executable
checkExistenceOfExecutable(path.join(iSSDirectory, iSSExecutable))
# clean up operation folder
cleanUpFolder(iSSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSSOperationDirectory)
# move(aFile, iSSOperationDirectory)
copy(path.join(iSSDirectory, 'EOS', 'chosen_particles_urqmd_v3.3+.dat'),
path.join(iSSDirectory, 'EOS', 'chosen_particles.dat'))
copy(path.join(iSSDirectory, 'EOS', 'pdg-urqmd_v3.3+.dat'),
path.join(iSSDirectory, 'EOS', 'pdg.dat'))
# form assignment string
assignments = formAssignmentStringFromDict(iSSParameters)
# form executable string
executableString = (
"nice -n %d ./" % (ProcessNiceness) + iSSExecutable + assignments)
# execute!
run(executableString, cwd=iSSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return hydro h5 file path
return (hydroH5Filepath,)
def iSWithResonancesWithdecayPhotonWithHydroResultFiles(fileList):
"""
Perform iS calculation using the given list of hydro result files,
followed by resonance calculations and iInteSp calculations with decay
photons.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSDirectory = path.join(controlParameterList['rootDir'],
iSControl['mainDir'])
iSOperationDirectory = path.join(iSDirectory, iSControl['operationDir'])
hydroH5Filepath = path.join(iSOperationDirectory, 'JetData.h5')
iSExecutables = iSControl['executables']
iSExecutionEntry = iSControl['entryShell']
# check executable
checkExistenceOfExecutables(
[path.join(iSDirectory, aExe) for aExe in iSExecutables])
# clean up operation folder
cleanUpFolder(iSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, iSOperationDirectory)
simulationType = controlParameterList['simulation_type']
if simulationType == "hydroEM_with_decaycocktail_with_urqmd":
# make sure all hadrons up to 2 GeV are calculated
copy(path.join(iSDirectory, 'EOS', 'chosen_particles_urqmd_v3.3+.dat'),
path.join(iSDirectory, 'EOS', 'chosen_particles.dat'))
# make sure to use the pdg table with tagged decay photons
copy(path.join(iSDirectory, 'EOS',
'pdg-urqmd_v3.3+_withDecayPhotons.dat'),
path.join(iSDirectory, 'EOS', 'pdg.dat'))
else:
# make sure all hadrons up to 2 GeV are calculated
copy(path.join(iSDirectory, 'EOS', 'chosen_particles_s95pv1.dat'),
path.join(iSDirectory, 'EOS', 'chosen_particles.dat'))
# make sure to use the pdg table with tagged decay photons
copy(path.join(iSDirectory, 'EOS', 'pdg-s95pv1_withDecayPhotons.dat'),
path.join(iSDirectory, 'EOS', 'pdg.dat'))
# execute!
run("nice -n %d bash ./" % (ProcessNiceness) + iSExecutionEntry,
cwd=iSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return hydro h5 file path
return (hydroH5Filepath,)
def photonEmissionWithHydroResultFiles(fileList):
"""
Perform thermal photon calculation using the given list of hydro
result files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
photonEmDirectory = path.join(controlParameterList['rootDir'],
photonEmissionControl['mainDir'])
photonEmOperationDirectory = path.join(
photonEmDirectory, photonEmissionControl['operationDir'])
photonEmExecutable = photonEmissionControl['executable']
# check executable
checkExistenceOfExecutable(path.join(photonEmDirectory, photonEmExecutable))
# clean up results folder
cleanUpFolder(photonEmOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
copy(aFile, photonEmOperationDirectory)
# move(aFile, photonEmOperationDirectory)
# form assignment string
assignments = formAssignmentStringFromDict(photonEmissionParameters)
# form executable string
executableString = (
"nice -n %d ./" % (ProcessNiceness) + photonEmExecutable + assignments)
# execute!
run(executableString, cwd=photonEmDirectory)
# save some of the important result files
worthStoring = []
for aGlob in photonEmissionControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(photonEmOperationDirectory, aGlob)))
for aFile in glob(path.join(photonEmOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def osc2uFromOSCARFile(OSCARFilePath):
"""
Execute osc2u program using the given path to the OSCAR file. Return the
path to the output file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
osc2uDirectory = path.join(controlParameterList['rootDir'], osc2uControl['mainDir'])
osc2uOutputFilePath = path.join(osc2uDirectory, osc2uControl['outputFilename'])
osc2uExecutable = osc2uControl['executable']
# check executable
checkExistenceOfExecutable(path.join(osc2uDirectory, osc2uExecutable))
# remove output file if already exists
if path.exists(osc2uOutputFilePath):
remove(osc2uOutputFilePath)
# check existence of the OSCAR file then execute
if path.exists(OSCARFilePath):
run("nice -n %d ./" % (ProcessNiceness) + osc2uExecutable + " < " + OSCARFilePath, cwd=osc2uDirectory)
# save OSCAR file
if osc2uControl['saveOSCAR']:
move(OSCARFilePath, controlParameterList['eventResultDir'])
# return the output file path
return osc2uOutputFilePath
def urqmdFromOsc2uOutputFile(osc2uFilePath):
"""
Perform urqmd using osc2u output file. Return the path to the output
file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
urqmdDirectory = path.join(controlParameterList['rootDir'], urqmdControl['mainDir'])
urqmdOutputFilePath = path.join(urqmdDirectory, urqmdControl['outputFilename'])
urqmdExecutable = urqmdControl['executable']
urqmdExecutionEntry = urqmdControl['entryShell']
# check executable
checkExistenceOfExecutable(path.join(urqmdDirectory, urqmdExecutable))
# remove output file if already exists
if path.exists(urqmdOutputFilePath):
remove(urqmdOutputFilePath)
# clean up IC
urqmdIC = path.join(urqmdDirectory, urqmdControl['ICFilename'])
if path.exists(urqmdIC):
remove(urqmdIC)
# check existence of the osc2u output, move it then execute urqmd
if path.exists(osc2uFilePath):
move(osc2uFilePath, urqmdIC)
run("nice -n %d bash ./" % (ProcessNiceness) + urqmdExecutionEntry, cwd=urqmdDirectory)
# save output file
if urqmdControl['saveOutputFile']:
copy(urqmdOutputFilePath, controlParameterList['eventResultDir'])
# return the output file path
return urqmdOutputFilePath
def binUrqmdResultFiles(urqmdOutputFile):
"""
Bin the output from URQMD to generate flows etc.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
binUDirectory = path.join(controlParameterList['rootDir'], binUtilitiesControl['mainDir'])
binUOperationDirectory = path.join(binUDirectory, binUtilitiesControl['operationDir'])
binUExecutable = binUtilitiesControl['executable']
# clean up operation folder
cleanUpFolder(binUOperationDirectory)
# check existence urqmd output file
if not path.exists(urqmdOutputFile):
raise ExecutionError("URQMD output file %s not found!" % urqmdOutputFile)
# form executable string
executableString = "nice -n %d python ./" % (ProcessNiceness) + binUExecutable + " " + urqmdOutputFile
# execute!
run(executableString, cwd=binUDirectory)
# save some of the important result files
worthStoring = []
for aGlob in binUtilitiesControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(binUOperationDirectory, aGlob)))
for aFile in glob(path.join(binUOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def collectEbeResultsToDatabaseFrom(folder):
"""
Collect the mostly used results from subfolders that contain hydro
results into a database, including ecc and flow etc.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
collectorDirectory = path.join(controlParameterList['rootDir'],
EbeCollectorControl['mainDir'])
# for executable string
simulationType = controlParameterList['simulation_type']
if simulationType == 'hybrid':
collectorExecutable = EbeCollectorControl['executable_hybrid']
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %g %s %s" % (
folder, 1.0/(iSSParameters['number_of_repeated_sampling']
*(iSSParameters["y_RB"] - iSSParameters["y_LB"])),
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydro':
collectorExecutable = EbeCollectorControl['executable_hydro']
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM':
collectorExecutable = EbeCollectorControl['executable_hydroEM']
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM_with_decaycocktail':
collectorExecutable = (
EbeCollectorControl['executable_hydroEM_with_decaycocktail'])
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM_with_decaycocktail_with_urqmd':
collectorExecutable = (
EbeCollectorControl['executable_hydroEM_with_decaycocktail'])
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable +
" %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
elif simulationType == 'hydroEM_preEquilibrium':
collectorExecutable = (
EbeCollectorControl['executable_hydroEM_with_decaycocktail'])
executableString = (
"nice -n %d python ./" % (ProcessNiceness) + collectorExecutable
+ " %s %s %s" % (folder,
EbeCollectorParameters['subfolderPattern'],
EbeCollectorParameters['databaseFilename']))
# execute
run(executableString, cwd=collectorDirectory)
def formAssignmentStringFromDict(aDict):
"""
Generate a parameter-equals-value string from the given dictionary. The
generated string has a leading blank.
"""
result = ""
for aParameter in aDict.keys():
result += " {}={}".format(aParameter, aDict[aParameter])
return result
def cleanUpFolder(aDir):
""" Delete all data files in the given directory. """
if path.exists(aDir):
try:
run("rm -rf *", cwd=aDir, echo=False)
except OSError:
pass # very likely the the folder is already empty
else:
makedirs(aDir)
def checkExistenceOfExecutable(executableFilename):
""" Check the existence of the executable file, and compile if not. """
if not path.exists(executableFilename):
# build then clean
exec_path, exec_filename = path.split(executableFilename)
run("make", cwd=exec_path)
# if still cannot find the executable
if not path.exists(executableFilename):
raise ExecutionError(
"Cannot generate executable %s!" % executableFilename)
def checkExistenceOfExecutables(executableFilenames):
"""
Check the existences of the executable files, and compile them if not.
Will call the checkExistenceOfExecutable function.
"""
for executableFilename in executableFilenames:
checkExistenceOfExecutable(executableFilename)
def run(command, cwd=getcwd(), echo=True):
""" Invoke a command from terminal and wait for it to stop. """
if echo:
print("-"*80)
print("In "+cwd)
print("Executing command: "+command)
print("-"*80)
stdout.flush()
return call(command, shell=True, cwd=cwd)
def sequentialEventDriverShell():
"""
Perform a sequential calculations for a given number of events.
Parameters are read from dictionaries given by allParameterList.
"""
try:
# read parameters
readInParameters()
translate_centrality_cut()
# create result folder
resultDir = controlParameterList['resultDir']
print('resultDir =', resultDir)
if path.exists(resultDir):
rmtree(resultDir)
makedirs(resultDir)
# get simulation type
simulationType = controlParameterList['simulation_type']
event_id = 0
# generate initial conditions then loop over initial conditions
initial_condition_list = get_initial_condition_list()
print('initial_condition_list =', initial_condition_list)
nev = len(initial_condition_list)
# print current progress to terminal
stdout.write("PROGRESS: %d events out of %d finished.\n"
% (event_id, nev))
stdout.flush()
#print initial_condition_list
# loop over initial conditions
for aInitialConditionFile in initial_condition_list:
event_id += 1
eventResultDir = path.join(resultDir,
controlParameterList['eventResultDirPattern'] % event_id)
controlParameterList['eventResultDir'] = eventResultDir
if path.exists(eventResultDir):
rmtree(eventResultDir)
makedirs(eventResultDir)
# print current progress to terminal
print("Starting event %d..." % event_id)
initial_type = initial_condition_control['initial_condition_type']
if initial_type == 'superMC': # initial conditions from superMC
if superMCControl['saveICFile']:
initial_id = int(
aInitialConditionFile.split('/')[-1].split('_')[2])
superMCDataDirectory = path.join(
controlParameterList['rootDir'],
superMCControl['mainDir'], superMCControl['dataDir'])
file_list = glob(path.join(superMCDataDirectory,
superMCControl['dataFiles'] % initial_id))
for aFile in file_list:
copy(aFile, controlParameterList['eventResultDir'])
elif initial_type == 'pre-generated':
# initial conditions from pre-generated files
copy(aInitialConditionFile, controlParameterList['eventResultDir'])
print 'Associating ' + aInitialConditionFile + ' with event ' + str(event_id)
print controlParameterList['rootDir']
if simulationType == 'hydroEM_preEquilibrium':
# perform hydro calculations with pre-equilibrium evolution
# and get a list of all the result filenames
hydroResultFiles = [aFile for aFile in
hydro_with_pre_equilbirium(aInitialConditionFile)]
else:
# perform hydro calculations and get a list of all the result
# filenames
hydroResultFiles = [aFile for aFile in hydroWithInitialCondition(aInitialConditionFile)]
print controlParameterList['rootDir']
print(controlParameterList['rootDir'])
print('simulationType =', simulationType)
print('HoTCoffeehControl[runHoTCoffeeh] =', HoTCoffeehControl['runHoTCoffeeh'] )
if simulationType != 'hybrid' and HoTCoffeehControl['runHoTCoffeeh']:
print('Doing HBT!')
doHBTWithHydroResultFiles(hydroResultFiles)
# fork simulation type here
if simulationType == 'hybrid':
# perform iSS calculation and return the path to the OSCAR file
OSCARFilePath = iSSWithHydroResultFiles(hydroResultFiles)
if urqmdControl['run_UrQMD']:
# perform osc2u
osc2uOutputFilePath = osc2uFromOSCARFile(OSCARFilePath)
# now urqmd
urqmdOutputFilePath = urqmdFromOsc2uOutputFile(
osc2uOutputFilePath)
# copy and concatenate final results from all hydro events
# into one file
combinedUrqmdFile = path.join(
controlParameterList['resultDir'],
controlParameterList['combinedUrqmdFile'])
open(combinedUrqmdFile, 'a').writelines(
open(urqmdOutputFilePath).readlines())
# bin the combined result file to get flows
binUrqmdResultFiles(urqmdOutputFilePath)
# delete the huge final UrQMD combined file
remove(urqmdOutputFilePath)
elif simulationType == 'hydro':
# perform iS calculation and resonance decays
print controlParameterList['rootDir']
iSWithResonancesWithHydroResultFiles(hydroResultFiles)
print controlParameterList['rootDir']
elif simulationType == 'hydroEM':
h5file = iSSeventplaneAngleWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
elif simulationType == 'hydroEM_with_decaycocktail':
h5file = iSWithResonancesWithdecayPhotonWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
elif simulationType == 'hydroEM_preEquilibrium':
# perform iS calculation and resonance decays
h5file = iSWithResonancesWithdecayPhotonWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
elif simulationType == 'hydroEM_with_decaycocktail_with_urqmd':
h5file = iSWithResonancesWithdecayPhotonWithHydroResultFiles(
hydroResultFiles)
# perform EM radiation calculation
photonEmissionWithHydroResultFiles(h5file)
# perform iSS calculation and return the path to the OSCAR file
OSCARFilePath = iSSWithHydroResultFiles(hydroResultFiles)
# perform osc2u
osc2uOutputFilePath = osc2uFromOSCARFile(OSCARFilePath)
# now urqmd
urqmdOutputFilePath = urqmdFromOsc2uOutputFile(
osc2uOutputFilePath)
tarfile_name = (
controlParameterList['eventResultDir'].split('/')[-1])
call("tar -cf %s.tar %s" % (tarfile_name, tarfile_name),
shell=True, cwd=resultDir)
call("rm -fr %s" % (tarfile_name,), shell=True, cwd=resultDir)
# print current progress to terminal
stdout.write("PROGRESS: %d events out of %d finished.\n"
% (event_id, nev))
stdout.flush()
# collect mostly used data into a database
#collectEbeResultsToDatabaseFrom(resultDir)
except ExecutionError as e:
print("Errors encountered during execution, aborting.")
raise
finally:
print("Thank you for using. Zhi Qiu, 2013-02")
if __name__ == "__main__":
sequentialEventDriverShell()
|
We are honored to have been chosen by the SourceForge Community to be “Project of the Month” for October 2016.
For our October “Community Choice” Project of the Month, the community elected Nagios Core, powerful network monitoring software, enterprise-class host, server, application, and network monitoring tools. Designed to be fast, flexible, and rock-solid stable. Nagios runs on *NIX hosts and can monitor Windows, Linux/Unix/BSD, Netware, and network devices.
|
#! /usr/bin/env python
import sys
import os
import logging
import argparse
basepath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.dirname(basepath))
import hiyapyco
class LoggingAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
# print '%r %r %r' % (namespace, values, option_string)
logger = logging.getLogger()
logger.setLevel(values)
setattr(namespace, self.dest, values)
logger = logging.getLogger()
logging.basicConfig(
level=logging.WARN,
format='%(levelname)s\t[%(name)s] %(funcName)s: %(message)s'
)
parser = argparse.ArgumentParser()
parser.add_argument(
'-l', '--loglevel',
help='set loglevel',
type=str,
choices=[k for k in logging._levelNames.keys() if isinstance(k, str)],
action=LoggingAction
)
parser.add_argument(
'-y', '--usedefaultyamlloader', dest='usedefaultyamlloader',
action='store_true', default=False, help='yaml file(s) to parse'
)
parser.add_argument('-f', '--file', type=str, nargs='+', help='yaml file(s) to parse')
args = parser.parse_args()
if args.loglevel is None:
logging.disable(logging.CRITICAL)
# FIXME: in fact this should be the job of argparse
if args.file is None or len(args.file) == 0:
raise Exception('please provide at least one yaml file!')
for mergemethod in hiyapyco.METHODS.keys():
print('='*10, 'method=', mergemethod, '='*10)
conf = hiyapyco.load(
*args.file,
method=hiyapyco.METHODS[mergemethod],
interpolate=True,
failonmissingfiles=True,
usedefaultyamlloader=args.usedefaultyamlloader
)
print(conf)
print('-'*10, 'YAML', '-'*10)
print(hiyapyco.dump(conf))
if len(args.file) < 2:
break
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 smartindent nu
|
A Stroll Down Memory Lane | SoHo Memory Project.
Every once in a while, I like to dip into our SoHo Stories folder to gather and share some of the wonderful memories of old SoHo readers have submitted to The SoHo Memory Project (to share your SoHo story, click on the "Share Your SoHo Stories" link above). It’s fun to see where there are overlaps or if there are themes that run through them. In this batch, a lot of people remember finding useful things on the street and while dumpster diving. And the loading docks, everyone remembers them. Quite a few also mention Fanelli’s and other SoHo gathering spots. I especially like the very funny story about the chickens!
Amber, who was in SoHo from the 1970’s through the 1990’s, misses a lot about old SoHo: The stuff you would find in the dumpsters. My mother found a spool of gold and silver card stock, and I would make full-body head dresses out of it and wear them around the neighborhood and to pizza at that place with a garden on 6th ave that became a Duane Reade. the carrot cake at Food, that spicy pepper smell around Broome or Grand. The constant creative activity- the neighborhood was so sparsely populated, and it was just "us" and the guy at Zelf tool rental and the nice people at Fanelli's. The sound of the trucks, the Neon Gallery, and the broken kilometer, and of course the Duane Hansons.
Richard (b. 1946) lived on Grand Street from 1975-1990: Sohozat, DeRoma's, Broome Street Bar, Magoos, The Cupping Room, The Performing Garage, The Canal Street Flea Market, O.K. Harris Art Gallery, Lucky Strike, Watts Happen Inn, Fanelli's, Vesuvio Bakery, The Spring Street Bar, Smokestacks Lightning, The Nancy Whiskey Pub, Leo Castelli Gallery, The Earth Room, The Ear Inn, cobblestone streets, blackouts and blizzards. Searching for wood on the streets in January of 1976 to bring home and burn in my pot-bellied stove. Being able to make art and then display it in the window of my studio. The Bells of the Church of Saint Alphonsus. Hornblower Antiques. Hanging out on the stoop of my studio and talking to the old long-time Italian immigrant neighbors. The sound of the Grand Street bus going East. The sunlight coming through the front windows of my studio.
Lucien (b. 1966) grew up in SoHo: Playing at playground east of Silver Towers while my parents climbed the fence into the field at the NE corner to play softball. Judson Health Clinic. Mary's Candy store on Thompson. Dumpster diving! Running along Greene or Wooster along the tops of loading docks and other building structures playing "don't touch the ground" with my brother. Sword fighting with cardboard tubes left over from bolts of fabric. Climbing in and on the bread delivery trucks at Wooster? and Prince.
Nicholas (b. 1967) was also a kid in old SoHo: My mother sent me to the bodega on West Broadway and Prince street to get her beer and cigarettes when I was 9. One of the guys who worked at the bodega sent me home to get a note from my mother. When I returned with the note he put the 6-pack in a paper bag and walked me half way home before handing me the bag. I think he was nervous about breaking the law. That kind of thing would be impossible today.
Kaleb (b. 1968), who lived in SoHo from 1977-1991, remembers: I woke up one morning to the sound of chickens. I thought I was dreaming. I got up and looked out my window. A truck carrying cages of chickens heading to the slaughter house on Mulberry and Prince had taken the corner on Lafayette too fast and cages of chickens spilled across the street, many cracking open. Dozens of chickens were wandering around dazed, clucking, confused.
Sybil (b. 1954) lived in SoHo from 1977-1999: During my first summer there, 1977, there was the big Blackout in the Northeast. I remember sitting on my fire escape around three in the morning and seeing more stars than I'd ever seen in NYC. Mike Fanelli, asking the local tradesmen/artists, "are you working", and not charging folks if they were out of work. Then, there was the guy who, around 10 or 11 at night, every so often, would come riding across Prince Street on a bicycle, from the Bowery, towards Soho, singing opera at the top of his lungs, with his dog running along side of him. I could hear him from blocks away, before he appeared outside my window. If I was in bed, I'd get up to run to the front room window, to see him. It made me feel joyful to hear and see him.
Vered (b. 1947) has lived in SoHo for close to 50 years: Meeting talented people from all over the world and from places in the United States that I had never heard of. They came, every year, the best and the brightest from rural, agricultural and cosmopolitan places and they all ended up here trying to build old lofts into studios and to make themselves famous. Andy Warhol walked and hung out among us, Henry Miller too, Blondie sang at Arturos, Phillip Glass bought my piano when I needed rent money.
Thornton (b. 1936) who has also been in SoHo for half a century remembers: The buildings, the architecture that is so compelling both inside and out. It was a soulful place filled with artist of diverse background, drawn here from every part of the US and abroad to try and make art of every kind; jazz, poetry, sculpture, dance, painting, and photography etc. The energy was amazing and unique because we were here in one area while the rest of the city for the most part ignored us.
Thanks to all of you who submitted SoHo memories. Please keep adding to our collection (click on "Share Your Stories" above). Memory is ephemeral, so lets catch as many as we can before they slip away!
|
import _mysql
import os
import cgi
from database import *
from settings import Settings
from framework import *
from formatting import *
from template import *
from post import *
def manage(self, path_split):
page = ''
validated = False
administrator = False
skiptemplate = False
try:
if self.formdata['pyib_username'] and self.formdata['pyib_password']:
password = getMD5(self.formdata['pyib_password'])
valid_account = FetchOne("SELECT * FROM `staff` WHERE `username` = '" + _mysql.escape_string(self.formdata['pyib_username']) + "' AND `password` = '" + _mysql.escape_string(password) + "' LIMIT 1")
if valid_account:
setCookie(self, 'pyib_manage', self.formdata['pyib_username'] + ':' + valid_account['password'], domain='THIS')
setCookie(self, 'pyib_staff', 'yes')
UpdateDb('DELETE FROM `logs` WHERE `timestamp` < ' + str(timestamp() - 604800)) # one week
else:
page += 'Incorrect username/password.<hr>'
except:
pass
try:
manage_cookie = self._cookies['pyib_manage'].value
if manage_cookie != '':
username, password = manage_cookie.split(':')
staff_account = FetchOne("SELECT * FROM `staff` WHERE `username` = '" + _mysql.escape_string(username) + "' AND `password` = '" + _mysql.escape_string(password) + "' LIMIT 1")
if staff_account:
validated = True
if staff_account['rights'] == '0' or staff_account['rights'] == '1':
administrator = True
UpdateDb('UPDATE `staff` SET `lastactive` = ' + str(timestamp()) + ' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
except:
pass
if not validated:
page += """<div style="text-align: center;">
<form action=""" + '"' + Settings.CGI_URL + """manage" method="post">
<label for="username">Username</label> <input type="text" name="pyib_username"><br>
<label for="password">Password</label> <input type="password" name="pyib_password"><br>
<label for="submit"> </label> <input type="submit" name="submit" value="Log in">
</form>"""
else:
if len(path_split) > 2:
if path_split[2] == 'rebuild':
if not administrator:
return
try:
board_dir = path_split[3]
except:
board_dir = ''
if board_dir == '':
page += 'Please click on the board you wish to rebuild:<br><br><a href="' + Settings.CGI_URL + 'manage/rebuild/!ALL">Rebuild all boards</b></a><br>'
page += boardlist('rebuild')
else:
if board_dir == '!ALL':
t1 = time.time()
boards = FetchAll('SELECT `dir` FROM `boards`')
for board in boards:
board = setBoard(board['dir'])
regenerateBoard()
page += 'Rebuilt all boards in ' + timeTaken(t1, time.time()) + ' seconds'
logAction(staff_account['username'], 'Rebuilt all boards')
else:
t1 = time.time()
board = setBoard(board_dir)
regenerateBoard()
page += 'Rebuilt /' + board['dir'] + '/ in ' + timeTaken(t1, time.time()) + ' seconds'
logAction(staff_account['username'], 'Rebuilt /' + board['dir'] + '/')
elif path_split[2] == 'rebuildnameblocks':
board_dir = ''
try:
board_dir = path_split[3]
except:
pass
if board_dir == '':
try:
board_dir = self.formdata['dir']
except:
pass
if board_dir != '':
t1 = time.time()
board = setBoard(board_dir)
posts = FetchAll('SELECT `id`, `name`, `tripcode`, `email`, `timestamp` FROM `posts` WHERE `boardid` = ' + board['id'])
for post in posts:
nameblock = nameBlock(post['name'], post['tripcode'], post['email'], formatTimestamp(post['timestamp']))
UpdateDb('UPDATE `posts` SET `nameblock` = \'' + _mysql.escape_string(nameblock) + '\' WHERE `id` = ' + post['id'] + ' AND `boardid` = ' + board['id'] + ' LIMIT 1')
page += 'Rebuilt name blocks for /' + board['dir'] + '/ in ' + timeTaken(t1, time.time()) + ' seconds'
logAction(staff_account['username'], 'Rebuilt /' + board['dir'] + '/')
elif path_split[2] == 'modbrowse':
board_dir = ''
thread_id = 0
try:
board_dir = path_split[3]
thread_id = path_split[4]
except:
pass
if board_dir == '':
try:
board_dir = self.formdata['dir']
thread_id = self.formdata['postid']
except:
pass
if board_dir == '':
page += """<div style="text-align: center;">
<form action=""" + '"' + Settings.CGI_URL + """manage/modbrowse" method="post">
<label for="dir">Board</label> <select name="dir">"""
boards = FetchAll('SELECT * FROM `boards` ORDER BY `dir`')
for board in boards:
page += '<option value="' + board['dir'] + '">/' + board['dir'] + '/ - ' + board['name'] + '</option>'
page += '</select><br>' + \
'<label for="postid">Thread ID</label> <input type="text" name="postid"><br>' \
'<label for="submit"> </label> <input type="submit" name="submit" value="Modbrowse">' \
'</form>'
else:
skiptemplate = True
Settings._.MODBROWSE = True
board = setBoard(board_dir)
self.output += threadPage(thread_id)
elif path_split[2] == 'staff':
if staff_account['rights'] != '0':
return
action_taken = False
if len(path_split) > 3:
if path_split[3] == 'add' or path_split[3] == 'edit':
member = None
member_username = ''
member_rights = '2'
if path_split[3] == 'edit':
if len(path_split) > 4:
member = FetchOne('SELECT * FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
if member:
member_username = member['username']
member_rights = member['rights']
action = 'edit/' + member['id']
try:
if self.formdata['username'] != '':
if self.formdata['rights'] in ['0', '1', '2']:
action_taken = True
if not ':' in self.formdata['username']:
UpdateDb("UPDATE `staff` SET `username` = '" + _mysql.escape_string(self.formdata['username']) + "', `rights` = " + self.formdata['rights'] + " WHERE `id` = " + member['id'] + " LIMIT 1")
page += 'Staff member updated.'
logAction(staff_account['username'], 'Updated staff account for ' + self.formdata['username'])
else:
page += 'The character : can not be used in usernames.'
except:
pass
else:
action = 'add'
try:
if self.formdata['username'] != '' and self.formdata['password'] != '':
username_taken = FetchOne('SELECT * FROM `staff` WHERE `username` = \'' + _mysql.escape_string(self.formdata['username']) + '\' LIMIT 1')
if not username_taken:
if self.formdata['rights'] in ['0', '1', '2']:
action_taken = True
if not ':' in self.formdata['username']:
password = getMD5(self.formdata['password'])
InsertDb("INSERT INTO `staff` (`username`, `password`, `added`, `rights`) VALUES ('" + _mysql.escape_string(self.formdata['username']) + "', '" + _mysql.escape_string(password) + "', " + str(timestamp()) + ", " + self.formdata['rights'] + ")")
page += 'Staff member added.'
logAction(staff_account['username'], 'Added staff account for ' + self.formdata['username'])
else:
page += 'The character : can not be used in usernames.'
else:
action_taken = True
page += 'That username is already in use.'
except:
pass
if not action_taken:
action_taken = True
page += '<form action="' + Settings.CGI_URL + 'manage/staff/' + action + '" method="post">' + \
'<label for="username">Username</label> <input type="text" name="username" value="' + member_username + '"><br>'
if not member:
page += '<label for="password">Password</label> <input type="password" name="password"><br>'
page += '<label for="rights">Rights</label> <select name="rights"><option value="2"'
if member_rights == '2':
page += ' selected'
page += '>Moderator</option><option value="1"'
if member_rights == '1':
page += ' selected'
page += '>Administrator</option><option value="0"'
if member_rights == '0':
page += ' selected'
page += '>Super administrator</option></select><br>' + \
'<label for="submit"> </label> <input type="submit" name="submit" value="'
if path_split[3] == 'add':
page += 'Add'
else:
page += 'Edit'
page += '">' + \
'</form>'
elif path_split[3] == 'delete':
action_taken = True
page += '<a href="' + Settings.CGI_URL + 'manage/staff/delete_confirmed/' + path_split[4] + '">Click here to confirm the deletion of that staff member</a>'
elif path_split[3] == 'delete_confirmed':
try:
action_taken = True
member = FetchOne('SELECT `username` FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
if member:
UpdateDb('DELETE FROM `staff` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
page += 'Staff member deleted.'
logAction(staff_account['username'], 'Deleted staff account for ' + member['username'])
else:
page += 'Unable to locate a staff account with that ID.'
except:
pass
if not action_taken:
page += '<a href="' + Settings.CGI_URL + 'manage/staff/add">Add new</a><br>' + \
'<table border="1"><tr><th>ID</th><th>Username</th><th>Rights</th><th>Last Active</th><th> </th></tr>'
staff = FetchAll('SELECT * FROM `staff` ORDER BY `rights`')
for member in staff:
page += '<tr><td>' + member['id'] + '</td><td>' + member['username'] + '</td><td>'
if member['rights'] == '0':
page += 'Super administrator'
elif member['rights'] == '1':
page += 'Administrator'
elif member['rights'] == '2':
page += 'Moderator'
page += '</td><td>'
if member['lastactive'] != '0':
page += formatTimestamp(member['lastactive'])
else:
page += 'Never'
page += '</td><td><a href="' + Settings.CGI_URL + 'manage/staff/edit/' + member['id'] + '">edit</a> <a href="' + Settings.CGI_URL + '/manage/staff/delete/' + member['id'] + '">delete</a></td></tr>'
page += '</table>'
elif path_split[2] == 'delete':
do_ban = False
try:
if self.formdata['ban'] == 'true':
do_ban = True
except:
pass
board = setBoard(path_split[3])
post = FetchOne('SELECT `parentid`, `ip` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
if not post:
page += 'Unable to locate a post with that ID.'
else:
deletePost(path_split[4])
if post['parentid'] != '0':
threadUpdated(post['parentid'])
else:
regenerateFrontPages()
page += 'Post successfully deleted.'
logAction(staff_account['username'], 'Deleted post /' + path_split[3] + '/' + path_split[4])
if do_ban:
page += '<br>Redirecting to ban page...<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage/ban/' + post['ip'] + '">'
elif path_split[2] == 'ban':
if len(path_split) > 4:
board = setBoard(path_split[3])
post = FetchOne('SELECT `ip` FROM `posts` WHERE `boardid` = ' + board['id'] + ' AND `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1')
if not post:
page += 'Unable to locate a post with that ID.'
else:
page += '<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage/ban/' + post['ip'] + '">'
else:
if path_split[3] == '':
try:
ip = self.formdata['ip']
except:
ip = ''
else:
ip = path_split[3]
if ip != '':
try:
reason = self.formdata['reason']
except:
reason = None
if reason is not None:
ban = FetchOne('SELECT `ip` FROM `bans` WHERE `ip` = \'' + _mysql.escape_string(ip) + '\' AND `where` = \'\' AND `until` = 0 LIMIT 1')
if not ban:
if self.formdata['seconds'] != '0':
until = str(timestamp() + int(self.formdata['seconds']))
else:
until = '0'
where = ''
if 'board_all' not in self.formdata.keys():
where = []
boards = FetchAll('SELECT `dir` FROM `boards`')
for board in boards:
keyname = 'board_' + board['dir']
if keyname in self.formdata.keys():
if self.formdata[keyname] == "1":
where.append(board['dir'])
if len(where) > 0:
where = pickle.dumps(where)
else:
self.error("You must select where the ban shall be placed")
return
if 'edit' in self.formdata.keys():
UpdateDb("DELETE FROM `bans` WHERE `id` = '" + _mysql.escape_string(self.formdata['edit']) + "' LIMIT 1")
InsertDb("INSERT INTO `bans` (`ip`, `where`, `added`, `until`, `staff`, `reason`, `note`) VALUES ('" + _mysql.escape_string(ip) + "', '" + _mysql.escape_string(where) + "', " + str(timestamp()) + ", " + until + ", '" + _mysql.escape_string(staff_account['username']) + "', '" + _mysql.escape_string(self.formdata['reason']) + "', '" + _mysql.escape_string(self.formdata['note']) + "')")
if 'edit' in self.formdata.keys():
page += 'Ban successfully edited.'
action = 'Edited ban for ' + ip
else:
page += 'Ban successfully placed.'
action = 'Banned ' + ip
if until != '0':
action += ' until ' + formatTimestamp(until)
else:
action += ' permanently'
logAction(staff_account['username'], action)
else:
page += 'There is already a global, permanent ban in place for that IP.'
else:
startvalues = {'where': [],
'reason': '',
'note': '',
'seconds': '0'}
edit_id = 0
if 'edit' in self.formdata.keys():
edit_id = self.formdata['edit']
ban = FetchOne("SELECT * FROM `bans` WHERE `id` = '" + _mysql.escape_string(edit_id) + "'")
if ban:
if ban['where'] == '':
where = ''
else:
where = pickle.loads(ban['where'])
if ban['until'] == '0':
until = 0
else:
until = int(ban['until']) - timestamp()
startvalues = {'where': where,
'reason': ban['reason'],
'note': ban['note'],
'seconds': str(until)}
else:
edit_id = 0
page += '<form action="' + Settings.CGI_URL + 'manage/ban/' + ip + '" name="banform" method="post">' + \
'<label>Board(s)</label> <ul>' + \
'<li><input type="checkbox" name="board_all" value="1"'
if startvalues['where'] == '':
page += ' checked'
page += '> <b>All boards</b><br><i>or</i></li>'
boards = FetchAll('SELECT `name`, `dir` FROM `boards` ORDER BY `dir`')
for board in boards:
page += '<li><input type="checkbox" name="board_' + board['dir'] + '" value="1"'
if board['dir'] in startvalues['where']:
page += ' checked'
page += '> ' + board['name'] + '</li>'
page += '</ul>'
if edit_id > 0:
page += '<input type="hidden" name="edit" value="' + edit_id + '">'
page += '<label for="reason">Reason</label> <input type="text" name="reason" value="' + startvalues['reason'] + '"><br>' + \
'<label for="note">Staff note</label> <input type="text" name="note" value="' + startvalues['note'] + '"><br>' + \
'<label for="seconds">Expire in #Seconds</label> <input type="text" name="seconds" value="' + startvalues['seconds'] + '"> <a href="#" onclick="document.banform.seconds.value=\'0\';return false;">no expiration</a> <a href="#" onclick="document.banform.seconds.value=\'3600\';return false;">1hr</a> <a href="#" onclick="document.banform.seconds.value=\'604800\';return false;">1w</a> <a href="#" onclick="document.banform.seconds.value=\'1209600\';return false;">2w</a> <a href="#" onclick="document.banform.seconds.value=\'2592000\';return false;">30d</a> <a href="#" onclick="document.banform.seconds.value=\'31536000\';return false;">1yr</a><br>' + \
'<label for="submit"> </label> <input type="submit" value="Place Ban">' + \
'</form>'
elif path_split[2] == 'bans':
if len(path_split) > 4:
if path_split[3] == 'delete':
ip = FetchOne('SELECT `ip` FROM `bans` WHERE `id` = \'' + _mysql.escape_string(path_split[4]) + '\' LIMIT 1', 0)[0]
if ip != '':
UpdateDb('DELETE FROM `bans` WHERE `id` = ' + _mysql.escape_string(path_split[4]) + ' LIMIT 1')
page += 'Ban successfully deleted.'
logAction(staff_account['username'], 'Deleted ban for ' + ip)
else:
page += 'There was a problem while deleting that ban. It may have already been removed, or recently expired.'
bans = FetchAll('SELECT * FROM `bans` ORDER BY `added` DESC')
page += '<form action="' + Settings.CGI_URL + 'manage/ban/" name="banform" method="post">' + \
'<label for="ip">IP address</label> <input type="text" name="ip"><br>' + \
'<label for="submit"> </label> <input type="submit" value="Proceed to ban form">' + \
'</form><br>'
if bans:
page += '<table border="1"><tr><th>IP Address</th><th>Boards</th><th>Added</th><th>Expires</th><th>Placed by</th><th>Reason</th><th>Staff note</th><th> </th></tr>'
for ban in bans:
page += '<tr><td>' + ban['ip'] + '</td><td>'
if ban['where'] == '':
page += 'All boards'
else:
where = pickle.loads(ban['where'])
if len(where) > 1:
page += '/' + '/, /'.join(where) + '/'
else:
page += '/' + where[0] + '/'
page += '</td><td>' + formatTimestamp(ban['added']) + '</td><td>'
if ban['until'] == '0':
page += 'Does not expire'
else:
page += formatTimestamp(ban['until'])
page += '</td><td>' + ban['staff'] + '</td><td>' + escapeHTML(ban['reason']) + '</td><td>' + ban['note'] + '</td><td><a href="' + Settings.CGI_URL + 'manage/ban/' + ban['ip'] + '?edit=' + ban['id'] + '">edit</a> <a href="' + Settings.CGI_URL + 'manage/bans/delete/' + ban['id'] + '">delete</a></td></tr>'
page += '</table>'
elif path_split[2] == 'changepassword':
form_submitted = False
try:
if self.formdata['oldpassword'] != '' and self.formdata['newpassword'] != '' and self.formdata['newpassword2'] != '':
form_submitted = True
except:
pass
if form_submitted:
if getMD5(self.formdata['oldpassword']) == staff_account['password']:
if self.formdata['newpassword'] == self.formdata['newpassword2']:
UpdateDb('UPDATE `staff` SET `password` = \'' + getMD5(self.formdata['newpassword']) + '\' WHERE `id` = ' + staff_account['id'] + ' LIMIT 1')
page += 'Password successfully changed. Please log out and log back in.'
else:
page += 'Passwords did not match.'
else:
page += 'Current password incorrect.'
else:
page += '<form action="' + Settings.CGI_URL + 'manage/changepassword" method="post">' + \
'<label for="oldpassword">Current password</label> <input type="password" name="oldpassword"><br>' + \
'<label for="newpassword">New password</label> <input type="password" name="newpassword"><br>' + \
'<label for="newpassword2">New password (confirm)</label> <input type="password" name="newpassword2"><br>' + \
'<label for="submit"> </label> <input type="submit" value="Change Password">' + \
'</form>'
elif path_split[2] == 'board':
if not administrator:
return
if len(path_split) > 3:
board = setBoard(path_split[3])
form_submitted = False
try:
if self.formdata['name'] != '':
form_submitted = True
except:
pass
if form_submitted:
if self.formdata['name'] != board['name']:
UpdateDb('UPDATE `boards` SET `name` = \'' + _mysql.escape_string(self.formdata['name']) + '\' WHERE `id` = ' + board['id'] + ' LIMIT 1')
board['settings']['anonymous'] = self.formdata['anonymous']
if self.formdata['forced_anonymous'] == '0':
board['settings']['forced_anonymous'] = False
else:
board['settings']['forced_anonymous'] = True
if self.formdata['disable_subject'] == '0':
board['settings']['disable_subject'] = False
else:
board['settings']['disable_subject'] = True
board['settings']['postarea_extra_html_top'] = self.formdata['postarea_extra_html_top']
updateBoardSettings()
page += 'Board options successfully updated.'
else:
page += '<form action="' + Settings.CGI_URL + 'manage/board/' + board['dir'] + '" method="post">' + \
'<label for="name">Name</label> <input type="text" name="name" value="' + board['name'] + '"><br>' + \
'<label for="anonymous">Anonymous</label> <input type="text" name="anonymous" value="' + board['settings']['anonymous'] + '"><br>' + \
'<label for="forced_anonymous">Forced anonymous</label> <input type="radio" name="forced_anonymous" value="0"'
if not board['settings']['forced_anonymous']:
page += ' checked'
page += '>No <input type="radio" name="forced_anonymous" value="1"'
if board['settings']['forced_anonymous']:
page += ' checked'
page += '>Yes<br>' + \
'<label for="disable_subject">Disable subject</label> <input type="radio" name="disable_subject" value="0"'
if not board['settings']['disable_subject']:
page += ' checked'
page += '>No <input type="radio" name="disable_subject" value="1"'
if board['settings']['disable_subject']:
page += ' checked'
page += '>Yes<br>' + \
'<label for="postarea_extra_html_top">HTML to include above posting area</label> <textarea name="postarea_extra_html_top" rows="10" cols="80">' + board['settings']['postarea_extra_html_top'] + '</textarea><br>' + \
'<label for="submit"> </label> <input type="submit" value="Update Options">' + \
'</form>'
else:
page += 'Click a board to view/change its options:' + boardlist('board')
elif path_split[2] == 'addboard':
if not administrator:
return
action_taken = False
board_dir = ''
try:
if self.formdata['name'] != '':
board_dir = self.formdata['dir']
except:
pass
if board_dir != '':
action_taken = True
board_exists = FetchOne('SELECT * FROM `boards` WHERE `dir` = \'' + _mysql.escape_string(board_dir) + '\' LIMIT 1')
if not board_exists:
os.mkdir(Settings.ROOT_DIR + board_dir)
os.mkdir(Settings.ROOT_DIR + board_dir + '/res')
os.mkdir(Settings.ROOT_DIR + board_dir + '/src')
os.mkdir(Settings.ROOT_DIR + board_dir + '/thumb')
if os.path.exists(Settings.ROOT_DIR + board_dir) and os.path.isdir(Settings.ROOT_DIR + board_dir):
UpdateDb('INSERT INTO `boards` (`dir`, `name`) VALUES (\'' + _mysql.escape_string(board_dir) + '\', \'' + _mysql.escape_string(self.formdata['name']) + '\')')
board = setBoard(board_dir)
f = open(Settings.ROOT_DIR + board['dir'] + '/.htaccess', 'w')
try:
f.write('DirectoryIndex index.html')
finally:
f.close()
regenerateFrontPages()
page += 'Board added'
logAction(staff_account['username'], 'Added board /' + board['dir'] + '/')
else:
page += 'There was a problem while making the directories'
else:
page += 'There is already a board with that directory'
if not action_taken:
page += '<form action="' + Settings.CGI_URL + 'manage/addboard" method="post">' + \
'<label for="dir">Directory</label> <input type="text" name="dir"><br>' + \
'<label for="name">Name</label> <input type="text" name="name"><br>' + \
'<label for="submit"> </label> <input type="submit" name="submit" value="Add board">' + \
'</form>'
elif path_split[2] == 'logs':
if staff_account['rights'] != '0':
return
page += '<table border="1"><tr><th>Date</th><th>Staff Account</th><th>Action</th></tr>'
logs = FetchAll('SELECT * FROM `logs` ORDER BY `timestamp` DESC')
for log in logs:
page += '<tr><td>' + formatTimestamp(log['timestamp']) + '</td><td>' + log['staff'] + '</td><td>' + log['action'] + '</td></tr>'
page += '</table>'
elif path_split[2] == 'logout':
page += 'Logging out...<meta http-equiv="refresh" content="0;url=' + Settings.CGI_URL + 'manage">'
setCookie(self, 'pyib_manage', '', domain='THIS')
setCookie(self, 'pyib_staff', '')
else:
page += "I'll think of something to put on the manage home."
if not skiptemplate:
template_values = {
'title': 'Manage',
'validated': validated,
'page': page,
'navbar': False,
}
if validated:
template_values.update({
'username': staff_account['username'],
'rights': staff_account['rights'],
'administrator': administrator,
'added': formatTimestamp(staff_account['added']),
})
self.output += renderTemplate('manage.html', template_values)
def logAction(staff, action):
InsertDb("INSERT INTO `logs` (`timestamp`, `staff`, `action`) VALUES (" + str(timestamp()) + ", '" + _mysql.escape_string(staff) + "\', \'" + _mysql.escape_string(action) + "\')")
def boardlist(action):
page = ''
boards = FetchAll('SELECT * FROM `boards` ORDER BY `dir`')
for board in boards:
page += '<br><a href="' + Settings.CGI_URL + 'manage/' + action + '/' + board['dir'] + '">/' + board['dir'] + '/ - ' + board['name'] + '</a>'
return page
|
Our approach is to accelerate the transformation of your ideas into software solutions by using agile methods and a mastery of Java, cloud computing, mobile technologies and Big Data.
Scub is a digital service enterprise that aims to develop custom applications to solve your biggest and most pressing problems.
Our job is to create innovative digital solutions with you and to accompany you in every step of your digital projects (design, layout, architecture, realization, implementation and maintenance). Our software factory, Scub Foundation, allows our projects to be based on agile and industrialization of development methods.
Since 2010, we are also the publisher of Square, a free CRM for the insurance business.
Our goal is to reduce your time to market through our experience, our knowledge of Java, our software factory and agile methodologies.
Project completed with agile method in less than six months.
Development of front office export tracking and paperless customs documentation flows / web portals for declarations with electronic signatures.
Skills transfer and co-development (training, coaching of teams, code review, projects monitoring...).
Development in agile method of a contract management portal between 15,000 opticians and the MGEN.
Management control, ergonomics, development and maintenance of the application and servers.
Development of a contact management solution (VIPs, Journalists, Shareholders, Suppliers ...), greeting management, monitoring of actions, case management for the holding company.
Realization and implementation of the CRM, Square, e-commerce website, mobile websites. Integration with GED, telecommunications, business software and price comparators.
Technology strategies, methodology, technical expertise, performance and industrialization.
Realization of turnkey projects or team reinforcement (specifications, prototyping, development ...).
We can create and run a specialized team for your projects from our Angoulême and Bordeaux offices.
We can train your staff in the latest Java technologies, in industrialization and in agile methods.
Do you want to create a startup ?
Many companies face significant inertia that prevents the rapid integration of new technologies and processes. Our software factory, Scub Foundation, allows our customers to quickly produce more efficient solutions and reduce their time to market.
To help you become more agile, Scub Foundation can improve your development process by reducing complexity through industrialization and reducing uncertainties with agile methods.
We are currently focussing our R&D on Big Data, the Cloud and Blockchains.
We can help you to integrate these new technologies in your company or in your solutions.
Square Predict is a project with several partners, including a large insurance company and three research laboratories (LIPN, LIPADE and LARIS). The goal is to build a platform for insurers to make predictions from their own data cross-referenced with data available on the Internet.
We are working on a monitoring solution to analyze blockchains, to profile their users and identify specific behaviors through a user configurable artificial intelligence engine.
Do you want to talk about innovation?
|
class Graph1(object):
def __init__(self, itt=0):
"""init the graph object"""
self.graph = {}
def add_node(self, val):
"""adds a node to the graph"""
if val in self.graph:
raise ValueError('graph all ready has the node')
self.graph[val] = []
def add_edge(self, val1, val2):
"""adds edges to the graph"""
if val1 not in self.graph:
self.add_node(val1)
if val2 not in self.graph:
self.add_node(val2)
if val2 in self.graph[val1]:
self.graph[val1].remove()
self.graph[val1].append(val2)
def del_node(self, val):
"""del node from graph and edges that are from or to node"""
if val in self.graph:
del self.graph[val]
for key in self.graph:
if val in self.graph[key]:
self.graph[key].remove(val)
else:
raise ValueError('graph does has node')
def del_edge(self, val1, val2):
"""del an edge"""
if val1 not in self.graph or val2 not in self.graph:
raise ValueError('graph does not have one of the nodes')
if val2 not in self.graph[val1]:
raise ValueError('graph does not have edge')
self.graph[val1].remove(val2)
def has_node(self, val):
"""check to see if graph has node"""
if val in self.graph:
return True
return False
def edges(self):
"""ouputs all neighbors of val"""
pair = ()
output = []
for key in self.graph:
for neigh in self.graph[key]:
pair = ()
pair = pair + (key, neigh)
output.append(pair)
return output
def adjacent(self, val1, val2):
"""Check to see if val1 is adjacent to val2"""
if val1 not in self.graph or val2 not in self.graph:
raise ValueError('graph does not have one of the nodes')
if val2 in self.graph[val1]:
return True
return False
def neighbors(self, val):
"""Outputs all neighbors of val"""
output = []
for neigh in self.graph[val]:
output.append(neigh)
return output
def nodes(self):
"""Returns a list of all nodes in graphs"""
output = []
for key in self.graph:
output.append(key)
return output
def depth_first_traversal(self, val1, output=[]):
"""Retrieves nodes ordered by a depth search criteria"""
if val1 not in self.graph:
raise ValueError('This node is not in the graph')
neighbors = self.graph[val1]
if val1 not in output:
output = []
output.append(val1)
for x in range(len(neighbors)):
if neighbors[x] not in output:
output.append(neighbors[x])
output = self.depth_first_traversal(neighbors[x], output)
return output
def breadth_first_traversal(self, val):
"""Retrieves nodes ordered by a breadth search criteria"""
output = []
done = False
if val not in self.graph:
raise ValueError('This node is not in the graph')
output.append(val)
iterator = 0
while not done:
neighbors = self.graph[val]
sample_size = len(output)
for x in range(len(neighbors)):
if neighbors[x] not in output:
output.append(neighbors[x])
if sample_size == len(output) and iterator >= len(output) - 1:
done = True
else:
iterator += 1
val = output[iterator]
return output
|
Britain’s first decimal coins were designed by the sculptor Christopher Ironside FSIA FRBS OBE. Christopher died on 13 July 1992. We asked his widow, Jean, what it was like to live through that momentous time.
The designing of the decimal coins started in 1962. No official Government announcement had been made on when to go decimal but The Royal Mint must have felt that it was likely to happen and wished to be prepared.
Sir Robin Darwin, a member of The Royal Mint Advisory Committee and Rector of the Royal College of Art, came up with a plan by which various artistic bodies, including the Royal Academy, the Royal Institute of British Architects, the Faculty of the Royal Designers for Industry and the RCA, should invite artists of their choice to submit designs for a new decimal currency.
My husband Christopher, who taught life drawing at the RCA and had designed a few medals in the past for the Mint, joined the combined RDI/RCA team which duly won the competition.
All this took place in total secrecy. Most instructions were given over the telephone – the occasional letters were marked ‘STRICTLY CONFIDENTIAL’ and Christopher was forbidden to speak to anyone. This made life rather difficult at home because in the end the secrecy was to span from 1962 to 1968.
With a teenage daughter at the top of the house, an elderly mother on the ground floor and two babies arriving in quick succession, there was little room at home for a discreet place to work. Christopher occupied one end of our drawing room, throwing a large cloth over his desk each time friends called. Plaster casts were stacked neatly out of sight on a balcony.
Our efforts at secrecy failed when after dinner one night two friends took it into their heads to stroll onto the balcony while Christopher and I were downstairs making coffee.
“You’re designing the decimals!” they cried.
Another hazard of working in the living area was the children. At six o’clock one morning Kate climbed out of her cot and was discovered sitting happily at her father’s desk, digging his tools into a finished plaster. The Mint had to wait a further two days for that one. If an amended drawing or plaster was needed in a hurry for the design committee, a large Mint car would arrive and a chap in a peaked cap would solemnly bear the plaster away in a flat white box rather like a modern takeaway pizza.
In 1966 all these preparations came to a halt. The country was on the verge of a General Election and James Callaghan as Chancellor of the Exchequer had a financial statement to make.
A few days later, having metaphorically picked himself up, Christopher decided he had to enter the open competition and do more and better designs. Thus began months and months of work.
By this time Virginia, Christopher’s eldest daughter, had flown the nest, we had moved house and he at last had a separate workroom. All social life came to a standstill. Apart from two days teaching, which he needed for income, he worked all day, every day, producing endless versions of lions, Britannias, dragons, coats of arms, St Georges and roses. Mealtimes were devoted to discussions of fresh ideas or to heraldry.
Once when I disturbed him with a mug of tea, I was faced with 15 St Georges. Which did I like best, St George naked or with armour? Why that one and not this one?
He finally discarded hundreds of drawings and selected three sets: royal, regional and popular. Then, when I thought life could begin again, he decided there should be a fourth avant-garde set. By the entry date he was exhausted. All the designs were displayed anonymously and the Advisory Committee invited to judge.
The only thing I remember about that day was sitting in the car outside a house where a party was going on. Christopher dashed in and minutes later returned saying, “It seems I’ve got the tails”. I cannot remember if we actually went to the party in the end. The Mint, however, never actually said he had won the competition. “Your designs have been chosen for development…” was the phrase. Then the modifications began.
Christopher felt strongly that coins are public property. If an artist paints a picture, the public can choose to buy it or reject it, but coins are thrust upon people and he wanted to do his utmost to make them generally acceptable. This was no time to be a prima donna.
However much he might privately like a particular design, other considerations were paramount, but it was difficult. Jack James’ right-hand man, Alan Dowling, would telephone to report the varied views of the Committee. This would lead to a rise in Christopher’s blood pressure as he was faced with a self-cancelling brief.
On one occasion he was asked to make Britannia lean back a fraction more and lift the trident forward but not to touch the lettering. Impossible. Steam came out of Christopher’s ears. Hours later, when I brought in the regulation mug of tea, the floor was covered with Britannias. “I’ve done it! Look at this,” he cried. “Not exactly what they wanted but they will never know!” And they never did.
In the end, after months of toing and froing, Christopher finally managed to attend a Royal Mint Advisory Committee meeting. I believe this had not been done before as it was feared designers would become tongue-tied in the face of an eminent gathering which included Sir Kenneth Clark, Sir Anthony Wagner and John Betjeman and which was chaired by His Royal Highness Prince Philip.
Possibly by now, The Mint realised that Christopher’s tongue was seldom tied.
He found the meetings he had with the Committee very helpful. He could pull out a pad of paper and demonstrate what happened to some of their suggestions. Thus time was saved.
One recurring problem was Garter King of Arms, who had to be satisfied with the accuracy of the heraldry. Christopher used to call on him for clearance from time to time which led to the saying in our house, ‘If only Garter could be more elastic’.
Year in, year out, the secrecy prevailed. Christopher supposed he was now designing the coins but he did not know.
At one point, when answering the telephone to Alan Dowling, I said in desperation, “Has Christopher won or not?” He paused for a moment.
This exchange sealed our affectionate later enjoyment of Sir Humphrey in the programme Yes Minister.
When our last child, a son, was born in 1968, Christopher added the name Decimus to Christian Adrian. The Deputy Master of the Mint, by now known to us as Jolly Jack, became his godfather.
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/gugu/w/calibre/src/calibre/gui2/convert/fb2_input.ui'
#
# Created: Thu Jul 19 23:32:30 2012
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 300)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
spacerItem = QtGui.QSpacerItem(20, 213, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 1, 0, 1, 1)
self.opt_no_inline_fb2_toc = QtGui.QCheckBox(Form)
self.opt_no_inline_fb2_toc.setObjectName(_fromUtf8("opt_no_inline_fb2_toc"))
self.gridLayout.addWidget(self.opt_no_inline_fb2_toc, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_("Form"))
self.opt_no_inline_fb2_toc.setText(_("Do not insert a &Table of Contents at the beginning of the book."))
|
Verghese Kurien is currently living in New York, and is interested in Other.
This profile is owned by Verghese Kurien. Send a message to this person to delete this profile.
|
#!/usr/bin/env python
# Copyright 2008, 2009 Hannes Hochreiner
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
# These lines are only needed if you don't put the script directly into
# the installation directory
import sys
# Unix
sys.path.append('/usr/share/inkscape/extensions')
# OS X
sys.path.append('/Applications/Inkscape.app/Contents/Resources/extensions')
# Windows
sys.path.append('C:\Program Files\Inkscape\share\extensions')
# We will use the inkex module with the predefined Effect base class.
import inkex
def propStrToList(str):
list = []
propList = str.split(";")
for prop in propList:
if not (len(prop) == 0):
list.append(prop.strip())
return list
def listToPropStr(list):
str = ""
for prop in list:
str += " " + prop + ";"
return str[1:]
class JessyInk_Uninstall(inkex.Effect):
def __init__(self):
# Call the base class constructor.
inkex.Effect.__init__(self)
self.OptionParser.add_option('--tab', action = 'store', type = 'string', dest = 'what')
self.OptionParser.add_option('--remove_script', action = 'store', type = 'inkbool', dest = 'remove_script', default = True)
self.OptionParser.add_option('--remove_effects', action = 'store', type = 'inkbool', dest = 'remove_effects', default = True)
self.OptionParser.add_option('--remove_masterSlide', action = 'store', type = 'inkbool', dest = 'remove_masterSlide', default = True)
self.OptionParser.add_option('--remove_transitions', action = 'store', type = 'inkbool', dest = 'remove_transitions', default = True)
self.OptionParser.add_option('--remove_autoTexts', action = 'store', type = 'inkbool', dest = 'remove_autoTexts', default = True)
self.OptionParser.add_option('--remove_views', action = 'store', type = 'inkbool', dest = 'remove_views', default = True)
inkex.NSS[u"jessyink"] = u"https://launchpad.net/jessyink"
def effect(self):
# Remove script, if so desired.
if self.options.remove_script:
# Find and delete script node.
for node in self.document.xpath("//svg:script[@id='JessyInk']", namespaces=inkex.NSS):
node.getparent().remove(node)
# Remove "jessyInkInit()" in the "onload" attribute, if present.
if self.document.getroot().get("onload"):
propList = propStrToList(self.document.getroot().get("onload"))
else:
propList = []
for prop in propList:
if prop == "jessyInkInit()":
propList.remove("jessyInkInit()")
if len(propList) > 0:
self.document.getroot().set("onload", listToPropStr(propList))
else:
if self.document.getroot().get("onload"):
del self.document.getroot().attrib["onload"]
# Remove effect attributes, if so desired.
if self.options.remove_effects:
for node in self.document.xpath("//*[@jessyink:effectIn]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}effectIn"]
for node in self.document.xpath("//*[@jessyink:effectOut]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}effectOut"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_effectIn]", namespaces=inkex.NSS):
del node.attrib["jessyInk_effectIn"]
for node in self.document.xpath("//*[@jessyInk_effectOut]", namespaces=inkex.NSS):
del node.attrib["jessyInk_effectOut"]
# Remove master slide assignment, if so desired.
if self.options.remove_masterSlide:
for node in self.document.xpath("//*[@jessyink:masterSlide]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}masterSlide"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_masterSlide]", namespaces=inkex.NSS):
del node.attrib["jessyInk_masterSlide"]
# Remove transitions, if so desired.
if self.options.remove_transitions:
for node in self.document.xpath("//*[@jessyink:transitionIn]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}transitionIn"]
for node in self.document.xpath("//*[@jessyink:transitionOut]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}transitionOut"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_transitionIn]", namespaces=inkex.NSS):
del node.attrib["jessyInk_transitionIn"]
for node in self.document.xpath("//*[@jessyInk_transitionOut]", namespaces=inkex.NSS):
del node.attrib["jessyInk_transitionOut"]
# Remove auto texts, if so desired.
if self.options.remove_autoTexts:
for node in self.document.xpath("//*[@jessyink:autoText]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}autoText"]
# Remove old style attributes as well.
for node in self.document.xpath("//*[@jessyInk_autoText]", namespaces=inkex.NSS):
del node.attrib["jessyInk_autoText"]
# Remove views, if so desired.
if self.options.remove_views:
for node in self.document.xpath("//*[@jessyink:view]", namespaces=inkex.NSS):
del node.attrib["{" + inkex.NSS["jessyink"] + "}view"]
# Create effect instance.
effect = JessyInk_Uninstall()
effect.affect()
|
Ghost Ranch and the Faraway Nearby from Craig Varjabedian on Vimeo.
In seven years exploring and backpacking O'Keeffe country Varjabedian retraces the artist's footsteps, reinterpreting Ghost Ranch's iconic scenery in 110 breathtaking black-and-white photographs.
Ghost Ranch and the Faraway Nearby handsomely reproduces [Varjabedian's] black and white photographs that portray the parched New Mexican soil under tumultuous skies that spoke to and so captured Georgia O'Keeffe's creative imagination.
The remarkable photographs by Craig Varjabedian are not only beautiful, but also extremely valuable documents of architecture, culture and lifestyle. From intimate portraits to expansive landscapes, Varjabedian's images, made primarily in black and white, celebrate the drama ad potency inherent in each subject's relationship to the photographer.
Ghost Ranch is perhaps best known as the longtime home of American artist Georgia O'Keeffe, who captured some of the its most stunning scenery in her paintings. For more than twenty years, award-winning, fine art photographer Craig Varjabedian has explored and captured the red cliffs and sweeping plains of this fabled 21,000-acre area in northern New Mexico. In Ghost Ranch and the Faraway Nearby, he shares over 90 new duotone photographs capturing its evanescent light.
These images reach beyond familiar ideas associated with the Ranch — such as its renown as a site of personal renewal and transformation — into Varjabedian's singular vision of his subject and its ties to ideas of identity, place, and perception.
While walking home on a snowy Michigan day in 1970 after taking pictures for his high school newspaper, fourteen-year-old Craig Varjabedian passed by an art gallery. Inside he saw a man with a thick white beard hanging pictures on the wall of a gallery. The teenager was awestruck by images of sky and stone, mountains and rivers, trees and thunderclouds. Seeing the young onlooker, the old man invited him in. Soon they were talking about cameras and photography.
The white-haired man was Ansel Adams; and this chance meeting sealed Varjabedian’s future.
After studying art and photography at the University of Michigan and Rochester Institute of Technology, Varjabedian went to New Mexico to finish his thesis. There, a friend took him to see Ghost Ranch, an inspirational mecca for artists, poets, painters, and photographers, including Ansel Adams, who was a frequent visitor, and Georgia O’Keeffe, who lived and painted there for more than fifty years.
I know exactly how he feels! I had the good fortune to be introduced to the mesmerizing beauty of northern New Mexico as a child, and if I don't get to visit several times a year to soak in its gentle grandeur and recharge my soul, well, let's just say I get a bit cranky. And, because Georgia O'Keeffe lived in and began to develop her unique style near my hometown in the Texas Panhandle, I was introduced to her work through the local museum at a young age.
Growing up in that ocean of land and sky, O'Keeffe's art was not abstract to me in any way - she was just trying to get people to see the beauty - often small, simple and taken for granted - that surrounds us daily.
So, take the time to slow down, breath deeply and see, really see the beauty of Ghost Ranch and the Faraway Nearby.
And if you want to savor some Georgia O'Keeffe at the same time, then you will want the must-have and newly published Georgia O'Keeffe Words l Works, Volume 1, that is in our Art & Photography Collection.
Want to get up close and personal, then also check out Day Hikes in the Santa Fe Area in our Places Collection.
Craig Varjabedian's photographs of the American West illuminate his profound connection with the region and its people. His finely detailed images die with an authenticity that reveals the inseparable ties between identity, place and act of perceiving.
For Varjabedian, the art of photography is a receptive process driven by the openness to the revelation each subject offers, rather than the desire to manipulate form or catalog detail. He achieves this intensely personal vision by capturing and suspending on film those decisive moments in which the elements and the ineffable sprit of a moment come together in exceptional and often startling ways.
"The remarkable photographs by Craig Varjabedian are not only beautiful, but also extremely valuable documents of architecture, culture and lifestyle. From intimate portraits to expansive landscapes, Varjabedian's images, made primarily in black and white, celebrate the drama ad potency inherent in each subject's relationship to the photographer, said Beaumont Newhall, preeminent 20th-century photographic historian and author of History of Photography: 1939 to the Present.
"The one thing that never changes is that moment of recognition when I feel the play of light, shadow, and texture resolve itself into something amazing," Vajabedian explains. Through this process, he offers viewers a new way of seeing — one that transcends mundane perception and expands our awareness of the potential of every moment.
In 1991, he co-produced an Emmy Award-winning film about his work entitled En Divina Luz: The Penitente Morados of New Mexico. Photographs from this book were published in a book by the same name, with an essay by Pulitzer Prize-nominated author Michael Wallis. Other books by Craig Varjabedian include Landscape Dreams: A New Mexico Portrait.
|
from nipype.interfaces.afni import preprocess
from CPAC.pipeline import nipype_pipeline_engine as pe
import nipype.algorithms.rapidart as ra
import nipype.interfaces.afni as afni
import nipype.interfaces.fsl as fsl
import nipype.interfaces.io as nio
import nipype.interfaces.utility as util
from CPAC.sca.utils import *
from CPAC.utils.utils import extract_one_d
from CPAC.utils.datasource import resample_func_roi, \
create_roi_mask_dataflow, create_spatial_map_dataflow
from CPAC.timeseries.timeseries_analysis import get_roi_timeseries, \
get_spatial_map_timeseries
def create_sca(name_sca='sca'):
"""
Map of the correlations of the Region of Interest(Seed in native or MNI space) with the rest of brain voxels.
The map is normalized to contain Z-scores, mapped in standard space and treated with spatial smoothing.
Parameters
----------
name_sca : a string
Name of the SCA workflow
Returns
-------
sca_workflow : workflow
Seed Based Correlation Analysis Workflow
Notes
-----
`Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/sca/sca.py>`_
Workflow Inputs::
inputspec.rest_res_filt : string (existing nifti file)
Band passed Image with Global Signal , white matter, csf and
motion regression. Recommended bandpass filter (0.001,0.1) )
inputspec.timeseries_one_d : string (existing nifti file)
1D 3dTcorr1D compatible timeseries file. 1D file can be timeseries
from a mask or from a parcellation containing ROIs
Workflow Outputs::
outputspec.correlation_file : string (nifti file)
Correlations of the functional file and the input time series
outputspec.Z_score : string (nifti file)
Fisher Z transformed correlations of the seed
SCA Workflow Procedure:
1. Compute pearson correlation between input timeseries 1D file and input functional file
Use 3dTcorr1D to compute that. Input timeseries can be a 1D file containing parcellation ROI's
or a 3D mask
2. Compute Fisher Z score of the correlation computed in step above. If a mask is provided then a
a single Z score file is returned, otherwise z-scores for all ROIs are returned as a list of
nifti files
.. exec::
from CPAC.sca import create_sca
wf = create_sca()
wf.write_graph(
graph2use='orig',
dotfilename='./images/generated/sca.dot'
)
Workflow:
.. image:: ../../images/generated/sca.png
:width: 500
Detailed Workflow:
.. image:: ../../images/generated/sca_detailed.png
:width: 500
Examples
--------
>>> sca_w = create_sca("sca_wf")
>>> sca_w.inputs.inputspec.functional_file = '/home/data/subject/func/rest_bandpassed.nii.gz'
>>> sca_w.inputs.inputspec.timeseries_one_d = '/home/data/subject/func/ts.1D'
>>> sca_w.run() # doctest: +SKIP
"""
from CPAC.utils.utils import get_roi_num_list
sca = pe.Workflow(name=name_sca)
inputNode = pe.Node(util.IdentityInterface(fields=['timeseries_one_d',
'functional_file',]),
name='inputspec')
outputNode = pe.Node(util.IdentityInterface(fields=[
'correlation_stack',
'correlation_files',
'Z_score',
]),
name='outputspec')
# 2. Compute voxel-wise correlation with Seed Timeseries
corr = pe.Node(interface=preprocess.TCorr1D(),
name='3dTCorr1D', mem_gb=3.0)
corr.inputs.pearson = True
corr.inputs.outputtype = 'NIFTI_GZ'
sca.connect(inputNode, 'timeseries_one_d',
corr, 'y_1d')
sca.connect(inputNode, 'functional_file',
corr, 'xset')
# Transform the sub-bricks into volumes
try:
concat = pe.Node(interface=preprocess.TCat(), name='3dTCat')
except AttributeError:
from nipype.interfaces.afni import utils as afni_utils
concat = pe.Node(interface=afni_utils.TCat(), name='3dTCat')
concat.inputs.outputtype = 'NIFTI_GZ'
# also write out volumes as individual files
#split = pe.Node(interface=fsl.Split(), name='split_raw_volumes_sca')
#split.inputs.dimension = 't'
#split.inputs.out_base_name = 'sca_'
#get_roi_num_list = pe.Node(util.Function(input_names=['timeseries_file',
# 'prefix'],
# output_names=['roi_list'],
# function=get_roi_num_list),
# name='get_roi_num_list')
#get_roi_num_list.inputs.prefix = "sca"
#sca.connect(inputNode, 'timeseries_one_d', get_roi_num_list,
# 'timeseries_file')
#rename_rois = pe.MapNode(interface=util.Rename(), name='output_rois',
# iterfield=['in_file', 'format_string'])
#rename_rois.inputs.keep_ext = True
#sca.connect(split, 'out_files', rename_rois, 'in_file')
#sca.connect(get_roi_num_list, 'roi_list', rename_rois, 'format_string')
sca.connect(corr, 'out_file', concat, 'in_files')
#sca.connect(concat, 'out_file', split, 'in_file')
sca.connect(concat, 'out_file',
outputNode, 'correlation_stack')
#sca.connect(rename_rois, 'out_file', outputNode,
# 'correlation_files')
return sca
def create_temporal_reg(wflow_name='temporal_reg', which='SR'):
"""
Temporal multiple regression workflow
Provides a spatial map of parameter estimates corresponding to each
provided timeseries in a timeseries.txt file as regressors
Parameters
----------
wflow_name : a string
Name of the temporal regression workflow
which: a string
SR: Spatial Regression, RT: ROI Timeseries
NOTE: If you set (which = 'RT'), the output of this workflow will be
renamed based on the header information provided in the
timeseries.txt file.
If you run the temporal regression workflow manually, don\'t set
(which = 'RT') unless you provide a timeseries.txt file with a header
containing the names of the timeseries.
Returns
-------
wflow : workflow
temporal multiple regression Workflow
Notes
-----
`Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/sca/sca.py>`_
Workflow Inputs::
inputspec.subject_rest : string (existing nifti file)
Band passed Image with Global Signal , white matter, csf and
motion regression. Recommended bandpass filter (0.001,0.1) )
inputspec.subject_timeseries : string (existing txt file)
text file containing the timeseries to be regressed on the subjects
functional file
timeseries are organized by columns, timepoints by rows
inputspec.subject_mask : string (existing nifti file)
path to subject functional mask
inputspec.demean : Boolean
control whether to demean model and data
inputspec.normalize : Boolean
control whether to normalize the input timeseries to unit standard deviation
Workflow Outputs::
outputspec.temp_reg_map : string (nifti file)
GLM parameter estimate image for each timeseries in the input file
outputspec.temp_reg_map_zstat : string (nifti file)
Normalized version of the GLM parameter estimates
Temporal Regression Workflow Procedure:
Enter all timeseries into a general linear model and regress these
timeseries to the subjects functional file to get spatial maps of voxels
showing activation patterns related to those in the timeseries.
.. exec::
from CPAC.sca import create_temporal_reg
wf = create_temporal_reg()
wf.write_graph(
graph2use='orig',
dotfilename='./images/generated/create_temporal_regression.dot'
)
Workflow:
.. image:: ../../images/generated/create_temporal_regression.png
:width: 500
Detailed Workflow:
.. image:: ../../images/generated/create_temporal_regression_detailed.png
:width: 500
References
----------
`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/DualRegression/UserGuide <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/DualRegression/UserGuide>`_
Examples
--------
>>> tr_wf = create_temporal_reg('temporal regression')
>>> tr_wf.inputs.inputspec.subject_rest = '/home/data/subject/func/rest_bandpassed.nii.gz'
>>> tr_wf.inputs.inputspec.subject_timeseries = '/home/data/subject/func/timeseries.txt'
>>> tr_wf.inputs.inputspec.subject_mask = '/home/data/spatialmaps/spatial_map.nii.gz'
>>> tr_wf.inputs.inputspec.demean = True
>>> tr_wf.inputs.inputspec.normalize = True
>>> tr_wf.run() # doctest: +SKIP
"""
wflow = pe.Workflow(name=wflow_name)
inputNode = pe.Node(util.IdentityInterface
(fields=['subject_rest',
'subject_timeseries',
'subject_mask',
'demean',
'normalize']),
name='inputspec')
outputNode = pe.Node(util.IdentityInterface
(fields=['temp_reg_map',
'temp_reg_map_files',
'temp_reg_map_z',
'temp_reg_map_z_files']),
name='outputspec')
check_timeseries = pe.Node(util.Function(input_names=['in_file'],
output_names=['out_file'],
function=check_ts),
name='check_timeseries')
wflow.connect(inputNode, 'subject_timeseries',
check_timeseries, 'in_file')
temporalReg = pe.Node(interface=fsl.GLM(), name='temporal_regression',
mem_gb=4.0)
temporalReg.inputs.out_file = 'temp_reg_map.nii.gz'
temporalReg.inputs.out_z_name = 'temp_reg_map_z.nii.gz'
wflow.connect(inputNode, 'subject_rest', temporalReg, 'in_file')
wflow.connect(check_timeseries, 'out_file', temporalReg, 'design')
wflow.connect(inputNode, 'demean', temporalReg, 'demean')
wflow.connect(inputNode, 'normalize', temporalReg, 'des_norm')
wflow.connect(inputNode, 'subject_mask', temporalReg, 'mask')
wflow.connect(temporalReg, 'out_file', outputNode, 'temp_reg_map')
wflow.connect(temporalReg, 'out_z', outputNode, 'temp_reg_map_z')
'''
split = pe.Node(interface=fsl.Split(), name='split_raw_volumes')
split.inputs.dimension = 't'
split.inputs.out_base_name = 'temp_reg_map_'
wflow.connect(temporalReg, 'out_file', split, 'in_file')
split_zstat = pe.Node(interface=fsl.Split(), name='split_zstat_volumes')
split_zstat.inputs.dimension = 't'
split_zstat.inputs.out_base_name = 'temp_reg_map_z_'
wflow.connect(temporalReg, 'out_z',
split_zstat, 'in_file')
if which == 'SR':
wflow.connect(split, 'out_files',
outputNode, 'temp_reg_map_files')
wflow.connect(split_zstat, 'out_files',
outputNode, 'temp_reg_map_z_files')
elif which == 'RT':
map_roi_imports = ['import os', 'import numpy as np']
# get roi order and send to output node for raw outputs
get_roi_order = pe.Node(util.Function(input_names=['maps',
'timeseries'],
output_names=['labels',
'maps'],
function=map_to_roi,
imports=map_roi_imports),
name='get_roi_order')
wflow.connect(split, 'out_files', get_roi_order, 'maps')
wflow.connect(inputNode, 'subject_timeseries',
get_roi_order, 'timeseries')
rename_maps = pe.MapNode(interface=util.Rename(),
name='rename_maps',
iterfield=['in_file',
'format_string'])
rename_maps.inputs.keep_ext = True
wflow.connect(get_roi_order, 'labels', rename_maps, 'format_string')
wflow.connect(get_roi_order, 'maps', rename_maps, 'in_file')
wflow.connect(rename_maps, 'out_file',
outputNode, 'temp_reg_map_files')
# get roi order and send to output node for z-stat outputs
get_roi_order_zstat = pe.Node(util.Function(input_names=['maps',
'timeseries'],
output_names=['labels',
'maps'],
function=map_to_roi,
imports=map_roi_imports),
name='get_roi_order_zstat')
wflow.connect(split_zstat, 'out_files', get_roi_order_zstat, 'maps')
wflow.connect(inputNode, 'subject_timeseries',
get_roi_order_zstat, 'timeseries')
rename_maps_zstat = pe.MapNode(interface=util.Rename(),
name='rename_maps_zstat',
iterfield=['in_file',
'format_string'])
rename_maps_zstat.inputs.keep_ext = True
wflow.connect(get_roi_order_zstat, 'labels',
rename_maps_zstat, 'format_string')
wflow.connect(get_roi_order_zstat, 'maps',
rename_maps_zstat, 'in_file')
wflow.connect(rename_maps_zstat, 'out_file',
outputNode, 'temp_reg_map_z_files')
'''
return wflow
def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None):
'''Run Seed-Based Correlation Analysis.
Node Block:
{"name": "SCA_AVG",
"config": ["seed_based_correlation_analysis"],
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": [["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"]],
"outputs": ["desc-MeanSCA_timeseries",
"desc-MeanSCA_correlations",
"atlas_name"]}
'''
# same workflow, except to run TSE and send it to the resource
# pool so that it will not get sent to SCA
resample_functional_roi_for_sca = pe.Node(
util.Function(input_names=['in_func',
'in_roi',
'realignment',
'identity_matrix'],
output_names=['out_func', 'out_roi'],
function=resample_func_roi,
as_module=True),
name=f'resample_functional_roi_for_sca_{pipe_num}')
resample_functional_roi_for_sca.inputs.realignment = \
cfg.timeseries_extraction['realignment']
resample_functional_roi_for_sca.inputs.identity_matrix = \
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines']['identity_matrix']
roi_dataflow_for_sca = create_roi_mask_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['Avg'],
f'roi_dataflow_for_sca_{pipe_num}'
)
roi_dataflow_for_sca.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)
roi_timeseries_for_sca = get_roi_timeseries(
f'roi_timeseries_for_sca_{pipe_num}')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
# resample the input functional file to roi
wf.connect(node, out,
resample_functional_roi_for_sca, 'in_func')
wf.connect(roi_dataflow_for_sca, 'outputspec.out_file',
resample_functional_roi_for_sca, 'in_roi')
# connect it to the roi_timeseries
wf.connect(resample_functional_roi_for_sca, 'out_roi',
roi_timeseries_for_sca, 'input_roi.roi')
wf.connect(resample_functional_roi_for_sca, 'out_func',
roi_timeseries_for_sca, 'inputspec.rest')
sca_roi = create_sca(f'sca_roi_{pipe_num}')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out, sca_roi, 'inputspec.functional_file')
wf.connect(roi_timeseries_for_sca, 'outputspec.roi_csv',
#('outputspec.roi_outputs', extract_one_d),
sca_roi, 'inputspec.timeseries_one_d')
outputs = {
'desc-MeanSCA_timeseries':
(roi_timeseries_for_sca, 'outputspec.roi_csv'),
#('outputspec.roi_outputs',
# extract_one_d)),
'desc-MeanSCA_correlations':
(sca_roi, 'outputspec.correlation_stack'),
'atlas_name': (roi_dataflow_for_sca, 'outputspec.out_name')
}
return (wf, outputs)
def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None):
'''Run Dual Regression - spatial regression and then temporal regression.
Node Block:
{"name": "dual_regression",
"config": ["seed_based_correlation_analysis"],
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": [["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"],
"space-template_desc-bold_mask"],
"outputs": ["desc-DualReg_correlations",
"desc-DualReg_statmap",
"atlas_name"]}
'''
resample_spatial_map_to_native_space_for_dr = pe.Node(
interface=fsl.FLIRT(),
name=f'resample_spatial_map_to_native_space_for_DR_{pipe_num}'
)
resample_spatial_map_to_native_space_for_dr.inputs.set(
interp='nearestneighbour',
apply_xfm=True,
in_matrix_file=
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines'][
'identity_matrix']
)
spatial_map_dataflow_for_dr = create_spatial_map_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['DualReg'],
f'spatial_map_dataflow_for_DR_{pipe_num}'
)
spatial_map_dataflow_for_dr.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)
spatial_map_timeseries_for_dr = get_spatial_map_timeseries(
f'spatial_map_timeseries_for_DR_{pipe_num}'
)
spatial_map_timeseries_for_dr.inputs.inputspec.demean = True
# resample the input functional file and functional mask
# to spatial map
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out,
resample_spatial_map_to_native_space_for_dr, 'reference')
wf.connect(node, out,
spatial_map_timeseries_for_dr, 'inputspec.subject_rest')
wf.connect(spatial_map_dataflow_for_dr, 'select_spatial_map.out_file',
resample_spatial_map_to_native_space_for_dr, 'in_file')
# connect it to the spatial_map_timeseries
wf.connect(resample_spatial_map_to_native_space_for_dr, 'out_file',
spatial_map_timeseries_for_dr, 'inputspec.spatial_map'
)
dr_temp_reg = create_temporal_reg(f'temporal_regression_{pipe_num}')
dr_temp_reg.inputs.inputspec.normalize = \
cfg.seed_based_correlation_analysis['norm_timeseries_for_DR']
dr_temp_reg.inputs.inputspec.demean = True
wf.connect(spatial_map_timeseries_for_dr, 'outputspec.subject_timeseries',
dr_temp_reg, 'inputspec.subject_timeseries')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out, dr_temp_reg, 'inputspec.subject_rest')
node, out = strat_pool.get_data("space-template_desc-bold_mask")
wf.connect(node, out, dr_temp_reg, 'inputspec.subject_mask')
outputs = {
'desc-DualReg_correlations':
(dr_temp_reg, 'outputspec.temp_reg_map'),
'desc-DualReg_statmap':
(dr_temp_reg, 'outputspec.temp_reg_map_z'),
'atlas_name':
(spatial_map_dataflow_for_dr, 'select_spatial_map.out_name')
}
return (wf, outputs)
def multiple_regression(wf, cfg, strat_pool, pipe_num, opt=None):
'''Run Multiple Regression.
Node Block:
{"name": "multiple_regression",
"config": ["seed_based_correlation_analysis"],
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": [["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"],
"space-template_desc-bold_mask"],
"outputs": ["desc-MultReg_correlations",
"desc-MultReg_statmap",
"atlas_name"]}
'''
# same workflow, except to run TSE and send it to the resource
# pool so that it will not get sent to SCA
resample_functional_roi_for_multreg = pe.Node(
util.Function(input_names=['in_func',
'in_roi',
'realignment',
'identity_matrix'],
output_names=['out_func',
'out_roi'],
function=resample_func_roi,
as_module=True),
name=f'resample_functional_roi_for_multreg_{pipe_num}')
resample_functional_roi_for_multreg.inputs.realignment = \
cfg.timeseries_extraction['realignment']
resample_functional_roi_for_multreg.inputs.identity_matrix = \
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines']['identity_matrix']
roi_dataflow_for_multreg = create_roi_mask_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['MultReg'],
f'roi_dataflow_for_mult_reg_{pipe_num}')
roi_dataflow_for_multreg.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)
roi_timeseries_for_multreg = get_roi_timeseries(
f'roi_timeseries_for_mult_reg_{pipe_num}')
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
# resample the input functional file to roi
wf.connect(node, out, resample_functional_roi_for_multreg, 'in_func')
wf.connect(roi_dataflow_for_multreg,
'outputspec.out_file',
resample_functional_roi_for_multreg,
'in_roi')
# connect it to the roi_timeseries
wf.connect(resample_functional_roi_for_multreg,
'out_roi',
roi_timeseries_for_multreg,
'input_roi.roi')
wf.connect(resample_functional_roi_for_multreg,
'out_func',
roi_timeseries_for_multreg,
'inputspec.rest')
sc_temp_reg = create_temporal_reg(
f'temporal_regression_sca_{pipe_num}',
which='RT')
sc_temp_reg.inputs.inputspec.normalize = \
cfg.seed_based_correlation_analysis['norm_timeseries_for_DR']
sc_temp_reg.inputs.inputspec.demean = True
node, out = strat_pool.get_data(["space-template_desc-cleaned_bold",
"space-template_desc-brain_bold",
"space-template_desc-motion_bold",
"space-template_desc-preproc_bold",
"space-template_bold"])
wf.connect(node, out, sc_temp_reg, 'inputspec.subject_rest')
wf.connect(roi_timeseries_for_multreg, 'outputspec.roi_csv',
#('outputspec.roi_outputs', extract_one_d),
sc_temp_reg, 'inputspec.subject_timeseries')
node, out = strat_pool.get_data('space-template_desc-bold_mask')
wf.connect(node, out, sc_temp_reg, 'inputspec.subject_mask')
outputs = {
'desc-MultReg_correlations':
(sc_temp_reg, 'outputspec.temp_reg_map'),
'desc-MultReg_statmap':
(sc_temp_reg, 'outputspec.temp_reg_map_z'),
'atlas_name': (roi_dataflow_for_multreg, 'outputspec.out_name')
}
return (wf, outputs)
|
Don’t be stunned to be met with folks sporting sopping clothes wherever you go in Samui Island throughout Songkran. While my hostel did have a charm of its own, the other hostel on the island was one of many coolest I’ve seen. Beer bars have long been the ‘face’ of Pattaya, and plenty of guests come here just to get a taste of them, while many more find yourself life-lengthy ‘addicts’. Enjoying some fun on getaways is not merely some factor to look forward to, however as well as required if you want your getaway to be way more outstanding and fascinating. More importantly, this is the timeframe when visibility is at it’s best and the air is the smoothest. System vacation alternate options sometimes can embrace air fare, and in addition resorts and in addition quite a lot of factors of curiosity. You may gather very helpful information about the totally different locations to eat via on-line travel websites where others who’ve dined there leave very helpful opinions that may save you time and money by avoiding the locations with bad opinions. And Vietnamese driving skills are most likely the worst I have ever encountered, so that is something to remember, as there are various street accidents here and traveling lengthy distances can typically take a look at the persistence of a saint.
Pictures, which is all the time is a favourite for guests may also be conducted all through the quite a few fun actions available. Based on Tariq Saraj Chaudhry, a Christian advocate who assists the UNHCR workplace in Bangkok to evaluation asylum seeker purposes, life upon return to Pakistan will be miserable for returnees. There’s tons of knowledge on the market on web promotion, a lot so that it could appear overwhelming. That is why you need to do it on the web. As the name goes, these loans are literally provided for the benefits of bad credit score people who find themselves in want of pressing cash. We need you to sit returned and benefit from the rewards of funding belongings, plus feel confident that your property is being managed, bought or sold in professionally on your behalf. When you’re contemplating buying medication and foods, along with the problem of quality, there’s additionally a difficulty of security. RSVP or 2. you won’t get the tour you need.
So it was kinda problem to snap nice photos on that day.We booked a local tour to tour round Phuket. Great for Bangkok – Phuket – Ko Samui flights. Khao Sok is situated in the Surat Thani province only a a couple of hours drive from a scorching holidaymaker vacation spot in Thailand referred to as Phuket. An exploration via Northeastern Thailand is an inspiration. And with a Retirement Visa, it is comparatively simple to settle in Thailand. I suppose Wei Jie who was up forward was worried why we had been taking eons to catch up. Residence is amongst the basic requirements of individual and that is the explanation why individuals are making homes by way of the pre historical ages to create themselves as also their households stay safe. At the online shops there lies the price tag identical to the value stickers which can be placed on the packets. I want so as to add that it wasn’t like nothing was taking place in New York, it was fairly good actually with promoters bringing in great artists and some using actually good sound.
I efficiently finished it with an hour to spare at the moment, which was great. So lengthy as Barbara Tuchman claimed that “Books are definitely the carriers of society. With out books, historical past is noiseless, literature dumb, science crippled, idea and speculation at a standstill.” None of us would not agree with her famed saying. A midrange hotel at Singapore is typically situated near the Singapore River, the place most of the sightseeing spots equivalent to monuments, museums, skyscrapers, and statement decks are located. Porter providers are moreover free. Ladies around the globe are trying to find diminished rates for this sort of swimwear, so I determined to write down this brief article about receiving bargains and values on the web. Round Leigong mountain, numerous groups have quick skirts worn in layers, They check with themselves as ‘Gannao’. Major space employers also name the Puget Sound house, with many simply a short commute from our Seattle apartments. In the overwhelming majority of instances, using the anti-spyware software and conducting regular scans should prevent serious penalties happening with any spyware that infects your system. Bangkok is a big city with a average transportation system. The location features a bridge and seven churches from Roman times, a castle, and the town wall.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.